├── .github
├── FUNDING.yml
└── workflows
│ └── rust.yml
├── .gitignore
├── Cargo.toml
├── LICENSE-APACHE
├── LICENSE-MIT
├── README.md
├── assets
├── environment_maps
│ ├── diffuse_rgb9e5_zstd.ktx2
│ └── specular_rgb9e5_zstd.ktx2
└── models
│ └── PlaneEngine
│ ├── license.txt
│ ├── scene.bin
│ ├── scene.gltf
│ └── textures
│ └── AppC7_baseColor.jpeg
├── examples
├── cad.rs
├── floating_origin.rs
├── map.rs
├── minimal.rs
├── ortho.rs
├── split_screen.rs
└── zoom_limits.rs
└── src
├── controller
├── component.rs
├── inputs.rs
├── mod.rs
├── momentum.rs
├── motion.rs
├── projections.rs
├── smoothing.rs
└── zoom.rs
├── extensions
├── anchor_indicator.rs
├── dolly_zoom.rs
├── independent_skybox.rs
├── look_to.rs
└── mod.rs
├── input.rs
└── lib.rs
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: aevyrie
4 |
--------------------------------------------------------------------------------
/.github/workflows/rust.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 | on:
3 | pull_request:
4 | push:
5 | branches:
6 | - main
7 | jobs:
8 | format:
9 | runs-on: ubuntu-latest
10 | steps:
11 | - uses: actions/checkout@v3
12 | - uses: dtolnay/rust-toolchain@stable
13 | - uses: Swatinem/rust-cache@v2.7.0
14 | - run: rustup component add rustfmt
15 | - run: cargo fmt --all -- --check
16 |
17 | check:
18 | runs-on: ubuntu-latest
19 | steps:
20 | - uses: actions/checkout@v3
21 | - uses: dtolnay/rust-toolchain@stable
22 | - uses: Swatinem/rust-cache@v2.7.0
23 | - run: sudo apt-get install -yq --no-install-recommends libudev-dev libasound2-dev libxcb-composite0-dev
24 | - run: cargo check --workspace --all-features --all-targets
25 |
26 | check-no-defaults:
27 | runs-on: ubuntu-latest
28 | steps:
29 | - uses: actions/checkout@v3
30 | - uses: dtolnay/rust-toolchain@stable
31 | - uses: Swatinem/rust-cache@v2.7.0
32 | - run: sudo apt-get install -yq --no-install-recommends libudev-dev libasound2-dev libxcb-composite0-dev
33 | - run: cargo check --workspace --no-default-features
34 |
35 | clippy:
36 | runs-on: ubuntu-latest
37 | steps:
38 | - uses: actions/checkout@v3
39 | - uses: dtolnay/rust-toolchain@stable
40 | - uses: Swatinem/rust-cache@v2.7.0
41 | - run: rustup component add clippy
42 | - run: sudo apt-get install -yq --no-install-recommends libudev-dev libasound2-dev libxcb-composite0-dev
43 | - run: cargo clippy --workspace --all-features --all-targets -- -D warnings
44 |
45 | doc:
46 | runs-on: ubuntu-latest
47 | steps:
48 | - uses: actions/checkout@v3
49 | - uses: dtolnay/rust-toolchain@stable
50 | - uses: Swatinem/rust-cache@v2.7.0
51 | - run: sudo apt-get install -yq --no-install-recommends libudev-dev libasound2-dev libxcb-composite0-dev
52 | - run: cargo doc --no-deps --workspace --all-features
53 | env:
54 | RUSTDOCFLAGS: -D warnings
55 |
56 | test:
57 | runs-on: ubuntu-latest
58 | steps:
59 | - uses: actions/checkout@v3
60 | - uses: dtolnay/rust-toolchain@stable
61 | - uses: Swatinem/rust-cache@v2.7.0
62 | - run: sudo apt-get install -yq --no-install-recommends libudev-dev libasound2-dev libxcb-composite0-dev
63 | - run: cargo test --workspace --all-targets --all-features
64 |
65 | test-doc:
66 | runs-on: ubuntu-latest
67 | steps:
68 | - uses: actions/checkout@v3
69 | - uses: dtolnay/rust-toolchain@stable
70 | - uses: Swatinem/rust-cache@v2.7.0
71 | - run: sudo apt-get install -yq --no-install-recommends libudev-dev libasound2-dev libxcb-composite0-dev
72 | - run: cargo test --workspace --doc
73 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /target
2 | Cargo.lock
3 | /assets/models/scene
--------------------------------------------------------------------------------
/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "bevy_editor_cam"
3 | version = "0.5.0"
4 | edition = "2021"
5 | description = "A camera controller for editors and CAD."
6 | license = "MIT OR Apache-2.0"
7 | keywords = ["controller", "camera", "bevy", "CAD"]
8 | repository = "https://github.com/aevyrie/bevy_editor_cam"
9 | documentation = "https://docs.rs/crate/bevy_editor_cam/latest"
10 | exclude = ["assets/"]
11 |
12 | [features]
13 | default = ["extension_anchor_indicator", "extension_independent_skybox"]
14 | extension_anchor_indicator = ["bevy_gizmos"]
15 | extension_independent_skybox = ["bevy_asset", "bevy_core_pipeline"]
16 |
17 | [dependencies]
18 | bevy_app = "0.15.0"
19 | bevy_color = "0.15.0"
20 | bevy_derive = "0.15.0"
21 | bevy_ecs = "0.15.0"
22 | bevy_image = "0.15.0"
23 | bevy_input = "0.15.0"
24 | bevy_log = "0.15.0"
25 | bevy_math = "0.15.0"
26 | bevy_picking = "0.15.0"
27 | bevy_reflect = "0.15.0"
28 | bevy_render = "0.15.0"
29 | bevy_time = "0.15.0"
30 | bevy_transform = "0.15.0"
31 | bevy_utils = "0.15.0"
32 | bevy_window = "0.15.0"
33 | # Optional
34 | bevy_asset = { version = "0.15.0", optional = true }
35 | bevy_core_pipeline = { version = "0.15.0", optional = true }
36 | bevy_gizmos = { version = "0.15.0", optional = true }
37 |
38 | [dev-dependencies]
39 | bevy_framepace = "0.18"
40 | big_space = "0.8.0"
41 | indoc = "2.0.5"
42 | rand = "0.8"
43 |
44 | [dev-dependencies.bevy]
45 | version = "0.15.0"
46 | features = [
47 | "bevy_gizmos",
48 | "bevy_gltf",
49 | "bevy_scene",
50 | "bevy_text",
51 | "bevy_ui",
52 | "bevy_winit",
53 | "default_font",
54 | "multi_threaded",
55 | "jpeg",
56 | "ktx2",
57 | "tonemapping_luts",
58 | "x11",
59 | "zstd",
60 | ]
61 | # TODO: workaround for https://github.com/bevyengine/bevy/issues/16562
62 | default-features = true
63 |
--------------------------------------------------------------------------------
/LICENSE-APACHE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/LICENSE-MIT:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Aevyrie
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | # bevy_editor_cam
4 |
5 | A production-ready camera controller for 2D/3D editors and CAD.
6 |
7 | [](https://github.com/aevyrie/bevy_editor_cam/actions/workflows/rust.yml)
8 | [](https://docs.rs/bevy_editor_cam)
9 | [](https://crates.io/crates/bevy_editor_cam)
10 |
11 | https://github.com/user-attachments/assets/58b270a9-7ae8-4466-9a8f-1fc8f0896590
12 |
13 |
14 |
15 |
16 | Bevy Version Support
17 |
18 | | bevy | bevy_editor_cam |
19 | | ---- | ---------------- |
20 | | 0.15 | 0.5 |
21 | | 0.14 | 0.3, 0.4 |
22 | | 0.13 | 0.2 |
23 | | 0.12 | 0.1 |
24 |
25 |
--------------------------------------------------------------------------------
/assets/environment_maps/diffuse_rgb9e5_zstd.ktx2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aevyrie/bevy_editor_cam/e2bf53593c3745d6db610bdbc1d0fb9039c5cadf/assets/environment_maps/diffuse_rgb9e5_zstd.ktx2
--------------------------------------------------------------------------------
/assets/environment_maps/specular_rgb9e5_zstd.ktx2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aevyrie/bevy_editor_cam/e2bf53593c3745d6db610bdbc1d0fb9039c5cadf/assets/environment_maps/specular_rgb9e5_zstd.ktx2
--------------------------------------------------------------------------------
/assets/models/PlaneEngine/license.txt:
--------------------------------------------------------------------------------
1 | Model Information:
2 | * title: Plane Engine
3 | * source: https://sketchfab.com/3d-models/plane-engine-3a3e71ec7f6e4f24963f63cf6bd22358
4 | * author: T-FLEX CAD ST (Free) (https://sketchfab.com/tflexcad)
5 |
6 | Model License:
7 | * license type: CC-BY-NC-4.0 (http://creativecommons.org/licenses/by-nc/4.0/)
8 | * requirements: Author must be credited. No commercial use.
9 |
10 | If you use this 3D model in your project be sure to copy paste this credit wherever you share it:
11 | This work is based on "Plane Engine" (https://sketchfab.com/3d-models/plane-engine-3a3e71ec7f6e4f24963f63cf6bd22358) by T-FLEX CAD ST (Free) (https://sketchfab.com/tflexcad) licensed under CC-BY-NC-4.0 (http://creativecommons.org/licenses/by-nc/4.0/)
--------------------------------------------------------------------------------
/assets/models/PlaneEngine/scene.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aevyrie/bevy_editor_cam/e2bf53593c3745d6db610bdbc1d0fb9039c5cadf/assets/models/PlaneEngine/scene.bin
--------------------------------------------------------------------------------
/assets/models/PlaneEngine/textures/AppC7_baseColor.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aevyrie/bevy_editor_cam/e2bf53593c3745d6db610bdbc1d0fb9039c5cadf/assets/models/PlaneEngine/textures/AppC7_baseColor.jpeg
--------------------------------------------------------------------------------
/examples/cad.rs:
--------------------------------------------------------------------------------
1 | use std::time::Duration;
2 |
3 | use bevy::{
4 | core_pipeline::{bloom::Bloom, tonemapping::Tonemapping},
5 | pbr::ScreenSpaceAmbientOcclusion,
6 | prelude::*,
7 | render::primitives::Aabb,
8 | utils::Instant,
9 | window::RequestRedraw,
10 | };
11 | use bevy_core_pipeline::smaa::Smaa;
12 | use bevy_editor_cam::{
13 | extensions::{dolly_zoom::DollyZoomTrigger, look_to::LookToTrigger},
14 | prelude::*,
15 | };
16 |
17 | fn main() {
18 | App::new()
19 | .add_plugins((
20 | DefaultPlugins,
21 | DefaultEditorCamPlugins,
22 | MeshPickingPlugin,
23 | bevy_framepace::FramepacePlugin,
24 | ))
25 | // The camera controller works with reactive rendering:
26 | // .insert_resource(bevy::winit::WinitSettings::desktop_app())
27 | .insert_resource(AmbientLight::NONE)
28 | .add_systems(Startup, setup)
29 | .add_systems(
30 | Update,
31 | (
32 | toggle_projection,
33 | toggle_constraint,
34 | explode,
35 | switch_direction,
36 | )
37 | .chain(),
38 | )
39 | .run();
40 | }
41 |
42 | fn setup(mut commands: Commands, asset_server: Res) {
43 | let diffuse_map = asset_server.load("environment_maps/diffuse_rgb9e5_zstd.ktx2");
44 | let specular_map = asset_server.load("environment_maps/specular_rgb9e5_zstd.ktx2");
45 |
46 | commands.spawn((
47 | SceneRoot(asset_server.load("models/PlaneEngine/scene.gltf#Scene0")),
48 | Transform::from_scale(Vec3::splat(2.0)),
49 | ));
50 |
51 | let cam_trans = Transform::from_xyz(2.0, 2.0, 2.0).looking_at(Vec3::ZERO, Vec3::Y);
52 | let camera = commands
53 | .spawn((
54 | Camera3d::default(),
55 | Camera {
56 | hdr: true,
57 | ..Default::default()
58 | },
59 | cam_trans,
60 | Tonemapping::AcesFitted,
61 | Bloom::default(),
62 | EnvironmentMapLight {
63 | intensity: 1000.0,
64 | diffuse_map: diffuse_map.clone(),
65 | specular_map: specular_map.clone(),
66 | ..Default::default()
67 | },
68 | EditorCam {
69 | orbit_constraint: OrbitConstraint::Free,
70 | last_anchor_depth: -cam_trans.translation.length() as f64,
71 | orthographic: projections::OrthographicSettings {
72 | scale_to_near_clip: 1_000_f32, // Needed for SSAO to work in ortho
73 | ..Default::default()
74 | },
75 | ..Default::default()
76 | },
77 | ScreenSpaceAmbientOcclusion::default(),
78 | Smaa::default(),
79 | Msaa::Off,
80 | ))
81 | .id();
82 |
83 | setup_ui(commands, camera);
84 | }
85 |
86 | fn toggle_projection(
87 | keys: Res>,
88 | mut dolly: EventWriter,
89 | cam: Query>,
90 | mut toggled: Local,
91 | ) {
92 | if keys.just_pressed(KeyCode::KeyP) {
93 | *toggled = !*toggled;
94 | let target_projection = if *toggled {
95 | Projection::Orthographic(OrthographicProjection::default_3d())
96 | } else {
97 | Projection::Perspective(PerspectiveProjection::default())
98 | };
99 | dolly.send(DollyZoomTrigger {
100 | target_projection,
101 | camera: cam.single(),
102 | });
103 | }
104 | }
105 |
106 | fn toggle_constraint(
107 | keys: Res>,
108 | mut cam: Query<(Entity, &Transform, &mut EditorCam)>,
109 | mut look_to: EventWriter,
110 | ) {
111 | if keys.just_pressed(KeyCode::KeyC) {
112 | let (entity, transform, mut editor) = cam.single_mut();
113 | match editor.orbit_constraint {
114 | OrbitConstraint::Fixed { .. } => editor.orbit_constraint = OrbitConstraint::Free,
115 | OrbitConstraint::Free => {
116 | editor.orbit_constraint = OrbitConstraint::Fixed {
117 | up: Vec3::Y,
118 | can_pass_tdc: false,
119 | };
120 |
121 | look_to.send(LookToTrigger::auto_snap_up_direction(
122 | transform.forward(),
123 | entity,
124 | transform,
125 | editor.as_ref(),
126 | ));
127 | }
128 | };
129 | }
130 | }
131 |
132 | fn switch_direction(
133 | keys: Res>,
134 | mut look_to: EventWriter,
135 | cam: Query<(Entity, &Transform, &EditorCam)>,
136 | ) {
137 | let (camera, transform, editor) = cam.single();
138 | if keys.just_pressed(KeyCode::Digit1) {
139 | look_to.send(LookToTrigger::auto_snap_up_direction(
140 | Dir3::X,
141 | camera,
142 | transform,
143 | editor,
144 | ));
145 | }
146 | if keys.just_pressed(KeyCode::Digit2) {
147 | look_to.send(LookToTrigger::auto_snap_up_direction(
148 | Dir3::Z,
149 | camera,
150 | transform,
151 | editor,
152 | ));
153 | }
154 | if keys.just_pressed(KeyCode::Digit3) {
155 | look_to.send(LookToTrigger::auto_snap_up_direction(
156 | Dir3::NEG_X,
157 | camera,
158 | transform,
159 | editor,
160 | ));
161 | }
162 | if keys.just_pressed(KeyCode::Digit4) {
163 | look_to.send(LookToTrigger::auto_snap_up_direction(
164 | Dir3::NEG_Z,
165 | camera,
166 | transform,
167 | editor,
168 | ));
169 | }
170 | if keys.just_pressed(KeyCode::Digit5) {
171 | look_to.send(LookToTrigger::auto_snap_up_direction(
172 | Dir3::Y,
173 | camera,
174 | transform,
175 | editor,
176 | ));
177 | }
178 | if keys.just_pressed(KeyCode::Digit6) {
179 | look_to.send(LookToTrigger::auto_snap_up_direction(
180 | Dir3::NEG_Y,
181 | camera,
182 | transform,
183 | editor,
184 | ));
185 | }
186 | }
187 |
188 | fn setup_ui(mut commands: Commands, camera: Entity) {
189 | let text = indoc::indoc! {"
190 | Left Mouse - Pan
191 | Right Mouse - Orbit
192 | Scroll - Zoom
193 | P - Toggle projection
194 | C - Toggle orbit constraint
195 | E - Toggle explode
196 | 1-6 - Switch direction
197 | "};
198 | commands.spawn((
199 | Text::new(text),
200 | TextFont {
201 | font_size: 20.0,
202 | ..default()
203 | },
204 | Node {
205 | margin: UiRect::all(Val::Px(20.0)),
206 | ..Default::default()
207 | },
208 | TargetCamera(camera),
209 | ));
210 | }
211 |
212 | #[derive(Component)]
213 | struct StartPos(f32);
214 |
215 | #[allow(clippy::type_complexity)]
216 | fn explode(
217 | mut commands: Commands,
218 | keys: Res>,
219 | mut toggle: Local>,
220 | mut explode_amount: Local,
221 | mut redraw: EventWriter,
222 | mut parts: Query<(Entity, &mut Transform, &Aabb, Option<&StartPos>), With>,
223 | mut matls: ResMut>,
224 | ) {
225 | let animation = Duration::from_millis(2000);
226 | if keys.just_pressed(KeyCode::KeyE) {
227 | let new = if let Some((last, ..)) = *toggle {
228 | !last
229 | } else {
230 | true
231 | };
232 | *toggle = Some((new, Instant::now(), *explode_amount));
233 | }
234 | if let Some((toggled, start, start_amount)) = *toggle {
235 | let goal_amount = toggled as usize as f32;
236 | let t = (start.elapsed().as_secs_f32() / animation.as_secs_f32()).clamp(0.0, 1.0);
237 | let progress = CubicSegment::new_bezier((0.25, 0.1), (0.25, 1.0)).ease(t);
238 | *explode_amount = start_amount + (goal_amount - start_amount) * progress;
239 | for (part, mut transform, aabb, start) in &mut parts {
240 | let start = if let Some(start) = start {
241 | start.0
242 | } else {
243 | let start = aabb.max().y;
244 | commands.entity(part).insert(StartPos(start));
245 | start
246 | };
247 | transform.translation.y = *explode_amount * (start) * 2.0;
248 | }
249 | if t < 1.0 {
250 | redraw.send(RequestRedraw);
251 | }
252 | }
253 | for (_, matl) in matls.iter_mut() {
254 | matl.perceptual_roughness = matl.perceptual_roughness.clamp(0.3, 1.0)
255 | }
256 | }
257 |
--------------------------------------------------------------------------------
/examples/floating_origin.rs:
--------------------------------------------------------------------------------
1 | use bevy::{color::palettes, prelude::*};
2 | use bevy_editor_cam::{
3 | controller::component::EditorCam,
4 | prelude::{projections::PerspectiveSettings, zoom::ZoomLimits},
5 | DefaultEditorCamPlugins,
6 | };
7 | use big_space::{
8 | commands::BigSpaceCommands,
9 | reference_frame::{local_origin::ReferenceFrames, ReferenceFrame},
10 | world_query::{GridTransformReadOnly, GridTransformReadOnlyItem},
11 | FloatingOrigin, GridCell,
12 | };
13 |
14 | fn main() {
15 | App::new()
16 | .add_plugins((
17 | DefaultPlugins.build().disable::(),
18 | MeshPickingPlugin,
19 | big_space::BigSpacePlugin::::default(),
20 | big_space::debug::FloatingOriginDebugPlugin::::default(),
21 | bevy_framepace::FramepacePlugin,
22 | ))
23 | .add_plugins(DefaultEditorCamPlugins)
24 | .insert_resource(ClearColor(Color::BLACK))
25 | .insert_resource(AmbientLight {
26 | color: Color::WHITE,
27 | brightness: 20.0,
28 | })
29 | .add_systems(Startup, (setup, ui_setup))
30 | .add_systems(PreUpdate, ui_text_system)
31 | .run();
32 | }
33 |
34 | fn setup(
35 | mut commands: Commands,
36 | mut meshes: ResMut>,
37 | mut materials: ResMut>,
38 | ) {
39 | commands.spawn_big_space(ReferenceFrame::::default(), |root| {
40 | root.spawn_spatial((
41 | Camera3d::default(),
42 | Transform::from_xyz(0.0, 0.0, 8.0).looking_at(Vec3::new(0.0, 0.0, 0.0), Vec3::Y),
43 | Projection::Perspective(PerspectiveProjection {
44 | near: 1e-18,
45 | ..default()
46 | }),
47 | FloatingOrigin, // Important: marks the floating origin entity for rendering.
48 | EditorCam {
49 | zoom_limits: ZoomLimits {
50 | min_size_per_pixel: 1e-20,
51 | ..Default::default()
52 | },
53 | perspective: PerspectiveSettings {
54 | near_clip_limits: 1e-20..0.1,
55 | ..Default::default()
56 | },
57 | ..Default::default()
58 | },
59 | ));
60 |
61 | let mesh_handle = meshes.add(Sphere::new(0.5).mesh().ico(32).unwrap());
62 | let matl_handle = materials.add(StandardMaterial {
63 | base_color: Color::Srgba(palettes::basic::BLUE),
64 | perceptual_roughness: 0.8,
65 | reflectance: 1.0,
66 | ..default()
67 | });
68 |
69 | let mut translation = Vec3::ZERO;
70 | for i in -16..=27 {
71 | let j = 10_f32.powf(i as f32);
72 | let k = 10_f32.powf((i - 1) as f32);
73 | translation.x += j / 2.0 + k;
74 | translation.y = j / 2.0;
75 |
76 | root.spawn_spatial((
77 | Mesh3d(mesh_handle.clone()),
78 | MeshMaterial3d(matl_handle.clone()),
79 | Transform::from_scale(Vec3::splat(j)).with_translation(translation),
80 | ));
81 | }
82 |
83 | // light
84 | root.spawn_spatial(DirectionalLight {
85 | illuminance: 10_000.0,
86 | ..default()
87 | });
88 | });
89 | }
90 |
91 | #[derive(Component, Reflect)]
92 | pub struct BigSpaceDebugText;
93 |
94 | #[derive(Component, Reflect)]
95 | pub struct FunFactText;
96 |
97 | fn ui_setup(mut commands: Commands) {
98 | commands.spawn((
99 | Text::new(""),
100 | TextFont {
101 | font_size: 18.0,
102 | ..default()
103 | },
104 | Node {
105 | margin: UiRect::all(Val::Px(20.0)),
106 | ..Default::default()
107 | },
108 | TextColor(Color::WHITE),
109 | BigSpaceDebugText,
110 | ));
111 | commands.spawn((
112 | Text::new(""),
113 | TextFont {
114 | font_size: 52.0,
115 | ..default()
116 | },
117 | Node {
118 | margin: UiRect::all(Val::Px(20.0)),
119 | ..Default::default()
120 | },
121 | TextColor(Color::WHITE),
122 | FunFactText,
123 | ));
124 | }
125 |
126 | #[allow(clippy::type_complexity)]
127 | fn ui_text_system(
128 | mut debug_text: Query<
129 | (&mut Text, &GlobalTransform),
130 | (With, Without),
131 | >,
132 | ref_frames: ReferenceFrames,
133 | origin: Query<(Entity, GridTransformReadOnly), With>,
134 | ) {
135 | let (origin_entity, origin_pos) = origin.single();
136 | let Some(ref_frame) = ref_frames.parent_frame(origin_entity) else {
137 | return;
138 | };
139 |
140 | let mut debug_text = debug_text.single_mut();
141 | *debug_text.0 = Text::new(ui_text(ref_frame, &origin_pos));
142 | }
143 |
144 | fn ui_text(
145 | ref_frame: &ReferenceFrame,
146 | origin_pos: &GridTransformReadOnlyItem,
147 | ) -> String {
148 | let GridCell {
149 | x: cx,
150 | y: cy,
151 | z: cz,
152 | } = origin_pos.cell;
153 | let [tx, ty, tz] = origin_pos.transform.translation.into();
154 | let [dx, dy, dz] = ref_frame
155 | .grid_position_double(origin_pos.cell, origin_pos.transform)
156 | .into();
157 | let [sx, sy, sz] = [dx as f32, dy as f32, dz as f32];
158 |
159 | indoc::formatdoc! {"
160 | GridCell: {cx}x, {cy}y, {cz}z
161 | Transform: {tx}x, {ty}y, {tz}z
162 | Combined (f64): {dx}x, {dy}y, {dz}z
163 | Combined (f32): {sx}x, {sy}y, {sz}z
164 | "}
165 | }
166 |
--------------------------------------------------------------------------------
/examples/map.rs:
--------------------------------------------------------------------------------
1 | use bevy::{color::palettes, prelude::*};
2 | use bevy_editor_cam::{extensions::dolly_zoom::DollyZoomTrigger, prelude::*};
3 | use rand::Rng;
4 |
5 | fn main() {
6 | App::new()
7 | .add_plugins((
8 | DefaultPlugins,
9 | MeshPickingPlugin,
10 | DefaultEditorCamPlugins,
11 | bevy_framepace::FramepacePlugin,
12 | ))
13 | .add_systems(Startup, (setup, setup_ui))
14 | .add_systems(Update, toggle_projection)
15 | .run();
16 | }
17 |
18 | fn setup(
19 | mut commands: Commands,
20 | asset_server: Res,
21 | mut meshes: ResMut>,
22 | mut matls: ResMut>,
23 | ) {
24 | spawn_buildings(&mut commands, &mut meshes, &mut matls, 20.0);
25 |
26 | let diffuse_map = asset_server.load("environment_maps/diffuse_rgb9e5_zstd.ktx2");
27 | let specular_map = asset_server.load("environment_maps/specular_rgb9e5_zstd.ktx2");
28 | let translation = Vec3::new(7.0, 7.0, 7.0);
29 |
30 | commands.spawn((
31 | Camera3d::default(),
32 | Transform::from_translation(translation).looking_at(Vec3::ZERO, Vec3::Y),
33 | EnvironmentMapLight {
34 | intensity: 1000.0,
35 | diffuse_map: diffuse_map.clone(),
36 | specular_map: specular_map.clone(),
37 | rotation: default(),
38 | },
39 | EditorCam {
40 | orbit_constraint: OrbitConstraint::Fixed {
41 | up: Vec3::Y,
42 | can_pass_tdc: false,
43 | },
44 | last_anchor_depth: -translation.length() as f64,
45 | ..Default::default()
46 | },
47 | bevy_editor_cam::extensions::independent_skybox::IndependentSkybox::new(
48 | diffuse_map,
49 | 1000.0,
50 | default(),
51 | ),
52 | ));
53 | }
54 |
55 | fn spawn_buildings(
56 | commands: &mut Commands,
57 | meshes: &mut Assets,
58 | matls: &mut Assets,
59 | half_width: f32,
60 | ) {
61 | commands.spawn((
62 | Mesh3d(meshes.add(Plane3d::new(Vec3::Y, Vec2::splat(half_width * 20.0)))),
63 | MeshMaterial3d(matls.add(StandardMaterial {
64 | base_color: Color::Srgba(palettes::css::DARK_GRAY),
65 | ..Default::default()
66 | })),
67 | Transform::from_xyz(0.0, 0.0, 0.0),
68 | ));
69 |
70 | let mut rng = rand::thread_rng();
71 | let mesh = meshes.add(Cuboid::default());
72 | let material = [
73 | matls.add(Color::Srgba(palettes::css::GRAY)),
74 | matls.add(Color::srgb(0.3, 0.6, 0.8)),
75 | matls.add(Color::srgb(0.55, 0.4, 0.8)),
76 | matls.add(Color::srgb(0.8, 0.45, 0.5)),
77 | ];
78 |
79 | let w = half_width as isize;
80 | for x in -w..=w {
81 | for z in -w..=w {
82 | let x = x as f32 + rng.gen::() - 0.5;
83 | let z = z as f32 + rng.gen::() - 0.5;
84 | let y = rng.gen::() * rng.gen::() * rng.gen::() * rng.gen::();
85 | let y_scale = 1.02f32.powf(100.0 * y);
86 |
87 | commands.spawn((
88 | Mesh3d(mesh.clone()),
89 | MeshMaterial3d(material[rng.gen_range(0..material.len())].clone()),
90 | Transform::from_xyz(x, y_scale / 2.0, z).with_scale(Vec3::new(
91 | (rng.gen::() + 0.5) * 0.3,
92 | y_scale,
93 | (rng.gen::() + 0.5) * 0.3,
94 | )),
95 | ));
96 | }
97 | }
98 | }
99 |
100 | fn toggle_projection(
101 | keys: Res>,
102 | mut dolly: EventWriter,
103 | cam: Query>,
104 | mut toggled: Local,
105 | ) {
106 | if keys.just_pressed(KeyCode::KeyP) {
107 | *toggled = !*toggled;
108 | let target_projection = if *toggled {
109 | Projection::Orthographic(OrthographicProjection::default_3d())
110 | } else {
111 | Projection::Perspective(PerspectiveProjection::default())
112 | };
113 | dolly.send(DollyZoomTrigger {
114 | target_projection,
115 | camera: cam.single(),
116 | });
117 | }
118 | }
119 |
120 | fn setup_ui(mut commands: Commands) {
121 | let text = indoc::indoc! {"
122 | Left Mouse - Pan
123 | Right Mouse - Orbit
124 | Scroll - Zoom
125 | P - Toggle projection
126 | "};
127 | commands.spawn((
128 | Text::new(text),
129 | TextFont {
130 | font_size: 20.0,
131 | ..default()
132 | },
133 | Node {
134 | margin: UiRect::all(Val::Px(20.0)),
135 | ..Default::default()
136 | },
137 | ));
138 | }
139 |
--------------------------------------------------------------------------------
/examples/minimal.rs:
--------------------------------------------------------------------------------
1 | //! A minimal example showing the steps needed to get started with the plugin.
2 |
3 | use bevy::prelude::*;
4 | use bevy_editor_cam::prelude::*;
5 |
6 | fn main() {
7 | App::new()
8 | .add_plugins((
9 | DefaultPlugins,
10 | MeshPickingPlugin, // Step 0: enable some picking backends for hit detection
11 | DefaultEditorCamPlugins, // Step 1: Add camera controller plugin
12 | ))
13 | .add_systems(Startup, setup)
14 | .run();
15 | }
16 |
17 | fn setup(mut commands: Commands, asset_server: Res) {
18 | commands.spawn((
19 | Camera3d::default(),
20 | EditorCam::default(), // Step 2: add camera controller component to any cameras
21 | EnvironmentMapLight {
22 | diffuse_map: asset_server.load("environment_maps/diffuse_rgb9e5_zstd.ktx2"),
23 | specular_map: asset_server.load("environment_maps/specular_rgb9e5_zstd.ktx2"),
24 | intensity: 500.0,
25 | ..default()
26 | },
27 | Transform::from_xyz(0.0, 0.0, 1.0),
28 | ));
29 | commands.spawn(SceneRoot(
30 | asset_server.load("models/PlaneEngine/scene.gltf#Scene0"),
31 | ));
32 | }
33 |
--------------------------------------------------------------------------------
/examples/ortho.rs:
--------------------------------------------------------------------------------
1 | use bevy::prelude::*;
2 | use bevy_editor_cam::prelude::*;
3 |
4 | fn main() {
5 | App::new()
6 | .add_plugins((
7 | DefaultPlugins,
8 | MeshPickingPlugin,
9 | DefaultEditorCamPlugins,
10 | bevy_framepace::FramepacePlugin,
11 | ))
12 | .add_systems(Startup, (setup, setup_ui))
13 | .run();
14 | }
15 |
16 | fn setup(mut commands: Commands, asset_server: Res) {
17 | let diffuse_map = asset_server.load("environment_maps/diffuse_rgb9e5_zstd.ktx2");
18 | let specular_map = asset_server.load("environment_maps/specular_rgb9e5_zstd.ktx2");
19 | let translation = Vec3::new(10.0, 10.0, 10.0);
20 |
21 | commands.spawn((
22 | Camera3d::default(),
23 | Transform::from_translation(translation).looking_at(Vec3::ZERO, Vec3::Y),
24 | Projection::Orthographic(OrthographicProjection {
25 | scale: 0.01,
26 | ..OrthographicProjection::default_3d()
27 | }),
28 | EnvironmentMapLight {
29 | intensity: 1000.0,
30 | diffuse_map: diffuse_map.clone(),
31 | specular_map: specular_map.clone(),
32 | rotation: default(),
33 | },
34 | // This component makes the camera controllable with this plugin.
35 | //
36 | // Important: the `with_initial_anchor_depth` is critical for an orthographic camera. Unlike
37 | // perspective, we can't rely on distant things being small to hide precision artifacts.
38 | // This means we need to be careful with the near and far plane of the camera, especially
39 | // because in orthographic, the depth precision is linear.
40 | //
41 | // This plugin uses the anchor (the point in space the user is interested in) to set the
42 | // orthographic scale, as well as the near and far planes. This can be a bit tricky if you
43 | // are unfamiliar with orthographic projections. Consider using an pseudo-ortho projection
44 | // (see `pseudo_ortho` example) if you don't need a true ortho projection.
45 | EditorCam::default().with_initial_anchor_depth(-translation.length() as f64),
46 | // This is an extension made specifically for orthographic cameras. Because an ortho camera
47 | // projection has no field of view, a skybox can't be sensibly rendered, only a single point
48 | // on the skybox would be visible to the camera at any given time. While this is technically
49 | // correct to what the camera would see, it is not visually helpful nor appealing. It is
50 | // common for CAD software to render a skybox with a field of view that is decoupled from
51 | // the camera field of view.
52 | bevy_editor_cam::extensions::independent_skybox::IndependentSkybox::new(
53 | diffuse_map,
54 | 500.0,
55 | default(),
56 | ),
57 | ));
58 |
59 | spawn_helmets(27, &asset_server, &mut commands);
60 | }
61 |
62 | fn spawn_helmets(n: usize, asset_server: &AssetServer, commands: &mut Commands) {
63 | let half_width = (((n as f32).powf(1.0 / 3.0) - 1.0) / 2.0) as i32;
64 | let scene = asset_server.load("models/PlaneEngine/scene.gltf#Scene0");
65 | let width = -half_width..=half_width;
66 | for x in width.clone() {
67 | for y in width.clone() {
68 | for z in width.clone() {
69 | commands.spawn((
70 | SceneRoot(scene.clone()),
71 | Transform::from_translation(IVec3::new(x, y, z).as_vec3() * 2.0)
72 | .with_scale(Vec3::splat(1.)),
73 | ));
74 | }
75 | }
76 | }
77 | }
78 |
79 | fn setup_ui(mut commands: Commands) {
80 | let text = indoc::indoc! {"
81 | Left Mouse - Pan
82 | Right Mouse - Orbit
83 | Scroll - Zoom
84 | "};
85 | commands.spawn((
86 | Text::new(text),
87 | TextFont {
88 | font_size: 20.0,
89 | ..default()
90 | },
91 | Node {
92 | margin: UiRect::all(Val::Px(20.0)),
93 | ..Default::default()
94 | },
95 | ));
96 | }
97 |
--------------------------------------------------------------------------------
/examples/split_screen.rs:
--------------------------------------------------------------------------------
1 | //! Renders two cameras to the same window to accomplish "split screen".
2 |
3 | use bevy::{
4 | core_pipeline::tonemapping::Tonemapping, prelude::*, render::camera::Viewport,
5 | window::WindowResized,
6 | };
7 | use bevy_editor_cam::prelude::*;
8 |
9 | fn main() {
10 | App::new()
11 | .add_plugins((
12 | DefaultPlugins,
13 | MeshPickingPlugin,
14 | DefaultEditorCamPlugins,
15 | bevy_framepace::FramepacePlugin,
16 | ))
17 | .add_systems(Startup, setup)
18 | .add_systems(Update, set_camera_viewports)
19 | .run();
20 | }
21 |
22 | /// set up a simple 3D scene
23 | fn setup(mut commands: Commands, asset_server: Res) {
24 | spawn_helmets(27, &asset_server, &mut commands);
25 |
26 | let diffuse_map = asset_server.load("environment_maps/diffuse_rgb9e5_zstd.ktx2");
27 | let specular_map = asset_server.load("environment_maps/specular_rgb9e5_zstd.ktx2");
28 |
29 | // Left Camera
30 | commands.spawn((
31 | Camera3d::default(),
32 | Transform::from_xyz(0.0, 2.0, -1.0).looking_at(Vec3::ZERO, Vec3::Y),
33 | Camera {
34 | hdr: true,
35 | clear_color: ClearColorConfig::None,
36 | ..default()
37 | },
38 | EnvironmentMapLight {
39 | intensity: 1000.0,
40 | diffuse_map: diffuse_map.clone(),
41 | specular_map: specular_map.clone(),
42 | rotation: default(),
43 | },
44 | EditorCam::default(),
45 | bevy_editor_cam::extensions::independent_skybox::IndependentSkybox::new(
46 | diffuse_map.clone(),
47 | 500.0,
48 | default(),
49 | ),
50 | LeftCamera,
51 | ));
52 |
53 | // Right Camera
54 | commands.spawn((
55 | Camera3d::default(),
56 | Transform::from_xyz(1.0, 1.0, 1.5).looking_at(Vec3::ZERO, Vec3::Y),
57 | Camera {
58 | // Renders the right camera after the left camera, which has a default priority of 0
59 | order: 10,
60 | hdr: true,
61 | // don't clear on the second camera because the first camera already cleared the window
62 | clear_color: ClearColorConfig::None,
63 | ..default()
64 | },
65 | Projection::Orthographic(OrthographicProjection {
66 | scale: 0.01,
67 | ..OrthographicProjection::default_3d()
68 | }),
69 | Tonemapping::AcesFitted,
70 | EnvironmentMapLight {
71 | intensity: 1000.0,
72 | diffuse_map: diffuse_map.clone(),
73 | specular_map: specular_map.clone(),
74 | rotation: default(),
75 | },
76 | EditorCam::default(),
77 | bevy_editor_cam::extensions::independent_skybox::IndependentSkybox::new(
78 | diffuse_map,
79 | 500.0,
80 | default(),
81 | ),
82 | RightCamera,
83 | ));
84 | }
85 |
86 | #[derive(Component)]
87 | struct LeftCamera;
88 |
89 | #[derive(Component)]
90 | struct RightCamera;
91 |
92 | fn set_camera_viewports(
93 | windows: Query<&Window>,
94 | mut resize_events: EventReader,
95 | mut left_camera: Query<&mut Camera, (With, Without)>,
96 | mut right_camera: Query<&mut Camera, With>,
97 | ) {
98 | // We need to dynamically resize the camera's viewports whenever the window size changes
99 | // so then each camera always takes up half the screen.
100 | // A resize_event is sent when the window is first created, allowing us to reuse this system for initial setup.
101 | for resize_event in resize_events.read() {
102 | let window = windows.get(resize_event.window).unwrap();
103 | let mut left_camera = left_camera.single_mut();
104 | left_camera.viewport = Some(Viewport {
105 | physical_position: UVec2::new(0, 0),
106 | physical_size: UVec2::new(
107 | window.resolution.physical_width() / 2,
108 | window.resolution.physical_height(),
109 | ),
110 | ..default()
111 | });
112 |
113 | let mut right_camera = right_camera.single_mut();
114 | right_camera.viewport = Some(Viewport {
115 | physical_position: UVec2::new(window.resolution.physical_width() / 2, 0),
116 | physical_size: UVec2::new(
117 | window.resolution.physical_width() / 2,
118 | window.resolution.physical_height(),
119 | ),
120 | ..default()
121 | });
122 | }
123 | }
124 |
125 | fn spawn_helmets(n: usize, asset_server: &AssetServer, commands: &mut Commands) {
126 | let half_width = (((n as f32).powf(1.0 / 3.0) - 1.0) / 2.0) as i32;
127 | let scene = asset_server.load("models/PlaneEngine/scene.gltf#Scene0");
128 | let width = -half_width..=half_width;
129 | for x in width.clone() {
130 | for y in width.clone() {
131 | for z in width.clone() {
132 | commands.spawn((
133 | SceneRoot(scene.clone()),
134 | Transform::from_translation(IVec3::new(x, y, z).as_vec3() * 2.0)
135 | .with_scale(Vec3::splat(1.)),
136 | ));
137 | }
138 | }
139 | }
140 | }
141 |
--------------------------------------------------------------------------------
/examples/zoom_limits.rs:
--------------------------------------------------------------------------------
1 | //! A minimal example demonstrating setting zoom limits and zooming through objects.
2 |
3 | use bevy::prelude::*;
4 | use bevy_editor_cam::{extensions::dolly_zoom::DollyZoomTrigger, prelude::*};
5 | use zoom::ZoomLimits;
6 |
7 | fn main() {
8 | App::new()
9 | .add_plugins((
10 | DefaultPlugins,
11 | MeshPickingPlugin,
12 | DefaultEditorCamPlugins,
13 | bevy_framepace::FramepacePlugin,
14 | ))
15 | .add_systems(Startup, (setup_camera, setup_scene, setup_ui))
16 | .add_systems(Update, (toggle_projection, toggle_zoom))
17 | .run();
18 | }
19 |
20 | fn setup_camera(mut commands: Commands, asset_server: Res) {
21 | commands.spawn((
22 | Camera3d::default(),
23 | EditorCam {
24 | zoom_limits: ZoomLimits {
25 | min_size_per_pixel: 0.0001,
26 | max_size_per_pixel: 0.01,
27 | zoom_through_objects: true,
28 | },
29 | ..default()
30 | },
31 | EnvironmentMapLight {
32 | intensity: 1000.0,
33 | diffuse_map: asset_server.load("environment_maps/diffuse_rgb9e5_zstd.ktx2"),
34 | specular_map: asset_server.load("environment_maps/specular_rgb9e5_zstd.ktx2"),
35 | rotation: default(),
36 | },
37 | ));
38 | }
39 |
40 | fn toggle_zoom(
41 | keys: Res>,
42 | mut cam: Query<&mut EditorCam>,
43 | mut text: Query<&mut Text>,
44 | ) {
45 | if keys.just_pressed(KeyCode::KeyZ) {
46 | let mut editor = cam.single_mut();
47 | editor.zoom_limits.zoom_through_objects = !editor.zoom_limits.zoom_through_objects;
48 | let mut text = text.single_mut();
49 | *text = Text::new(help_text(editor.zoom_limits.zoom_through_objects));
50 | }
51 | }
52 |
53 | //
54 | // --- The below code is not important for the example ---
55 | //
56 |
57 | fn setup_scene(
58 | mut commands: Commands,
59 | mut meshes: ResMut>,
60 | mut materials: ResMut>,
61 | ) {
62 | let material = materials.add(Color::srgba(0.1, 0.1, 0.9, 0.5));
63 | let mesh = meshes.add(Cuboid::from_size(Vec3::new(1.0, 1.0, 0.1)));
64 |
65 | for i in 1..5 {
66 | commands.spawn((
67 | Mesh3d(mesh.clone()),
68 | MeshMaterial3d(material.clone()),
69 | Transform::from_xyz(0.0, 0.0, -2.0 * i as f32),
70 | ));
71 | }
72 | }
73 |
74 | fn setup_ui(mut commands: Commands) {
75 | commands.spawn((
76 | Text::new(help_text(true)),
77 | TextFont {
78 | font_size: 20.0,
79 | ..default()
80 | },
81 | Node {
82 | margin: UiRect::all(Val::Px(20.0)),
83 | ..Default::default()
84 | },
85 | // TargetCamera(camera),
86 | ));
87 | }
88 |
89 | fn help_text(zoom_through: bool) -> String {
90 | indoc::formatdoc! {"
91 | Left Mouse - Pan
92 | Right Mouse - Orbit
93 | Scroll - Zoom
94 | P - Toggle projection
95 | Z - Toggle zoom through object setting
96 | Zoom Through: {zoom_through}
97 | "}
98 | }
99 |
100 | fn toggle_projection(
101 | keys: Res>,
102 | mut dolly: EventWriter,
103 | cam: Query>,
104 | mut toggled: Local,
105 | ) {
106 | if keys.just_pressed(KeyCode::KeyP) {
107 | *toggled = !*toggled;
108 | let target_projection = if *toggled {
109 | Projection::Orthographic(OrthographicProjection::default_3d())
110 | } else {
111 | Projection::Perspective(PerspectiveProjection::default())
112 | };
113 | dolly.send(DollyZoomTrigger {
114 | target_projection,
115 | camera: cam.single(),
116 | });
117 | }
118 | }
119 |
--------------------------------------------------------------------------------
/src/controller/component.rs:
--------------------------------------------------------------------------------
1 | //! The primary [`Component`] of the controller, [`EditorCam`].
2 |
3 | use std::{
4 | f32::consts::{FRAC_PI_2, PI},
5 | time::Duration,
6 | };
7 |
8 | use bevy_ecs::prelude::*;
9 | use bevy_log::prelude::*;
10 | use bevy_math::{prelude::*, DMat4, DQuat, DVec2, DVec3};
11 | use bevy_reflect::prelude::*;
12 | use bevy_render::prelude::*;
13 | use bevy_time::prelude::*;
14 | use bevy_transform::prelude::*;
15 | use bevy_utils::Instant;
16 | use bevy_window::RequestRedraw;
17 |
18 | use super::{
19 | inputs::MotionInputs,
20 | momentum::{Momentum, Velocity},
21 | motion::CurrentMotion,
22 | projections::{OrthographicSettings, PerspectiveSettings},
23 | smoothing::{InputQueue, Smoothing},
24 | zoom::ZoomLimits,
25 | };
26 |
27 | /// Tracks all state of a camera's controller, including its inputs, motion, and settings.
28 | ///
29 | /// See the documentation on the contained fields and types to learn more about each setting.
30 | ///
31 | /// # Moving the Camera
32 | ///
33 | /// The [`EditorCamPlugin`](crate::DefaultEditorCamPlugins) will automatically handle sending inputs
34 | /// to the camera controller using [`bevy_picking`] to compute pointer hit locations for mouse,
35 | /// touch, and pen inputs. The picking plugin allows you to specify your own picking backend, or
36 | /// choose from a variety of provided backends. This is important because this camera controller
37 | /// relies on depth information for each pointer, and using the picking plugin means it can do this
38 | /// without forcing you into using a particular hit testing backend, e.g. raycasting, which is used
39 | /// by default.
40 | ///
41 | /// To move the camera manually:
42 | ///
43 | /// 1. Start a camera motion using one of [`EditorCam::start_orbit`], [`EditorCam::start_pan`],
44 | /// [`EditorCam::start_zoom`].
45 | /// 2. While the motion should be active, send inputs with [`EditorCam::send_screenspace_input`] and
46 | /// [`EditorCam::send_zoom_input`].
47 | /// 3. When the motion should end, call [`EditorCam::end_move`].
48 | #[derive(Debug, Clone, Reflect, Component)]
49 | pub struct EditorCam {
50 | /// What input motions are currently allowed?
51 | pub enabled_motion: EnabledMotion,
52 | /// The type of camera orbit to use.
53 | pub orbit_constraint: OrbitConstraint,
54 | /// Set near and far zoom limits, as well as the ability to zoom through objects.
55 | pub zoom_limits: ZoomLimits,
56 | /// Input smoothing of camera motion.
57 | pub smoothing: Smoothing,
58 | /// Input sensitivity of the camera.
59 | pub sensitivity: Sensitivity,
60 | /// Amount of camera momentum after inputs have stopped.
61 | pub momentum: Momentum,
62 | /// How long should inputs attempting to start a new motion be ignored, after the last input
63 | /// ends? This is useful to prevent accidentally killing momentum when, for example, releasing a
64 | /// two finger right click on a trackpad triggers a scroll input.
65 | pub input_debounce: Duration,
66 | /// Settings used when the camera has a perspective [`Projection`].
67 | pub perspective: PerspectiveSettings,
68 | /// Settings used when the camera has an orthographic [`Projection`].
69 | pub orthographic: OrthographicSettings,
70 | /// Managed by the camera controller, though you may want to change this when spawning or
71 | /// manually moving the camera.
72 | ///
73 | /// If the camera starts moving, but there is nothing under the pointer, the controller will
74 | /// rotate, pan, and zoom about a point in the direction the camera is facing, at this depth.
75 | /// This will be overwritten with the latest depth if a hit is found, to ensure the anchor point
76 | /// doesn't change suddenly if the user moves the pointer away from an object.
77 | pub last_anchor_depth: f64,
78 | /// Current camera motion. Managed by the camera controller, but exposed publicly to allow for
79 | /// overriding motion.
80 | pub current_motion: CurrentMotion,
81 | }
82 |
83 | impl Default for EditorCam {
84 | fn default() -> Self {
85 | EditorCam {
86 | orbit_constraint: Default::default(),
87 | zoom_limits: Default::default(),
88 | smoothing: Default::default(),
89 | sensitivity: Default::default(),
90 | momentum: Default::default(),
91 | input_debounce: Duration::from_millis(80),
92 | perspective: Default::default(),
93 | orthographic: Default::default(),
94 | enabled_motion: Default::default(),
95 | current_motion: Default::default(),
96 | last_anchor_depth: -2.0,
97 | }
98 | }
99 | }
100 |
101 | impl EditorCam {
102 | /// Create a new editor camera component.
103 | pub fn new(
104 | orbit: OrbitConstraint,
105 | smoothness: Smoothing,
106 | sensitivity: Sensitivity,
107 | momentum: Momentum,
108 | initial_anchor_depth: f64,
109 | ) -> Self {
110 | Self {
111 | orbit_constraint: orbit,
112 | smoothing: smoothness,
113 | sensitivity,
114 | momentum,
115 | last_anchor_depth: initial_anchor_depth.abs() * -1.0, // ensure depth is correct sign
116 | ..Default::default()
117 | }
118 | }
119 |
120 | /// Set the initial anchor depth of the camera controller.
121 | pub fn with_initial_anchor_depth(self, initial_anchor_depth: f64) -> Self {
122 | Self {
123 | last_anchor_depth: initial_anchor_depth.abs() * -1.0, // ensure depth is correct sign
124 | ..self
125 | }
126 | }
127 |
128 | /// Gets the [`MotionInputs`], if the camera is being actively moved..
129 | pub fn motion_inputs(&self) -> Option<&MotionInputs> {
130 | match &self.current_motion {
131 | CurrentMotion::Stationary => None,
132 | CurrentMotion::Momentum { .. } => None,
133 | CurrentMotion::UserControlled { motion_inputs, .. } => Some(motion_inputs),
134 | }
135 | }
136 |
137 | /// Returns the best guess at an anchor point if none is provided.
138 | ///
139 | /// Updates the fallback value with the latest hit to ensure that if the camera starts orbiting
140 | /// again, but has no hit to anchor onto, the anchor doesn't suddenly change distance, which is
141 | /// what would happen if we used a fixed value.
142 | fn maybe_update_anchor(&mut self, anchor: Option) -> DVec3 {
143 | let anchor = anchor.unwrap_or(DVec3::new(0.0, 0.0, self.last_anchor_depth.abs() * -1.0));
144 | self.last_anchor_depth = anchor.z;
145 | anchor
146 | }
147 |
148 | /// Get the position of the anchor in the camera's view space.
149 | pub fn anchor_view_space(&self) -> Option {
150 | if let CurrentMotion::UserControlled { anchor, .. } = &self.current_motion {
151 | Some(*anchor)
152 | } else {
153 | None
154 | }
155 | }
156 |
157 | /// Get the position of the anchor in world space.
158 | pub fn anchor_world_space(&self, camera_transform: &GlobalTransform) -> Option {
159 | self.anchor_view_space().map(|anchor_view_space| {
160 | camera_transform
161 | .compute_matrix()
162 | .as_dmat4()
163 | .transform_point3(anchor_view_space)
164 | });
165 |
166 | self.anchor_view_space().map(|anchor_view_space| {
167 | let (_, r, t) = camera_transform.to_scale_rotation_translation();
168 | r.as_dquat() * anchor_view_space + t.as_dvec3()
169 | })
170 | }
171 |
172 | /// Should the camera controller prevent new motions from starting, because the user is actively
173 | /// operating the camera?
174 | ///
175 | /// This does not consider zooming as "actively controlled". This is needed because scroll input
176 | /// devices often have their own momentum, and can continue to provide inputs even when the user
177 | /// is not actively providing inputs, like a scroll wheel that keeps spinning, or a trackpad
178 | /// with smooth scrolling. Without this, the controller will feel unresponsive, as a user will
179 | /// be unable to initiate a new motion even though they are not technically providing an input.
180 | pub fn is_actively_controlled(&self) -> bool {
181 | !self.current_motion.is_zooming_only()
182 | && (self.current_motion.is_user_controlled()
183 | || self
184 | .current_motion
185 | .momentum_duration()
186 | .map(|duration| duration < self.input_debounce)
187 | .unwrap_or(false))
188 | }
189 |
190 | /// Call this to start an orbiting motion with the optionally supplied anchor position in view
191 | /// space. See [`EditorCam`] for usage.
192 | pub fn start_orbit(&mut self, anchor: Option) {
193 | if !self.enabled_motion.orbit {
194 | return;
195 | }
196 | self.current_motion = CurrentMotion::UserControlled {
197 | anchor: self.maybe_update_anchor(anchor),
198 | motion_inputs: MotionInputs::OrbitZoom {
199 | screenspace_inputs: InputQueue::default(),
200 | zoom_inputs: InputQueue::default(),
201 | },
202 | }
203 | }
204 |
205 | /// Call this to start an panning motion with the optionally supplied anchor position in view
206 | /// space. See [`EditorCam`] for usage.
207 | pub fn start_pan(&mut self, anchor: Option) {
208 | if !self.enabled_motion.pan {
209 | return;
210 | }
211 | self.current_motion = CurrentMotion::UserControlled {
212 | anchor: self.maybe_update_anchor(anchor),
213 | motion_inputs: MotionInputs::PanZoom {
214 | screenspace_inputs: InputQueue::default(),
215 | zoom_inputs: InputQueue::default(),
216 | },
217 | }
218 | }
219 |
220 | /// Call this to start a zooming motion with the optionally supplied anchor position in view
221 | /// space. See [`EditorCam`] for usage.
222 | pub fn start_zoom(&mut self, anchor: Option) {
223 | if !self.enabled_motion.zoom {
224 | return;
225 | }
226 | let anchor = self.maybe_update_anchor(anchor);
227 |
228 | // Inherit current camera velocity
229 | let zoom_inputs = match self.current_motion {
230 | CurrentMotion::Stationary | CurrentMotion::Momentum { .. } => InputQueue::default(),
231 | CurrentMotion::UserControlled {
232 | ref mut motion_inputs,
233 | ..
234 | } => InputQueue(motion_inputs.zoom_inputs_mut().0.drain(..).collect()),
235 | };
236 | self.current_motion = CurrentMotion::UserControlled {
237 | anchor,
238 | motion_inputs: MotionInputs::Zoom { zoom_inputs },
239 | }
240 | }
241 |
242 | /// Send screen space camera inputs. This will be interpreted as panning or orbiting depending
243 | /// on the current motion. See [`EditorCam`] for usage.
244 | pub fn send_screenspace_input(&mut self, screenspace_input: Vec2) {
245 | if let CurrentMotion::UserControlled {
246 | ref mut motion_inputs,
247 | ..
248 | } = self.current_motion
249 | {
250 | match motion_inputs {
251 | MotionInputs::OrbitZoom {
252 | screenspace_inputs: ref mut movement,
253 | ..
254 | } => movement.process_input(screenspace_input, self.smoothing.orbit),
255 | MotionInputs::PanZoom {
256 | screenspace_inputs: ref mut movement,
257 | ..
258 | } => movement.process_input(screenspace_input, self.smoothing.pan),
259 | MotionInputs::Zoom { .. } => (), // When in zoom-only, we ignore pan and zoom
260 | }
261 | }
262 | }
263 |
264 | /// Send zoom inputs. See [`EditorCam`] for usage.
265 | pub fn send_zoom_input(&mut self, zoom_amount: f32) {
266 | if let CurrentMotion::UserControlled { motion_inputs, .. } = &mut self.current_motion {
267 | motion_inputs
268 | .zoom_inputs_mut()
269 | .process_input(zoom_amount, self.smoothing.zoom)
270 | }
271 | }
272 |
273 | /// End the current camera motion, allowing other motions on this camera to begin. See
274 | /// [`EditorCam`] for usage.
275 | pub fn end_move(&mut self) {
276 | let velocity = match self.current_motion {
277 | CurrentMotion::Stationary => return,
278 | CurrentMotion::Momentum { .. } => return,
279 | CurrentMotion::UserControlled {
280 | anchor,
281 | ref motion_inputs,
282 | ..
283 | } => match motion_inputs {
284 | MotionInputs::OrbitZoom { .. } => Velocity::Orbit {
285 | anchor,
286 | velocity: motion_inputs.orbit_momentum(self.momentum.init_orbit),
287 | },
288 | MotionInputs::PanZoom { .. } => Velocity::Pan {
289 | anchor,
290 | velocity: motion_inputs.pan_momentum(self.momentum.init_pan),
291 | },
292 | MotionInputs::Zoom { .. } => Velocity::None,
293 | },
294 | };
295 | let momentum_start = Instant::now();
296 | self.current_motion = CurrentMotion::Momentum {
297 | velocity,
298 | momentum_start,
299 | };
300 | }
301 |
302 | /// Called once every frame to compute motions and update the transforms of all [`EditorCam`]s
303 | pub fn update_camera_positions(
304 | mut cameras: Query<(&mut EditorCam, &Camera, &mut Transform, &mut Projection)>,
305 | mut event: EventWriter,
306 | time: Res,
307 | ) {
308 | for (mut camera_controller, camera, ref mut transform, ref mut projection) in
309 | cameras.iter_mut()
310 | {
311 | let dt = time.delta();
312 | camera_controller
313 | .update_transform_and_projection(camera, transform, projection, &mut event, dt);
314 | }
315 | }
316 |
317 | /// Update this [`EditorCam`]'s transform and projection.
318 | pub fn update_transform_and_projection(
319 | &mut self,
320 | camera: &Camera,
321 | cam_transform: &mut Transform,
322 | projection: &mut Projection,
323 | redraw: &mut EventWriter,
324 | delta_time: Duration,
325 | ) {
326 | let (anchor, orbit, pan, zoom) = match &mut self.current_motion {
327 | CurrentMotion::Stationary => return,
328 | CurrentMotion::Momentum {
329 | ref mut velocity, ..
330 | } => {
331 | velocity.decay(self.momentum, delta_time);
332 | match velocity {
333 | Velocity::None => {
334 | self.current_motion = CurrentMotion::Stationary;
335 | return;
336 | }
337 | Velocity::Orbit { anchor, velocity } => (anchor, *velocity, DVec2::ZERO, 0.0),
338 | Velocity::Pan { anchor, velocity } => (anchor, DVec2::ZERO, *velocity, 0.0),
339 | }
340 | }
341 | CurrentMotion::UserControlled {
342 | anchor,
343 | motion_inputs,
344 | } => (
345 | anchor,
346 | motion_inputs.smooth_orbit_velocity() * self.sensitivity.orbit.as_dvec2(),
347 | motion_inputs.smooth_pan_velocity(),
348 | motion_inputs.smooth_zoom_velocity() * self.sensitivity.zoom as f64,
349 | ),
350 | };
351 |
352 | // If there is no motion, we will have already early-exited.
353 | redraw.send(RequestRedraw);
354 |
355 | let screen_to_view_space_at_depth =
356 | |perspective: &PerspectiveProjection, depth: f64| -> Option {
357 | let target_size = camera.logical_viewport_size()?.as_dvec2();
358 | // This is a strange looking, but key part of the otherwise normal looking
359 | // screen-to-view transformation. What we are trying to do here is answer "if we
360 | // move by one pixel in x and y, how much distance do we cover in the world at the
361 | // specified depth?" Because the viewport position's origin is in the corner, we
362 | // need to halve the target size, and subtract one pixel. This gets us a viewport
363 | // position one pixel diagonal offset from the center of the screen.
364 | let mut viewport_position = target_size / 2.0 - 1.0;
365 | // Flip the y-coordinate origin from the top to the bottom.
366 | viewport_position.y = target_size.y - viewport_position.y;
367 | let ndc = viewport_position * 2. / target_size - DVec2::ONE;
368 | let ndc_to_view = DMat4::perspective_infinite_reverse_rh(
369 | perspective.fov as f64,
370 | perspective.aspect_ratio as f64,
371 | perspective.near as f64,
372 | )
373 | .inverse(); // f64 version replaced .get_projection_matrix().as_dmat4().inverse();
374 |
375 | let view_near_plane = ndc_to_view.project_point3(ndc.extend(1.));
376 | // Using EPSILON because an ndc with Z = 0 returns NaNs.
377 | let view_far_plane = ndc_to_view.project_point3(ndc.extend(f64::EPSILON));
378 | let direction = view_far_plane - view_near_plane;
379 | let depth_normalized_direction = direction / direction.z;
380 | let view_pos = depth_normalized_direction * depth;
381 | debug_assert_eq!(view_pos.z, depth);
382 | Some(view_pos.truncate())
383 | };
384 |
385 | let view_offset = match projection {
386 | Projection::Perspective(perspective) => {
387 | let Some(offset) = screen_to_view_space_at_depth(perspective, anchor.z) else {
388 | error!("Malformed camera");
389 | return;
390 | };
391 | offset
392 | }
393 | Projection::Orthographic(ortho) => DVec2::new(-ortho.scale as f64, ortho.scale as f64),
394 | };
395 |
396 | let pan_translation_view_space = (pan * view_offset).extend(0.0);
397 |
398 | let size_at_anchor =
399 | super::zoom::length_per_pixel_at_view_space_pos(camera, *anchor).unwrap_or(0.0);
400 |
401 | // I'm not sure why I created this mapping - maybe it was to prevent zooming through
402 | // surfaces if the user really whipped the mouse:
403 | //
404 | // let zoom_unscaled = (zoom.abs() / 60.0)
405 | // .powf(1.3); // Varies from 0 to 1 over x = [0..inf]
406 | // let zoom_input = (1.0 - 1.0 / (zoom_unscaled + 1.0)) * zoom.signum();
407 | //
408 | // It is roughly equivalent to just using
409 | // let zoom_input = zoom * 0.01;
410 | //
411 | // ...so I've opted to just factor this constant out of the other scaling constants below.
412 | //
413 | // I recall spending a lot of time on this mapping function, but for the life of me can't
414 | // remember why. Leaving this comment behind for a few releases, delete me if nothing
415 | // breaks.
416 |
417 | // The zoom input, bounded to prevent zooming past the limits.
418 | let zoom_bounded = if size_at_anchor <= self.zoom_limits.min_size_per_pixel {
419 | zoom.min(0.0) // Prevent zooming in further
420 | } else if size_at_anchor >= self.zoom_limits.max_size_per_pixel {
421 | zoom.max(0.0) // Prevent zooming out further
422 | } else {
423 | zoom
424 | };
425 |
426 | let zoom_translation_view_space = match projection {
427 | Projection::Perspective(perspective) => {
428 | let zoom_amount = if self.zoom_limits.zoom_through_objects {
429 | // Clamp the zoom speed at the limits
430 | zoom * size_at_anchor.clamp(
431 | self.zoom_limits.min_size_per_pixel,
432 | self.zoom_limits.max_size_per_pixel,
433 | ) as f64
434 | } else {
435 | // If we cannot zoom through objects, use the bounded input
436 | zoom_bounded * size_at_anchor as f64
437 | };
438 | // Scale this with the perspective FOV, so zoom speed feels the same regardless.
439 | anchor.normalize() * zoom_amount / perspective.fov as f64
440 | }
441 | Projection::Orthographic(ref mut ortho) => {
442 | // Constants are hand tuned to feel equivalent between perspective and ortho. Might
443 | // be a better way to do this correctly, if it matters.
444 | ortho.scale *= 1.0 - zoom_bounded as f32 * 0.0015;
445 | // We don't move the camera in z, as this is managed by another ortho system.
446 | anchor.normalize()
447 | * zoom_bounded
448 | * anchor.z.abs()
449 | * 0.0015
450 | * DVec3::new(1.0, 1.0, 0.0)
451 | }
452 | };
453 |
454 | // If we can zoom through objects, then scoot the anchor point forward when we hit the
455 | // limit. This prevents the anchor from getting closer to the camera than the minimum
456 | // distance, or worse, zooming past the anchor.
457 | if self.zoom_limits.zoom_through_objects
458 | && size_at_anchor < self.zoom_limits.min_size_per_pixel
459 | && matches!(projection, Projection::Perspective(_))
460 | && zoom > 0.0
461 | {
462 | *anchor += zoom_translation_view_space;
463 | }
464 |
465 | cam_transform.translation += (cam_transform.rotation.as_dquat()
466 | * (pan_translation_view_space + zoom_translation_view_space))
467 | .as_vec3();
468 |
469 | *anchor -= pan_translation_view_space + zoom_translation_view_space;
470 |
471 | let orbit = orbit * DVec2::new(-1.0, 1.0);
472 | let anchor_world = cam_transform
473 | .compute_matrix()
474 | .as_dmat4()
475 | .transform_point3(*anchor);
476 | let orbit_dir = orbit.normalize().extend(0.0);
477 | let orbit_axis_world = cam_transform
478 | .rotation
479 | .as_dquat()
480 | .mul_vec3(orbit_dir.cross(DVec3::NEG_Z).normalize())
481 | .normalize();
482 |
483 | let rotate_around = |transform: &mut Transform, point: DVec3, rotation: DQuat| {
484 | // Following lines are f64 versions of Transform::rotate_around
485 | transform.translation =
486 | (point + rotation * (transform.translation.as_dvec3() - point)).as_vec3();
487 | transform.rotation = (rotation * transform.rotation.as_dquat())
488 | .as_quat()
489 | .normalize();
490 | };
491 |
492 | let orbit_multiplier = 0.005;
493 | if orbit.is_finite() && orbit.length() != 0.0 {
494 | match self.orbit_constraint {
495 | OrbitConstraint::Fixed { up, can_pass_tdc } => {
496 | let epsilon = 1e-3;
497 | let motion_threshold = 1e-5;
498 |
499 | let angle_to_bdc = cam_transform.forward().angle_between(up) as f64;
500 | let angle_to_tdc = cam_transform.forward().angle_between(-up) as f64;
501 | let pitch_angle = {
502 | let desired_rotation = orbit.y * orbit_multiplier;
503 | if can_pass_tdc {
504 | desired_rotation
505 | } else if desired_rotation >= 0.0 {
506 | desired_rotation.min(angle_to_tdc - (epsilon as f64).min(angle_to_tdc))
507 | } else {
508 | desired_rotation.max(-angle_to_bdc + (epsilon as f64).min(angle_to_bdc))
509 | }
510 | };
511 | let pitch = if pitch_angle.abs() <= motion_threshold {
512 | DQuat::IDENTITY
513 | } else {
514 | DQuat::from_axis_angle(cam_transform.left().as_dvec3(), pitch_angle)
515 | };
516 |
517 | let yaw_angle = orbit.x * orbit_multiplier;
518 | let yaw = if yaw_angle.abs() <= motion_threshold {
519 | DQuat::IDENTITY
520 | } else {
521 | DQuat::from_axis_angle(up.as_dvec3(), yaw_angle)
522 | };
523 |
524 | match [pitch == DQuat::IDENTITY, yaw == DQuat::IDENTITY] {
525 | [true, true] => (),
526 | [true, false] => rotate_around(cam_transform, anchor_world, yaw),
527 | [false, true] => rotate_around(cam_transform, anchor_world, pitch),
528 | [false, false] => rotate_around(cam_transform, anchor_world, yaw * pitch),
529 | };
530 |
531 | let how_upright = cam_transform.up().angle_between(up).abs();
532 | // Orient the camera so up always points up (roll).
533 | if how_upright > epsilon && how_upright < FRAC_PI_2 - epsilon {
534 | cam_transform.look_to(cam_transform.forward(), up);
535 | } else if how_upright > FRAC_PI_2 + epsilon && how_upright < PI - epsilon {
536 | cam_transform.look_to(cam_transform.forward(), -up);
537 | }
538 | }
539 | OrbitConstraint::Free => {
540 | let rotation =
541 | DQuat::from_axis_angle(orbit_axis_world, orbit.length() * orbit_multiplier);
542 | rotate_around(cam_transform, anchor_world, rotation);
543 | }
544 | }
545 | }
546 |
547 | self.last_anchor_depth = anchor.z;
548 | }
549 |
550 | /// Compute the world space size of a pixel at the anchor.
551 | ///
552 | /// This is a robust alternative to using the distance of the camera from the anchor point.
553 | /// Camera distance is not directly related to how large something is on screen - that depends
554 | /// on the camera projection.
555 | ///
556 | /// This function correctly accounts for camera projection, and is particularly useful when
557 | /// doing zoom and scale calculations.
558 | pub fn length_per_pixel_at_anchor(&self, camera: &Camera) -> Option {
559 | let anchor_view = self.anchor_view_space()?;
560 | super::zoom::length_per_pixel_at_view_space_pos(camera, anchor_view)
561 | }
562 |
563 | /// The last known anchor depth. This value will always be negative.
564 | pub fn last_anchor_depth(&self) -> f64 {
565 | self.last_anchor_depth.abs() * -1.0
566 | }
567 | }
568 |
569 | /// Settings that define how camera orbit behaves.
570 | #[derive(Debug, Clone, Copy, Reflect)]
571 | pub enum OrbitConstraint {
572 | /// The camera's up direction is fixed.
573 | Fixed {
574 | /// The camera's up direction must always be parallel with this unit vector.
575 | up: Vec3,
576 | /// Should the camera be allowed to pass over top dead center (TDC), making the camera
577 | /// upside down compared to the up direction?
578 | can_pass_tdc: bool,
579 | },
580 | /// The camera's up direction is free.
581 | Free,
582 | }
583 |
584 | impl Default for OrbitConstraint {
585 | fn default() -> Self {
586 | Self::Fixed {
587 | up: Vec3::Y,
588 | can_pass_tdc: false,
589 | }
590 | }
591 | }
592 |
593 | /// The sensitivity of the camera controller to inputs.
594 | #[derive(Debug, Clone, Copy, Reflect)]
595 | pub struct Sensitivity {
596 | /// X/Y sensitivity of orbit inputs, multiplied.
597 | pub orbit: Vec2,
598 | /// Sensitivity of zoom inputs, multiplied.
599 | pub zoom: f32,
600 | }
601 |
602 | impl Default for Sensitivity {
603 | fn default() -> Self {
604 | Self {
605 | orbit: Vec2::splat(1.0),
606 | zoom: 1.0,
607 | }
608 | }
609 | }
610 |
611 | /// Controls what kinds of motions are allowed to initiate. Does not affect momentum.
612 | #[derive(Debug, Clone, Reflect)]
613 | pub struct EnabledMotion {
614 | /// Should pan be enabled?
615 | pub pan: bool,
616 | /// Should orbit be enabled?
617 | pub orbit: bool,
618 | /// Should zoom be enabled?
619 | pub zoom: bool,
620 | }
621 |
622 | impl Default for EnabledMotion {
623 | fn default() -> Self {
624 | Self {
625 | pan: true,
626 | orbit: true,
627 | zoom: true,
628 | }
629 | }
630 | }
631 |
--------------------------------------------------------------------------------
/src/controller/inputs.rs:
--------------------------------------------------------------------------------
1 | //! Defines mutually exclusive camera input motions, and a place to store these input streams.
2 |
3 | use std::time::Duration;
4 |
5 | use bevy_math::{prelude::*, DVec2};
6 | use bevy_reflect::prelude::*;
7 |
8 | use super::smoothing::InputQueue;
9 |
10 | /// Tracks the current exclusive motion type and input queue of the camera controller.
11 | #[derive(Debug, Clone, Reflect)]
12 | pub enum MotionInputs {
13 | /// The camera can orbit and zoom
14 | OrbitZoom {
15 | /// A queue of screenspace orbiting inputs; usually the mouse drag vector.
16 | screenspace_inputs: InputQueue,
17 | /// A queue of zoom inputs.
18 | zoom_inputs: InputQueue,
19 | },
20 | /// The camera can pan and zoom
21 | PanZoom {
22 | /// A queue of screenspace panning inputs; usually the mouse drag vector.
23 | screenspace_inputs: InputQueue,
24 | /// A queue of zoom inputs.
25 | zoom_inputs: InputQueue,
26 | },
27 | /// The camera can only zoom
28 | Zoom {
29 | /// A queue of zoom inputs.
30 | zoom_inputs: InputQueue,
31 | },
32 | }
33 |
34 | impl MotionInputs {
35 | /// The motion-conserving smoothed orbit velocity in screen space.
36 | pub fn smooth_orbit_velocity(&self) -> DVec2 {
37 | if let Self::OrbitZoom {
38 | screenspace_inputs, ..
39 | } = self
40 | {
41 | let value = screenspace_inputs
42 | .latest_smoothed()
43 | .unwrap_or(Vec2::ZERO)
44 | .as_dvec2();
45 | if value.is_finite() {
46 | value
47 | } else {
48 | DVec2::ZERO
49 | }
50 | } else {
51 | DVec2::ZERO
52 | }
53 | }
54 |
55 | /// The motion-conserving smoothed pan velocity in screen space.
56 | pub fn smooth_pan_velocity(&self) -> DVec2 {
57 | if let Self::PanZoom {
58 | screenspace_inputs, ..
59 | } = self
60 | {
61 | let value = screenspace_inputs
62 | .latest_smoothed()
63 | .unwrap_or(Vec2::ZERO)
64 | .as_dvec2();
65 | if value.is_finite() {
66 | value
67 | } else {
68 | DVec2::ZERO
69 | }
70 | } else {
71 | DVec2::ZERO
72 | }
73 | }
74 |
75 | /// Approximate orbit velocity over the last `window`. to use for momentum calculations.
76 | pub fn orbit_momentum(&self, window: Duration) -> DVec2 {
77 | if let Self::OrbitZoom {
78 | screenspace_inputs, ..
79 | } = self
80 | {
81 | let velocity = screenspace_inputs.average_smoothed_value(window).as_dvec2();
82 | if !velocity.is_finite() {
83 | DVec2::ZERO
84 | } else {
85 | velocity
86 | }
87 | } else {
88 | DVec2::ZERO
89 | }
90 | }
91 |
92 | /// Approximate pan velocity over the last `window`. to use for momentum calculations.
93 | pub fn pan_momentum(&self, window: Duration) -> DVec2 {
94 | if let Self::PanZoom {
95 | screenspace_inputs, ..
96 | } = self
97 | {
98 | let velocity = screenspace_inputs.average_smoothed_value(window).as_dvec2();
99 | if !velocity.is_finite() {
100 | DVec2::ZERO
101 | } else {
102 | velocity
103 | }
104 | } else {
105 | DVec2::ZERO
106 | }
107 | }
108 |
109 | /// Motion-conserving smoothed zoom input velocity.
110 | pub fn smooth_zoom_velocity(&self) -> f64 {
111 | let velocity = self.zoom_inputs().latest_smoothed().unwrap_or(0.0) as f64;
112 | if !velocity.is_finite() {
113 | 0.0
114 | } else {
115 | velocity
116 | }
117 | }
118 |
119 | /// Get a reference to the queue of zoom inputs.
120 | pub fn zoom_inputs(&self) -> &InputQueue {
121 | match self {
122 | MotionInputs::OrbitZoom { zoom_inputs, .. } => zoom_inputs,
123 | MotionInputs::PanZoom { zoom_inputs, .. } => zoom_inputs,
124 | MotionInputs::Zoom { zoom_inputs } => zoom_inputs,
125 | }
126 | }
127 |
128 | /// Get a mutable reference to the queue of zoom inputs.
129 | pub fn zoom_inputs_mut(&mut self) -> &mut InputQueue {
130 | match self {
131 | MotionInputs::OrbitZoom { zoom_inputs, .. } => zoom_inputs,
132 | MotionInputs::PanZoom { zoom_inputs, .. } => zoom_inputs,
133 | MotionInputs::Zoom { zoom_inputs } => zoom_inputs,
134 | }
135 | }
136 |
137 | /// Approximate smoothed absolute value of the zoom velocity over the last `window`.
138 | pub fn zoom_velocity_abs(&self, window: Duration) -> f64 {
139 | let zoom_inputs = match self {
140 | MotionInputs::OrbitZoom { zoom_inputs, .. } => zoom_inputs,
141 | MotionInputs::PanZoom { zoom_inputs, .. } => zoom_inputs,
142 | MotionInputs::Zoom { zoom_inputs } => zoom_inputs,
143 | };
144 |
145 | let velocity = zoom_inputs.approx_smoothed(window, |v| {
146 | *v = v.abs();
147 | }) as f64;
148 | if !velocity.is_finite() {
149 | 0.0
150 | } else {
151 | velocity
152 | }
153 | }
154 | }
155 |
--------------------------------------------------------------------------------
/src/controller/mod.rs:
--------------------------------------------------------------------------------
1 | //! Camera controller implementation.
2 |
3 | use bevy_app::prelude::*;
4 | use bevy_ecs::prelude::*;
5 |
6 | pub mod component;
7 | pub mod inputs;
8 | pub mod momentum;
9 | pub mod motion;
10 | pub mod projections;
11 | pub mod smoothing;
12 | pub mod zoom;
13 |
14 | /// Adds [`bevy_editor_cam`](crate) functionality without an input plugin or any extensions. This
15 | /// requires an input plugin to function! If you don't use the [`crate::input::DefaultInputPlugin`],
16 | /// you will need to provide your own.
17 | pub struct MinimalEditorCamPlugin;
18 |
19 | impl Plugin for MinimalEditorCamPlugin {
20 | fn build(&self, app: &mut App) {
21 | app.add_systems(
22 | PreUpdate,
23 | (
24 | crate::controller::component::EditorCam::update_camera_positions,
25 | crate::controller::projections::update_orthographic,
26 | crate::controller::projections::update_perspective,
27 | )
28 | .chain()
29 | .after(bevy_picking::PickSet::Last),
30 | )
31 | .register_type::();
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/src/controller/momentum.rs:
--------------------------------------------------------------------------------
1 | //! Provides the [`Momentum`] settings.
2 |
3 | use std::time::Duration;
4 |
5 | use bevy_math::{DVec2, DVec3};
6 | use bevy_reflect::prelude::*;
7 |
8 | /// Defines momentum behavior of this [`super::component::EditorCam`].
9 | #[derive(Debug, Clone, Copy, Reflect)]
10 | pub struct Momentum {
11 | /// Momentum decay scales with velocity.
12 | pub pan_damping: u8,
13 | /// Momentum decay is constant.
14 | pub pan_friction: f64,
15 | /// The sampling window to use when a movement ends to determine the velocity of the camera when
16 | /// momentum decay begins. The higher this value, the easier it is to "flick" the camera, but
17 | /// the more of a velocity discontinuity will be present when momentum starts.
18 | pub init_pan: Duration,
19 | /// Momentum decay scales with velocity.
20 | pub orbit_damping: u8,
21 | /// Momentum decay is constant.
22 | pub orbit_friction: f64,
23 | /// The sampling window to use when a movement ends to determine the velocity of the camera when
24 | /// momentum decay begins. The higher this value, the easier it is to "flick" the camera, but
25 | /// the more of a velocity discontinuity will be present when momentum starts.
26 | pub init_orbit: Duration,
27 | }
28 |
29 | impl Default for Momentum {
30 | fn default() -> Self {
31 | Self {
32 | pan_damping: 160,
33 | pan_friction: 0.2,
34 | init_pan: Duration::from_millis(40),
35 | orbit_damping: 160,
36 | orbit_friction: 0.2,
37 | init_orbit: Duration::from_millis(60),
38 | }
39 | }
40 | }
41 |
42 | impl Momentum {
43 | fn decay_velocity_orbit(self, velocity: DVec2, delta_time: Duration) -> DVec2 {
44 | let speed = velocity.length();
45 | let f_damping = self.orbit_damping as f64 / 256.0 * speed * 10.0;
46 | let f_friction = self.orbit_friction * 40.0;
47 | let braking = (f_damping + f_friction) * delta_time.as_secs_f64();
48 | (speed - braking).max(0.0) * velocity.normalize_or_zero()
49 | }
50 |
51 | fn decay_velocity_pan(self, velocity: DVec2, delta_time: Duration) -> DVec2 {
52 | let speed = velocity.length();
53 | let f_damping = self.pan_damping as f64 / 256.0 * speed * 10.0;
54 | let f_friction = self.pan_friction * 40.0;
55 | let braking = (f_damping + f_friction) * delta_time.as_secs_f64();
56 | (speed - braking).max(0.0) * velocity.normalize_or_zero()
57 | }
58 | }
59 |
60 | /// The velocity of the camera.
61 | #[derive(Debug, Clone, Copy, Default, Reflect)]
62 | pub enum Velocity {
63 | /// The velocity is zero and the camera will transition into the Stationary state.
64 | #[default]
65 | None,
66 | ///Camera is spinning.
67 | Orbit {
68 | /// The anchor of rotation being orbited about.
69 | anchor: DVec3,
70 | /// The current velocity of the camera about the anchor.
71 | velocity: DVec2,
72 | },
73 | /// Camera is sliding.
74 | Pan {
75 | /// The anchor point that should stick to the pointer during panning.
76 | anchor: DVec3,
77 | /// The current panning velocity of the camera.
78 | velocity: DVec2,
79 | },
80 | }
81 |
82 | impl Velocity {
83 | const DECAY_THRESHOLD: f64 = 1e-3;
84 | /// Decay the velocity based on the momentum setting.
85 | pub fn decay(&mut self, momentum: Momentum, delta_time: Duration) {
86 | let is_none = match self {
87 | Velocity::None => true,
88 | Velocity::Orbit {
89 | ref mut velocity, ..
90 | } => {
91 | *velocity = momentum.decay_velocity_orbit(*velocity, delta_time);
92 | velocity.length() <= Self::DECAY_THRESHOLD
93 | }
94 | Velocity::Pan {
95 | ref mut velocity, ..
96 | } => {
97 | *velocity = momentum.decay_velocity_pan(*velocity, delta_time);
98 | velocity.length() <= Self::DECAY_THRESHOLD
99 | }
100 | };
101 |
102 | if is_none {
103 | *self = Velocity::None;
104 | }
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/src/controller/motion.rs:
--------------------------------------------------------------------------------
1 | //! The motion state of the camera.
2 |
3 | use std::time::Duration;
4 |
5 | use super::{inputs::MotionInputs, momentum::Velocity};
6 | use bevy_math::DVec3;
7 | use bevy_reflect::prelude::*;
8 | use bevy_utils::Instant;
9 |
10 | /// The current motion state of the camera.
11 | #[derive(Debug, Clone, Reflect, Default)]
12 | pub enum CurrentMotion {
13 | /// The camera is not moving.
14 | #[default]
15 | Stationary,
16 | /// The camera is in motion, but not being directly controlled by the user. This happens while
17 | /// the camera has momentum.
18 | Momentum {
19 | /// Contains inherited velocity, if any. This will decay based on momentum settings.
20 | velocity: Velocity,
21 | /// Used to compute how long the camera has been in the momentum state. Useful for
22 | /// debouncing user inputs.
23 | momentum_start: Instant,
24 | },
25 | /// The camera is being directly controlled by the user.
26 | UserControlled {
27 | /// The point the camera is rotating about, zooming into, or panning with, in view space
28 | /// (relative to the camera).
29 | ///
30 | /// - Rotation: the direction of the anchor does not change, it is fixed in screenspace.
31 | /// - Panning: the depth of the anchor does not change, the camera only moves in x and y.
32 | /// - Zoom: the direction of the anchor does not change, but the length does.
33 | anchor: DVec3,
34 | /// Pan and orbit are mutually exclusive, however both can be used with zoom.
35 | motion_inputs: MotionInputs,
36 | },
37 | }
38 |
39 | impl CurrentMotion {
40 | /// Returns `true` if the camera is moving due to inputs or momentum.
41 | pub fn is_moving(&self) -> bool {
42 | !matches!(self, CurrentMotion::Stationary)
43 | && !matches!(
44 | self,
45 | CurrentMotion::Momentum {
46 | velocity: Velocity::None,
47 | ..
48 | }
49 | )
50 | }
51 |
52 | /// Returns `true` if the camera is moving due to user inputs.
53 | pub fn is_user_controlled(&self) -> bool {
54 | matches!(self, CurrentMotion::UserControlled { .. })
55 | }
56 |
57 | /// Get the user motion inputs if they exist.
58 | pub fn inputs(&self) -> Option<&MotionInputs> {
59 | match self {
60 | CurrentMotion::Stationary => None,
61 | CurrentMotion::Momentum { .. } => None,
62 | CurrentMotion::UserControlled { motion_inputs, .. } => Some(motion_inputs),
63 | }
64 | }
65 |
66 | /// Returns true if the camera is user controlled and orbiting.
67 | pub fn is_orbiting(&self) -> bool {
68 | matches!(
69 | self,
70 | Self::UserControlled {
71 | motion_inputs: MotionInputs::OrbitZoom { .. },
72 | ..
73 | }
74 | )
75 | }
76 |
77 | /// Returns true if the camera is user controlled and panning.
78 | pub fn is_panning(&self) -> bool {
79 | matches!(
80 | self,
81 | Self::UserControlled {
82 | motion_inputs: MotionInputs::PanZoom { .. },
83 | ..
84 | }
85 | )
86 | }
87 |
88 | /// Returns true if the camera is user controlled and only zooming.
89 | pub fn is_zooming_only(&self) -> bool {
90 | matches!(
91 | self,
92 | Self::UserControlled {
93 | motion_inputs: MotionInputs::Zoom { .. },
94 | ..
95 | }
96 | )
97 | }
98 |
99 | /// How long has the camera been moving with momentum, without user input? This is equivalent to
100 | /// the amount of time since the last input event ended.
101 | pub fn momentum_duration(&self) -> Option {
102 | match self {
103 | CurrentMotion::Momentum { momentum_start, .. } => {
104 | Some(Instant::now().saturating_duration_since(*momentum_start))
105 | }
106 | _ => None,
107 | }
108 | }
109 | }
110 |
--------------------------------------------------------------------------------
/src/controller/projections.rs:
--------------------------------------------------------------------------------
1 | //! Configurable options for the challenge of working with orthographic cameras.
2 |
3 | use bevy_ecs::prelude::*;
4 | use bevy_reflect::prelude::*;
5 | use bevy_render::prelude::*;
6 | use bevy_transform::prelude::*;
7 |
8 | use crate::prelude::*;
9 |
10 | use self::motion::CurrentMotion;
11 |
12 | /// Settings used when the [`EditorCam`] has a perspective [`Projection`].
13 | #[derive(Debug, Clone, Reflect)]
14 | pub struct PerspectiveSettings {
15 | /// Limits the near clipping plane to always fit inside this range.
16 | ///
17 | /// The camera controller will try to make the near clipping plane smaller when you zoom in to
18 | /// ensure the anchor (the thing you are zooming into) is always within the view frustum
19 | /// (visible), bounded by this limit.
20 | ///
21 | /// Unless the camera is zoomed very close to something, it will spend most of the time at the
22 | /// high end of this limit - you should treat that like the default near clipping plane. Bevy
23 | /// defaults to `0.1`, and you should probably use that too unless you have a very good reason
24 | /// not to. Many rendering effects that rely on depth can break down if the clipping plane is
25 | /// very far from `0.1`.
26 | pub near_clip_limits: std::ops::Range,
27 | /// When computing the near plane position, the anchor depth is multiplied by this value to
28 | /// determine the new near clip position. This should be smaller than one, to ensure that the
29 | /// object you are looking at, which will be located at the anchor position, is bot being
30 | /// clipped. Some parts of the object may protrude toward the camera, which is what necessitates
31 | /// this.
32 | pub near_clip_multiplier: f32,
33 | }
34 |
35 | impl Default for PerspectiveSettings {
36 | fn default() -> Self {
37 | Self {
38 | near_clip_limits: 1e-9..f32::INFINITY,
39 | near_clip_multiplier: 0.05,
40 | }
41 | }
42 | }
43 |
44 | /// Updates perspective projection properties of editor cameras.
45 | pub fn update_perspective(mut cameras: Query<(&EditorCam, &mut Projection)>) {
46 | for (editor_cam, mut projection) in cameras.iter_mut() {
47 | let Projection::Perspective(ref mut perspective) = *projection else {
48 | continue;
49 | };
50 | let limits = editor_cam.perspective.near_clip_limits.clone();
51 | let multiplier = editor_cam.perspective.near_clip_multiplier;
52 | perspective.near = (editor_cam.last_anchor_depth.abs() as f32 * multiplier)
53 | .clamp(limits.start, limits.end);
54 | }
55 | }
56 |
57 | /// Settings used when the [`EditorCam`] has an orthographic [`Projection`].
58 | #[derive(Debug, Clone, Reflect)]
59 | pub struct OrthographicSettings {
60 | /// The camera's near clipping plane will move closer and farther from the anchor point during
61 | /// zoom to maximize precision. The position of the near plane is based on the orthographic
62 | /// projection `scale`, multiplied by this value.
63 | ///
64 | /// To maximize depth precision, make this as small ap possible. If the value is too large,
65 | /// depth-based effects like SSAO will break down. If the value is too small, objects that
66 | /// should be visible will be clipped. Ideally, the clipping planes should scale with the scene
67 | /// geometry and camera frustum to tightly bound the visible scene, but this is not yet
68 | /// implemented.
69 | pub scale_to_near_clip: f32,
70 | /// Limits the distance the near clip plane can be to the anchor. The low limit is useful to
71 | /// prevent geometry clipping when zooming in, while the high limit is useful to prevent the
72 | /// camera moving too far away from the anchor, causing precision issues.
73 | pub near_clip_limits: std::ops::Range,
74 | /// The far plane is placed opposite the anchor from the near plane, at this multiple of the
75 | /// distance from the near plane to the anchor. Setting this to 1.0 means the camera frustum is
76 | /// centered on the anchor. It might be desirable to make this larger to prevent things in the
77 | /// background from disappearing when zooming in.
78 | pub far_clip_multiplier: f32,
79 | }
80 |
81 | impl Default for OrthographicSettings {
82 | fn default() -> Self {
83 | Self {
84 | scale_to_near_clip: 1_000_000.0,
85 | near_clip_limits: 1.0..1_000_000.0,
86 | far_clip_multiplier: 1.0,
87 | }
88 | }
89 | }
90 |
91 | /// Update the ortho camera projection and position based on the [`OrthographicSettings`].
92 | pub fn update_orthographic(mut cameras: Query<(&mut EditorCam, &mut Projection, &mut Transform)>) {
93 | for (mut editor_cam, mut projection, mut cam_transform) in cameras.iter_mut() {
94 | let Projection::Orthographic(ref mut orthographic) = *projection else {
95 | continue;
96 | };
97 |
98 | let anchor_dist = editor_cam.last_anchor_depth().abs() as f32;
99 | let target_dist = (editor_cam.orthographic.scale_to_near_clip * orthographic.scale).clamp(
100 | editor_cam.orthographic.near_clip_limits.start,
101 | editor_cam.orthographic.near_clip_limits.end,
102 | );
103 |
104 | let forward_amount = anchor_dist - target_dist;
105 | let movement = cam_transform.forward() * forward_amount;
106 |
107 | cam_transform.translation += movement;
108 |
109 | editor_cam.last_anchor_depth += forward_amount as f64;
110 | if let CurrentMotion::UserControlled { ref mut anchor, .. } = editor_cam.current_motion {
111 | anchor.z += forward_amount as f64;
112 | }
113 |
114 | orthographic.near = 0.0;
115 | orthographic.far = anchor_dist * (1.0 + editor_cam.orthographic.far_clip_multiplier);
116 | }
117 | }
118 |
--------------------------------------------------------------------------------
/src/controller/smoothing.rs:
--------------------------------------------------------------------------------
1 | //! Implements a motion-conserving smoothed input queue.
2 |
3 | use std::{
4 | collections::VecDeque,
5 | ops::{Add, AddAssign, Mul},
6 | time::Duration,
7 | };
8 |
9 | use bevy_derive::{Deref, DerefMut};
10 | use bevy_reflect::prelude::*;
11 | use bevy_utils::Instant;
12 |
13 | /// How smooth should inputs be? Over what tine window should they be averaged.
14 | #[derive(Debug, Clone, Copy, Reflect)]
15 | pub struct Smoothing {
16 | /// Smoothing window for panning.
17 | pub pan: Duration,
18 | /// Smoothing window for orbit.
19 | pub orbit: Duration,
20 | /// Smoothing window for zoom.
21 | pub zoom: Duration,
22 | }
23 |
24 | impl Default for Smoothing {
25 | fn default() -> Self {
26 | Smoothing {
27 | pan: Duration::from_millis(10),
28 | orbit: Duration::from_millis(30),
29 | zoom: Duration::from_millis(60),
30 | }
31 | }
32 | }
33 |
34 | /// A smoothed queue of inputs over time.
35 | ///
36 | /// Useful for smoothing to query "what was the average input over the last N milliseconds?". This
37 | /// does some important bookkeeping to ensure samples are not over or under sampled. This means the
38 | /// queue has very useful properties:
39 | ///
40 | /// 1. The smoothing can change over time, useful for sampling over changing framerates.
41 | /// 2. The sum of smoothed and unsmoothed inputs will be equal despite (1). This is useful because
42 | /// you can smooth something like pointer motions, and the smoothed output will arrive at the
43 | /// same destination as the unsmoothed input without drifting.
44 | #[derive(Debug, Clone, Reflect, Deref, DerefMut)]
45 | pub struct InputQueue(pub VecDeque>);
46 |
47 | /// Represents a single input in an [`InputQueue`].
48 | #[derive(Debug, Clone, Reflect)]
49 | pub struct InputStreamEntry {
50 | /// The time the sample was added and smoothed value computed.
51 | time: Instant,
52 | /// The input sample recorded at this time.
53 | sample: T,
54 | /// How much of this entry is available to be consumed, from `0.0` to `1.0`. This is required to
55 | /// ensure that smoothing does not over or under sample any entries as the size of the sampling
56 | /// window changes. This value should always be zero by the time a sample exits the queue.
57 | fraction_remaining: f32,
58 | /// Because we need to do bookkeeping to ensure no samples are under or over sampled, we compute
59 | /// the smoothed value at the same time a sample is inserted. Because consumers of this will
60 | /// want to read the smoothed samples multiple times, we do the computation eagerly so the input
61 | /// stream is always in a valid state, and the act of a user reading a sample multiple times
62 | /// does not change the value they get.
63 | smoothed_value: T,
64 | }
65 |
66 | impl + AddAssign + Mul> Default
67 | for InputQueue
68 | {
69 | fn default() -> Self {
70 | let start = Instant::now();
71 | let interval = Duration::from_secs_f32(1.0 / 60.0);
72 | let mut queue = VecDeque::default();
73 | for time in
74 | // See: https://github.com/aevyrie/bevy_editor_cam/issues/13 There is no guarantee that
75 | // `start` is large enough to subtract from, so we ignore any subtractions that fail, to
76 | // avoid a panic. If this fails, it will manifest as a slight stutter, most noticeable
77 | // during zooming. However, this *should* only happen at the very startup of the app,
78 | // and even then, is unlikely.
79 | (1..Self::MAX_EVENTS)
80 | .filter_map(|i| start.checked_sub(interval.mul_f32(i as f32)))
81 | {
82 | queue.push_back(InputStreamEntry {
83 | time,
84 | sample: T::default(),
85 | fraction_remaining: 1.0,
86 | smoothed_value: T::default(),
87 | })
88 | }
89 | Self(queue)
90 | }
91 | }
92 |
93 | impl + AddAssign + Mul> InputQueue {
94 | const MAX_EVENTS: usize = 256;
95 |
96 | /// Add an input sample to the queue, and compute the smoothed value.
97 | ///
98 | /// The smoothing must be computed at the time a sample is added to ensure no samples are over
99 | /// or under sampled in the smoothing process.
100 | pub fn process_input(&mut self, new_input: T, smoothing: Duration) {
101 | let now = Instant::now();
102 | let queue = &mut self.0;
103 |
104 | // Compute the expected sampling window end index
105 | let window_size = queue
106 | .iter()
107 | .enumerate()
108 | .find(|(_i, entry)| now.duration_since(entry.time) > smoothing)
109 | .map(|(i, _)| i) // `find` breaks *after* we fail, so we don't need to add one
110 | .unwrap_or(0)
111 | + 1; // Add one to account for the new sample being added
112 |
113 | let range_end = (window_size - 1).clamp(0, queue.len());
114 |
115 | // Compute the smoothed value by sampling over the desired window
116 | let target_fraction = 1.0 / window_size as f32;
117 | let mut smoothed_value = new_input * target_fraction;
118 | for entry in queue.range_mut(..range_end) {
119 | // Only consume what is left of a sample, to prevent oversampling
120 | let this_fraction = entry.fraction_remaining.min(target_fraction);
121 | smoothed_value += entry.sample * this_fraction;
122 | entry.fraction_remaining = (entry.fraction_remaining - this_fraction).max(0.0);
123 | }
124 |
125 | // To prevent under sampling, we also need to look at entries older than the window, and add
126 | // those to the smoothed value, to catch up. This happens when the window shrinks, or there
127 | // is a pause in rendering and it needs to catch up.
128 | for old_entry in queue
129 | .range_mut(range_end..)
130 | .filter(|e| e.fraction_remaining > 0.0)
131 | {
132 | smoothed_value += old_entry.sample * old_entry.fraction_remaining;
133 | old_entry.fraction_remaining = 0.0;
134 | }
135 |
136 | queue.truncate(Self::MAX_EVENTS - 1);
137 | queue.push_front(InputStreamEntry {
138 | time: now,
139 | sample: new_input,
140 | fraction_remaining: 1.0 - target_fraction,
141 | smoothed_value,
142 | })
143 | }
144 |
145 | /// Get the latest motion-conserving smoothed input value.
146 | pub fn latest_smoothed(&self) -> Option {
147 | self.iter_smoothed().next().map(|(_, val)| val)
148 | }
149 |
150 | /// Iterator over all smoothed samples.
151 | pub fn iter_smoothed(&self) -> impl Iterator- + '_ {
152 | self.0
153 | .iter()
154 | .map(|entry| (entry.time, entry.smoothed_value))
155 | }
156 |
157 | /// Iterate over the raw samples.
158 | pub fn iter_unsmoothed(&self) -> impl Iterator
- + '_ {
159 | self.0.iter().map(|entry| (entry.time, entry.sample))
160 | }
161 |
162 | /// Approximate the smoothed average sampled in the `window`.
163 | pub fn average_smoothed_value(&self, window: Duration) -> T {
164 | let now = Instant::now();
165 | let mut count = 0;
166 | let sum = self
167 | .iter_smoothed()
168 | .filter(|(t, _)| now.duration_since(*t) < window)
169 | .map(|(_, smoothed_value)| smoothed_value)
170 | .reduce(|acc, v| {
171 | count += 1;
172 | acc + v
173 | })
174 | .unwrap_or_default();
175 | sum * (1.0 / count as f32)
176 | }
177 |
178 | /// Approximate smoothed value with user-supplied modifier function as needed
179 | pub fn approx_smoothed(&self, window: Duration, mut modifier: impl FnMut(&mut T)) -> T {
180 | let now = Instant::now();
181 | let n_elements = &mut 0;
182 | self.iter_unsmoothed()
183 | .filter(|(time, _)| now.duration_since(*time) < window)
184 | .map(|(_, value)| {
185 | *n_elements += 1;
186 | let mut value = value;
187 | modifier(&mut value);
188 | value
189 | })
190 | .reduce(|acc, v| acc + v)
191 | .unwrap_or_default()
192 | * (1.0 / *n_elements as f32)
193 | }
194 | }
195 |
--------------------------------------------------------------------------------
/src/controller/zoom.rs:
--------------------------------------------------------------------------------
1 | //! Provides [`ZoomLimits`] settings.
2 |
3 | use bevy_math::{DVec2, DVec3};
4 | use bevy_reflect::Reflect;
5 | use bevy_render::prelude::*;
6 |
7 | /// Bound zooming scale, and define behavior at the limits of zoom.
8 | #[derive(Debug, Clone, Reflect)]
9 | pub struct ZoomLimits {
10 | /// The smallest size in world space units of a pixel located at the anchor when zooming in.
11 | ///
12 | /// When zooming in, a single pixel will cover a smaller and smaller world space area. This
13 | /// limit will set how small of an area a single pixel can cover. Assuming you are using meters,
14 | /// setting this to 1e-3 would limit the camera zoom so that an object that is one millimeter
15 | /// across and located at the anchor would take up at most a single pixel.
16 | ///
17 | /// Setting this to a small value will let you zoom in further. If this is too small, you may
18 | /// begin to encounter floating point rendering errors.
19 | pub min_size_per_pixel: f64,
20 | /// The largest size in world space units of a pixel located at the anchor when zooming out.
21 | ///
22 | /// When zooming out, a single pixel will cover a larger and larger world space area. This limit
23 | /// will set how large of an area a single pixel can cover. Assuming you are using meters,
24 | /// setting this to 1.0 would only allow you to zoom out until a 1 meter object located at the
25 | /// anchor was the size of a pixel.
26 | ///
27 | /// Setting this to a large value will let you zoom out further.
28 | pub max_size_per_pixel: f64,
29 | /// When true, and when a perspective projection is being used, zooming in can pass through
30 | /// objects. When reaching `min_size_per_pixel`, instead of stopping, the camera will continue
31 | /// moving forward, passing through the object in front of the camera.
32 | ///
33 | /// Additionally, when reaching `max_size_per_pixel`, the camera does not continue zooming out,
34 | /// but instead continues at the same speed.
35 | pub zoom_through_objects: bool,
36 | }
37 |
38 | impl Default for ZoomLimits {
39 | fn default() -> Self {
40 | Self {
41 | min_size_per_pixel: 1e-6, // Any smaller and floating point rendering artifacts appear.
42 | max_size_per_pixel: 1e27, // The diameter of the observable universe is probably a good upper limit.
43 | zoom_through_objects: false,
44 | }
45 | }
46 | }
47 |
48 | /// The size of a pixel at the anchor (under the pointer) in world space units.
49 | ///
50 | /// This is a much better way to compute scale than using camera distance from the anchor (the
51 | /// length of the anchor vector). Anchor distance does not take camera projection into account.
52 | pub fn length_per_pixel_at_view_space_pos(camera: &Camera, view_space_pos: DVec3) -> Option
{
53 | // This is a point offset by scaled_offset units to the right relative to the camera facing the
54 | // anchor point. We can then project the anchor and the offset anchor onto the viewport
55 | // (screen), to see how many pixels apart these two points are on screen. This gives us the
56 | // world units per pixel, at the anchor (pointer) location.
57 | //
58 | // The scaled_offset is important for handling varying scales. If we only offset by a unit value
59 | // (1.0), then at large distances, an offset of 1.0 would round to 0.0 when projected on the
60 | // screen, and the result, a reciprocal, would go to infinity. To combat this, we ensure that
61 | // our offset is a similar scale to the anchor distance itself, and cancel it out later.
62 | let scaled_offset = view_space_pos.length();
63 | let view_space_pos_offset = view_space_pos + DVec3::X * scaled_offset;
64 |
65 | let viewport_pos = view_to_viewport(camera, view_space_pos)?;
66 | let viewport_pos_offset = view_to_viewport(camera, view_space_pos_offset)?;
67 |
68 | let pixels_per_world_unit = (viewport_pos_offset - viewport_pos).length();
69 | // The length per pixel is the inverse of pixels_per_world_unit
70 | let len_per_pixel = pixels_per_world_unit.recip().min(f64::MAX) * scaled_offset;
71 | len_per_pixel.is_finite().then_some(len_per_pixel)
72 | }
73 |
74 | /// Project a point in view space onto the camera's viewport.
75 | fn view_to_viewport(camera: &Camera, view_space_point: DVec3) -> Option {
76 | let ndc_space_coords = camera
77 | .clip_from_view()
78 | .as_dmat4()
79 | .project_point3(view_space_point);
80 |
81 | // NDC z-values outside of 0 < z < 1 are outside the (implicit) camera frustum and are thus not
82 | // in viewport-space
83 | let ndc_space_coords =
84 | (!ndc_space_coords.is_nan() && ndc_space_coords.z >= 0.0 && ndc_space_coords.z <= 1.0)
85 | .then_some(ndc_space_coords)?;
86 |
87 | let target_size = camera.logical_viewport_size()?.as_dvec2();
88 |
89 | // Once in NDC space, we can discard the z element and rescale x/y to fit the screen
90 | let mut viewport_position = (ndc_space_coords.truncate() + DVec2::ONE) / 2.0 * target_size;
91 | // Flip the Y co-ordinate origin from the bottom to the top.
92 | viewport_position.y = target_size.y - viewport_position.y;
93 | Some(viewport_position)
94 | }
95 |
--------------------------------------------------------------------------------
/src/extensions/anchor_indicator.rs:
--------------------------------------------------------------------------------
1 | //! A `bevy_editor_cam` extension that draws an indicator in the scene at the location of the
2 | //! anchor. This makes it more obvious to users what point in space the camera is rotating around,
3 | //! making it easier to use and understand.
4 |
5 | use crate::prelude::*;
6 |
7 | use bevy_app::prelude::*;
8 | use bevy_color::Color;
9 | use bevy_ecs::prelude::*;
10 | use bevy_gizmos::prelude::*;
11 | use bevy_math::prelude::*;
12 | use bevy_reflect::prelude::*;
13 | use bevy_render::prelude::*;
14 | use bevy_transform::prelude::*;
15 |
16 | /// See the [module](self) docs.
17 | pub struct AnchorIndicatorPlugin;
18 |
19 | impl Plugin for AnchorIndicatorPlugin {
20 | fn build(&self, app: &mut App) {
21 | app.add_systems(
22 | PostUpdate,
23 | draw_anchor
24 | .after(bevy_transform::TransformSystem::TransformPropagate)
25 | .after(bevy_render::camera::CameraUpdateSystem),
26 | )
27 | .register_type::();
28 | }
29 | }
30 |
31 | /// Optional. Configures whether or not an [`EditorCam`] should show an anchor indicator when the
32 | /// camera is orbiting. The indicator will be enabled if this component is not present.
33 | #[derive(Debug, Component, Reflect)]
34 | pub struct AnchorIndicator {
35 | /// Should the indicator be visible on this camera?
36 | pub enabled: bool,
37 | }
38 |
39 | impl Default for AnchorIndicator {
40 | fn default() -> Self {
41 | Self { enabled: true }
42 | }
43 | }
44 |
45 | /// Use gizmos to draw the camera anchor in world space.
46 | pub fn draw_anchor(
47 | cameras: Query<(
48 | &EditorCam,
49 | &GlobalTransform,
50 | &Camera,
51 | Option<&AnchorIndicator>,
52 | )>,
53 | mut gizmos: Gizmos,
54 | ) {
55 | for (editor_cam, cam_transform, cam, _) in cameras
56 | .iter()
57 | .filter(|(.., anchor_indicator)| anchor_indicator.map(|a| a.enabled).unwrap_or(true))
58 | {
59 | let Some(anchor_world) = editor_cam.anchor_world_space(cam_transform) else {
60 | continue;
61 | };
62 | let p1 = cam
63 | .world_to_viewport(cam_transform, anchor_world.as_vec3())
64 | .unwrap_or_default();
65 | let p2 = cam
66 | .world_to_viewport(
67 | cam_transform,
68 | anchor_world.as_vec3() + cam_transform.right().as_vec3(),
69 | )
70 | .unwrap_or_default();
71 |
72 | let scale = 8.0 / (p2 - p1).length();
73 |
74 | // Shift the indicator toward the camera to prevent it clipping objects near parallel
75 | let shift = (cam_transform.translation() - anchor_world.as_vec3()).normalize() * scale;
76 | let anchor_world = anchor_world.as_vec3() + shift;
77 |
78 | if editor_cam.current_motion.is_orbiting() {
79 | let gizmo_color = || Color::WHITE;
80 | let arm_length = 0.4;
81 |
82 | gizmos.circle(
83 | Isometry3d::new(anchor_world, cam_transform.rotation()),
84 | scale,
85 | gizmo_color(),
86 | );
87 | let offset = 1.5 * scale;
88 | gizmos.ray(
89 | anchor_world + offset * cam_transform.left(),
90 | offset * arm_length * cam_transform.left(),
91 | gizmo_color(),
92 | );
93 | gizmos.ray(
94 | anchor_world + offset * cam_transform.right(),
95 | offset * arm_length * cam_transform.right(),
96 | gizmo_color(),
97 | );
98 | gizmos.ray(
99 | anchor_world + offset * cam_transform.up(),
100 | offset * arm_length * cam_transform.up(),
101 | gizmo_color(),
102 | );
103 | gizmos.ray(
104 | anchor_world + offset * cam_transform.down(),
105 | offset * arm_length * cam_transform.down(),
106 | gizmo_color(),
107 | );
108 | }
109 | }
110 | }
111 |
--------------------------------------------------------------------------------
/src/extensions/dolly_zoom.rs:
--------------------------------------------------------------------------------
1 | //! A `bevy_editor_cam` extension that adds the ability to smoothly transition between perspective
2 | //! and orthographic projections using what's known as a "dolly zoom" in film. This is useful
3 | //! because it ensures that the object the user is focusing on does not change size even as the
4 | //! projection changes.
5 |
6 | use std::time::Duration;
7 |
8 | use bevy_app::prelude::*;
9 | use bevy_ecs::prelude::*;
10 | use bevy_math::prelude::*;
11 | use bevy_reflect::prelude::*;
12 | use bevy_render::{camera::ScalingMode, prelude::*};
13 | use bevy_transform::prelude::*;
14 | use bevy_utils::{HashMap, Instant};
15 | use bevy_window::RequestRedraw;
16 |
17 | use crate::prelude::{motion::CurrentMotion, EditorCam, EnabledMotion};
18 |
19 | /// See the [module](self) docs.
20 | pub struct DollyZoomPlugin;
21 |
22 | impl Plugin for DollyZoomPlugin {
23 | fn build(&self, app: &mut App) {
24 | app.init_resource::()
25 | .add_event::()
26 | .add_systems(
27 | PreUpdate,
28 | DollyZoom::update
29 | .before(crate::controller::component::EditorCam::update_camera_positions),
30 | )
31 | .add_systems(Last, DollyZoomTrigger::receive) // This mutates camera components, so we want to be sure it runs *after* rendering has happened. We place it in Last to ensure that we wake the next frame if needed. If we run this in PostUpdate, this can result in rendering artifacts because this will mutate projections right before rendering.
32 | .register_type::();
33 | }
34 | }
35 |
36 | /// Used when transitioning from ortho to perspective, this needs to be close to ortho (zero fov).
37 | const ZERO_FOV: f64 = 1e-3;
38 |
39 | /// Triggers a dolly zoom on the specified camera.
40 | #[derive(Debug, Event)]
41 | pub struct DollyZoomTrigger {
42 | /// The new projection.
43 | pub target_projection: Projection,
44 | /// The camera to update.
45 | pub camera: Entity,
46 | }
47 |
48 | impl DollyZoomTrigger {
49 | fn receive(
50 | mut events: EventReader,
51 | mut state: ResMut,
52 | mut cameras: Query<(&Camera, &mut Projection, &mut EditorCam, &mut Transform)>,
53 | mut redraw: EventWriter,
54 | ) {
55 | for event in events.read() {
56 | let Ok((camera, mut proj, mut controller, mut transform)) =
57 | cameras.get_mut(event.camera)
58 | else {
59 | continue;
60 | };
61 | redraw.send(RequestRedraw);
62 | let (fov_start, triangle_base) = match &*proj {
63 | Projection::Perspective(perspective) => {
64 | if let Projection::Perspective(PerspectiveProjection {
65 | fov: target_fov, ..
66 | }) = event.target_projection
67 | {
68 | // If the target and current fov are the same, there is nothing to do.
69 | if (target_fov - perspective.fov).abs() <= f32::EPSILON {
70 | continue;
71 | }
72 | }
73 | (
74 | perspective.fov,
75 | (perspective.fov as f64 / 2.0).tan() * controller.last_anchor_depth.abs(),
76 | )
77 | }
78 | Projection::Orthographic(ortho) => {
79 | if matches!(event.target_projection, Projection::Orthographic(..)) {
80 | // If the camera is in ortho, and wants to go to ortho, early exit.
81 | continue;
82 | }
83 |
84 | let base = ortho.scale as f64 / ortho_tri_base_to_scale_factor(camera, ortho);
85 | let new_anchor_dist = base / (ZERO_FOV / 2.0).tan();
86 | let forward_dist = controller.last_anchor_depth.abs() - new_anchor_dist;
87 | let next_translation = transform.forward().as_dvec3() * forward_dist;
88 |
89 | transform.translation += next_translation.as_vec3();
90 | controller.last_anchor_depth += forward_dist;
91 |
92 | (ZERO_FOV as f32, base)
93 | }
94 | };
95 |
96 | let perspective_start = PerspectiveProjection {
97 | fov: fov_start,
98 | ..Default::default()
99 | };
100 | *proj = Projection::Perspective(perspective_start.clone());
101 |
102 | state
103 | .map
104 | .entry(event.camera)
105 | .and_modify(|e| {
106 | e.perspective_start = perspective_start.clone();
107 | e.proj_end = event.target_projection.clone();
108 | e.triangle_base = triangle_base;
109 | e.start = Instant::now();
110 | e.complete = false;
111 | })
112 | .or_insert(ZoomEntry {
113 | perspective_start,
114 | proj_end: event.target_projection.clone(),
115 | triangle_base,
116 | start: Instant::now(),
117 | initial_enabled: controller.enabled_motion.clone(),
118 | complete: false,
119 | });
120 |
121 | controller.end_move();
122 | controller.current_motion = CurrentMotion::Stationary;
123 | controller.enabled_motion = EnabledMotion {
124 | pan: false,
125 | orbit: false,
126 | zoom: false,
127 | };
128 | }
129 | }
130 | }
131 |
132 | struct ZoomEntry {
133 | perspective_start: PerspectiveProjection,
134 | proj_end: Projection,
135 | triangle_base: f64,
136 | start: Instant,
137 | initial_enabled: EnabledMotion,
138 | complete: bool,
139 | }
140 |
141 | /// Stores settings and state for the dolly zoom plugin.
142 | #[derive(Resource, Reflect)]
143 | pub struct DollyZoom {
144 | /// The duration of the dolly zoom transition animation.
145 | pub animation_duration: Duration,
146 | /// The cubic curve used to animate the camera during a dolly zoom.
147 | #[reflect(ignore)]
148 | pub animation_curve: CubicSegment,
149 | #[reflect(ignore)]
150 | map: HashMap,
151 | }
152 |
153 | impl Default for DollyZoom {
154 | fn default() -> Self {
155 | Self {
156 | animation_duration: Duration::from_millis(300),
157 | animation_curve: CubicSegment::new_bezier((0.65, 0.0), (0.35, 1.0)),
158 | map: Default::default(),
159 | }
160 | }
161 | }
162 |
163 | impl DollyZoom {
164 | fn update(
165 | mut state: ResMut,
166 | mut cameras: Query<(&Camera, &mut Projection, &mut Transform, &mut EditorCam)>,
167 | mut redraw: EventWriter,
168 | ) {
169 | let animation_duration = state.animation_duration;
170 | let animation_curve = state.animation_curve;
171 | for (
172 | camera,
173 | ZoomEntry {
174 | perspective_start,
175 | proj_end,
176 | triangle_base,
177 | start,
178 | initial_enabled,
179 | complete,
180 | },
181 | ) in state.map.iter_mut()
182 | {
183 | let Ok((camera, mut projection, mut transform, mut controller)) =
184 | cameras.get_mut(*camera)
185 | else {
186 | *complete = true;
187 | continue;
188 | };
189 |
190 | let Projection::Perspective(last_perspective) = projection.clone() else {
191 | *projection = proj_end.clone();
192 | controller.enabled_motion = initial_enabled.clone();
193 | *complete = true;
194 | continue;
195 | };
196 |
197 | let last_fov = last_perspective.fov as f64;
198 | let fov_start = perspective_start.fov as f64;
199 |
200 | let fov_end = match &*proj_end {
201 | Projection::Perspective(perspective) => perspective.fov as f64,
202 | Projection::Orthographic(_) => ZERO_FOV,
203 | };
204 | let progress = start.elapsed().as_secs_f32() / animation_duration.as_secs_f32();
205 | let progress = animation_curve.ease(progress);
206 | let next_fov = (1.0 - progress as f64) * fov_start + progress as f64 * fov_end;
207 |
208 | let last_dist = *triangle_base / (last_fov / 2.0).tan();
209 | let next_dist = *triangle_base / (next_fov / 2.0).tan();
210 | let forward_dist = last_dist - next_dist;
211 | let next_translation = transform.forward().as_dvec3() * forward_dist;
212 |
213 | transform.translation += next_translation.as_vec3();
214 | controller.last_anchor_depth += forward_dist;
215 |
216 | if progress < 1.0 {
217 | *projection = Projection::Perspective(PerspectiveProjection {
218 | fov: next_fov as f32,
219 | ..last_perspective
220 | })
221 | } else {
222 | *projection = proj_end.clone();
223 | if let Projection::Orthographic(ortho) = &mut *projection {
224 | let multiplier = ortho_tri_base_to_scale_factor(camera, ortho);
225 |
226 | ortho.scale = (*triangle_base * multiplier) as f32;
227 | }
228 | controller.enabled_motion = initial_enabled.clone();
229 | *complete = true;
230 | }
231 | redraw.send(RequestRedraw);
232 | }
233 | state.map.retain(|_, v| !v.complete);
234 | }
235 | }
236 |
237 | fn ortho_tri_base_to_scale_factor(camera: &Camera, ortho: &OrthographicProjection) -> f64 {
238 | if let Some(size) = camera.logical_viewport_size() {
239 | let (width, height) = (size.x as f64, size.y as f64);
240 | 2.0 / match ortho.scaling_mode {
241 | ScalingMode::WindowSize => height,
242 | ScalingMode::AutoMin {
243 | min_width,
244 | min_height,
245 | } => {
246 | if width * min_height as f64 > min_width as f64 * height {
247 | min_height as f64
248 | } else {
249 | height * min_width as f64 / width
250 | }
251 | }
252 | ScalingMode::AutoMax {
253 | max_width,
254 | max_height,
255 | } => {
256 | if (width * max_height as f64) < max_width as f64 * height {
257 | max_height as f64
258 | } else {
259 | height * max_width as f64 / width
260 | }
261 | }
262 | ScalingMode::FixedVertical { viewport_height } => viewport_height as f64,
263 | ScalingMode::FixedHorizontal { viewport_width } => {
264 | height * viewport_width as f64 / width
265 | }
266 | ScalingMode::Fixed { height, .. } => height as f64,
267 | }
268 | } else {
269 | 0.00278
270 | }
271 | }
272 |
--------------------------------------------------------------------------------
/src/extensions/independent_skybox.rs:
--------------------------------------------------------------------------------
1 | //! A `bevy_editor_cam` extension that provides a skybox rendered by a different camera with a
2 | //! different field of view than the camera it is added to. This allows you to use very narrow
3 | //! camera FOVs, or even orthographic projections, while keeping the appearance of the skybox
4 | //! unchanged.
5 | //!
6 | //! To use it, add a [`IndependentSkybox`] component to a camera.
7 |
8 | use bevy_app::prelude::*;
9 | use bevy_asset::Handle;
10 | use bevy_core_pipeline::{prelude::*, Skybox};
11 | use bevy_ecs::prelude::*;
12 | use bevy_image::Image;
13 | use bevy_math::Quat;
14 | use bevy_reflect::prelude::*;
15 | use bevy_render::{prelude::*, view::RenderLayers};
16 | use bevy_transform::prelude::*;
17 |
18 | /// See the [module](self) docs.
19 | pub struct IndependentSkyboxPlugin;
20 |
21 | impl Plugin for IndependentSkyboxPlugin {
22 | fn build(&self, app: &mut App) {
23 | app.add_systems(
24 | Update,
25 | (
26 | IndependentSkyboxCamera::spawn,
27 | IndependentSkyboxCamera::despawn,
28 | apply_deferred,
29 | IndependentSkyboxCamera::update,
30 | )
31 | .chain(),
32 | )
33 | .register_type::();
34 | }
35 | }
36 |
37 | /// Add this to a camera to enable rendering a skybox with these parameters.
38 | #[derive(Debug, Clone, Reflect, Component)]
39 | pub struct IndependentSkybox {
40 | /// The image to render as a skybox.
41 | pub skybox: Handle,
42 | /// Used to set [`Skybox::brightness`].
43 | pub brightness: f32,
44 | /// Used to set [`Skybox::rotation`].
45 | pub rotation: Quat,
46 | /// The [`Camera::order`] of the skybox camera, offset from the camera it is tracking. This
47 | /// should be lower than the order of the primary camera controller camera. the default value
48 | /// should be sufficient for most cases. You can override this if you have a more complex use
49 | /// case with multiple cameras.
50 | pub skybox_cam_order_offset: isize,
51 | /// The field of view of the skybox.
52 | pub fov: SkyboxFov,
53 | /// The corresponding skybox camera entity.
54 | skybox_cam: Option,
55 | }
56 |
57 | impl IndependentSkybox {
58 | /// Create a new [`IndependentSkybox`] with default settings and the provided skybox image.
59 | pub fn new(skybox: Handle, brightness: f32, rotation: Quat) -> Self {
60 | Self {
61 | skybox,
62 | brightness,
63 | rotation,
64 | ..Default::default()
65 | }
66 | }
67 | }
68 |
69 | impl Default for IndependentSkybox {
70 | fn default() -> Self {
71 | Self {
72 | skybox: Default::default(),
73 | brightness: 500.0,
74 | rotation: Quat::IDENTITY,
75 | skybox_cam_order_offset: -1_000,
76 | fov: Default::default(),
77 | skybox_cam: Default::default(),
78 | }
79 | }
80 | }
81 |
82 | /// Field of view setting for the [`IndependentSkybox`]
83 | #[derive(Debug, Clone, Reflect)]
84 | pub enum SkyboxFov {
85 | /// Match the [`PerspectiveProjection::fov`] of the camera this skybox camera is following.
86 | Auto,
87 | /// Use a fixed value for the skybox field of view. This value is equivalent to
88 | /// [`PerspectiveProjection::fov`].
89 | Fixed(f32),
90 | }
91 |
92 | impl Default for SkyboxFov {
93 | fn default() -> Self {
94 | Self::Fixed(PerspectiveProjection::default().fov)
95 | }
96 | }
97 |
98 | /// Used to track the camera that is used to render a skybox, using the [`IndependentSkybox`]
99 | /// component settings placed on a camera.
100 | #[derive(Component)]
101 | pub struct IndependentSkyboxCamera {
102 | /// The camera that this skybox camera is observing.
103 | driven_by: Entity,
104 | }
105 |
106 | impl IndependentSkyboxCamera {
107 | /// Spawns [`IndependentSkyboxCamera`]s when a [`IndependentSkybox`] exists without a skybox
108 | /// entity.
109 | pub fn spawn(
110 | mut commands: Commands,
111 | mut editor_cams: Query<(Entity, &mut IndependentSkybox, &mut Camera, &Msaa)>,
112 | skybox_cams: Query<&IndependentSkyboxCamera>,
113 | ) {
114 | for (editor_cam_entity, mut editor_without_skybox, mut camera, msaa) in
115 | editor_cams.iter_mut().filter(|(_, config, ..)| {
116 | config
117 | .skybox_cam
118 | .and_then(|e| skybox_cams.get(e).ok())
119 | .is_none()
120 | })
121 | {
122 | camera.clear_color = ClearColorConfig::None;
123 | camera.hdr = true;
124 |
125 | let entity = commands
126 | .spawn((
127 | Camera3d::default(),
128 | Camera {
129 | order: camera.order + editor_without_skybox.skybox_cam_order_offset,
130 | hdr: true,
131 | clear_color: ClearColorConfig::None,
132 | ..Default::default()
133 | },
134 | Projection::Perspective(PerspectiveProjection {
135 | fov: match editor_without_skybox.fov {
136 | SkyboxFov::Auto => PerspectiveProjection::default().fov,
137 | SkyboxFov::Fixed(fov) => fov,
138 | },
139 | ..Default::default()
140 | }),
141 | RenderLayers::none(),
142 | Skybox {
143 | image: editor_without_skybox.skybox.clone(),
144 | brightness: editor_without_skybox.brightness,
145 | rotation: editor_without_skybox.rotation,
146 | },
147 | IndependentSkyboxCamera {
148 | driven_by: editor_cam_entity,
149 | },
150 | *msaa,
151 | ))
152 | .id();
153 | editor_without_skybox.skybox_cam = Some(entity);
154 | }
155 | }
156 |
157 | /// Despawns [`IndependentSkyboxCamera`]s when their corresponding [`IndependentSkybox`] entity
158 | /// does not exist.
159 | pub fn despawn(
160 | mut commands: Commands,
161 | skybox_cams: Query<(Entity, &IndependentSkyboxCamera)>,
162 | editor_cams: Query<&IndependentSkybox>,
163 | ) {
164 | for (skybox_entity, skybox) in &skybox_cams {
165 | if editor_cams.get(skybox.driven_by).is_err() {
166 | commands.entity(skybox_entity).despawn();
167 | }
168 | }
169 | }
170 |
171 | /// Update the position and projection of this [`IndependentSkyboxCamera`] to copy the camera it
172 | /// is following.
173 | #[allow(clippy::type_complexity)]
174 | pub fn update(
175 | mut editor_cams: Query<
176 | (&IndependentSkybox, &Transform, &Projection, &Camera),
177 | (
178 | Or<(Changed, Changed)>,
179 | Without,
180 | ),
181 | >,
182 | mut skybox_cams: Query<(&mut Transform, &mut Projection, &mut Camera), With>,
183 | ) {
184 | for (editor_cam, editor_transform, editor_projection, camera) in &mut editor_cams {
185 | let Some(skybox_entity) = editor_cam.skybox_cam else {
186 | continue;
187 | };
188 | let Ok((mut skybox_transform, mut skybox_projection, mut skybox_camera)) =
189 | skybox_cams.get_mut(skybox_entity)
190 | else {
191 | continue;
192 | };
193 |
194 | skybox_camera.viewport.clone_from(&camera.viewport);
195 |
196 | if let Projection::Perspective(editor_perspective) = editor_projection {
197 | *skybox_projection = Projection::Perspective(PerspectiveProjection {
198 | fov: match editor_cam.fov {
199 | SkyboxFov::Auto => editor_perspective.fov,
200 | SkyboxFov::Fixed(fov) => fov,
201 | },
202 | ..editor_perspective.clone()
203 | })
204 | }
205 |
206 | *skybox_transform = *editor_transform;
207 | }
208 | }
209 | }
210 |
--------------------------------------------------------------------------------
/src/extensions/look_to.rs:
--------------------------------------------------------------------------------
1 | //! A `bevy_editor_cam` extension that adds the ability to smoothly rotate the camera about its
2 | //! anchor point until it is looking in the specified direction.
3 |
4 | use std::{f32::consts::PI, time::Duration};
5 |
6 | use bevy_app::prelude::*;
7 | use bevy_ecs::prelude::*;
8 | use bevy_math::{prelude::*, DQuat, DVec3};
9 | use bevy_reflect::prelude::*;
10 | use bevy_transform::prelude::*;
11 | use bevy_utils::{HashMap, Instant};
12 | use bevy_window::RequestRedraw;
13 |
14 | use crate::prelude::*;
15 |
16 | /// See the [module](self) docs.
17 | pub struct LookToPlugin;
18 |
19 | impl Plugin for LookToPlugin {
20 | fn build(&self, app: &mut App) {
21 | app.init_resource::()
22 | .add_event::()
23 | .add_systems(
24 | PreUpdate,
25 | LookTo::update
26 | .before(crate::controller::component::EditorCam::update_camera_positions),
27 | )
28 | .add_systems(PostUpdate, LookToTrigger::receive) // In PostUpdate so we don't miss users sending this in Update. LookTo::update will catch the changes next frame.
29 | .register_type::();
30 | }
31 | }
32 |
33 | /// Send this event to rotate the camera about its anchor until it is looking in the given direction
34 | /// with the given up direction. Animation speed is configured with the [`LookTo`] resource.
35 | #[derive(Debug, Event)]
36 | pub struct LookToTrigger {
37 | /// The new direction to face.
38 | pub target_facing_direction: Dir3,
39 | /// The camera's "up" direction when finished moving.
40 | pub target_up_direction: Dir3,
41 | /// The camera to update.
42 | pub camera: Entity,
43 | }
44 |
45 | impl LookToTrigger {
46 | /// Constructs a [`LookToTrigger`] with the up direction automatically selected.
47 | ///
48 | /// If the camera is set to [`OrbitConstraint::Fixed`], the fixed up direction will be used, as
49 | /// long as it is not parallel to the facing direction. If set to [`OrbitConstraint::Free`] or
50 | /// the facing direction is parallel to the fixed up direction, the up direction will be
51 | /// automatically selected by choosing the axis that results in the least amount of rotation.
52 | pub fn auto_snap_up_direction(
53 | facing: Dir3,
54 | cam_entity: Entity,
55 | cam_transform: &Transform,
56 | cam_editor: &EditorCam,
57 | ) -> Self {
58 | const EPSILON: f32 = 0.01;
59 | let constraint = match cam_editor.orbit_constraint {
60 | OrbitConstraint::Fixed { up, .. } => Some(up),
61 | OrbitConstraint::Free => None,
62 | }
63 | .filter(|up| {
64 | let angle = facing.angle_between(*up).abs();
65 | angle > EPSILON && angle < PI - EPSILON
66 | });
67 |
68 | let up = constraint.unwrap_or_else(|| {
69 | let current = cam_transform.rotation;
70 | let options = [
71 | Vec3::X,
72 | Vec3::NEG_X,
73 | Vec3::Y,
74 | Vec3::NEG_Y,
75 | Vec3::Z,
76 | Vec3::NEG_Z,
77 | ];
78 | *options
79 | .iter()
80 | .map(|d| (d, Transform::default().looking_to(*facing, *d).rotation))
81 | .map(|(d, rot)| (d, rot.angle_between(current).abs()))
82 | .reduce(|acc, this| if this.1 < acc.1 { this } else { acc })
83 | .map(|nearest| nearest.0)
84 | .unwrap_or(&Vec3::Y)
85 | });
86 |
87 | LookToTrigger {
88 | target_facing_direction: facing,
89 | target_up_direction: Dir3::new_unchecked(up.normalize()),
90 | camera: cam_entity,
91 | }
92 | }
93 | }
94 |
95 | impl LookToTrigger {
96 | fn receive(
97 | mut events: EventReader,
98 | mut state: ResMut,
99 | mut cameras: Query<(&mut EditorCam, &Transform)>,
100 | mut redraw: EventWriter,
101 | ) {
102 | for event in events.read() {
103 | let Ok((mut controller, transform)) = cameras.get_mut(event.camera) else {
104 | continue;
105 | };
106 | redraw.send(RequestRedraw);
107 |
108 | state
109 | .map
110 | .entry(event.camera)
111 | .and_modify(|e| {
112 | e.start = Instant::now();
113 | e.initial_facing_direction = transform.forward();
114 | e.initial_up_direction = transform.up();
115 | e.target_facing_direction = event.target_facing_direction;
116 | e.target_up_direction = event.target_up_direction;
117 | e.complete = false;
118 | })
119 | .or_insert(LookToEntry {
120 | start: Instant::now(),
121 | initial_facing_direction: transform.forward(),
122 | initial_up_direction: transform.up(),
123 | target_facing_direction: event.target_facing_direction,
124 | target_up_direction: event.target_up_direction,
125 | complete: false,
126 | });
127 |
128 | controller.end_move();
129 | controller.current_motion = motion::CurrentMotion::Stationary;
130 | }
131 | }
132 | }
133 |
134 | struct LookToEntry {
135 | start: Instant,
136 | initial_facing_direction: Dir3,
137 | initial_up_direction: Dir3,
138 | target_facing_direction: Dir3,
139 | target_up_direction: Dir3,
140 | complete: bool,
141 | }
142 |
143 | /// Stores settings and state for the dolly zoom plugin.
144 | #[derive(Resource, Reflect)]
145 | pub struct LookTo {
146 | /// The duration of the "look to" transition animation.
147 | pub animation_duration: Duration,
148 | /// The cubic curve used to animate the camera during a "look to".
149 | #[reflect(ignore)]
150 | pub animation_curve: CubicSegment,
151 | #[reflect(ignore)]
152 | map: HashMap,
153 | }
154 |
155 | impl Default for LookTo {
156 | fn default() -> Self {
157 | Self {
158 | animation_duration: Duration::from_millis(400),
159 | animation_curve: CubicSegment::new_bezier((0.25, 0.0), (0.25, 1.0)),
160 | map: Default::default(),
161 | }
162 | }
163 | }
164 |
165 | impl LookTo {
166 | fn update(
167 | mut state: ResMut,
168 | mut cameras: Query<(&mut Transform, &EditorCam)>,
169 | mut redraw: EventWriter,
170 | ) {
171 | let animation_duration = state.animation_duration;
172 | let animation_curve = state.animation_curve;
173 | for (
174 | camera,
175 | LookToEntry {
176 | start,
177 | initial_facing_direction,
178 | initial_up_direction,
179 | target_facing_direction,
180 | target_up_direction,
181 | complete,
182 | },
183 | ) in state.map.iter_mut()
184 | {
185 | let Ok((mut transform, controller)) = cameras.get_mut(*camera) else {
186 | *complete = true;
187 | continue;
188 | };
189 | let progress_t =
190 | (start.elapsed().as_secs_f32() / animation_duration.as_secs_f32()).clamp(0.0, 1.0);
191 | let progress = animation_curve.ease(progress_t);
192 |
193 | let rotate_around = |transform: &mut Transform, point: DVec3, rotation: DQuat| {
194 | // Following lines are f64 versions of Transform::rotate_around
195 | transform.translation =
196 | (point + rotation * (transform.translation.as_dvec3() - point)).as_vec3();
197 | transform.rotation = (rotation * transform.rotation.as_dquat())
198 | .as_quat()
199 | .normalize();
200 | };
201 |
202 | let anchor_view_space = controller.anchor_view_space().unwrap_or(DVec3::new(
203 | 0.0,
204 | 0.0,
205 | controller.last_anchor_depth(),
206 | ));
207 |
208 | let anchor_world = {
209 | let (r, t) = (transform.rotation, transform.translation);
210 | r.as_dquat() * anchor_view_space + t.as_dvec3()
211 | };
212 |
213 | let rot_init = Transform::default()
214 | .looking_to(**initial_facing_direction, **initial_up_direction)
215 | .rotation;
216 | let rot_target = Transform::default()
217 | .looking_to(**target_facing_direction, **target_up_direction)
218 | .rotation;
219 |
220 | let rot_next = rot_init.slerp(rot_target, progress);
221 | let rot_last = transform.rotation;
222 | let rot_delta = rot_next * rot_last.inverse();
223 |
224 | rotate_around(&mut transform, anchor_world, rot_delta.as_dquat());
225 |
226 | if progress_t >= 1.0 {
227 | *complete = true;
228 | }
229 | redraw.send(RequestRedraw);
230 | }
231 | state.map.retain(|_, v| !v.complete);
232 | }
233 | }
234 |
--------------------------------------------------------------------------------
/src/extensions/mod.rs:
--------------------------------------------------------------------------------
1 | //! Extensions to the base camera controller.
2 |
3 | pub mod dolly_zoom;
4 | pub mod look_to;
5 |
6 | #[cfg(feature = "extension_anchor_indicator")]
7 | pub mod anchor_indicator;
8 | #[cfg(feature = "extension_independent_skybox")]
9 | pub mod independent_skybox;
10 |
--------------------------------------------------------------------------------
/src/input.rs:
--------------------------------------------------------------------------------
1 | //! Provides a default input plugin for the camera. See [`DefaultInputPlugin`].
2 |
3 | use bevy_app::prelude::*;
4 | use bevy_derive::{Deref, DerefMut};
5 | use bevy_ecs::prelude::*;
6 | use bevy_input::{
7 | mouse::{MouseScrollUnit, MouseWheel},
8 | prelude::*,
9 | };
10 | use bevy_math::{prelude::*, DVec2, DVec3};
11 | use bevy_reflect::prelude::*;
12 | use bevy_render::{camera::CameraProjection, prelude::*};
13 | use bevy_transform::prelude::*;
14 | use bevy_utils::hashbrown::HashMap;
15 | use bevy_window::PrimaryWindow;
16 |
17 | use bevy_picking::pointer::{
18 | PointerAction, PointerId, PointerInput, PointerInteraction, PointerLocation, PointerMap,
19 | };
20 |
21 | use crate::prelude::{component::EditorCam, inputs::MotionInputs};
22 |
23 | /// The type of mutually exclusive camera motion.
24 | #[derive(Debug, Clone, Copy, Reflect, PartialEq, Eq)]
25 | pub enum MotionKind {
26 | /// The camera is orbiting and zooming.
27 | OrbitZoom,
28 | /// The camera is panning and zooming.
29 | PanZoom,
30 | /// The camera is only zooming.
31 | Zoom,
32 | }
33 |
34 | impl From<&MotionInputs> for MotionKind {
35 | fn from(value: &MotionInputs) -> Self {
36 | match value {
37 | MotionInputs::OrbitZoom { .. } => MotionKind::OrbitZoom,
38 | MotionInputs::PanZoom { .. } => MotionKind::PanZoom,
39 | MotionInputs::Zoom { .. } => MotionKind::Zoom,
40 | }
41 | }
42 | }
43 |
44 | /// A plugin that provides a default input mapping. Intended to be replaced by users with their own
45 | /// version of this code, if needed.
46 | ///
47 | /// The input plugin is responsible for starting motions, sending inputs, and ending motions. See
48 | /// [`EditorCam`] for more details on how to implement this yourself.
49 | pub struct DefaultInputPlugin;
50 | impl Plugin for DefaultInputPlugin {
51 | fn build(&self, app: &mut App) {
52 | app.add_event::()
53 | .init_resource::()
54 | .add_systems(
55 | PreUpdate,
56 | (
57 | default_camera_inputs,
58 | EditorCamInputEvent::receive_events,
59 | EditorCamInputEvent::send_pointer_inputs,
60 | )
61 | .chain()
62 | .after(bevy_picking::PickSet::Last)
63 | .before(crate::controller::component::EditorCam::update_camera_positions),
64 | )
65 | .register_type::()
66 | .register_type::();
67 | }
68 | }
69 |
70 | /// A default implementation of an input system
71 | pub fn default_camera_inputs(
72 | pointers: Query<(&PointerId, &PointerLocation)>,
73 | pointer_map: Res,
74 | mut controller: EventWriter,
75 | mut mouse_wheel: EventReader,
76 | mouse_input: Res>,
77 | cameras: Query<(Entity, &Camera, &EditorCam)>,
78 | primary_window: Query>,
79 | ) {
80 | let orbit_start = MouseButton::Right;
81 | let pan_start = MouseButton::Left;
82 | let zoom_stop = 0.0;
83 |
84 | if let Some(&camera) = pointer_map.get(&PointerId::Mouse) {
85 | let camera_query = cameras.get(camera).ok();
86 | let is_in_zoom_mode = camera_query
87 | .map(|(.., editor_cam)| editor_cam.current_motion.is_zooming_only())
88 | .unwrap_or_default();
89 | let zoom_amount_abs = camera_query
90 | .and_then(|(.., editor_cam)| {
91 | editor_cam
92 | .current_motion
93 | .inputs()
94 | .map(|inputs| inputs.zoom_velocity_abs(editor_cam.smoothing.zoom.mul_f32(2.0)))
95 | })
96 | .unwrap_or(0.0);
97 | let should_zoom_end = is_in_zoom_mode && zoom_amount_abs <= zoom_stop;
98 |
99 | if mouse_input.any_just_released([orbit_start, pan_start]) || should_zoom_end {
100 | controller.send(EditorCamInputEvent::End { camera });
101 | }
102 | }
103 |
104 | for (&pointer, pointer_location) in pointers
105 | .iter()
106 | .filter_map(|(id, loc)| loc.location().map(|loc| (id, loc)))
107 | {
108 | match pointer {
109 | PointerId::Mouse => {
110 | let Some((camera, ..)) = cameras.iter().find(|(_, camera, _)| {
111 | pointer_location.is_in_viewport(camera, &primary_window)
112 | }) else {
113 | continue; // Pointer must be in viewport to start a motion.
114 | };
115 |
116 | if mouse_input.just_pressed(orbit_start) {
117 | controller.send(EditorCamInputEvent::Start {
118 | kind: MotionKind::OrbitZoom,
119 | camera,
120 | pointer,
121 | });
122 | } else if mouse_input.just_pressed(pan_start) {
123 | controller.send(EditorCamInputEvent::Start {
124 | kind: MotionKind::PanZoom,
125 | camera,
126 | pointer,
127 | });
128 | } else if mouse_wheel.read().map(|mw| mw.y.abs()).sum::() > 0.0 {
129 | // Note we can't just check if the mouse wheel inputs are empty, we need to
130 | // check if the y value abs greater than zero, otherwise we get a bunch of false
131 | // positives, which can cause issues with figuring out what the user is trying
132 | // to do.
133 | controller.send(EditorCamInputEvent::Start {
134 | kind: MotionKind::Zoom,
135 | camera,
136 | pointer,
137 | });
138 | }
139 | }
140 | PointerId::Touch(_) => continue,
141 | PointerId::Custom(_) => continue,
142 | }
143 | }
144 |
145 | // This must be cleared manually because reading these inputs is conditional - we are not
146 | // guaranteed to be flushing the events every frame.
147 | mouse_wheel.clear();
148 | }
149 |
150 | /// Maps pointers to the camera they are currently controlling.
151 | ///
152 | /// This is needed so we can automatically track pointer movements and update camera movement after
153 | /// a [`EditorCamInputEvent::Start`] has been received.
154 | #[derive(Debug, Clone, Default, Deref, DerefMut, Reflect, Resource)]
155 | pub struct CameraPointerMap(HashMap);
156 |
157 | /// Events used when implementing input systems for the [`EditorCam`].
158 | #[derive(Debug, Clone, Reflect, Event)]
159 | pub enum EditorCamInputEvent {
160 | /// Send this event to start moving the camera. The anchor and inputs will be computed
161 | /// automatically until the [`EditorCamInputEvent::End`] event is received.
162 | Start {
163 | /// The kind of camera movement that is being started.
164 | kind: MotionKind,
165 | /// The camera to move.
166 | camera: Entity,
167 | /// The pointer that will be controlling the camera. The rotation anchor point in the world
168 | /// will be automatically computed using picking backends.
169 | pointer: PointerId,
170 | },
171 | /// Send this event when a user's input ends, e.g. the button is released.
172 | End {
173 | /// The entity of the camera that should end its current input motion.
174 | camera: Entity,
175 | },
176 | }
177 |
178 | impl EditorCamInputEvent {
179 | /// Get the camera entity associated with this event.
180 | pub fn camera(&self) -> Entity {
181 | match self {
182 | EditorCamInputEvent::Start { camera, .. } => *camera,
183 | EditorCamInputEvent::End { camera } => *camera,
184 | }
185 | }
186 |
187 | /// Receive [`EditorCamInputEvent`]s, and use these to start and end moves on the [`EditorCam`].
188 | pub fn receive_events(
189 | mut events: EventReader,
190 | mut controllers: Query<(&mut EditorCam, &GlobalTransform)>,
191 | mut camera_map: ResMut,
192 | pointer_map: Res,
193 | pointer_interactions: Query<&PointerInteraction>,
194 | pointer_locations: Query<&PointerLocation>,
195 | cameras: Query<(&Camera, &Projection)>,
196 | ) {
197 | for event in events.read() {
198 | let Ok((mut controller, cam_transform)) = controllers.get_mut(event.camera()) else {
199 | continue;
200 | };
201 |
202 | match event {
203 | EditorCamInputEvent::Start { kind, pointer, .. } => {
204 | if controller.is_actively_controlled() {
205 | continue;
206 | }
207 | let anchor = pointer_map
208 | .get_entity(*pointer)
209 | .and_then(|entity| pointer_interactions.get(entity).ok())
210 | .and_then(|interaction| interaction.get_nearest_hit())
211 | .and_then(|(_, hit)| hit.position)
212 | .map(|world_space_hit| {
213 | // Convert the world space hit to view (camera) space
214 | cam_transform
215 | .compute_matrix()
216 | .as_dmat4()
217 | .inverse()
218 | .transform_point3(world_space_hit.into())
219 | })
220 | .or_else(|| {
221 | let camera = cameras.get(event.camera()).ok();
222 | let pointer_location = pointer_map
223 | .get_entity(*pointer)
224 | .and_then(|entity| pointer_locations.get(entity).ok())
225 | .and_then(|l| l.location());
226 | if let Some(((camera, proj), pointer_location)) =
227 | camera.zip(pointer_location)
228 | {
229 | screen_to_view_space(
230 | camera,
231 | proj,
232 | &controller,
233 | pointer_location.position,
234 | )
235 | } else {
236 | None
237 | }
238 | });
239 |
240 | match kind {
241 | MotionKind::OrbitZoom => controller.start_orbit(anchor),
242 | MotionKind::PanZoom => controller.start_pan(anchor),
243 | MotionKind::Zoom => controller.start_zoom(anchor),
244 | }
245 | camera_map.insert(*pointer, event.camera());
246 | }
247 | EditorCamInputEvent::End { .. } => {
248 | controller.end_move();
249 | if let Some(pointer) = camera_map
250 | .iter()
251 | .find(|(.., &camera)| camera == event.camera())
252 | .map(|(&pointer, ..)| pointer)
253 | {
254 | camera_map.remove(&pointer);
255 | }
256 | }
257 | }
258 | }
259 | }
260 |
261 | /// While a camera motion is active, this system will take care of sending new pointer motion to
262 | /// the camera controller. The camera controller assumes that pan and orbit movements are tied
263 | /// to screen space pointer motion.
264 | ///
265 | /// This is because some of the pixel-perfect features of the controller require that data be
266 | /// passed in as screen space deltas, to compute perfect first-order control. This is also
267 | /// because the plugin uses pointer information to know which camera is being controlled.
268 | ///
269 | /// If you want to control the camera with different inputs, you will need to replace this
270 | /// system with one that tracks other input methods, and sends the required zoom and screenspace
271 | /// movement information.
272 | pub fn send_pointer_inputs(
273 | camera_map: Res,
274 | mut camera_controllers: Query<&mut EditorCam>,
275 | mut mouse_wheel: EventReader,
276 | mut moves: EventReader,
277 | ) {
278 | let moves_list: Vec<_> = moves.read().collect();
279 | for (pointer, camera) in camera_map.iter() {
280 | let Ok(mut camera_controller) = camera_controllers.get_mut(*camera) else {
281 | continue;
282 | };
283 |
284 | let screenspace_input = moves_list
285 | .iter()
286 | .filter(|m| m.pointer_id.eq(pointer))
287 | .filter_map(|m| match m.action {
288 | PointerAction::Moved { delta } => Some(delta),
289 | PointerAction::Pressed { .. } => None,
290 | PointerAction::Canceled => None,
291 | })
292 | .sum();
293 |
294 | let zoom_amount = match pointer {
295 | // TODO: add pinch zoom support
296 | PointerId::Mouse => mouse_wheel
297 | .read()
298 | .map(|mw| {
299 | let scroll_multiplier = match mw.unit {
300 | MouseScrollUnit::Line => 150.0,
301 | MouseScrollUnit::Pixel => 1.0,
302 | };
303 | mw.y * scroll_multiplier
304 | })
305 | .sum::(),
306 | _ => 0.0,
307 | };
308 |
309 | camera_controller.send_screenspace_input(screenspace_input);
310 | camera_controller.send_zoom_input(zoom_amount);
311 | }
312 | // This must be cleared manually because reading these inputs is conditional - we are not
313 | // guaranteed to be flushing the events every frame.
314 | mouse_wheel.clear();
315 | }
316 | }
317 |
318 | fn screen_to_view_space(
319 | camera: &Camera,
320 | proj: &Projection,
321 | controller: &EditorCam,
322 | target_position: Vec2,
323 | ) -> Option {
324 | let mut viewport_position = if let Some(rect) = camera.logical_viewport_rect() {
325 | target_position.as_dvec2() - rect.min.as_dvec2()
326 | } else {
327 | target_position.as_dvec2()
328 | };
329 | let target_size = camera.logical_viewport_size()?.as_dvec2();
330 | // Flip the Y co-ordinate origin from the top to the bottom.
331 | viewport_position.y = target_size.y - viewport_position.y;
332 | let ndc = viewport_position * 2. / target_size - DVec2::ONE;
333 | let ndc_to_view = proj.get_clip_from_view().as_dmat4().inverse();
334 | let view_near_plane = ndc_to_view.project_point3(ndc.extend(1.));
335 | match &proj {
336 | Projection::Perspective(_) => {
337 | // Using EPSILON because an ndc with Z = 0 returns NaNs.
338 | let view_far_plane = ndc_to_view.project_point3(ndc.extend(f64::EPSILON));
339 | let direction = (view_far_plane - view_near_plane).normalize();
340 | Some((direction / direction.z) * controller.last_anchor_depth())
341 | }
342 | Projection::Orthographic(_) => Some(DVec3::new(
343 | view_near_plane.x,
344 | view_near_plane.y,
345 | controller.last_anchor_depth(),
346 | )),
347 | }
348 | }
349 |
--------------------------------------------------------------------------------
/src/lib.rs:
--------------------------------------------------------------------------------
1 | //! A production-ready camera controller for 3D editors; intended for anyone who needs to rapidly
2 | //! and intuitively navigate virtual spaces.
3 | //!
4 | //! Camera controllers are very subjective! As someone who has spent years using camera controllers
5 | //! in mechanical engineering CAD software, I've developed my own opinions about what matters in a
6 | //! camera controller. This is my attempt to make the controller I've always wanted, that fixes the
7 | //! annoyances I've encountered.
8 | //!
9 | //! *Because* camera controllers are so subjective, I felt the need to write out the impetus for
10 | //! making this thing, what matters to me, and how I decided between conflicting goals. Somehow,
11 | //! this ended up as a manifesto of sorts. If you came here to learn how to use or extend this
12 | //! plugin, I've boiled the manifesto down into two sentences:
13 | //!
14 | //! > A camera controller needs to be responsive, robust, and satisfying to use. When there is
15 | //! > conflict between these needs, they should be prioritized in that order.
16 | //!
17 | //! Now that you've absorbed my wisdom, feel free to skip ahead to the [Usage](crate#usage) section.
18 | //!
19 | //! Or don't. It's up to you.
20 | //!
21 | //! # Philosophy
22 | //!
23 | //! These are the properties of a good editor camera controller, in order of importance. These are
24 | //! the driving values for the choices I've made here. You might disagree and have different values
25 | //! or priorities!
26 | //!
27 | //! ## Responsive
28 | //!
29 | //! A good camera controller should never feel floaty or disconnected. It should go exactly where
30 | //! the user commands it to go. Responsiveness isn't simply "low latency", it's about respecting the
31 | //! user's intent.
32 | //!
33 | //! #### First-order input
34 | //!
35 | //! The most precise inputs are first-order, that is, controlling the position of something
36 | //! directly, instead of its velocity (second-order) or acceleration (third-order). An example of
37 | //! this is using a mouse vs. a gamepad for controlling the rotation of a first person view. The
38 | //! mouse is first order, the position of the mouse on the mousepad directly corresponds with the
39 | //! direction the player is facing. Conversely, a joystick controls the velocity of the view
40 | //! rotation. All that is to say, where possible, the camera controller should use pointer inputs
41 | //! *directly*.
42 | //!
43 | //! #### Pixel-perfect panning
44 | //!
45 | //! When you click and drag to pan the scene, the thing you click on should stick to your pointer,
46 | //! and never drift. This should hold true even if inputs are being smoothed.
47 | //!
48 | //! #### Intuitive zoom
49 | //!
50 | //! The camera should zoom in and out in the direction you are pointing. If the user is hovering
51 | //! over something, the speed of the camera should automatically adjust to quickly zoom up to it
52 | //! without clipping through it.
53 | //!
54 | //! #### Predictable rotation
55 | //!
56 | //! When you click and drag to orbit the scene in 3d, the center of rotation should be located where
57 | //! your pointer was when the drag started.
58 | //!
59 | //! #### Intuitive perspective toggle
60 | //!
61 | //! Toggling between different fields of view, or between perspective and orthographic projections,
62 | //! should not cause the camera view to jump or change suddenly. The view should smoothly warp,
63 | //! keeping the last interacted point stationary on the screen.
64 | //!
65 | //! ## Robust
66 | //!
67 | //! A camera controller should work in any scenario, and handle failure gracefully and
68 | //! unsurprisingly when inputs are ambiguous.
69 | //!
70 | //! #### Works in all conditions:
71 | //!
72 | //! All of features in the previous section should work regardless of framerate, distance, scale,
73 | //! camera field of view, and camera projection - including orthographic.
74 | //!
75 | //! #### Graceful fallback
76 | //!
77 | //! if nothing is under the pointer when a camera motion starts, the last-known depth should be
78 | //! used, to prevent erratic behavior when the hit test fails. If a user was orbiting around a point
79 | //! on an object, then clicks to rotate about empty space, the camera should not shoot off into
80 | //! space because nothing was under the cursor.
81 | //!
82 | //! ### Satisfying
83 | //!
84 | //! The controller should *feel* good to use.
85 | //!
86 | //! #### Momentum
87 | //!
88 | //! Panning and orbiting should support configurable momentum, to allow you to "flick" the camera
89 | //! through the scene to cover distance and make the feel of the camera tunable. This is especially
90 | //! useful for trackpad and touch users.
91 | //!
92 | //! #### Smoothness
93 | //!
94 | //! The smoothness of inputs should be configurable as a tradeoff between fluidity of motion and
95 | //! responsiveness. This is particularly useful when showing the screen to other people, where fast
96 | //! motions can be disorienting or even nauseating.
97 | //!
98 | //! # Usage
99 | //!
100 | //! This plugin only requires three things to work. The `bevy_picking` plugin for hit tests, the
101 | //! [`DefaultEditorCamPlugins`] plugin group, and the [`EditorCam`](crate::prelude::EditorCam)
102 | //! component. Controller settings are configured per-camera in the
103 | //! [`EditorCam`](crate::prelude::EditorCam) component.
104 | //!
105 | //! ## Getting Started
106 | //!
107 | //! #### 1. Add `bevy_picking`
108 | //!
109 | //! The camera controller uses [`bevy_picking`] for pointer interactions. If you already it along
110 | //! with a picking backend, then using this camera controller is essentially free because it can
111 | //! reuse those same hit tests you are already running.
112 | //!
113 | //! #### 2. Add `DefaultEditorCamPlugins`
114 | //!
115 | //! This is a plugin group that adds the camera controller, as well as all the [extensions]. You can
116 | //! instead add [`controller::MinimalEditorCamPlugin`], though you will need to add your own input
117 | //! plugin if you do.
118 | //!
119 | //! ```
120 | //! # let mut app = bevy::app::App::new();
121 | //! app.add_plugins(bevy_editor_cam::DefaultEditorCamPlugins);
122 | //! ```
123 | //!
124 | //! #### 3. Insert the `EditorCam` component
125 | //!
126 | //! Finally, insert [`controller::component::EditorCam`] onto any cameras that you want to control.
127 | //! This marks the cameras as controllable and holds all camera controller settings.
128 | //!
129 | //! ```
130 | //! # use bevy::ecs::system::Commands;
131 | //! # use bevy_editor_cam::prelude::*;
132 | //! # fn test(mut commands: Commands) {
133 | //! commands.spawn((
134 | //! // Camera
135 | //! EditorCam::default(),
136 | //! ));
137 | //! # }
138 | //! ```
139 | //!
140 | //! # Other notable features
141 | //!
142 | //! I've also implemented a few other features that are handy for a camera controller like this.
143 | //!
144 | //! ### Compatible with floating origins and other controllers
145 | //!
146 | //! This controller does all computations in view space. The result of this is that you can move the
147 | //! camera wherever you want, update its transform, and it will continue to behave normally, as long
148 | //! as the camera isn't being controlled by the user while you do this. This means you can control
149 | //! this camera with another camera controller, or use it in a floating origin system.
150 | //!
151 | //! ### Independent skybox
152 | //!
153 | //! When working in a CAD context, it is common to use orthographic projections to remove
154 | //! perspective distortion from the image. However, because an ortho projection has zero field of
155 | //! view, the view of the skybox is infinitesimally small, i.e. only a single pixel of the skybox is
156 | //! visible. To fix this, an [extension](extensions) is provided to attach a skybox to a camera that
157 | //! is independent from that camera's field of view.
158 | //!
159 | //! ### Pointer and Hit Test Agnostic
160 | //!
161 | //! Users of this library shouldn't be forced into using any particular hit testing method, like CPU
162 | //! raycasting. The controller uses [`bevy_picking`] to work with:
163 | //!
164 | //! - Arbitrary hit testing backends, including those written by users. See
165 | //! [`bevy_picking::backend`] for more information.
166 | //! - Any number of pointing inputs, including touch.
167 | //! - Viewports and multi-pass rendering.
168 |
169 | #![warn(missing_docs)]
170 |
171 | pub mod controller;
172 | pub mod extensions;
173 | pub mod input;
174 |
175 | /// Common imports.
176 | pub mod prelude {
177 | pub use crate::{
178 | controller::{component::*, *},
179 | DefaultEditorCamPlugins,
180 | };
181 | }
182 |
183 | use bevy_app::{prelude::*, PluginGroupBuilder};
184 |
185 | /// Adds [`bevy_editor_cam`](crate) functionality with all extensions and the default input plugin.
186 | pub struct DefaultEditorCamPlugins;
187 |
188 | impl PluginGroup for DefaultEditorCamPlugins {
189 | #[allow(clippy::let_and_return)] // Needed for conditional compilation
190 | fn build(self) -> PluginGroupBuilder {
191 | let group = PluginGroupBuilder::start::()
192 | .add(input::DefaultInputPlugin)
193 | .add(controller::MinimalEditorCamPlugin)
194 | .add(extensions::dolly_zoom::DollyZoomPlugin)
195 | .add(extensions::look_to::LookToPlugin);
196 |
197 | #[cfg(feature = "extension_anchor_indicator")]
198 | let group = group.add(extensions::anchor_indicator::AnchorIndicatorPlugin);
199 |
200 | #[cfg(feature = "extension_independent_skybox")]
201 | let group = group.add(extensions::independent_skybox::IndependentSkyboxPlugin);
202 |
203 | group
204 | }
205 | }
206 |
--------------------------------------------------------------------------------