├── .github ├── dependabot.yml └── workflows │ └── rust.yml ├── .gitignore ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── code_of_conduct.md ├── examples ├── 3d-to-2d.rs ├── ray-intersect.rs └── render-cube.rs └── src ├── camera.rs ├── extrinsics.rs ├── intrinsic_test_utils.rs ├── intrinsics_orthographic.rs ├── intrinsics_perspective.rs ├── lib.rs ├── linearize.rs ├── ray_bundle_types.rs └── ray_intersection.rs /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: cargo 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | time: "04:00" 8 | open-pull-requests-limit: 10 9 | -------------------------------------------------------------------------------- /.github/workflows/rust.yml: -------------------------------------------------------------------------------- 1 | name: build 2 | 3 | on: 4 | push: 5 | branches: [ '**' ] 6 | pull_request: 7 | branches: [ '**' ] 8 | 9 | jobs: 10 | std: 11 | 12 | runs-on: ${{ matrix.os }} 13 | strategy: 14 | matrix: 15 | os: [macos-latest, windows-latest, ubuntu-latest] 16 | 17 | steps: 18 | - uses: actions/checkout@v3 19 | - name: Build 20 | run: cargo build 21 | - name: Run tests 22 | run: cargo test 23 | 24 | nostd: 25 | 26 | runs-on: ${{ matrix.os }} 27 | strategy: 28 | matrix: 29 | os: [macos-latest, windows-latest, ubuntu-latest] 30 | 31 | steps: 32 | - uses: actions/checkout@v3 33 | - name: Install ARM target 34 | run: rustup target add thumbv7em-none-eabihf 35 | - name: Build 36 | run: cargo build --no-default-features --target thumbv7em-none-eabihf 37 | - name: Build 38 | run: cargo build --no-default-features --features alloc --target thumbv7em-none-eabihf 39 | 40 | serde: 41 | 42 | runs-on: ${{ matrix.os }} 43 | strategy: 44 | matrix: 45 | os: [macos-latest, windows-latest, ubuntu-latest] 46 | 47 | steps: 48 | - uses: actions/checkout@v3 49 | - name: Build 50 | run: cargo build --features serde-serialize 51 | - name: Run tests 52 | run: cargo test --features serde-serialize 53 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | **/*.rs.bk 3 | Cargo.lock 4 | cube-ortho.svg 5 | cube-perspective.svg 6 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "cam-geom" 3 | version = "0.15.3" 4 | authors = ["Andrew Straw "] 5 | edition = "2021" 6 | description = "Geometric models of cameras for photogrammetry" 7 | license = "MIT OR Apache-2.0" 8 | readme = "README.md" 9 | repository = "https://github.com/strawlab/cam-geom" 10 | keywords = ["computer", "vision", "photogrammetry"] 11 | categories = [ 12 | "algorithms", 13 | "computer-vision", 14 | "no-std", 15 | "science", 16 | "science::robotics", 17 | ] 18 | rust-version = "1.65" 19 | 20 | [dependencies] 21 | nalgebra = { version = "0.33", default-features = false, features = ["libm"] } 22 | approx = { version = "0.5", default-features = false } 23 | num-iter = { version = "0.1.40", default-features = false, optional = true } 24 | itertools = { version = "0.14", default-features = false } 25 | serde = { version = "1.0", default-features = false, features = [ 26 | "derive", 27 | ], optional = true } 28 | 29 | [dev-dependencies] 30 | serde_json = "1.0" 31 | dlt = "0.13" 32 | 33 | [features] 34 | default = ["std"] 35 | 36 | std = ["nalgebra/std", "dep:num-iter", "alloc"] 37 | serde-serialize = ["dep:serde", "nalgebra/serde-serialize"] 38 | alloc = ["nalgebra/alloc"] 39 | 40 | [package.metadata.docs.rs] 41 | features = ["serde-serialize"] 42 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2020 Andrew Straw 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Copyright (c) 2020 Andrew D. Straw 2 | 3 | Permission is hereby granted, free of charge, to any 4 | person obtaining a copy of this software and associated 5 | documentation files (the "Software"), to deal in the 6 | Software without restriction, including without 7 | limitation the rights to use, copy, modify, merge, 8 | publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software 10 | is furnished to do so, subject to the following 11 | conditions: 12 | 13 | The above copyright notice and this permission notice 14 | shall be included in all copies or substantial portions 15 | of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 18 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 19 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 20 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 21 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 22 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 23 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 24 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 25 | DEALINGS IN THE SOFTWARE. 26 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Crates.io](https://img.shields.io/crates/v/cam-geom.svg)](https://crates.io/crates/cam-geom) 2 | [![Documentation](https://docs.rs/cam-geom/badge.svg)](https://docs.rs/cam-geom/) 3 | [![Crate License](https://img.shields.io/crates/l/cam-geom.svg)](https://crates.io/crates/cam-geom) 4 | [![Dependency status](https://deps.rs/repo/github/strawlab/cam-geom/status.svg)](https://deps.rs/repo/github/strawlab/cam-geom) 5 | [![build](https://github.com/strawlab/cam-geom/actions/workflows/rust.yml/badge.svg?branch=main)](https://github.com/strawlab/cam-geom/actions?query=branch%3Amain) 6 | 7 | 📷 📐 Geometric models of cameras for photogrammetry 8 | 9 | ![pinhole model 10 | image](https://strawlab.org/assets/images/pinhole-model-ladybug.png) 11 | 12 | (3D model by 13 | [Adan](https://sketchfab.com/3d-models/lowpoly-lady-bug-90b59b5185b14c52944573f236eb7175), 14 | [CC by 4.0](https://creativecommons.org/licenses/by/4.0/)) 15 | 16 | ## About 17 | 18 | The crate implements geometric models of cameras which may be useful for 19 | [photogrammetry](https://en.wikipedia.org/wiki/Photogrammetry). 20 | 21 | The crate provides a couple camera models, [the pinhole perspective 22 | camera](https://en.wikipedia.org/wiki/Pinhole_camera_model) and the 23 | [orthographic 24 | camera](https://en.wikipedia.org/wiki/Orthographic_projection). Adding 25 | another camera model entails implementing the 26 | [`IntrinsicParameters`](trait.IntrinsicParameters.html) trait. See the 27 | [`opencv_ros_camera`](https://crates.io/crates/opencv-ros-camera) crate 28 | for one example. 29 | 30 | Also provided is the function 31 | [`best_intersection_of_rays()`](fn.best_intersection_of_rays.html) which 32 | determines the best 3D point corresponding to the intersection of multiple 33 | rays. Thus, this crate is also useful for multiple view geometry. 34 | 35 | Characteristics: 36 | 37 | * Extensive use of static typing to ensure no unpleasant runtime surprises 38 | with coordinate system, matrix dimensions, and so on. 39 | * Serialization and deserialization using [`serde`](https://docs.rs/serde). 40 | Enable with the `serde-serialize` cargo feature. 41 | * Linear algebra and types from the [`nalgebra`](https://docs.rs/nalgebra) 42 | crate. 43 | * Possible to create new camera models by implementing the 44 | [`IntrinsicParameters`](trait.IntrinsicParameters.html) trait. While the 45 | camera models implemented in this crate are linear, there is no 46 | requirement that implementations are linear. For example, the 47 | [`opencv_ros_camera`](https://crates.io/crates/opencv-ros-camera) crate 48 | exhibits [distortion](https://en.wikipedia.org/wiki/Distortion_(optics)). 49 | * [`ExtrinsicParameters`](struct.ExtrinsicParameters.html) based on the 50 | [`nalgebra::Isometry3`](https://docs.rs/nalgebra/latest/nalgebra/geometry/type.Isometry3.html) 51 | type to handle the camera pose. 52 | * No standard library is required (disable the default features to disable 53 | use of `std`) and no heap allocations. In other words, this can run on a 54 | bare-metal microcontroller with no OS. 55 | * Extensive documentation and tests. 56 | * Requires rust version 1.40 or greater. 57 | 58 | ## Testing 59 | 60 | ### Unit tests 61 | 62 | To run the basic unit tests: 63 | 64 | ```text 65 | cargo test 66 | ``` 67 | 68 | To run all unit tests: 69 | 70 | ```text 71 | cargo test --features serde-serialize 72 | ``` 73 | 74 | ### Test for `no_std` 75 | 76 | Since the `thumbv7em-none-eabihf` target does not have `std` available, we 77 | can build for it to check that our crate does not inadvertently pull in 78 | std. The unit tests require std, so cannot be run on a `no_std` platform. 79 | The following will fail if a std dependency is present: 80 | 81 | ```text 82 | # install target with: "rustup target add thumbv7em-none-eabihf" 83 | cargo build --no-default-features --target thumbv7em-none-eabihf 84 | ``` 85 | 86 | ## Code of conduct 87 | 88 | Anyone who interacts with this software in any space, including but not limited 89 | to this GitHub repository, must follow our [code of 90 | conduct](code_of_conduct.md). 91 | 92 | ## License 93 | 94 | Licensed under either of these: 95 | 96 | * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or 97 | ) 98 | * MIT license ([LICENSE-MIT](LICENSE-MIT) or 99 | ) 100 | -------------------------------------------------------------------------------- /code_of_conduct.md: -------------------------------------------------------------------------------- 1 | # Contributor Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, gender identity and expression, level of experience, 9 | nationality, personal appearance, race, religion, or sexual identity and 10 | orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies both within project spaces and in public spaces 49 | when an individual is representing the project or its community. Examples of 50 | representing a project or community include using an official project e-mail 51 | address, posting via an official social media account, or acting as an appointed 52 | representative at an online or offline event. Representation of a project may be 53 | further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting a project maintainer at: 59 | 60 | * Andrew Straw 61 | 62 | All complaints will be reviewed and investigated and will result in a response 63 | that is deemed necessary and appropriate to the circumstances. The project team 64 | is obligated to maintain confidentiality with regard to the reporter of an 65 | incident. Further details of specific enforcement policies may be posted 66 | separately. 67 | 68 | Project maintainers who do not follow or enforce the Code of Conduct in good 69 | faith may face temporary or permanent repercussions as determined by other 70 | members of the project's leadership. 71 | 72 | ## Attribution 73 | 74 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 75 | available at [http://contributor-covenant.org/version/1/4][version] 76 | 77 | [homepage]: http://contributor-covenant.org 78 | [version]: http://contributor-covenant.org/version/1/4/ 79 | -------------------------------------------------------------------------------- /examples/3d-to-2d.rs: -------------------------------------------------------------------------------- 1 | // This is cut-and-pasted from the example in the docs of src/lib.rs. 2 | 3 | fn main() { 4 | use cam_geom::*; 5 | use nalgebra::{Matrix2x3, Unit, Vector3}; 6 | 7 | // Create two points in the world coordinate frame. 8 | let world_coords = Points::new(Matrix2x3::new( 9 | 1.0, 0.0, 0.0, // point 1 10 | 0.0, 1.0, 0.0, // point 2 11 | )); 12 | 13 | // perepective parameters - focal length of 100, no skew, pixel center at (640,480) 14 | let intrinsics = IntrinsicParametersPerspective::from(PerspectiveParams { 15 | fx: 100.0, 16 | fy: 100.0, 17 | skew: 0.0, 18 | cx: 640.0, 19 | cy: 480.0, 20 | }); 21 | 22 | // Set extrinsic parameters - camera at (10,0,0), looing at (0,0,0), up (0,0,1) 23 | let camcenter = Vector3::new(10.0, 0.0, 0.0); 24 | let lookat = Vector3::new(0.0, 0.0, 0.0); 25 | let up = Unit::new_normalize(Vector3::new(0.0, 0.0, 1.0)); 26 | let pose = ExtrinsicParameters::from_view(&camcenter, &lookat, &up); 27 | 28 | // Create a `Camera` with both intrinsic and extrinsic parameters. 29 | let camera = Camera::new(intrinsics, pose); 30 | 31 | // Project the original 3D coordinates to 2D pixel coordinates. 32 | let pixel_coords = camera.world_to_pixel(&world_coords); 33 | 34 | // Print the results. 35 | for i in 0..world_coords.data.nrows() { 36 | let wc = world_coords.data.row(i); 37 | let pix = pixel_coords.data.row(i); 38 | println!("{} -> {}", wc, pix); 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /examples/ray-intersect.rs: -------------------------------------------------------------------------------- 1 | // This is cut-and-pasted from the example in the docs of src/lib.rs. 2 | 3 | fn main() { 4 | use cam_geom::*; 5 | use nalgebra::RowVector3; 6 | 7 | // Create the first ray. 8 | let ray1 = Ray::::new( 9 | RowVector3::new(1.0, 0.0, 0.0), // origin 10 | RowVector3::new(0.0, 1.0, 0.0), // direction 11 | ); 12 | 13 | // Create the second ray. 14 | let ray2 = Ray::::new( 15 | RowVector3::new(0.0, 1.0, 0.0), // origin 16 | RowVector3::new(1.0, 0.0, 0.0), // direction 17 | ); 18 | 19 | // Compute the best intersection. 20 | let result = best_intersection_of_rays(&[ray1, ray2]).unwrap(); 21 | 22 | // Print the result. 23 | println!("result: {}", result.data); 24 | } 25 | -------------------------------------------------------------------------------- /examples/render-cube.rs: -------------------------------------------------------------------------------- 1 | //! This example renders a cube to an SVG file using both a perspective camera 2 | //! and an orthographic camera. 3 | 4 | use cam_geom::*; 5 | use nalgebra::{ 6 | allocator::Allocator, storage::Storage, DefaultAllocator, Dim, Matrix, SMatrix, Unit, Vector3, 7 | U1, U2, U3, 8 | }; 9 | 10 | /// Create a perspective camera. 11 | fn get_perspective_cam() -> Camera> { 12 | // Set intrinsic parameters 13 | let intrinsics = PerspectiveParams { 14 | fx: 100.0, 15 | fy: 100.0, 16 | skew: 0.0, 17 | cx: 640.0, 18 | cy: 480.0, 19 | }; 20 | 21 | // Set extrinsic parameters. 22 | let camcenter = Vector3::new(10.0, 3.0, 5.0); 23 | let lookat = Vector3::new(0.0, 0.0, 0.0); 24 | let up = Unit::new_normalize(Vector3::new(0.0, 0.0, 1.0)); 25 | let pose = ExtrinsicParameters::from_view(&camcenter, &lookat, &up); 26 | 27 | // Create camera with both intrinsic and extrinsic parameters. 28 | Camera::new(intrinsics.into(), pose) 29 | } 30 | 31 | /// Create an orthographic camera. 32 | fn get_ortho_cam() -> Camera> { 33 | let intrinsics = OrthographicParams { 34 | sx: 100.0, 35 | sy: 102.0, 36 | cx: 321.0, 37 | cy: 239.9, 38 | }; 39 | 40 | // Set extrinsic parameters. 41 | let camcenter = Vector3::new(10.0, 3.0, 5.0); 42 | let lookat = Vector3::new(0.0, 0.0, 0.0); 43 | let up = Unit::new_normalize(Vector3::new(0.0, 0.0, 1.0)); 44 | let pose = ExtrinsicParameters::from_view(&camcenter, &lookat, &up); 45 | 46 | // Create camera with both intrinsic and extrinsic parameters. 47 | Camera::new(intrinsics.into(), pose) 48 | } 49 | 50 | /// A simple SVG file writer 51 | struct SvgWriter { 52 | segs: Vec<((f64, f64), (f64, f64))>, 53 | xmin: f64, 54 | xmax: f64, 55 | ymin: f64, 56 | ymax: f64, 57 | } 58 | 59 | impl SvgWriter { 60 | fn new() -> Self { 61 | Self { 62 | xmin: std::f64::INFINITY, 63 | xmax: -std::f64::INFINITY, 64 | ymin: std::f64::INFINITY, 65 | ymax: -std::f64::INFINITY, 66 | segs: Vec::new(), 67 | } 68 | } 69 | fn add_edge(&mut self, pt0: &Matrix, pt1: &Matrix) 70 | where 71 | S: Storage, 72 | { 73 | self.xmin = self.xmin.min(pt0[0]); 74 | self.xmin = self.xmin.min(pt1[0]); 75 | 76 | self.ymin = self.ymin.min(pt0[1]); 77 | self.ymin = self.ymin.min(pt1[1]); 78 | 79 | self.xmax = self.xmax.max(pt0[0]); 80 | self.xmax = self.xmax.max(pt1[0]); 81 | 82 | self.ymax = self.ymax.max(pt0[1]); 83 | self.ymax = self.ymax.max(pt1[1]); 84 | 85 | self.segs.push(((pt0[0], pt0[1]), (pt1[0], pt1[1]))); 86 | } 87 | fn save(&self, fname: &str) -> Result<(), std::io::Error> { 88 | use std::io::prelude::*; 89 | 90 | let header = "\n"; 94 | let footer = "\n"; 95 | 96 | let width = 300.0; 97 | let height = 200.0; 98 | let border = 5.0; 99 | let mut xscale = (width - 2.0 * border) / (self.xmax - self.xmin); 100 | let mut yscale = (height - 2.0 * border) / (self.ymax - self.ymin); 101 | 102 | // Keep aspect ratio equal for x and y dimensions. 103 | if xscale > yscale { 104 | xscale = yscale; 105 | } else { 106 | yscale = xscale; 107 | } 108 | 109 | let xoffset = -self.xmin * xscale + border; 110 | let yoffset = -self.ymin * yscale + border; 111 | 112 | let mut file = std::fs::File::create(fname)?; 113 | 114 | file.write_all(header.as_bytes())?; 115 | 116 | let radius = border; 117 | let stroke_width = 2.0; 118 | 119 | for seg in &self.segs { 120 | let x1 = (seg.0).0 * xscale + xoffset; 121 | let x2 = (seg.1).0 * xscale + xoffset; 122 | let y1 = (seg.0).1 * yscale + yoffset; 123 | let y2 = (seg.1).1 * yscale + yoffset; 124 | 125 | let buf = format!("", 126 | x1, x2, y1, y2, stroke_width); 127 | file.write_all(buf.as_bytes())?; 128 | let buf = format!( 129 | "\n", 130 | x1, y1, radius 131 | ); 132 | file.write_all(buf.as_bytes())?; 133 | let buf = format!( 134 | "\n", 135 | x2, y2, radius 136 | ); 137 | file.write_all(buf.as_bytes())?; 138 | } 139 | file.write_all(footer.as_bytes())?; 140 | Ok(()) 141 | } 142 | } 143 | 144 | // Save a wireframe rendering of the vertices and edges to an SVG file. 145 | fn render_wireframe( 146 | verts: &Points, 147 | edges: &[(usize, usize)], 148 | cam: &Camera, 149 | fname: &str, 150 | ) -> Result<(), std::io::Error> 151 | where 152 | NPTS: Dim, 153 | I: IntrinsicParameters, 154 | S: Storage, 155 | DefaultAllocator: Allocator, 156 | DefaultAllocator: Allocator, 157 | { 158 | // Project the original 3D coordinates to 2D pixel coordinates. 159 | let pixel_coords = cam.world_to_pixel(verts); 160 | 161 | let mut wtr = SvgWriter::new(); 162 | 163 | for edge in edges { 164 | let (i0, i1) = edge; 165 | let pt0 = pixel_coords.data.row(*i0); 166 | let pt1 = pixel_coords.data.row(*i1); 167 | wtr.add_edge(&pt0, &pt1); 168 | } 169 | wtr.save(fname)?; 170 | 171 | Ok(()) 172 | } 173 | 174 | fn main() -> Result<(), std::io::Error> { 175 | // Create cube vertices in the world coordinate frame. 176 | let world_coords = Points::::new(SMatrix::::from_row_slice(&[ 177 | -1.0, -1.0, -1.0, // v1 178 | 1.0, -1.0, -1.0, // v2 179 | 1.0, 1.0, -1.0, // v3 180 | -1.0, 1.0, -1.0, // v4 181 | -1.0, -1.0, 1.0, // v5 182 | 1.0, -1.0, 1.0, // v6 183 | 1.0, 1.0, 1.0, // v7 184 | -1.0, 1.0, 1.0, // v8 185 | ])); 186 | let edges = [ 187 | (0, 1), 188 | (1, 2), 189 | (2, 3), 190 | (3, 0), 191 | (4, 5), 192 | (5, 6), 193 | (6, 7), 194 | (7, 4), 195 | (0, 4), 196 | (1, 5), 197 | (2, 6), 198 | (3, 7), 199 | ]; 200 | 201 | let cam = get_perspective_cam(); 202 | render_wireframe(&world_coords, &edges, &cam, "cube-perspective.svg")?; 203 | 204 | let cam = get_ortho_cam(); 205 | render_wireframe(&world_coords, &edges, &cam, "cube-ortho.svg")?; 206 | 207 | Ok(()) 208 | } 209 | -------------------------------------------------------------------------------- /src/camera.rs: -------------------------------------------------------------------------------- 1 | use nalgebra::{ 2 | allocator::Allocator, 3 | base::storage::{Owned, Storage}, 4 | convert, 5 | geometry::{Point3, Rotation3, UnitQuaternion}, 6 | DefaultAllocator, Dim, Matrix, Matrix3, RealField, SMatrix, Vector3, U1, U2, U3, U4, 7 | }; 8 | 9 | #[cfg(feature = "serde-serialize")] 10 | use serde::{Deserialize, Serialize}; 11 | 12 | use crate::{ 13 | coordinate_system::WorldFrame, 14 | intrinsics_perspective::{IntrinsicParametersPerspective, PerspectiveParams}, 15 | Bundle, Error, ExtrinsicParameters, IntrinsicParameters, Pixels, Points, RayBundle, 16 | }; 17 | 18 | /// A camera model that can convert world coordinates into pixel coordinates. 19 | /// 20 | /// # Examples 21 | /// 22 | /// Creates a new perspective camera: 23 | /// 24 | /// ``` 25 | /// use cam_geom::*; 26 | /// use nalgebra::*; 27 | /// 28 | /// // perepective parameters - focal length of 100, no skew, pixel center at (640,480) 29 | /// let intrinsics = IntrinsicParametersPerspective::from(PerspectiveParams { 30 | /// fx: 100.0, 31 | /// fy: 100.0, 32 | /// skew: 0.0, 33 | /// cx: 640.0, 34 | /// cy: 480.0, 35 | /// }); 36 | /// 37 | /// // Set extrinsic parameters - camera at (10,0,10), looing at (0,0,0), up (0,0,1) 38 | /// let camcenter = Vector3::new(10.0, 0.0, 10.0); 39 | /// let lookat = Vector3::new(0.0, 0.0, 0.0); 40 | /// let up = Unit::new_normalize(Vector3::new(0.0, 0.0, 1.0)); 41 | /// let pose = ExtrinsicParameters::from_view(&camcenter, &lookat, &up); 42 | /// 43 | /// // Create camera with both intrinsic and extrinsic parameters. 44 | /// let cam = Camera::new(intrinsics, pose); 45 | /// ``` 46 | /// 47 | /// Creates a new orthographic camera: 48 | /// 49 | /// ``` 50 | /// use cam_geom::*; 51 | /// use nalgebra::*; 52 | /// 53 | /// // orthographic parameters - scale of 100, pixel center at (640,480) 54 | /// let intrinsics = IntrinsicParametersOrthographic::from(OrthographicParams { 55 | /// sx: 100.0, 56 | /// sy: 100.0, 57 | /// cx: 640.0, 58 | /// cy: 480.0, 59 | /// }); 60 | /// 61 | /// // Set extrinsic parameters - camera at (10,0,10), looing at (0,0,0), up (0,0,1) 62 | /// let camcenter = Vector3::new(10.0, 0.0, 10.0); 63 | /// let lookat = Vector3::new(0.0, 0.0, 0.0); 64 | /// let up = Unit::new_normalize(Vector3::new(0.0, 0.0, 1.0)); 65 | /// let pose = ExtrinsicParameters::from_view(&camcenter, &lookat, &up); 66 | /// 67 | /// // Create camera with both intrinsic and extrinsic parameters. 68 | /// let cam = Camera::new(intrinsics, pose); 69 | /// ``` 70 | #[derive(Debug, Clone, PartialEq)] 71 | #[cfg_attr(feature = "serde-serialize", derive(Serialize, Deserialize))] 72 | pub struct Camera 73 | where 74 | I: IntrinsicParameters, 75 | R: RealField, 76 | { 77 | intrinsics: I, 78 | extrinsics: ExtrinsicParameters, 79 | } 80 | 81 | impl Camera 82 | where 83 | I: IntrinsicParameters, 84 | R: RealField, 85 | { 86 | /// Create a new camera from intrinsic and extrinsic parameters. 87 | /// 88 | /// # Arguments 89 | /// Intrinsic parameters and extrinsic parameters 90 | #[inline] 91 | pub fn new(intrinsics: I, extrinsics: ExtrinsicParameters) -> Self { 92 | Self { 93 | intrinsics, 94 | extrinsics, 95 | } 96 | } 97 | 98 | /// Return a reference to the extrinsic parameters. 99 | #[inline] 100 | pub fn extrinsics(&self) -> &ExtrinsicParameters { 101 | &self.extrinsics 102 | } 103 | 104 | /// Return a reference to the intrinsic parameters. 105 | #[inline] 106 | pub fn intrinsics(&self) -> &I { 107 | &self.intrinsics 108 | } 109 | 110 | /// take 3D coordinates in world frame and convert to pixel coordinates 111 | pub fn world_to_pixel( 112 | &self, 113 | world: &Points, 114 | ) -> Pixels> 115 | where 116 | NPTS: Dim, 117 | InStorage: Storage, 118 | DefaultAllocator: Allocator, 119 | DefaultAllocator: Allocator, 120 | { 121 | let camera_frame = self.extrinsics.world_to_camera(world); 122 | self.intrinsics.camera_to_pixel(&camera_frame) 123 | } 124 | 125 | /// take pixel coordinates and project to 3D in world frame 126 | /// 127 | /// output arguments: 128 | /// `camera` - camera frame coordinate rays 129 | /// `world` - world frame coordinate rays 130 | /// 131 | /// Note that the camera frame coordinates are returned as they must 132 | /// be computed anyway, so this additional data is "free". 133 | pub fn pixel_to_world( 134 | &self, 135 | pixels: &Pixels, 136 | ) -> RayBundle> 137 | where 138 | I::BundleType: Bundle, 139 | IN: Storage, 140 | NPTS: Dim, 141 | I::BundleType: Bundle, 142 | DefaultAllocator: Allocator, 143 | DefaultAllocator: Allocator, 144 | DefaultAllocator: Allocator, 145 | { 146 | // get camera frame rays 147 | let camera = self.intrinsics.pixel_to_camera(pixels); 148 | 149 | // get world frame rays 150 | self.extrinsics.ray_camera_to_world(&camera) 151 | } 152 | } 153 | 154 | impl Camera> { 155 | /// Create a `Camera` from a 3x4 perspective projection matrix. 156 | pub fn from_perspective_matrix(pmat: &Matrix) -> Result 157 | where 158 | S: Storage + Clone, 159 | { 160 | let m = pmat.clone().remove_column(3); 161 | let (rquat, k) = rq_decomposition(m)?; 162 | 163 | let k22: R = k[(2, 2)].clone(); 164 | 165 | let one: R = convert(1.0); 166 | 167 | let k = k * (one / k22); // normalize 168 | 169 | let params = PerspectiveParams { 170 | fx: k[(0, 0)].clone(), 171 | fy: k[(1, 1)].clone(), 172 | skew: k[(0, 1)].clone(), 173 | cx: k[(0, 2)].clone(), 174 | cy: k[(1, 2)].clone(), 175 | }; 176 | 177 | let camcenter = pmat2cam_center(pmat); 178 | let extrinsics = ExtrinsicParameters::from_rotation_and_camcenter(rquat, camcenter); 179 | 180 | Ok(Self::new(params.into(), extrinsics)) 181 | } 182 | 183 | /// Create a 3x4 perspective projection matrix modeling this camera. 184 | pub fn as_camera_matrix(&self) -> SMatrix { 185 | let m = { 186 | let p33 = self.intrinsics().as_intrinsics_matrix(); 187 | p33 * self.extrinsics().cache.qt.clone() 188 | }; 189 | 190 | // flip sign if focal length < 0 191 | let m = if m[(0, 0)] < nalgebra::convert(0.0) { 192 | -m 193 | } else { 194 | m 195 | }; 196 | 197 | m.clone() / m[(2, 3)].clone() // normalize 198 | } 199 | } 200 | 201 | #[cfg(test)] 202 | pub fn roundtrip_camera( 203 | cam: Camera, 204 | width: usize, 205 | height: usize, 206 | step: usize, 207 | border: usize, 208 | eps: R, 209 | ) where 210 | R: RealField, 211 | I: IntrinsicParameters, 212 | I::BundleType: Bundle, 213 | { 214 | let pixels = crate::intrinsic_test_utils::generate_uv_raw(width, height, step, border); 215 | 216 | let world_coords = cam.pixel_to_world(&pixels); 217 | let world_coords_points = world_coords.point_on_ray(); 218 | 219 | // project back to pixel coordinates 220 | let pixel_actual = cam.world_to_pixel(&world_coords_points); 221 | approx::assert_abs_diff_eq!(pixels.data, pixel_actual.data, epsilon = convert(eps)); 222 | } 223 | 224 | #[allow(non_snake_case)] 225 | fn rq(A: Matrix3) -> (Matrix3, Matrix3) { 226 | let zero: R = convert(0.0); 227 | let one: R = convert(1.0); 228 | 229 | // see https://math.stackexchange.com/a/1640762 230 | #[rustfmt::skip] 231 | let P = Matrix3::::new( 232 | zero.clone(), zero.clone(), one.clone(), // row 1 233 | zero.clone(), one.clone(), zero.clone(), // row 2 234 | one, zero.clone(), zero, // row 3 235 | ); 236 | let Atilde = P.clone() * A; 237 | 238 | let (Qtilde, Rtilde) = { 239 | let qrm = nalgebra::linalg::QR::new(Atilde.transpose()); 240 | (qrm.q(), qrm.r()) 241 | }; 242 | let Q = P.clone() * Qtilde.transpose(); 243 | let R = P.clone() * Rtilde.transpose() * P; 244 | (R, Q) 245 | } 246 | 247 | /// perform RQ decomposition and return results as right-handed quaternion and intrinsics matrix 248 | fn rq_decomposition( 249 | orig: Matrix3, 250 | ) -> Result<(UnitQuaternion, Matrix3), Error> { 251 | // Perform RQ decomposition to separate intrinsic matrix from orthonormal rotation matrix. 252 | let (mut intrin, mut q) = rq(orig); 253 | 254 | // Flip signs so that diagonal of intrinsic matrix is positive. 255 | let zero: R = convert(0.0); 256 | for i in 0..3 { 257 | if intrin[(i, i)] < zero { 258 | for j in 0..3 { 259 | intrin[(j, i)] = -intrin[(j, i)].clone(); 260 | q[(i, j)] = -q[(i, j)].clone(); 261 | } 262 | } 263 | } 264 | 265 | // Now we could have either a pure rotation matrix in q or an improper 266 | // rotation matrix. Excluding numerical issues, the determinant will be 1 or 267 | // -1, respectively. To do deal with potential numerical issues, we pick the 268 | // matrix with the largest determinant. 269 | 270 | let r1 = q.clone(); // option 1 271 | let r2 = -q; // option 1 272 | 273 | if r1.determinant() > r2.determinant() { 274 | let intrin1 = intrin.clone(); 275 | let rotmat1 = Rotation3::from_matrix_unchecked(r1); 276 | let rquat1 = UnitQuaternion::from_rotation_matrix(&rotmat1); 277 | Ok((rquat1, intrin1)) 278 | } else { 279 | let intrin2 = -intrin; 280 | let rotmat2 = Rotation3::from_matrix_unchecked(r2); 281 | let rquat2 = UnitQuaternion::from_rotation_matrix(&rotmat2); 282 | Ok((rquat2, intrin2)) 283 | } 284 | } 285 | 286 | /// get the camera center from a 3x4 camera projection matrix 287 | #[allow(clippy::many_single_char_names)] 288 | fn pmat2cam_center(p: &Matrix) -> Point3 289 | where 290 | R: RealField, 291 | S: Storage + Clone, 292 | { 293 | let x = p.clone().remove_column(0).determinant(); 294 | let y = -p.clone().remove_column(1).determinant(); 295 | let z = p.clone().remove_column(2).determinant(); 296 | let w = -p.clone().remove_column(3).determinant(); 297 | Point3::from(Vector3::new(x / w.clone(), y / w.clone(), z / w)) 298 | } 299 | 300 | #[cfg(test)] 301 | mod tests { 302 | #[test] 303 | #[cfg(feature = "serde-serialize")] 304 | fn test_serde() { 305 | use nalgebra::{Unit, Vector3}; 306 | 307 | use super::PerspectiveParams; 308 | use crate::{Camera, ExtrinsicParameters, IntrinsicParametersPerspective}; 309 | 310 | let params = PerspectiveParams { 311 | fx: 100.0, 312 | fy: 102.0, 313 | skew: 0.1, 314 | cx: 321.0, 315 | cy: 239.9, 316 | }; 317 | 318 | let intrinsics: IntrinsicParametersPerspective<_> = params.into(); 319 | 320 | let camcenter = Vector3::new(10.0, 0.0, 10.0); 321 | let lookat = Vector3::new(0.0, 0.0, 0.0); 322 | let up = Unit::new_normalize(Vector3::new(0.0, 0.0, 1.0)); 323 | let pose = ExtrinsicParameters::from_view(&camcenter, &lookat, &up); 324 | 325 | let expected = Camera::new(intrinsics, pose); 326 | 327 | let buf = serde_json::to_string(&expected).unwrap(); 328 | let actual: Camera<_, _> = serde_json::from_str(&buf).unwrap(); 329 | assert!(expected == actual); 330 | } 331 | 332 | #[test] 333 | fn test_rotation_rh() { 334 | use super::Camera; 335 | 336 | // Reproduces https://github.com/strawlab/cam-geom/issues/17 337 | let image_points = [ 338 | [102.92784, 141.48114], 339 | [250.00058, 141.4969], 340 | [397.07214, 141.48114], 341 | [141.12552, 247.49763], 342 | [250.0, 247.50888], 343 | [358.8746, 247.4984], 344 | [163.47325, 309.60333], 345 | [250.0, 309.61908], 346 | [336.52673, 309.60333], 347 | ]; 348 | let object_points = [ 349 | [140., 140., 0.], 350 | [500., 140., 0.], 351 | [860., 140., 0.], 352 | [140., 500., 0.], 353 | [500., 500., 0.], 354 | [860., 500., 0.], 355 | [140., 860., 0.], 356 | [500., 860., 0.], 357 | [860., 860., 0.], 358 | ]; 359 | let corresponding_points = std::iter::zip(image_points, object_points) 360 | .map(|(image_point, object_point)| dlt::CorrespondingPoint { 361 | object_point, 362 | image_point, 363 | }) 364 | .collect::>(); 365 | let pmat = dlt::dlt_corresponding(&corresponding_points, 1e-10).unwrap(); 366 | let cam_f32 = Camera::from_perspective_matrix(&pmat.cast::()).unwrap(); 367 | println!("cam_f32 = {cam_f32:?}"); 368 | let cam_f64 = Camera::from_perspective_matrix(&pmat).unwrap(); 369 | println!("cam_f64 = {cam_f64:?}"); 370 | } 371 | } 372 | -------------------------------------------------------------------------------- /src/extrinsics.rs: -------------------------------------------------------------------------------- 1 | use nalgebra::geometry::{Isometry3, Point3, Rotation3, Translation, UnitQuaternion}; 2 | use nalgebra::{ 3 | allocator::Allocator, 4 | storage::{Owned, Storage}, 5 | DefaultAllocator, RealField, 6 | }; 7 | use nalgebra::{convert, Dim, OMatrix, SMatrix, Unit, Vector3, U3}; 8 | 9 | #[cfg(feature = "serde-serialize")] 10 | use serde::{Deserialize, Serialize}; 11 | 12 | use crate::{ 13 | coordinate_system::{CameraFrame, WorldFrame}, 14 | Bundle, Points, RayBundle, 15 | }; 16 | 17 | /// Defines the pose of a camera in the world coordinate system. 18 | #[derive(Clone, PartialEq)] 19 | #[cfg_attr(feature = "serde-serialize", derive(Serialize))] 20 | pub struct ExtrinsicParameters { 21 | pub(crate) rquat: UnitQuaternion, 22 | pub(crate) camcenter: Point3, 23 | #[cfg_attr(feature = "serde-serialize", serde(skip))] 24 | pub(crate) cache: ExtrinsicsCache, 25 | } 26 | 27 | impl std::fmt::Debug for ExtrinsicParameters { 28 | fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 29 | // This should match the auto derived Debug implementation but not print 30 | // the cache field. 31 | fmt.debug_struct("ExtrinsicParameters") 32 | .field("rquat", &self.rquat) 33 | .field("camcenter", &self.camcenter) 34 | .finish() 35 | } 36 | } 37 | 38 | #[derive(Clone, PartialEq)] 39 | pub(crate) struct ExtrinsicsCache { 40 | pub(crate) q: Rotation3, 41 | pub(crate) translation: Point3, 42 | pub(crate) qt: SMatrix, 43 | pub(crate) q_inv: Rotation3, 44 | pub(crate) camcenter_z0: Point3, 45 | pub(crate) pose: Isometry3, 46 | pub(crate) pose_inv: Isometry3, 47 | } 48 | 49 | impl std::fmt::Debug for ExtrinsicsCache { 50 | fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 51 | // do not show cache 52 | Ok(()) 53 | } 54 | } 55 | 56 | impl ExtrinsicParameters { 57 | /// Create a new instance from a rotation and a camera center. 58 | pub fn from_rotation_and_camcenter(rotation: UnitQuaternion, camcenter: Point3) -> Self { 59 | let q = rotation.clone().to_rotation_matrix(); 60 | let translation = -(q.clone() * camcenter.clone()); 61 | #[rustfmt::skip] 62 | let qt = { 63 | let q = q.matrix(); 64 | SMatrix::::new( 65 | q[(0,0)].clone(), q[(0,1)].clone(), q[(0,2)].clone(), translation[0].clone(), 66 | q[(1,0)].clone(), q[(1,1)].clone(), q[(1,2)].clone(), translation[1].clone(), 67 | q[(2,0)].clone(), q[(2,1)].clone(), q[(2,2)].clone(), translation[2].clone(), 68 | ) 69 | }; 70 | let q_inv = q.inverse(); 71 | let camcenter_z0 = Point3::from(Vector3::new( 72 | camcenter[0].clone(), 73 | camcenter[1].clone(), 74 | convert::<_, R>(0.0), 75 | )); 76 | let pose = Isometry3::from_parts( 77 | Translation { 78 | vector: translation.clone().coords, 79 | }, 80 | rotation.clone(), 81 | ); 82 | let pose_inv = pose.inverse(); 83 | let cache = ExtrinsicsCache { 84 | q, 85 | translation, 86 | qt, 87 | q_inv, 88 | camcenter_z0, 89 | pose, 90 | pose_inv, 91 | }; 92 | 93 | Self { 94 | rquat: rotation, 95 | camcenter, 96 | cache, 97 | } 98 | } 99 | 100 | /// Create a new instance from an [`nalgebra::Isometry3`](https://docs.rs/nalgebra/latest/nalgebra/geometry/type.Isometry3.html). 101 | pub fn from_pose(pose: &Isometry3) -> Self { 102 | let rquat = pose.clone().rotation; 103 | let translation = pose.clone().translation.vector; 104 | let q = rquat.inverse().to_rotation_matrix(); 105 | let camcenter = -(q * translation); 106 | let cc = Point3 { coords: camcenter }; 107 | Self::from_rotation_and_camcenter(rquat, cc) 108 | } 109 | 110 | /// Create a new instance from a camera center, a lookat vector, and an up vector. 111 | pub fn from_view(camcenter: &Vector3, lookat: &Vector3, up: &Unit>) -> Self { 112 | let dir = lookat - camcenter; 113 | let dir_unit = nalgebra::Unit::new_normalize(dir); 114 | let q = UnitQuaternion::look_at_lh(dir_unit.as_ref(), up.as_ref()); 115 | let pi: R = convert(std::f64::consts::PI); 116 | 117 | let q2 = UnitQuaternion::from_axis_angle(&dir_unit, pi); 118 | let q3 = q * q2; 119 | 120 | Self::from_rotation_and_camcenter( 121 | q3, 122 | Point3 { 123 | coords: camcenter.clone(), 124 | }, 125 | ) 126 | } 127 | 128 | /// Return the camera center 129 | #[inline] 130 | pub fn camcenter(&self) -> &Point3 { 131 | &self.camcenter 132 | } 133 | 134 | /// Return the camera pose 135 | #[inline] 136 | pub fn pose(&self) -> &Isometry3 { 137 | &self.cache.pose 138 | } 139 | 140 | /// Return the pose as a 3x4 matrix 141 | #[inline] 142 | pub fn matrix(&self) -> &SMatrix { 143 | &self.cache.qt 144 | } 145 | 146 | /// Return the rotation part of the pose 147 | /// 148 | /// To obtain the rotation as a quaternion, use [Self::pose] to obtain an 149 | /// [Isometry3] and access the [field@Isometry3::rotation] field. 150 | #[inline] 151 | pub fn rotation(&self) -> &Rotation3 { 152 | &self.cache.q 153 | } 154 | 155 | /// Return the translation part of the pose 156 | #[inline] 157 | pub fn translation(&self) -> &Point3 { 158 | &self.cache.translation 159 | } 160 | 161 | /// Return a unit vector aligned along our look (+Z) direction. 162 | pub fn forward(&self) -> Unit> { 163 | let pt_cam = Point3::new(R::zero(), R::zero(), R::one()); 164 | self.lookdir(&pt_cam) 165 | } 166 | 167 | /// Return a unit vector aligned along our up (-Y) direction. 168 | pub fn up(&self) -> Unit> { 169 | let pt_cam = Point3::new(R::zero(), -R::one(), R::zero()); 170 | self.lookdir(&pt_cam) 171 | } 172 | 173 | /// Return a unit vector aligned along our right (+X) direction. 174 | pub fn right(&self) -> Unit> { 175 | let pt_cam = Point3::new(R::one(), R::zero(), R::zero()); 176 | self.lookdir(&pt_cam) 177 | } 178 | 179 | /// Return a world coords unit vector aligned along the given direction 180 | /// 181 | /// `pt_cam` is specified in camera coords. 182 | fn lookdir(&self, pt_cam: &Point3) -> Unit> { 183 | let cc = self.camcenter(); 184 | let pt = self.cache.pose_inv.transform_point(pt_cam) - cc; 185 | nalgebra::Unit::new_normalize(pt) 186 | } 187 | 188 | /// Convert points in camera coordinates to world coordinates. 189 | pub fn camera_to_world( 190 | &self, 191 | cam_coords: &Points, 192 | ) -> Points> 193 | where 194 | NPTS: Dim, 195 | InStorage: Storage, 196 | DefaultAllocator: Allocator, 197 | { 198 | let mut world = Points::new(OMatrix::zeros_generic( 199 | NPTS::from_usize(cam_coords.data.nrows()), 200 | U3::from_usize(3), 201 | )); 202 | 203 | // Potential optimization: remove for loops 204 | let in_mult = &cam_coords.data; 205 | let out_mult = &mut world.data; 206 | 207 | for i in 0..in_mult.nrows() { 208 | let tmp = self.cache.pose_inv.transform_point(&Point3::new( 209 | in_mult[(i, 0)].clone(), 210 | in_mult[(i, 1)].clone(), 211 | in_mult[(i, 2)].clone(), 212 | )); 213 | for j in 0..3 { 214 | out_mult[(i, j)] = tmp[j].clone(); 215 | } 216 | } 217 | world 218 | } 219 | 220 | /// Convert rays in camera coordinates to world coordinates. 221 | #[inline] 222 | pub fn ray_camera_to_world( 223 | &self, 224 | camera: &RayBundle, 225 | ) -> RayBundle> 226 | where 227 | BType: Bundle, 228 | NPTS: Dim, 229 | StorageCamera: Storage, 230 | DefaultAllocator: Allocator, 231 | { 232 | camera.to_pose(self.cache.pose_inv.clone()) 233 | } 234 | 235 | /// Convert points in world coordinates to camera coordinates. 236 | pub fn world_to_camera( 237 | &self, 238 | world: &Points, 239 | ) -> Points> 240 | where 241 | NPTS: Dim, 242 | InStorage: Storage, 243 | DefaultAllocator: Allocator, 244 | { 245 | let mut cam_coords = Points::new(OMatrix::zeros_generic( 246 | NPTS::from_usize(world.data.nrows()), 247 | U3::from_usize(3), 248 | )); 249 | 250 | // Potential optimization: remove for loops 251 | let in_mult = &world.data; 252 | let out_mult = &mut cam_coords.data; 253 | 254 | for i in 0..in_mult.nrows() { 255 | let tmp = self.cache.pose.transform_point(&Point3::new( 256 | in_mult[(i, 0)].clone(), 257 | in_mult[(i, 1)].clone(), 258 | in_mult[(i, 2)].clone(), 259 | )); 260 | for j in 0..3 { 261 | out_mult[(i, j)] = tmp[j].clone(); 262 | } 263 | } 264 | cam_coords 265 | } 266 | } 267 | 268 | // So far, I could not figure out how to get serde derive to construct a cache 269 | // only after rquat and camcenter are created. Instead serde derive wants the 270 | // struct to be deserialized to implement the Default trait. 271 | #[cfg(feature = "serde-serialize")] 272 | impl<'de, R: RealField + serde::Deserialize<'de>> serde::Deserialize<'de> 273 | for ExtrinsicParameters 274 | { 275 | fn deserialize(deserializer: D) -> std::result::Result 276 | where 277 | D: serde::Deserializer<'de>, 278 | { 279 | use serde::de; 280 | use std::fmt; 281 | 282 | #[derive(Deserialize)] 283 | #[serde(field_identifier, rename_all = "lowercase")] 284 | enum Field { 285 | RQuat, 286 | CamCenter, 287 | } 288 | 289 | struct ExtrinsicParametersVisitor<'de, R2: RealField + serde::Deserialize<'de>>( 290 | std::marker::PhantomData<&'de R2>, 291 | ); 292 | 293 | impl<'de, R2: RealField + serde::Deserialize<'de>> serde::de::Visitor<'de> 294 | for ExtrinsicParametersVisitor<'de, R2> 295 | { 296 | type Value = ExtrinsicParameters; 297 | 298 | fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { 299 | formatter.write_str("struct ExtrinsicParameters") 300 | } 301 | 302 | fn visit_seq( 303 | self, 304 | mut seq: V, 305 | ) -> std::result::Result, V::Error> 306 | where 307 | V: serde::de::SeqAccess<'de>, 308 | { 309 | let rquat = seq 310 | .next_element()? 311 | .ok_or_else(|| de::Error::invalid_length(0, &self))?; 312 | let camcenter = seq 313 | .next_element()? 314 | .ok_or_else(|| de::Error::invalid_length(1, &self))?; 315 | Ok(ExtrinsicParameters::from_rotation_and_camcenter( 316 | rquat, camcenter, 317 | )) 318 | } 319 | 320 | fn visit_map( 321 | self, 322 | mut map: V, 323 | ) -> std::result::Result, V::Error> 324 | where 325 | V: serde::de::MapAccess<'de>, 326 | { 327 | let mut rquat = None; 328 | let mut camcenter = None; 329 | while let Some(key) = map.next_key()? { 330 | match key { 331 | Field::RQuat => { 332 | if rquat.is_some() { 333 | return Err(de::Error::duplicate_field("rquat")); 334 | } 335 | rquat = Some(map.next_value()?); 336 | } 337 | Field::CamCenter => { 338 | if camcenter.is_some() { 339 | return Err(de::Error::duplicate_field("camcenter")); 340 | } 341 | camcenter = Some(map.next_value()?); 342 | } 343 | } 344 | } 345 | let rquat = rquat.ok_or_else(|| de::Error::missing_field("rquat"))?; 346 | let camcenter = camcenter.ok_or_else(|| de::Error::missing_field("camcenter"))?; 347 | Ok(ExtrinsicParameters::from_rotation_and_camcenter( 348 | rquat, camcenter, 349 | )) 350 | } 351 | } 352 | 353 | const FIELDS: &[&str] = &["rquat", "camcenter"]; 354 | deserializer.deserialize_struct( 355 | "ExtrinsicParameters", 356 | FIELDS, 357 | ExtrinsicParametersVisitor(std::marker::PhantomData), 358 | ) 359 | } 360 | } 361 | 362 | #[cfg(feature = "serde-serialize")] 363 | fn _test_extrinsics_is_serialize() { 364 | // Compile-time test to ensure ExtrinsicParameters implements Serialize trait. 365 | fn implements() {} 366 | implements::>(); 367 | } 368 | 369 | #[cfg(feature = "serde-serialize")] 370 | fn _test_extrinsics_is_deserialize() { 371 | // Compile-time test to ensure ExtrinsicParameters implements Deserialize trait. 372 | fn implements<'de, T: serde::Deserialize<'de>>() {} 373 | implements::>(); 374 | } 375 | 376 | #[cfg(test)] 377 | mod tests { 378 | use super::*; 379 | use nalgebra::{convert as c, UnitVector3}; 380 | 381 | #[test] 382 | fn to_from_pose_f64() { 383 | to_from_pose_generic::(1e-10) 384 | } 385 | 386 | #[test] 387 | fn to_from_pose_f32() { 388 | to_from_pose_generic::(1e-5) 389 | } 390 | 391 | fn to_from_pose_generic(epsilon: R) { 392 | let zero: R = convert(0.0); 393 | let one: R = convert(1.0); 394 | 395 | let e1 = ExtrinsicParameters::::from_view( 396 | &Vector3::new(c(1.2), c(3.4), c(5.6)), // camcenter 397 | &Vector3::new(c(2.2), c(3.4), c(5.6)), // lookat 398 | &nalgebra::Unit::new_normalize(Vector3::new(zero.clone(), zero.clone(), one.clone())), // up 399 | ); 400 | let pose1 = e1.pose(); 401 | let e2 = ExtrinsicParameters::::from_pose(pose1); 402 | 403 | approx::assert_abs_diff_eq!(e1.rotation(), e2.rotation(), epsilon = epsilon.clone()); 404 | approx::assert_abs_diff_eq!(e1.camcenter(), e2.camcenter(), epsilon = epsilon.clone()); 405 | } 406 | 407 | #[test] 408 | fn roundtrip_f64() { 409 | roundtrip_generic::(1e-10) 410 | } 411 | 412 | #[test] 413 | fn roundtrip_f32() { 414 | roundtrip_generic::(1e-5) 415 | } 416 | 417 | fn roundtrip_generic(epsilon: R) { 418 | let zero: R = convert(0.0); 419 | let one: R = convert(1.0); 420 | 421 | let e1 = ExtrinsicParameters::::from_view( 422 | &Vector3::new(c(1.2), c(3.4), c(5.6)), // camcenter 423 | &Vector3::new(c(2.2), c(3.4), c(5.6)), // lookat 424 | &nalgebra::Unit::new_normalize(Vector3::new(zero.clone(), zero.clone(), one.clone())), // up 425 | ); 426 | 427 | #[rustfmt::skip] 428 | let cam_coords = Points { 429 | coords: std::marker::PhantomData, 430 | data: SMatrix::::new( 431 | zero.clone(), zero.clone(), zero.clone(), // at camera center 432 | zero.clone(), zero.clone(), one.clone(), // one unit in +Z - exactly in camera direction 433 | one.clone(), zero.clone(), zero.clone(), // one unit in +X - right of camera axis 434 | zero.clone(), one, zero, // one unit in +Y - down from camera axis 435 | ), 436 | }; 437 | 438 | #[rustfmt::skip] 439 | let world_expected = SMatrix::::new( 440 | c(1.2), c(3.4), c(5.6), 441 | c(2.2), c(3.4), c(5.6), 442 | c(1.2), c(2.4), c(5.6), 443 | c(1.2), c(3.4), c(4.6), 444 | ); 445 | 446 | let world_actual = e1.camera_to_world(&cam_coords); 447 | approx::assert_abs_diff_eq!(world_expected, world_actual.data, epsilon = epsilon.clone()); 448 | 449 | // test roundtrip 450 | let camera_actual = e1.world_to_camera(&world_actual); 451 | approx::assert_abs_diff_eq!(cam_coords.data, camera_actual.data, epsilon = epsilon); 452 | } 453 | 454 | #[test] 455 | #[cfg(feature = "serde-serialize")] 456 | fn test_serde() { 457 | let expected = ExtrinsicParameters::::from_view( 458 | &Vector3::new(1.2, 3.4, 5.6), // camcenter 459 | &Vector3::new(2.2, 3.4, 5.6), // lookat 460 | &nalgebra::Unit::new_normalize(Vector3::new(0.0, 0.0, 1.0)), // up 461 | ); 462 | let buf = serde_json::to_string(&expected).unwrap(); 463 | let actual: crate::ExtrinsicParameters = serde_json::from_str(&buf).unwrap(); 464 | assert!(expected == actual); 465 | } 466 | 467 | #[test] 468 | fn test_from_view() { 469 | // These values had previously cause problems. 470 | let camcenter = Vector3::new(10.0, 0.0, 10.0); 471 | let lookat = Vector3::new(0.0, 0.0, 0.0); 472 | let up = Unit::new_normalize(Vector3::new(0.0, 0.0, 1.0)); 473 | let e = ExtrinsicParameters::from_view(&camcenter, &lookat, &up); 474 | let axis = UnitVector3::new_normalize(Vector3::new( 475 | 0.6785983445458471, 476 | 0.6785983445458469, 477 | -0.28108463771482023, 478 | )); 479 | let expected_rot = UnitQuaternion::from_axis_angle(&axis, 2.5935642459694805); 480 | approx::assert_abs_diff_eq!(camcenter, e.camcenter().coords); 481 | approx::assert_abs_diff_eq!(expected_rot, e.pose().rotation); 482 | } 483 | } 484 | -------------------------------------------------------------------------------- /src/intrinsic_test_utils.rs: -------------------------------------------------------------------------------- 1 | //! Utilities for testing `cam_geom` implementations. 2 | use super::*; 3 | use nalgebra::{ 4 | base::{dimension::Dyn, VecStorage}, 5 | convert, 6 | }; 7 | 8 | pub(crate) fn generate_uv_raw( 9 | width: usize, 10 | height: usize, 11 | step: usize, 12 | border: usize, 13 | ) -> Pixels> { 14 | let mut uv_raws: Vec<[R; 2]> = Vec::new(); 15 | for row in num_iter::range_step(border, height - border, step) { 16 | for col in num_iter::range_step(border, width - border, step) { 17 | uv_raws.push([convert(col as f64), convert(row as f64)]); 18 | } 19 | } 20 | 21 | let mut data = nalgebra::OMatrix::::from_element(uv_raws.len(), convert(0.0)); 22 | for i in 0..uv_raws.len() { 23 | for j in 0..2 { 24 | data[(i, j)] = uv_raws[i][j].clone(); 25 | } 26 | } 27 | Pixels { data } 28 | } 29 | 30 | /// Test roundtrip projection from pixels to camera rays for an intrinsic camera model. 31 | /// 32 | /// Generate pixel coordinates, project them to rays, convert to points on the 33 | /// rays, convert the points back to pixels, and then compare with the original 34 | /// pixel coordinates. 35 | pub fn roundtrip_intrinsics( 36 | cam: &CAM, 37 | width: usize, 38 | height: usize, 39 | step: usize, 40 | border: usize, 41 | eps: R, 42 | ) where 43 | R: RealField, 44 | CAM: IntrinsicParameters, 45 | CAM::BundleType: Bundle, 46 | { 47 | let pixels = generate_uv_raw(width, height, step, border); 48 | 49 | let camcoords = cam.pixel_to_camera(&pixels); 50 | let camera_coords_points = camcoords.point_on_ray(); 51 | 52 | // project back to pixel coordinates 53 | let pixel_actual = cam.camera_to_pixel(&camera_coords_points); 54 | approx::assert_abs_diff_eq!(pixels.data, pixel_actual.data, epsilon = convert(eps)); 55 | } 56 | -------------------------------------------------------------------------------- /src/intrinsics_orthographic.rs: -------------------------------------------------------------------------------- 1 | use nalgebra::{ 2 | allocator::Allocator, 3 | base::storage::{Owned, Storage}, 4 | convert, DefaultAllocator, Dim, OMatrix, RealField, U2, U3, 5 | }; 6 | 7 | #[cfg(feature = "serde-serialize")] 8 | use serde::{Deserialize, Serialize}; 9 | 10 | use crate::{ 11 | coordinate_system::CameraFrame, Bundle, IntrinsicParameters, Pixels, Points, RayBundle, 12 | }; 13 | 14 | use crate::ray_bundle_types::SharedDirectionRayBundle; 15 | 16 | // TODO: implement ortho camera with near and far clipping? 17 | 18 | /// Parameters defining the intrinsic part of an orthographic camera model. 19 | /// 20 | /// These parameters describe the intrinsic parameters, the transformation from 21 | /// camera coordinates to pixel coordinates, for an orthographic camera model. 22 | /// For a full transformation from world coordinates to pixel coordinates, use a 23 | /// [`Camera`](struct.Camera.html), which can be constructed with these intinsic 24 | /// parameters and extrinsic parameters. 25 | /// 26 | /// Read more about the [orthographic 27 | /// projection](https://en.wikipedia.org/wiki/Orthographic_projection). 28 | /// 29 | /// Can be converted into 30 | /// [`IntrinsicParametersOrthographic`](struct.IntrinsicParametersOrthographic.html) 31 | /// via the `.into()` method like so: 32 | /// 33 | /// ``` 34 | /// use cam_geom::*; 35 | /// let params = OrthographicParams { 36 | /// sx: 100.0, 37 | /// sy: 100.0, 38 | /// cx: 640.0, 39 | /// cy: 480.0, 40 | /// }; 41 | /// let intrinsics: IntrinsicParametersOrthographic<_> = params.into(); 42 | /// ``` 43 | #[derive(Debug, Clone, PartialEq)] 44 | #[cfg_attr(feature = "serde-serialize", derive(Serialize, Deserialize))] 45 | pub struct OrthographicParams { 46 | /// Horizontal scale. 47 | pub sx: R, 48 | /// Vertical scale. 49 | pub sy: R, 50 | /// Horizontal component of image center. 51 | pub cx: R, 52 | /// Vertical component of image center. 53 | pub cy: R, 54 | } 55 | 56 | impl From> for IntrinsicParametersOrthographic { 57 | #[inline] 58 | fn from(params: OrthographicParams) -> Self { 59 | Self { params } 60 | } 61 | } 62 | 63 | /// An orthographic camera model. Implements [`IntrinsicParameters`](trait.IntrinsicParameters.html). 64 | /// 65 | /// Create an `IntrinsicParametersOrthographic` as described for 66 | /// [`OrthographicParams`](struct.OrthographicParams.html) by using `.into()`. 67 | #[derive(Debug, Clone, PartialEq)] 68 | #[cfg_attr(feature = "serde-serialize", derive(Serialize, Deserialize))] 69 | pub struct IntrinsicParametersOrthographic { 70 | params: OrthographicParams, 71 | } 72 | 73 | impl IntrinsicParameters for IntrinsicParametersOrthographic 74 | where 75 | R: RealField, 76 | { 77 | type BundleType = SharedDirectionRayBundle; 78 | 79 | fn pixel_to_camera( 80 | &self, 81 | pixels: &Pixels, 82 | ) -> RayBundle> 83 | where 84 | Self::BundleType: Bundle, 85 | IN: Storage, 86 | NPTS: Dim, 87 | DefaultAllocator: Allocator, 88 | { 89 | let zero: R = convert(0.0); 90 | 91 | // allocate zeros, fill later 92 | let mut result = RayBundle::new_shared_plusz_direction(OMatrix::zeros_generic( 93 | NPTS::from_usize(pixels.data.nrows()), 94 | U3::from_usize(3), 95 | )); 96 | 97 | let origin = &mut result.data; 98 | 99 | // It seems broadcasting is not (yet) supported in nalgebra, so we loop 100 | // through the data. See 101 | // https://discourse.nphysics.org/t/array-broadcasting-support/375/3 . 102 | 103 | for i in 0..pixels.data.nrows() { 104 | let u = pixels.data[(i, 0)].clone(); 105 | let v = pixels.data[(i, 1)].clone(); 106 | 107 | let x: R = (u - self.params.cx.clone()) / self.params.sx.clone(); 108 | let y: R = (v - self.params.cy.clone()) / self.params.sy.clone(); 109 | 110 | origin[(i, 0)] = x; 111 | origin[(i, 1)] = y; 112 | origin[(i, 2)] = zero.clone(); 113 | } 114 | result 115 | } 116 | 117 | fn camera_to_pixel( 118 | &self, 119 | camera: &Points, 120 | ) -> Pixels> 121 | where 122 | IN: Storage, 123 | NPTS: Dim, 124 | DefaultAllocator: Allocator, 125 | { 126 | let mut result = Pixels::new(OMatrix::zeros_generic( 127 | NPTS::from_usize(camera.data.nrows()), 128 | U2::from_usize(2), 129 | )); 130 | 131 | // It seems broadcasting is not (yet) supported in nalgebra, so we loop 132 | // through the data. See 133 | // https://discourse.nphysics.org/t/array-broadcasting-support/375/3 . 134 | 135 | for i in 0..camera.data.nrows() { 136 | result.data[(i, 0)] = 137 | camera.data[(i, 0)].clone() * self.params.sx.clone() + self.params.cx.clone(); 138 | result.data[(i, 1)] = 139 | camera.data[(i, 1)].clone() * self.params.sy.clone() + self.params.cy.clone(); 140 | } 141 | result 142 | } 143 | } 144 | 145 | #[cfg(test)] 146 | mod tests { 147 | use nalgebra::Vector3; 148 | 149 | use super::{IntrinsicParametersOrthographic, OrthographicParams}; 150 | use crate::camera::{roundtrip_camera, Camera}; 151 | use crate::extrinsics::ExtrinsicParameters; 152 | use crate::intrinsic_test_utils::roundtrip_intrinsics; 153 | 154 | #[test] 155 | fn roundtrip() { 156 | let params = OrthographicParams { 157 | sx: 100.0, 158 | sy: 102.0, 159 | cx: 321.0, 160 | cy: 239.9, 161 | }; 162 | let cam: IntrinsicParametersOrthographic<_> = params.into(); 163 | 164 | roundtrip_intrinsics(&cam, 640, 480, 5, 0, nalgebra::convert(1e-10)); 165 | 166 | let extrinsics = ExtrinsicParameters::from_view( 167 | &Vector3::new(1.2, 3.4, 5.6), // camcenter 168 | &Vector3::new(2.2, 3.4, 5.6), // lookat 169 | &nalgebra::Unit::new_normalize(Vector3::new(0.0, 0.0, 1.0)), // up 170 | ); 171 | 172 | let full_cam = Camera::new(cam, extrinsics); 173 | roundtrip_camera(full_cam, 640, 480, 5, 0, nalgebra::convert(1e-10)); 174 | } 175 | 176 | #[test] 177 | #[cfg(feature = "serde-serialize")] 178 | fn test_serde() { 179 | let params = OrthographicParams { 180 | sx: 100.0, 181 | sy: 102.0, 182 | cx: 321.0, 183 | cy: 239.9, 184 | }; 185 | let expected: IntrinsicParametersOrthographic<_> = params.into(); 186 | 187 | let buf = serde_json::to_string(&expected).unwrap(); 188 | let actual: IntrinsicParametersOrthographic = serde_json::from_str(&buf).unwrap(); 189 | assert!(expected == actual); 190 | } 191 | } 192 | -------------------------------------------------------------------------------- /src/intrinsics_perspective.rs: -------------------------------------------------------------------------------- 1 | use nalgebra::{ 2 | allocator::Allocator, 3 | convert, 4 | storage::{Owned, Storage}, 5 | DefaultAllocator, Dim, Matrix, OMatrix, RealField, SMatrix, U1, U2, U3, 6 | }; 7 | 8 | #[cfg(feature = "serde-serialize")] 9 | use serde::{Deserialize, Serialize}; 10 | 11 | use crate::{ 12 | coordinate_system::CameraFrame, Bundle, Error, IntrinsicParameters, Pixels, Points, RayBundle, 13 | }; 14 | 15 | use crate::ray_bundle_types::SharedOriginRayBundle; 16 | 17 | /// Parameters defining a pinhole perspective camera model. 18 | /// 19 | /// These will be used to make the 3x4 intrinsic parameter matrix 20 | /// ```text 21 | /// [[fx, skew, cx, 0], 22 | /// [ 0, fy, cy, 0], 23 | /// [ 0, 0, 1, 0]] 24 | /// ``` 25 | /// 26 | /// These parameters describe the intrinsic parameters, the transformation from 27 | /// camera coordinates to pixel coordinates, for a perspective camera model. For 28 | /// a full transformation from world coordinates to pixel coordinates, use a 29 | /// [`Camera`](struct.Camera.html), which can be constructed with these intinsic 30 | /// parameters and extrinsic parameters. 31 | /// 32 | /// Read more about the [pinhole perspective 33 | /// projection](https://en.wikipedia.org/wiki/Pinhole_camera_model). 34 | /// 35 | /// Can be converted into 36 | /// [`IntrinsicParametersPerspective`](struct.IntrinsicParametersPerspective.html) 37 | /// via the `.into()` method like so: 38 | /// 39 | /// ``` 40 | /// use cam_geom::*; 41 | /// let params = PerspectiveParams { 42 | /// fx: 100.0, 43 | /// fy: 100.0, 44 | /// skew: 0.0, 45 | /// cx: 640.0, 46 | /// cy: 480.0, 47 | /// }; 48 | /// let intrinsics: IntrinsicParametersPerspective<_> = params.into(); 49 | /// ``` 50 | #[derive(Debug, Clone, PartialEq)] 51 | #[cfg_attr(feature = "serde-serialize", derive(Serialize, Deserialize))] 52 | pub struct PerspectiveParams { 53 | /// Horizontal focal length. 54 | pub fx: R, 55 | /// Vertical focal length. 56 | pub fy: R, 57 | /// Skew between horizontal and vertical axes. 58 | pub skew: R, 59 | /// Horizontal component of the principal point. 60 | pub cx: R, 61 | /// Vertical component of the principal point. 62 | pub cy: R, 63 | } 64 | 65 | impl From> for IntrinsicParametersPerspective { 66 | fn from(params: PerspectiveParams) -> Self { 67 | use nalgebra::convert as c; 68 | #[rustfmt::skip] 69 | let cache_p = nalgebra::SMatrix::::new( 70 | params.fx.clone(), params.skew.clone(), params.cx.clone(), c(0.0), 71 | c(0.0), params.fy.clone(), params.cy.clone(), c(0.0), 72 | c(0.0), c(0.0), c(1.0), c(0.0), 73 | ); 74 | Self { params, cache_p } 75 | } 76 | } 77 | 78 | /// A pinhole perspective camera model. Implements [`IntrinsicParameters`](trait.IntrinsicParameters.html). 79 | /// 80 | /// Create an `IntrinsicParametersPerspective` as described for 81 | /// [`PerspectiveParams`](struct.PerspectiveParams.html) by using `.into()`. 82 | #[derive(Clone, PartialEq)] 83 | #[cfg_attr(feature = "serde-serialize", derive(Serialize))] 84 | pub struct IntrinsicParametersPerspective { 85 | params: PerspectiveParams, 86 | #[cfg_attr(feature = "serde-serialize", serde(skip))] 87 | pub(crate) cache_p: SMatrix, 88 | } 89 | 90 | impl IntrinsicParametersPerspective { 91 | /// Create a new instance given an intrinsic parameter matrix. 92 | /// 93 | /// Returns an error if the intrinsic parameter matrix is not normalized or 94 | /// otherwise does not represent a perspective camera model. 95 | pub fn from_normalized_3x4_matrix(p: SMatrix) -> std::result::Result { 96 | let params: PerspectiveParams = PerspectiveParams { 97 | fx: p[(0, 0)].clone(), 98 | fy: p[(1, 1)].clone(), 99 | skew: p[(0, 1)].clone(), 100 | cx: p[(0, 2)].clone(), 101 | cy: p[(1, 2)].clone(), 102 | }; 103 | if approx::relative_ne!(p[(0, 3)], nalgebra::convert(0.0)) { 104 | return Err(Error::InvalidInput); 105 | } 106 | 107 | if approx::relative_ne!(p[(1, 0)], nalgebra::convert(0.0)) { 108 | return Err(Error::InvalidInput); 109 | } 110 | 111 | if approx::relative_ne!(p[(1, 3)], nalgebra::convert(0.0)) { 112 | return Err(Error::InvalidInput); 113 | } 114 | 115 | if approx::relative_ne!(p[(2, 0)], nalgebra::convert(0.0)) { 116 | return Err(Error::InvalidInput); 117 | } 118 | 119 | if approx::relative_ne!(p[(2, 1)], nalgebra::convert(0.0)) { 120 | return Err(Error::InvalidInput); 121 | } 122 | 123 | if approx::relative_ne!(p[(2, 2)], nalgebra::convert(1.0)) { 124 | return Err(Error::InvalidInput); // camera matrix must be normalized 125 | } 126 | 127 | if approx::relative_ne!(p[(2, 3)], nalgebra::convert(0.0)) { 128 | return Err(Error::InvalidInput); 129 | } 130 | 131 | Ok(params.into()) 132 | } 133 | 134 | /// Get X focal length 135 | #[inline] 136 | pub fn fx(&self) -> R { 137 | self.params.fx.clone() 138 | } 139 | 140 | /// Get Y focal length 141 | #[inline] 142 | pub fn fy(&self) -> R { 143 | self.params.fy.clone() 144 | } 145 | 146 | /// Get skew 147 | #[inline] 148 | pub fn skew(&self) -> R { 149 | self.params.skew.clone() 150 | } 151 | 152 | /// Get X center 153 | #[inline] 154 | pub fn cx(&self) -> R { 155 | self.params.cx.clone() 156 | } 157 | 158 | /// Get Y center 159 | #[inline] 160 | pub fn cy(&self) -> R { 161 | self.params.cy.clone() 162 | } 163 | 164 | /// Get intrinsic parameters 165 | #[inline] 166 | pub fn params(&self) -> &PerspectiveParams { 167 | &self.params 168 | } 169 | 170 | /// Create a 3x3 projection matrix. 171 | #[inline] 172 | pub(crate) fn as_intrinsics_matrix( 173 | &self, 174 | ) -> Matrix> { 175 | // TODO: implement similar functionality for orthographic camera and 176 | // make a new trait which exposes this functionality. Note that not all 177 | // intrinsic parameter implementations will be able to implement this 178 | // hypothetical new trait, because not all cameras are linear. 179 | self.cache_p.fixed_view::<3, 3>(0, 0) 180 | } 181 | } 182 | 183 | impl IntrinsicParameters for IntrinsicParametersPerspective 184 | where 185 | R: RealField, 186 | { 187 | type BundleType = SharedOriginRayBundle; 188 | 189 | fn pixel_to_camera( 190 | &self, 191 | pixels: &Pixels, 192 | ) -> RayBundle> 193 | where 194 | Self::BundleType: Bundle, 195 | IN: Storage, 196 | NPTS: Dim, 197 | DefaultAllocator: Allocator, 198 | { 199 | let one: R = convert(1.0); 200 | 201 | // allocate zeros, fill later 202 | let mut result = RayBundle::new_shared_zero_origin(OMatrix::zeros_generic( 203 | NPTS::from_usize(pixels.data.nrows()), 204 | U3::from_usize(3), 205 | )); 206 | 207 | let cam_dir = &mut result.data; 208 | 209 | // It seems broadcasting is not (yet) supported in nalgebra, so we loop 210 | // through the data. See 211 | // https://discourse.nphysics.org/t/array-broadcasting-support/375/3 . 212 | 213 | for i in 0..pixels.data.nrows() { 214 | let u = pixels.data[(i, 0)].clone(); 215 | let v = pixels.data[(i, 1)].clone(); 216 | 217 | // point in camcoords at distance 1.0 from image plane 218 | let y = (v - self.params.cy.clone()) / self.params.fy.clone(); 219 | cam_dir[(i, 0)] = (u - self.params.skew.clone() * y.clone() - self.params.cx.clone()) 220 | / self.params.fx.clone(); // x 221 | cam_dir[(i, 1)] = y; 222 | cam_dir[(i, 2)] = one.clone(); // z 223 | } 224 | result 225 | } 226 | 227 | fn camera_to_pixel( 228 | &self, 229 | camera: &Points, 230 | ) -> Pixels> 231 | where 232 | IN: Storage, 233 | NPTS: Dim, 234 | DefaultAllocator: Allocator, 235 | { 236 | let mut result = Pixels::new(OMatrix::zeros_generic( 237 | NPTS::from_usize(camera.data.nrows()), 238 | U2::from_usize(2), 239 | )); 240 | 241 | // It seems broadcasting is not (yet) supported in nalgebra, so we loop 242 | // through the data. See 243 | // https://discourse.nphysics.org/t/array-broadcasting-support/375/3 . 244 | 245 | for i in 0..camera.data.nrows() { 246 | let x = nalgebra::Point3::new( 247 | camera.data[(i, 0)].clone(), 248 | camera.data[(i, 1)].clone(), 249 | camera.data[(i, 2)].clone(), 250 | ) 251 | .to_homogeneous(); 252 | let rst = self.cache_p.clone() * x; 253 | result.data[(i, 0)] = rst[0].clone() / rst[2].clone(); 254 | result.data[(i, 1)] = rst[1].clone() / rst[2].clone(); 255 | } 256 | result 257 | } 258 | } 259 | 260 | impl std::fmt::Debug for IntrinsicParametersPerspective { 261 | fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 262 | // This should match the auto derived Debug implementation but not print 263 | // the cache_p field. 264 | fmt.debug_struct("IntrinsicParametersPerspective") 265 | .field("params", &self.params) 266 | .finish() 267 | } 268 | } 269 | 270 | // See note about serde derive for ExtrinsicParameters Deserialize, which 271 | // applies here, too. 272 | #[cfg(feature = "serde-serialize")] 273 | impl<'de, R: RealField + serde::Deserialize<'de>> serde::Deserialize<'de> 274 | for IntrinsicParametersPerspective 275 | { 276 | fn deserialize(deserializer: D) -> std::result::Result 277 | where 278 | D: serde::Deserializer<'de>, 279 | { 280 | use serde::de; 281 | use std::fmt; 282 | 283 | #[derive(Deserialize)] 284 | #[serde(field_identifier, rename_all = "lowercase")] 285 | enum Field { 286 | Params, 287 | } 288 | 289 | struct IntrinsicParametersPerspectiveVisitor<'de, R2: RealField + serde::Deserialize<'de>>( 290 | std::marker::PhantomData<&'de R2>, 291 | ); 292 | 293 | impl<'de, R2: RealField + serde::Deserialize<'de>> serde::de::Visitor<'de> 294 | for IntrinsicParametersPerspectiveVisitor<'de, R2> 295 | { 296 | type Value = IntrinsicParametersPerspective; 297 | 298 | fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { 299 | formatter.write_str("struct IntrinsicParametersPerspective") 300 | } 301 | 302 | fn visit_seq( 303 | self, 304 | mut seq: V, 305 | ) -> std::result::Result, V::Error> 306 | where 307 | V: serde::de::SeqAccess<'de>, 308 | { 309 | let params: PerspectiveParams<_> = seq 310 | .next_element()? 311 | .ok_or_else(|| de::Error::invalid_length(0, &self))?; 312 | Ok(IntrinsicParametersPerspective::from(params)) 313 | } 314 | 315 | fn visit_map( 316 | self, 317 | mut map: V, 318 | ) -> std::result::Result, V::Error> 319 | where 320 | V: serde::de::MapAccess<'de>, 321 | { 322 | let mut params = None; 323 | while let Some(key) = map.next_key()? { 324 | match key { 325 | Field::Params => { 326 | if params.is_some() { 327 | return Err(de::Error::duplicate_field("params")); 328 | } 329 | params = Some(map.next_value()?); 330 | } 331 | } 332 | } 333 | let params: PerspectiveParams<_> = 334 | params.ok_or_else(|| de::Error::missing_field("params"))?; 335 | Ok(IntrinsicParametersPerspective::from(params)) 336 | } 337 | } 338 | 339 | const FIELDS: &[&str] = &["params"]; 340 | deserializer.deserialize_struct( 341 | "IntrinsicParametersPerspective", 342 | FIELDS, 343 | IntrinsicParametersPerspectiveVisitor(std::marker::PhantomData), 344 | ) 345 | } 346 | } 347 | 348 | #[cfg(feature = "serde-serialize")] 349 | fn _test_is_serialize() { 350 | // Compile-time test to ensure IntrinsicParametersPerspective implements Serialize trait. 351 | fn implements() {} 352 | implements::>(); 353 | } 354 | 355 | #[cfg(feature = "serde-serialize")] 356 | fn _test_is_deserialize() { 357 | // Compile-time test to ensure IntrinsicParametersPerspective implements Deserialize trait. 358 | fn implements<'de, T: serde::Deserialize<'de>>() {} 359 | implements::>(); 360 | } 361 | 362 | #[cfg(test)] 363 | mod tests { 364 | use nalgebra::{SMatrix, Vector3}; 365 | 366 | use super::{IntrinsicParametersPerspective, PerspectiveParams}; 367 | use crate::camera::{roundtrip_camera, Camera}; 368 | use crate::extrinsics::ExtrinsicParameters; 369 | use crate::intrinsic_test_utils::roundtrip_intrinsics; 370 | use crate::Points; 371 | 372 | #[test] 373 | fn roundtrip() { 374 | let params = PerspectiveParams { 375 | fx: 100.0, 376 | fy: 102.0, 377 | skew: 0.1, 378 | cx: 321.0, 379 | cy: 239.9, 380 | }; 381 | 382 | let cam: IntrinsicParametersPerspective<_> = params.into(); 383 | roundtrip_intrinsics(&cam, 640, 480, 5, 0, nalgebra::convert(1e-10)); 384 | 385 | let extrinsics = ExtrinsicParameters::from_view( 386 | &Vector3::new(1.2, 3.4, 5.6), // camcenter 387 | &Vector3::new(2.2, 3.4, 5.6), // lookat 388 | &nalgebra::Unit::new_normalize(Vector3::new(0.0, 0.0, 1.0)), // up 389 | ); 390 | 391 | let full_cam = Camera::new(cam, extrinsics); 392 | roundtrip_camera(full_cam, 640, 480, 5, 0, nalgebra::convert(1e-10)); 393 | } 394 | 395 | #[test] 396 | fn reject_invalid_projection_matrix() { 397 | #[rustfmt::skip] 398 | let p_valid = nalgebra::SMatrix::::new( 399 | 10.0, 0.0, 0.0, 0.0, 400 | 0.0, 10.0, 0.0, 0.0, 401 | 0.0, 0.0, 1.0, 0.0, 402 | ); 403 | assert!(IntrinsicParametersPerspective::from_normalized_3x4_matrix(p_valid).is_ok()); 404 | 405 | let mut p = p_valid; 406 | p[(2, 2)] = 1.1; 407 | assert!(IntrinsicParametersPerspective::from_normalized_3x4_matrix(p).is_err()); 408 | 409 | let mut p = p_valid; 410 | p[(0, 3)] = 1.1; 411 | assert!(IntrinsicParametersPerspective::from_normalized_3x4_matrix(p).is_err()); 412 | 413 | let mut p = p_valid; 414 | p[(1, 0)] = 1.1; 415 | assert!(IntrinsicParametersPerspective::from_normalized_3x4_matrix(p).is_err()); 416 | 417 | let mut p = p_valid; 418 | p[(1, 3)] = 1.1; 419 | assert!(IntrinsicParametersPerspective::from_normalized_3x4_matrix(p).is_err()); 420 | 421 | let mut p = p_valid; 422 | p[(2, 0)] = 1.1; 423 | assert!(IntrinsicParametersPerspective::from_normalized_3x4_matrix(p).is_err()); 424 | 425 | let mut p = p_valid; 426 | p[(2, 1)] = 1.1; 427 | assert!(IntrinsicParametersPerspective::from_normalized_3x4_matrix(p).is_err()); 428 | 429 | let mut p = p_valid; 430 | p[(2, 2)] = 1.1; 431 | assert!(IntrinsicParametersPerspective::from_normalized_3x4_matrix(p).is_err()); 432 | } 433 | 434 | fn assert_is_pmat_same( 435 | cam: &Camera>, 436 | pmat: &SMatrix, 437 | ) { 438 | let camcoord_pts = Points::new(SMatrix::::new( 439 | 1.23, 4.56, 7.89, // pt 1 440 | 1.0, 2.0, 3.0, // pt 2 441 | )); 442 | 443 | // Convert world to pixel using Camera method. 444 | let pts1 = cam.world_to_pixel(&camcoord_pts); 445 | 446 | for i in 0..pts1.data.nrows() { 447 | let pt1 = pts1.data.row(i); 448 | 449 | // Convert world to pixel using matrix multiply. 450 | let cc = camcoord_pts.data.row(i); 451 | let coords = nalgebra::Point3::new(cc[(0, 0)], cc[(0, 1)], cc[(0, 2)]); 452 | let cc = pmat * coords.to_homogeneous(); 453 | let pt2 = SMatrix::::new(cc[0] / cc[2], cc[1] / cc[2]); 454 | 455 | approx::assert_abs_diff_eq!(pt1[0], pt2[0], epsilon = 1e-5); 456 | approx::assert_abs_diff_eq!(pt1[1], pt2[1], epsilon = 1e-5); 457 | } 458 | } 459 | 460 | #[test] 461 | fn test_to_from_pmat() { 462 | for (name, cam) in &get_test_cameras() { 463 | println!("\n\n\ntesting camera {}", name); 464 | 465 | // Get camera matrix from this Camera instance and check it. 466 | let pmat = cam.as_camera_matrix(); 467 | assert_is_pmat_same(cam, &pmat); 468 | 469 | // Create a new Camera instance from this matrix and check it. 470 | let cam2 = Camera::from_perspective_matrix(&pmat).unwrap(); 471 | let pmat2 = cam2.as_camera_matrix(); 472 | assert_is_pmat_same(&cam2, &pmat); 473 | assert_is_pmat_same(cam, &pmat2); 474 | } 475 | } 476 | 477 | fn get_test_cameras() -> Vec<(String, Camera>)> { 478 | let mut result = Vec::new(); 479 | 480 | // camera 1 - from perspective parameters 481 | let params = PerspectiveParams { 482 | fx: 100.0, 483 | fy: 102.0, 484 | skew: 0.1, 485 | cx: 321.0, 486 | cy: 239.9, 487 | }; 488 | 489 | let cam: IntrinsicParametersPerspective<_> = params.into(); 490 | roundtrip_intrinsics(&cam, 640, 480, 5, 0, nalgebra::convert(1e-10)); 491 | 492 | let extrinsics = ExtrinsicParameters::from_view( 493 | &Vector3::new(1.2, 3.4, 5.6), // camcenter 494 | &Vector3::new(2.2, 3.4, 5.6), // lookat 495 | &nalgebra::Unit::new_normalize(Vector3::new(0.0, 0.0, 1.0)), // up 496 | ); 497 | 498 | let from_params = Camera::new(cam, extrinsics); 499 | result.push(("from-params".into(), from_params)); 500 | 501 | // in the future - more cameras 502 | 503 | result 504 | } 505 | 506 | #[test] 507 | #[cfg(feature = "serde-serialize")] 508 | fn test_serde() { 509 | let params = PerspectiveParams { 510 | fx: 100.0, 511 | fy: 102.0, 512 | skew: 0.1, 513 | cx: 321.0, 514 | cy: 239.9, 515 | }; 516 | 517 | let expected: IntrinsicParametersPerspective<_> = params.into(); 518 | 519 | let buf = serde_json::to_string(&expected).unwrap(); 520 | let actual: IntrinsicParametersPerspective = serde_json::from_str(&buf).unwrap(); 521 | assert!(expected == actual); 522 | assert!(actual.fx() == 100.0); 523 | assert!(actual.fy() == 102.0); 524 | assert!(actual.skew() == 0.1); 525 | assert!(actual.cx() == 321.0); 526 | assert!(actual.cy() == 239.9); 527 | } 528 | } 529 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![cfg_attr(not(feature = "std"), no_std)] 2 | #![deny(rust_2018_idioms, unsafe_code, missing_docs)] 3 | #![doc = include_str!("../README.md")] 4 | 5 | //! # Examples 6 | //! 7 | //! ## Example - projecting 3D world coordinates to 2D pixel coordinates. 8 | //! 9 | //! ``` 10 | //! use cam_geom::*; 11 | //! use nalgebra::{Matrix2x3, Unit, Vector3}; 12 | //! 13 | //! // Create two points in the world coordinate frame. 14 | //! let world_coords = Points::new(Matrix2x3::new( 15 | //! 1.0, 0.0, 0.0, // point 1 16 | //! 0.0, 1.0, 0.0, // point 2 17 | //! )); 18 | //! 19 | //! // perspective parameters - focal length of 100, no skew, pixel center at (640,480) 20 | //! let intrinsics = IntrinsicParametersPerspective::from(PerspectiveParams { 21 | //! fx: 100.0, 22 | //! fy: 100.0, 23 | //! skew: 0.0, 24 | //! cx: 640.0, 25 | //! cy: 480.0, 26 | //! }); 27 | //! 28 | //! // Set extrinsic parameters - camera at (10,0,0), looking at (0,0,0), up (0,0,1) 29 | //! let camcenter = Vector3::new(10.0, 0.0, 0.0); 30 | //! let lookat = Vector3::new(0.0, 0.0, 0.0); 31 | //! let up = Unit::new_normalize(Vector3::new(0.0, 0.0, 1.0)); 32 | //! let pose = ExtrinsicParameters::from_view(&camcenter, &lookat, &up); 33 | //! 34 | //! // Create a `Camera` with both intrinsic and extrinsic parameters. 35 | //! let camera = Camera::new(intrinsics, pose); 36 | //! 37 | //! // Project the original 3D coordinates to 2D pixel coordinates. 38 | //! let pixel_coords = camera.world_to_pixel(&world_coords); 39 | //! 40 | //! // Print the results. 41 | //! for i in 0..world_coords.data.nrows() { 42 | //! let wc = world_coords.data.row(i); 43 | //! let pix = pixel_coords.data.row(i); 44 | //! println!("{} -> {}", wc, pix); 45 | //! } 46 | //! ``` 47 | //! 48 | //! This will print: 49 | //! 50 | //! ```text 51 | //! ┌ ┐ 52 | //! │ 1 0 0 │ 53 | //! └ ┘ 54 | //! 55 | //! -> 56 | //! ┌ ┐ 57 | //! │ 640 480 │ 58 | //! └ ┘ 59 | //! 60 | //! 61 | //! 62 | //! ┌ ┐ 63 | //! │ 0 1 0 │ 64 | //! └ ┘ 65 | //! 66 | //! -> 67 | //! ┌ ┐ 68 | //! │ 650 480 │ 69 | //! └ ┘ 70 | //! ``` 71 | //! 72 | //! 73 | //! ## Example - intersection of rays 74 | //! 75 | //! ``` 76 | //! use cam_geom::*; 77 | //! use nalgebra::RowVector3; 78 | //! 79 | //! // Create the first ray. 80 | //! let ray1 = Ray::::new( 81 | //! RowVector3::new(1.0, 0.0, 0.0), // origin 82 | //! RowVector3::new(0.0, 1.0, 0.0), // direction 83 | //! ); 84 | //! 85 | //! // Create the second ray. 86 | //! let ray2 = Ray::::new( 87 | //! RowVector3::new(0.0, 1.0, 0.0), // origin 88 | //! RowVector3::new(1.0, 0.0, 0.0), // direction 89 | //! ); 90 | //! 91 | //! // Compute the best intersection. 92 | //! let result = best_intersection_of_rays(&[ray1, ray2]).unwrap(); 93 | //! 94 | //! // Print the result. 95 | //! println!("result: {}", result.data); 96 | //! ``` 97 | //! 98 | //! This will print: 99 | //! 100 | //! ```text 101 | //! result: 102 | //! ┌ ┐ 103 | //! │ 1 1 0 │ 104 | //! └ ┘ 105 | //! ``` 106 | 107 | #[cfg(not(feature = "std"))] 108 | extern crate core as std; 109 | 110 | #[cfg(feature = "serde-serialize")] 111 | use serde::{Deserialize, Serialize}; 112 | 113 | use nalgebra::{ 114 | allocator::Allocator, 115 | storage::{Owned, Storage}, 116 | DefaultAllocator, Dim, DimName, Isometry3, Matrix, Point3, RealField, SMatrix, Vector3, U1, U2, 117 | U3, 118 | }; 119 | 120 | #[cfg(feature = "std")] 121 | pub mod intrinsic_test_utils; 122 | 123 | mod intrinsics_perspective; 124 | pub use intrinsics_perspective::{IntrinsicParametersPerspective, PerspectiveParams}; 125 | 126 | mod intrinsics_orthographic; 127 | pub use intrinsics_orthographic::{IntrinsicParametersOrthographic, OrthographicParams}; 128 | 129 | mod extrinsics; 130 | pub use extrinsics::ExtrinsicParameters; 131 | 132 | mod camera; 133 | pub use camera::Camera; 134 | 135 | /// Defines the different possible types of ray bundles. 136 | pub mod ray_bundle_types; 137 | 138 | #[cfg(feature = "alloc")] 139 | mod ray_intersection; 140 | #[cfg(feature = "alloc")] 141 | pub use ray_intersection::best_intersection_of_rays; 142 | 143 | pub mod linearize; 144 | 145 | /// All possible errors. 146 | #[cfg_attr(feature = "std", derive(Debug))] 147 | #[non_exhaustive] 148 | pub enum Error { 149 | /// Invalid input. 150 | InvalidInput, 151 | /// Singular Value Decomposition did not converge. 152 | SvdFailed, 153 | /// At least two rays are needed to compute their intersection. 154 | MinimumTwoRaysNeeded, 155 | /// Invalid rotation matrix 156 | InvalidRotationMatrix, 157 | } 158 | 159 | #[cfg(feature = "std")] 160 | impl std::error::Error for Error {} 161 | 162 | #[cfg(feature = "std")] 163 | impl std::fmt::Display for Error { 164 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 165 | std::fmt::Debug::fmt(self, f) 166 | } 167 | } 168 | 169 | /// 2D pixel locations on the image sensor. 170 | /// 171 | /// These pixels are "distorted" - with barrel and pincushion distortion - if 172 | /// the camera model incorporates such. (Undistorted pixels are handled 173 | /// internally within the camera model.) 174 | /// 175 | /// This is a newtype wrapping an `nalgebra::Matrix`. 176 | #[derive(Clone)] 177 | pub struct Pixels { 178 | /// The matrix storing pixel locations. 179 | pub data: nalgebra::Matrix, 180 | } 181 | 182 | impl Pixels { 183 | /// Create a new Pixels instance 184 | #[inline] 185 | pub fn new(data: nalgebra::Matrix) -> Self { 186 | Self { data } 187 | } 188 | } 189 | 190 | /// A coordinate system in which points and rays can be defined. 191 | pub trait CoordinateSystem {} 192 | 193 | /// Implementations of [`CoordinateSystem`](trait.CoordinateSystem.html). 194 | pub mod coordinate_system { 195 | 196 | #[cfg(feature = "serde-serialize")] 197 | use serde::{Deserialize, Serialize}; 198 | 199 | /// Coordinates in the camera coordinate system. 200 | /// 201 | /// The camera center is at (0,0,0) at looking at (0,0,1) with up as 202 | /// (0,-1,0) in this coordinate frame. 203 | #[derive(Debug, Clone, PartialEq)] 204 | #[cfg_attr(feature = "serde-serialize", derive(Serialize, Deserialize))] 205 | pub struct CameraFrame {} 206 | impl crate::CoordinateSystem for CameraFrame {} 207 | 208 | /// Coordinates in the world coordinate system. 209 | /// 210 | /// The camera center is may be located at an arbitrary position and pointed 211 | /// in an arbitrary direction in this coordinate frame. 212 | #[derive(Debug, Clone, PartialEq)] 213 | #[cfg_attr(feature = "serde-serialize", derive(Serialize, Deserialize))] 214 | pub struct WorldFrame {} 215 | impl crate::CoordinateSystem for WorldFrame {} 216 | } 217 | pub use coordinate_system::{CameraFrame, WorldFrame}; 218 | 219 | /// 3D points. Can be in any [`CoordinateSystem`](trait.CoordinateSystem.html). 220 | /// 221 | /// This is a newtype wrapping an `nalgebra::Matrix`. 222 | pub struct Points { 223 | coords: std::marker::PhantomData, 224 | /// The matrix storing point locations. 225 | pub data: nalgebra::Matrix, 226 | } 227 | 228 | #[cfg(feature = "std")] 229 | impl std::fmt::Debug 230 | for Points 231 | { 232 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { 233 | f.debug_struct("Points") 234 | .field("coords", &self.coords) 235 | .field("data", &self.data) 236 | .finish() 237 | } 238 | } 239 | 240 | impl Points 241 | where 242 | Coords: CoordinateSystem, 243 | R: RealField, 244 | NPTS: Dim, 245 | { 246 | /// Create a new Points instance from the underlying storage. 247 | #[inline] 248 | pub fn new(data: nalgebra::Matrix) -> Self { 249 | Self { 250 | coords: std::marker::PhantomData, 251 | data, 252 | } 253 | } 254 | } 255 | 256 | /// 3D rays. Can be in any [`CoordinateSystem`](trait.CoordinateSystem.html). 257 | /// 258 | /// Any given `RayBundle` will have a particular bundle type, which implements 259 | /// the [`Bundle`](trait.Bundle.html) trait. 260 | #[derive(Debug, Clone, PartialEq)] 261 | #[cfg_attr(feature = "serde-serialize", derive(Serialize, Deserialize))] 262 | pub struct RayBundle 263 | where 264 | Coords: CoordinateSystem, 265 | BType: Bundle, 266 | R: RealField, 267 | NPTS: Dim, 268 | StorageMultiple: Storage, 269 | { 270 | coords: std::marker::PhantomData, 271 | /// The matrix storing the ray data. 272 | pub data: Matrix, 273 | bundle_type: BType, 274 | } 275 | 276 | impl RayBundle 277 | where 278 | Coords: CoordinateSystem, 279 | BType: Bundle, 280 | R: RealField, 281 | NPTS: DimName, 282 | StorageMultiple: Storage, 283 | DefaultAllocator: Allocator, 284 | { 285 | /// get directions of each ray in bundle 286 | #[inline] 287 | pub fn directions(&self) -> Matrix> { 288 | self.bundle_type.directions(&self.data) 289 | } 290 | 291 | /// get centers (origins) of each ray in bundle 292 | #[inline] 293 | pub fn centers(&self) -> Matrix> { 294 | self.bundle_type.centers(&self.data) 295 | } 296 | } 297 | 298 | impl RayBundle> 299 | where 300 | Coords: CoordinateSystem, 301 | BType: Bundle, 302 | R: RealField, 303 | { 304 | /// Return the single ray from the RayBundle with exactly one ray. 305 | #[inline] 306 | pub fn to_single_ray(&self) -> Ray { 307 | self.bundle_type.to_single_ray(&self.data) 308 | } 309 | } 310 | 311 | impl 312 | RayBundle, R, NPTS, StorageMultiple> 313 | where 314 | Coords: CoordinateSystem, 315 | R: RealField, 316 | NPTS: Dim, 317 | StorageMultiple: Storage, 318 | { 319 | /// Create a new RayBundle instance in which all rays share origin at zero. 320 | /// 321 | /// The number of points allocated is given by the `npts` parameter, which 322 | /// should agree with the `NPTS` type. The coordinate system is given by the 323 | /// `Coords` type. 324 | pub fn new_shared_zero_origin(data: Matrix) -> Self { 325 | let bundle_type = crate::ray_bundle_types::SharedOriginRayBundle::new_shared_zero_origin(); 326 | Self::new(bundle_type, data) 327 | } 328 | } 329 | 330 | impl 331 | RayBundle< 332 | Coords, 333 | crate::ray_bundle_types::SharedDirectionRayBundle, 334 | R, 335 | NPTS, 336 | StorageMultiple, 337 | > 338 | where 339 | Coords: CoordinateSystem, 340 | R: RealField, 341 | NPTS: Dim, 342 | StorageMultiple: Storage, 343 | { 344 | /// Create a new RayBundle instance in which all rays share +z direction. 345 | /// 346 | /// The number of points allocated is given by the `npts` parameter, which 347 | /// should agree with the `NPTS` type. The coordinate system is given by the 348 | /// `Coords` type. 349 | pub fn new_shared_plusz_direction(data: Matrix) -> Self { 350 | let bundle_type = 351 | crate::ray_bundle_types::SharedDirectionRayBundle::new_plusz_shared_direction(); 352 | Self::new(bundle_type, data) 353 | } 354 | } 355 | 356 | impl RayBundle 357 | where 358 | Coords: CoordinateSystem, 359 | BType: Bundle, 360 | R: RealField, 361 | NPTS: Dim, 362 | StorageMultiple: Storage, 363 | { 364 | /// Create a new RayBundle instance from the underlying storage. 365 | /// 366 | /// The coordinate system is given by the `Coords` type and the bundle type 367 | /// (e.g. shared origin or shared direction) is given by the `BType`. 368 | #[inline] 369 | fn new(bundle_type: BType, data: nalgebra::Matrix) -> Self { 370 | Self { 371 | coords: std::marker::PhantomData, 372 | data, 373 | bundle_type, 374 | } 375 | } 376 | 377 | /// get a 3D point on the ray, obtained by adding the direction(s) to the origin(s) 378 | /// 379 | /// The distance of the point from the ray bundle center is not definted and 380 | /// can be arbitrary. 381 | #[inline] 382 | pub fn point_on_ray(&self) -> Points> 383 | where 384 | DefaultAllocator: Allocator, 385 | { 386 | self.bundle_type.point_on_ray(&self.data) 387 | } 388 | 389 | /// get a 3D point on the ray at a defined distance from the origin(s) 390 | #[inline] 391 | pub fn point_on_ray_at_distance( 392 | &self, 393 | distance: R, 394 | ) -> Points> 395 | where 396 | DefaultAllocator: Allocator, 397 | { 398 | self.bundle_type 399 | .point_on_ray_at_distance(&self.data, distance) 400 | } 401 | 402 | #[inline] 403 | fn to_pose( 404 | &self, 405 | pose: Isometry3, 406 | ) -> RayBundle> 407 | where 408 | R: RealField, 409 | NPTS: Dim, 410 | OutFrame: CoordinateSystem, 411 | DefaultAllocator: Allocator, 412 | { 413 | self.bundle_type.to_pose(pose, &self.data) 414 | } 415 | } 416 | 417 | /// A single ray. Can be in any [`CoordinateSystem`](trait.CoordinateSystem.html). 418 | /// 419 | /// A `RayBundle` with only one ray can be converted to this with 420 | /// `RayBundle::to_single_ray()`. 421 | pub struct Ray { 422 | /// The center (origin) of the ray. 423 | pub center: SMatrix, 424 | /// The direction of the ray. 425 | pub direction: SMatrix, 426 | c: std::marker::PhantomData, 427 | } 428 | 429 | impl Ray { 430 | /// Create a new ray from center (origin) and direction. 431 | #[inline] 432 | pub fn new(center: SMatrix, direction: SMatrix) -> Self { 433 | Self { 434 | center, 435 | direction, 436 | c: std::marker::PhantomData, 437 | } 438 | } 439 | } 440 | 441 | /// Specifies operations which any RayBundle must implement. 442 | pub trait Bundle 443 | where 444 | R: RealField, 445 | { 446 | /// Return a single ray from a `RayBundle` with exactly one ray. 447 | fn to_single_ray(&self, self_data: &SMatrix) -> Ray 448 | where 449 | Coords: CoordinateSystem; 450 | 451 | /// Get directions of each ray in bundle. 452 | /// 453 | /// This can be inefficient, because when not every ray has a different 454 | /// direction (which is the case for the `SharedDirectionRayBundle` type), 455 | /// this will nevertheless copy the single direction `NPTS` times. 456 | fn directions( 457 | &self, 458 | self_data: &Matrix, 459 | ) -> Matrix> 460 | where 461 | NPTS: DimName, 462 | StorageIn: Storage, 463 | DefaultAllocator: Allocator; 464 | 465 | /// Get centers of each ray in bundle. 466 | /// 467 | /// This can be inefficient, because when not every ray has a different 468 | /// center (which is the case for the `SharedOriginRayBundle` type), 469 | /// this will nevertheless copy the single center `NPTS` times. 470 | fn centers( 471 | &self, 472 | self_data: &Matrix, 473 | ) -> Matrix> 474 | where 475 | NPTS: DimName, 476 | StorageIn: Storage, 477 | DefaultAllocator: Allocator; 478 | 479 | /// Return points on on the input rays. 480 | /// 481 | /// The distance of the point from the ray bundle center is not definted and 482 | /// can be arbitrary. 483 | fn point_on_ray( 484 | &self, 485 | self_data: &Matrix, 486 | ) -> Points> 487 | where 488 | Self: Sized, 489 | NPTS: Dim, 490 | StorageIn: Storage, 491 | OutFrame: CoordinateSystem, 492 | DefaultAllocator: Allocator; 493 | 494 | /// Return points on on the input rays at a defined distance from the origin(s). 495 | fn point_on_ray_at_distance( 496 | &self, 497 | self_data: &Matrix, 498 | distance: R, 499 | ) -> Points> 500 | where 501 | Self: Sized, 502 | NPTS: Dim, 503 | StorageIn: Storage, 504 | OutFrame: CoordinateSystem, 505 | DefaultAllocator: Allocator; 506 | 507 | /// Convert the input rays by the pose given. 508 | fn to_pose( 509 | &self, 510 | pose: Isometry3, 511 | self_data: &Matrix, 512 | ) -> RayBundle> 513 | where 514 | Self: Sized, 515 | R: RealField, 516 | NPTS: Dim, 517 | StorageIn: Storage, 518 | OutFrame: CoordinateSystem, 519 | DefaultAllocator: Allocator; 520 | } 521 | 522 | /// A geometric model of camera coordinates to pixels (and vice versa). 523 | pub trait IntrinsicParameters: std::fmt::Debug + Clone 524 | where 525 | R: RealField, 526 | { 527 | /// What type of ray bundle is returned when projecting pixels to rays. 528 | type BundleType; 529 | 530 | /// project pixels to camera coords 531 | fn pixel_to_camera( 532 | &self, 533 | pixels: &Pixels, 534 | ) -> RayBundle> 535 | where 536 | Self::BundleType: Bundle, 537 | IN: Storage, 538 | NPTS: Dim, 539 | DefaultAllocator: Allocator, // needed to make life easy for implementors 540 | DefaultAllocator: Allocator, // needed to make life easy for implementors 541 | DefaultAllocator: Allocator; 542 | 543 | /// project camera coords to pixel coordinates 544 | fn camera_to_pixel( 545 | &self, 546 | camera: &Points, 547 | ) -> Pixels> 548 | where 549 | IN: Storage, 550 | NPTS: Dim, 551 | DefaultAllocator: Allocator; 552 | } 553 | 554 | #[cfg(test)] 555 | mod tests { 556 | use super::*; 557 | use nalgebra::convert; 558 | 559 | #[cfg(not(feature = "std"))] 560 | compile_error!("tests require std"); 561 | 562 | #[test] 563 | fn rays_shared_origin() { 564 | // Create rays in world coorindates all with a shared origin at zero. 565 | let b1 = 566 | RayBundle::::new_shared_zero_origin(SMatrix::<_, 2, 3>::new( 567 | 1.0, 2.0, 3.0, // ray 1 568 | 4.0, 5.0, 6.0, // ray 2 569 | )); 570 | 571 | // Get points on rays at a specific distance. 572 | let actual_dist1 = b1.point_on_ray_at_distance(1.0).data; 573 | 574 | { 575 | // Manually compte what these points should be. 576 | let r1m = (1.0_f64 + 4.0 + 9.0).sqrt(); 577 | let r2m = (16.0_f64 + 25.0 + 36.0).sqrt(); 578 | let expected = SMatrix::<_, 2, 3>::new( 579 | 1.0 / r1m, 580 | 2.0 / r1m, 581 | 3.0 / r1m, // ray 1 582 | 4.0 / r2m, 583 | 5.0 / r2m, 584 | 6.0 / r2m, // ray 2 585 | ); 586 | 587 | // Check the points vs the manually computed versions. 588 | approx::assert_abs_diff_eq!(actual_dist1, expected, epsilon = 1e-10); 589 | } 590 | 591 | // Get points on rays at a specific distance. 592 | let actual_dist10 = b1.point_on_ray_at_distance(10.0).data; 593 | 594 | // Get points on rays at arbitrary distance. 595 | let actual = b1.point_on_ray().data; 596 | 597 | for i in 0..actual_dist1.nrows() { 598 | assert_on_line(actual_dist1.row(i), actual_dist10.row(i), actual.row(i)); 599 | } 600 | } 601 | 602 | #[test] 603 | fn rays_shared_direction() { 604 | // Create rays in world coorindates all with a shared direction (+z). 605 | let b1 = RayBundle::::new_shared_plusz_direction( 606 | SMatrix::<_, 2, 3>::new( 607 | 1.0, 2.0, 0.0, // ray 1 608 | 3.0, 4.0, 0.0, // ray 2 609 | ), 610 | ); 611 | 612 | // Get points on rays at a specific distance. 613 | let actual_dist10 = b1.point_on_ray_at_distance(10.0).data; 614 | 615 | { 616 | // Manually compte what these points should be. 617 | let expected_dist10 = SMatrix::<_, 2, 3>::new( 618 | 1.0, 2.0, 10.0, // ray 1 619 | 3.0, 4.0, 10.0, // ray 2 620 | ); 621 | 622 | // Check the points vs the manually computed versions. 623 | approx::assert_abs_diff_eq!(actual_dist10, expected_dist10, epsilon = 1e-10); 624 | } 625 | 626 | // Get points on rays at a specific distance. 627 | let actual_dist0 = b1.point_on_ray_at_distance(0.0).data; 628 | 629 | { 630 | // Manually compte what these points should be. 631 | let expected_dist0 = SMatrix::<_, 2, 3>::new( 632 | 1.0, 2.0, 0.0, // ray 1 633 | 3.0, 4.0, 0.0, // ray 2 634 | ); 635 | 636 | // Check the points vs the manually computed versions. 637 | approx::assert_abs_diff_eq!(actual_dist0, expected_dist0, epsilon = 1e-10); 638 | } 639 | 640 | // Get points on rays at arbitrary distance. 641 | let actual = b1.point_on_ray().data; 642 | 643 | for i in 0..actual_dist0.nrows() { 644 | assert_on_line(actual_dist0.row(i), actual_dist10.row(i), actual.row(i)); 645 | } 646 | } 647 | 648 | fn assert_on_line( 649 | line_a: Matrix, 650 | line_b: Matrix, 651 | test_pt: Matrix, 652 | ) where 653 | R: RealField, 654 | S1: Storage, 655 | S2: Storage, 656 | S3: Storage, 657 | { 658 | let dir = &line_b - &line_a; 659 | let testx = &test_pt - &line_a; 660 | let mag_dir = (dir[0].clone() * dir[0].clone() 661 | + dir[1].clone() * dir[1].clone() 662 | + dir[2].clone() * dir[2].clone()) 663 | .sqrt(); 664 | let mag_testx = (testx[0].clone() * testx[0].clone() 665 | + testx[1].clone() * testx[1].clone() 666 | + testx[2].clone() * testx[2].clone()) 667 | .sqrt(); 668 | let scale = mag_dir / mag_testx; 669 | 670 | for j in 0..3 { 671 | approx::assert_abs_diff_eq!( 672 | testx[j].clone() * scale.clone(), 673 | dir[j].clone(), 674 | epsilon = convert(1e-10) 675 | ); 676 | } 677 | } 678 | 679 | #[test] 680 | #[cfg(feature = "serde-serialize")] 681 | fn test_ray_bundle_serde() { 682 | let expected = 683 | RayBundle::::new_shared_plusz_direction( 684 | SMatrix::<_, 2, 3>::new( 685 | 1.0, 2.0, 0.0, // ray 1 686 | 3.0, 4.0, 0.0, // ray 2 687 | ), 688 | ); 689 | 690 | let buf = serde_json::to_string(&expected).unwrap(); 691 | let actual: RayBundle<_, _, _, _, _> = serde_json::from_str(&buf).unwrap(); 692 | assert!(expected == actual); 693 | } 694 | } 695 | -------------------------------------------------------------------------------- /src/linearize.rs: -------------------------------------------------------------------------------- 1 | //! Linearize camera models by computing the Jacobian matrix. 2 | 3 | use crate::{Camera, IntrinsicParametersPerspective, Points, WorldFrame}; 4 | use nalgebra::{storage::Storage, RealField, SMatrix, U1, U3}; 5 | 6 | /// Required data required for finding Jacobian of perspective camera models. 7 | /// 8 | /// Create this with the [`new()`](struct.JacobianPerspectiveCache.html#method.new) method. 9 | pub struct JacobianPerspectiveCache { 10 | m: SMatrix, 11 | } 12 | 13 | impl JacobianPerspectiveCache { 14 | /// Create a new `JacobianPerspectiveCache` from a `Camera` with a perspective model. 15 | pub fn new(cam: &Camera>) -> Self { 16 | let m = { 17 | let p33 = cam.intrinsics().as_intrinsics_matrix(); 18 | p33 * cam.extrinsics().matrix() 19 | }; 20 | 21 | // flip sign if focal length < 0 22 | let m = if m[(0, 0)] < nalgebra::zero() { -m } else { m }; 23 | 24 | let m = m.clone() / m[(2, 3)].clone(); // normalize 25 | 26 | Self { m } 27 | } 28 | 29 | /// Linearize camera model by evaluating around input point `p`. 30 | /// 31 | /// Returns Jacobian matrix `A` (shape 2x3) such that `Ao = (u,v)` where `o` 32 | /// is 3D world coords offset from `p` and `(u,v)` are the shift in pixel 33 | /// coords from the projected location of `p`. In other words, for a camera 34 | /// model `F(x)`, if `F(p) = (a,b)` and `F(p+o) = (a,b) 35 | /// + Ao = (a,b) + (u,v) = (a+u,b+v)`, this function returns `A`. 36 | pub fn linearize_at(&self, p: &Points) -> SMatrix 37 | where 38 | STORAGE: Storage, 39 | { 40 | let pt3d = &p.data; 41 | 42 | // See pinhole_jacobian_demo.py in flydra for the original source of this. It has 43 | // been manually factored it a bit futher. 44 | // https://github.com/strawlab/flydra/blob/3ab1b5843b095d73f796bf707e6680b923993899/flydra_core/sympy_demo/pinhole_jacobian_demo.py 45 | let x = pt3d[(0, 0)].clone(); 46 | let y = pt3d[(0, 1)].clone(); 47 | let z = pt3d[(0, 2)].clone(); 48 | 49 | let p = &self.m; 50 | let denom = p[(2, 0)].clone() * x.clone() 51 | + p[(2, 1)].clone() * y.clone() 52 | + p[(2, 2)].clone() * z.clone() 53 | + p[(2, 3)].clone(); 54 | let denom_sqrt = denom.clone().powi(-2); 55 | 56 | let factor_u = p[(0, 0)].clone() * x.clone() 57 | + p[(0, 1)].clone() * y.clone() 58 | + p[(0, 2)].clone() * z.clone() 59 | + p[(0, 3)].clone(); 60 | let ux = -p[(2, 0)].clone() * denom_sqrt.clone() * factor_u.clone() 61 | + p[(0, 0)].clone() / denom.clone(); 62 | let uy = -p[(2, 1)].clone() * denom_sqrt.clone() * factor_u.clone() 63 | + p[(0, 1)].clone() / denom.clone(); 64 | let uz = 65 | -p[(2, 2)].clone() * denom_sqrt.clone() * factor_u + p[(0, 2)].clone() / denom.clone(); 66 | 67 | let factor_v = p[(1, 0)].clone() * x 68 | + p[(1, 1)].clone() * y 69 | + p[(1, 2)].clone() * z 70 | + p[(1, 3)].clone(); 71 | let vx = -p[(2, 0)].clone() * denom_sqrt.clone() * factor_v.clone() 72 | + p[(1, 0)].clone() / denom.clone(); 73 | let vy = -p[(2, 1)].clone() * denom_sqrt.clone() * factor_v.clone() 74 | + p[(1, 1)].clone() / denom.clone(); 75 | let vz = -p[(2, 2)].clone() * denom_sqrt * factor_v + p[(1, 2)].clone() / denom; 76 | 77 | SMatrix::::new(ux, uy, uz, vx, vy, vz) 78 | } 79 | } 80 | 81 | #[test] 82 | fn test_jacobian_perspective() { 83 | use nalgebra::{OMatrix, RowVector2, RowVector3, Unit, Vector3}; 84 | 85 | use super::*; 86 | use crate::{Camera, ExtrinsicParameters, IntrinsicParametersPerspective}; 87 | 88 | // create a perspective camera 89 | let params = PerspectiveParams { 90 | fx: 100.0, 91 | fy: 102.0, 92 | skew: 0.1, 93 | cx: 321.0, 94 | cy: 239.9, 95 | }; 96 | 97 | let intrinsics: IntrinsicParametersPerspective<_> = params.into(); 98 | 99 | let camcenter = Vector3::new(10.0, 0.0, 10.0); 100 | let lookat = Vector3::new(0.0, 0.0, 0.0); 101 | let up = Unit::new_normalize(Vector3::new(0.0, 0.0, 1.0)); 102 | let pose = ExtrinsicParameters::from_view(&camcenter, &lookat, &up); 103 | 104 | let cam = Camera::new(intrinsics, pose); 105 | 106 | // cache the required data to compute a jacobian 107 | let cam_jac = JacobianPerspectiveCache::new(&cam); 108 | 109 | // We are going to linearize around this center 3D center point 110 | let center = Points::new(RowVector3::new(0.01, 0.02, 0.03)); 111 | 112 | // We will test a 3D point at this offset from the center 113 | let offset = Vector3::new(0.0, 0.0, 0.01); 114 | 115 | // Get the 2D projection (in pixels) of our center. 116 | let center_projected: OMatrix = cam.world_to_pixel(¢er).data; 117 | 118 | // Linearize the camera model around the center 3D point. 119 | let linearized_cam = cam_jac.linearize_at(¢er); 120 | 121 | // Compute the 3D point which we now want to view. 122 | let new_point = Points::new(center.data + offset.transpose()); 123 | 124 | // Get the 2D projection (in pixels) using the original, non-linear camera model 125 | let nonlin = cam.world_to_pixel(&new_point).data; 126 | 127 | // Get the 2D projection (in pixels) point with the linearized camera model 128 | let o = linearized_cam * offset; 129 | let linear_prediction = RowVector2::new(center_projected.x + o[0], center_projected.y + o[1]); 130 | 131 | // Check both approaches are equal 132 | approx::assert_relative_eq!(linear_prediction, nonlin, epsilon = nalgebra::convert(1e-4)); 133 | } 134 | -------------------------------------------------------------------------------- /src/ray_bundle_types.rs: -------------------------------------------------------------------------------- 1 | use nalgebra::{OMatrix, SMatrix}; 2 | 3 | use crate::*; 4 | 5 | #[cfg(feature = "serde-serialize")] 6 | use serde::{Deserialize, Serialize}; 7 | 8 | /// A bundle of rays with the same arbitrary origin 9 | #[derive(Debug, Clone, PartialEq)] 10 | #[cfg_attr(feature = "serde-serialize", derive(Serialize, Deserialize))] 11 | pub struct SharedOriginRayBundle { 12 | center: Point3, 13 | } 14 | 15 | impl SharedOriginRayBundle 16 | where 17 | R: RealField, 18 | { 19 | /// Create a new SharedOriginRayBundle with origin (center) at zero. 20 | #[inline] 21 | pub fn new_shared_zero_origin() -> Self { 22 | // center is at (0,0,0) 23 | let zero: R = nalgebra::convert(0.0); 24 | Self { 25 | center: Point3::new(zero.clone(), zero.clone(), zero), 26 | } 27 | } 28 | } 29 | 30 | impl Bundle for SharedOriginRayBundle 31 | where 32 | R: RealField, 33 | { 34 | #[inline] 35 | fn to_single_ray(&self, self_data: &SMatrix) -> Ray 36 | where 37 | Coords: CoordinateSystem, 38 | { 39 | Ray { 40 | direction: self_data.clone(), 41 | center: self.center.coords.transpose(), 42 | c: std::marker::PhantomData, 43 | } 44 | } 45 | 46 | fn directions( 47 | &self, 48 | self_data: &Matrix, 49 | ) -> Matrix> 50 | where 51 | NPTS: nalgebra::DimName, 52 | StorageIn: Storage, 53 | DefaultAllocator: Allocator, 54 | { 55 | // TODO: do this more smartly/efficiently 56 | let mut result = nalgebra::OMatrix::::zeros(); 57 | for i in 0..self_data.nrows() { 58 | for j in 0..3 { 59 | result[(i, j)] = self_data[(i, j)].clone(); 60 | } 61 | } 62 | result 63 | } 64 | 65 | fn centers( 66 | &self, 67 | self_data: &Matrix, 68 | ) -> Matrix> 69 | where 70 | NPTS: nalgebra::DimName, 71 | StorageIn: Storage, 72 | DefaultAllocator: Allocator, 73 | { 74 | // TODO: do this more smartly/efficiently 75 | let mut result = nalgebra::OMatrix::::zeros(); 76 | for i in 0..self_data.nrows() { 77 | for j in 0..3 { 78 | result[(i, j)] = self.center[j].clone(); 79 | } 80 | } 81 | result 82 | } 83 | 84 | fn point_on_ray( 85 | &self, 86 | directions: &Matrix, 87 | ) -> Points> 88 | where 89 | Self: Sized, 90 | NPTS: Dim, 91 | StorageIn: Storage, 92 | OutFrame: CoordinateSystem, 93 | DefaultAllocator: Allocator, 94 | { 95 | let mut result = Points::new(OMatrix::zeros_generic( 96 | NPTS::from_usize(directions.nrows()), 97 | U3::from_usize(3), 98 | )); 99 | let center = [ 100 | self.center[0].clone(), 101 | self.center[1].clone(), 102 | self.center[2].clone(), 103 | ]; 104 | for i in 0..directions.nrows() { 105 | for j in 0..3 { 106 | result.data[(i, j)] = center[j].clone() + directions[(i, j)].clone(); 107 | } 108 | } 109 | result 110 | } 111 | 112 | fn point_on_ray_at_distance( 113 | &self, 114 | directions: &Matrix, 115 | distance: R, 116 | ) -> Points> 117 | where 118 | Self: Sized, 119 | NPTS: Dim, 120 | StorageIn: Storage, 121 | OutFrame: CoordinateSystem, 122 | DefaultAllocator: Allocator, 123 | { 124 | let mut result = Points::new(OMatrix::zeros_generic( 125 | NPTS::from_usize(directions.nrows()), 126 | U3::from_usize(3), 127 | )); 128 | let center = [ 129 | self.center[0].clone(), 130 | self.center[1].clone(), 131 | self.center[2].clone(), 132 | ]; 133 | for i in 0..directions.nrows() { 134 | let dx = directions[(i, 0)].clone(); 135 | let dy = directions[(i, 1)].clone(); 136 | let dz = directions[(i, 2)].clone(); 137 | let mag2 = dx.clone() * dx + dy.clone() * dy + dz.clone() * dz; 138 | let mag = mag2.sqrt(); 139 | let scale = distance.clone() / mag; 140 | for j in 0..3 { 141 | result.data[(i, j)] = 142 | center[j].clone() + scale.clone() * directions[(i, j)].clone(); 143 | } 144 | } 145 | result 146 | } 147 | 148 | fn to_pose( 149 | &self, 150 | pose: Isometry3, 151 | self_data: &Matrix, 152 | ) -> RayBundle> 153 | where 154 | NPTS: Dim, 155 | StorageIn: Storage, 156 | OutFrame: CoordinateSystem, 157 | DefaultAllocator: Allocator, 158 | { 159 | let bundle_type = Self::new_shared_zero_origin(); 160 | let mut reposed = RayBundle::new( 161 | bundle_type, 162 | OMatrix::zeros_generic(NPTS::from_usize(self_data.nrows()), U3::from_usize(3)), 163 | ); 164 | // transform single bundle center point 165 | let new_center = pose.transform_point(&self.center); 166 | 167 | // transform multiple bundle directions 168 | for i in 0..self_data.nrows() { 169 | let orig_vec = self_data.row(i).transpose(); 170 | let new_vec = pose.transform_vector(&orig_vec); 171 | for j in 0..3 { 172 | reposed.data[(i, j)] = new_vec[j].clone(); 173 | } 174 | } 175 | 176 | reposed.bundle_type = SharedOriginRayBundle { center: new_center }; 177 | reposed 178 | } 179 | } 180 | 181 | /// A bundle of rays with the same arbitrary direction 182 | #[derive(Debug, Clone, PartialEq)] 183 | #[cfg_attr(feature = "serde-serialize", derive(Serialize, Deserialize))] 184 | pub struct SharedDirectionRayBundle { 185 | direction: Vector3, 186 | } 187 | 188 | impl SharedDirectionRayBundle { 189 | /// Create a new SharedDirectionRayBundle with direction +z. 190 | pub fn new_plusz_shared_direction() -> Self { 191 | // in +z direction 192 | Self { 193 | direction: Vector3::new(R::zero(), R::zero(), R::one()), 194 | } 195 | } 196 | } 197 | 198 | impl Bundle for SharedDirectionRayBundle { 199 | fn to_single_ray(&self, self_data: &SMatrix) -> Ray 200 | where 201 | Coords: CoordinateSystem, 202 | { 203 | Ray { 204 | direction: self.direction.transpose(), 205 | center: self_data.clone(), 206 | c: std::marker::PhantomData, 207 | } 208 | } 209 | 210 | fn directions( 211 | &self, 212 | self_data: &Matrix, 213 | ) -> Matrix> 214 | where 215 | NPTS: nalgebra::DimName, 216 | StorageIn: Storage, 217 | DefaultAllocator: Allocator, 218 | { 219 | // TODO: do this more smartly/efficiently 220 | let mut result = nalgebra::OMatrix::::zeros(); 221 | for i in 0..self_data.nrows() { 222 | for j in 0..3 { 223 | result[(i, j)] = self.direction[j].clone(); 224 | } 225 | } 226 | result 227 | } 228 | 229 | fn centers( 230 | &self, 231 | self_data: &Matrix, 232 | ) -> Matrix> 233 | where 234 | NPTS: nalgebra::DimName, 235 | StorageIn: Storage, 236 | DefaultAllocator: Allocator, 237 | { 238 | // TODO: do this more smartly/efficiently 239 | let mut result = nalgebra::OMatrix::::zeros(); 240 | for i in 0..self_data.nrows() { 241 | for j in 0..3 { 242 | result[(i, j)] = self_data[(i, j)].clone(); 243 | } 244 | } 245 | result 246 | } 247 | 248 | fn point_on_ray( 249 | &self, 250 | centers: &Matrix, 251 | ) -> Points> 252 | where 253 | Self: Sized, 254 | NPTS: Dim, 255 | StorageIn: Storage, 256 | OutFrame: CoordinateSystem, 257 | DefaultAllocator: Allocator, 258 | { 259 | let mut result = Points::new(OMatrix::zeros_generic( 260 | NPTS::from_usize(centers.nrows()), 261 | U3::from_usize(3), 262 | )); 263 | let direction = [ 264 | self.direction[0].clone(), 265 | self.direction[1].clone(), 266 | self.direction[2].clone(), 267 | ]; 268 | for i in 0..centers.nrows() { 269 | for j in 0..3 { 270 | result.data[(i, j)] = direction[j].clone() + centers[(i, j)].clone(); 271 | } 272 | } 273 | result 274 | } 275 | 276 | fn point_on_ray_at_distance( 277 | &self, 278 | centers: &Matrix, 279 | distance: R, 280 | ) -> Points> 281 | where 282 | Self: Sized, 283 | NPTS: Dim, 284 | StorageIn: Storage, 285 | OutFrame: CoordinateSystem, 286 | DefaultAllocator: Allocator, 287 | { 288 | let mut result = Points::new(OMatrix::zeros_generic( 289 | NPTS::from_usize(centers.nrows()), 290 | U3::from_usize(3), 291 | )); 292 | 293 | let d = &self.direction; 294 | let dx = d[0].clone(); 295 | let dy = d[1].clone(); 296 | let dz = d[2].clone(); 297 | let mag2 = dx.clone() * dx.clone() + dy.clone() * dy.clone() + dz.clone() * dz.clone(); 298 | let mag = mag2.sqrt(); 299 | let scale = distance / mag; 300 | let dist_dir = Vector3::new(scale.clone() * dx, scale.clone() * dy, scale * dz); 301 | 302 | for i in 0..centers.nrows() { 303 | for j in 0..3 { 304 | result.data[(i, j)] = dist_dir[j].clone() + centers[(i, j)].clone(); 305 | } 306 | } 307 | result 308 | } 309 | 310 | fn to_pose( 311 | &self, 312 | pose: Isometry3, 313 | self_data: &Matrix, 314 | ) -> RayBundle> 315 | where 316 | NPTS: Dim, 317 | StorageIn: Storage, 318 | OutFrame: CoordinateSystem, 319 | DefaultAllocator: Allocator, 320 | { 321 | let bundle_type = Self::new_plusz_shared_direction(); 322 | 323 | let mut reposed = RayBundle::new( 324 | bundle_type, 325 | OMatrix::zeros_generic(NPTS::from_usize(self_data.nrows()), U3::from_usize(3)), 326 | ); 327 | 328 | // transform single bundle direction 329 | let new_direction = pose.transform_vector(&self.direction); 330 | 331 | // transform multiple bundle origins 332 | for i in 0..self_data.nrows() { 333 | let orig_point = Point3 { 334 | coords: self_data.row(i).transpose(), 335 | }; 336 | let new_point = pose.transform_point(&orig_point); 337 | for j in 0..3 { 338 | reposed.data[(i, j)] = new_point[j].clone(); 339 | } 340 | } 341 | 342 | reposed.bundle_type = SharedDirectionRayBundle { 343 | direction: new_direction, 344 | }; 345 | reposed 346 | } 347 | } 348 | -------------------------------------------------------------------------------- /src/ray_intersection.rs: -------------------------------------------------------------------------------- 1 | #![deny(unsafe_code, missing_docs)] 2 | 3 | use nalgebra as na; 4 | 5 | use na::{ 6 | allocator::Allocator, base::storage::Owned, DefaultAllocator, Dim, Dyn, Matrix3, OMatrix, 7 | RealField, SMatrix, Vector3, U1, U3, 8 | }; 9 | 10 | use itertools::izip; 11 | 12 | use crate::{CoordinateSystem, Error, Points, Ray}; 13 | 14 | /// Iter::Sum is not implemented for R. 15 | macro_rules! sum_as_f64 { 16 | ($iter:expr,$R:ty) => {{ 17 | $iter 18 | .map(|x| na::try_convert::<$R, f64>(x.clone()).unwrap()) 19 | .sum() 20 | }}; 21 | } 22 | // just like above but without * dereference. 23 | macro_rules! refsum_as_f64 { 24 | ($iter:expr,$R:ty) => {{ 25 | $iter.map(|x| na::try_convert::<$R, f64>(x).unwrap()).sum() 26 | }}; 27 | } 28 | 29 | /// convert from specialized type (e.g. f64) into generic type $R 30 | macro_rules! despecialize { 31 | ($a:expr, $R:ty, $rows:expr, $cols:expr) => {{ 32 | SMatrix::<$R, $rows, $cols>::from_iterator( 33 | $a.as_slice().into_iter().map(|x| na::convert(*x)), 34 | ) 35 | }}; 36 | } 37 | 38 | /// A single point. (This is a `Points` instance defined to have only one point). 39 | pub type SinglePoint = Points>; 40 | 41 | /// Return the 3D point which is the best intersection of rays. 42 | #[allow(non_snake_case)] 43 | pub fn best_intersection_of_rays( 44 | rays: &[Ray], 45 | ) -> Result, Error> 46 | where 47 | Coords: CoordinateSystem, 48 | R: RealField, 49 | DefaultAllocator: Allocator, 50 | DefaultAllocator: Allocator, 51 | DefaultAllocator: Allocator, 52 | { 53 | if rays.len() < 2 { 54 | return Err(Error::MinimumTwoRaysNeeded); 55 | } 56 | 57 | let npts = Dyn(rays.len()); 58 | let u1 = U1::from_usize(1); 59 | let u3 = U3::from_usize(3); 60 | 61 | let mut line_dirs = OMatrix::::zeros_generic(npts, u3); 62 | let mut line_points = OMatrix::::zeros_generic(npts, u3); 63 | 64 | for i in 0..rays.len() { 65 | let ray_wc = rays.get(i).unwrap(); 66 | let d = &ray_wc.direction; 67 | 68 | // Normalize the vector length. 69 | let dir = nalgebra::base::Unit::new_normalize(Vector3::new( 70 | d.data.0[0][0].clone(), 71 | d.data.0[1][0].clone(), 72 | d.data.0[2][0].clone(), 73 | )); 74 | 75 | line_dirs[(i, 0)] = dir[0].clone(); 76 | line_dirs[(i, 1)] = dir[1].clone(); 77 | line_dirs[(i, 2)] = dir[2].clone(); 78 | 79 | line_points[(i, 0)] = ray_wc.center.data.0[0][0].clone(); 80 | line_points[(i, 1)] = ray_wc.center.data.0[1][0].clone(); 81 | line_points[(i, 2)] = ray_wc.center.data.0[2][0].clone(); 82 | } 83 | 84 | let mut xxm1 = OMatrix::::zeros_generic(u1, npts); 85 | let mut yym1 = OMatrix::::zeros_generic(u1, npts); 86 | let mut zzm1 = OMatrix::::zeros_generic(u1, npts); 87 | let mut xy = OMatrix::::zeros_generic(u1, npts); 88 | let mut xz = OMatrix::::zeros_generic(u1, npts); 89 | let mut yz = OMatrix::::zeros_generic(u1, npts); 90 | 91 | // TODO element-wise add, mul with nalgebra matrices 92 | 93 | let nx = line_dirs.column(0); 94 | let ny = line_dirs.column(1); 95 | let nz = line_dirs.column(2); 96 | 97 | let minusone: R = na::convert(-1.0); 98 | 99 | for (x, xxm1) in nx.iter().zip(xxm1.iter_mut()) { 100 | *xxm1 = x.clone().powi(2).add(minusone.clone()); 101 | } 102 | 103 | for (y, yym1) in ny.iter().zip(yym1.iter_mut()) { 104 | *yym1 = y.clone().powi(2).add(minusone.clone()); 105 | } 106 | 107 | for (z, zzm1) in nz.iter().zip(zzm1.iter_mut()) { 108 | *zzm1 = z.clone().powi(2).add(minusone.clone()); 109 | } 110 | 111 | for (x, y, xy) in izip!(nx.iter(), ny.iter(), xy.iter_mut()) { 112 | *xy = x.clone() * y.clone(); 113 | } 114 | 115 | for (x, z, xz) in izip!(nx.iter(), nz.iter(), xz.iter_mut()) { 116 | *xz = x.clone() * z.clone(); 117 | } 118 | 119 | for (y, z, yz) in izip!(ny.iter(), nz.iter(), yz.iter_mut()) { 120 | *yz = y.clone() * z.clone(); 121 | } 122 | 123 | let SXX: f64 = sum_as_f64!(xxm1.iter(), R); 124 | let SYY: f64 = sum_as_f64!(yym1.iter(), R); 125 | let SZZ: f64 = sum_as_f64!(zzm1.iter(), R); 126 | let SXY: f64 = sum_as_f64!(xy.iter(), R); 127 | let SXZ: f64 = sum_as_f64!(xz.iter(), R); 128 | let SYZ: f64 = sum_as_f64!(yz.iter(), R); 129 | 130 | let S = Matrix3::new(SXX, SXY, SXZ, SXY, SYY, SYZ, SXZ, SYZ, SZZ); 131 | 132 | let px = line_points.column(0); 133 | let py = line_points.column(1); 134 | let pz = line_points.column(2); 135 | 136 | let xt1 = px 137 | .iter() 138 | .zip(xxm1.iter()) 139 | .map(|(a, b)| a.clone() * b.clone()); 140 | let xt2 = py.iter().zip(xy.iter()).map(|(a, b)| a.clone() * b.clone()); 141 | let xt3 = pz.iter().zip(xz.iter()).map(|(a, b)| a.clone() * b.clone()); 142 | let CX: f64 = refsum_as_f64!(izip!(xt1, xt2, xt3).map(|(t1, t2, t3)| t1 + t2 + t3), R); 143 | let CX: R = na::convert(CX); 144 | 145 | let yt1 = px.iter().zip(xy.iter()).map(|(a, b)| a.clone() * b.clone()); 146 | let yt2 = py 147 | .iter() 148 | .zip(yym1.iter()) 149 | .map(|(a, b)| a.clone() * b.clone()); 150 | let yt3 = pz.iter().zip(yz.iter()).map(|(a, b)| a.clone() * b.clone()); 151 | let CY: f64 = refsum_as_f64!(izip!(yt1, yt2, yt3).map(|(t1, t2, t3)| t1 + t2 + t3), R); 152 | let CY: R = na::convert(CY); 153 | 154 | let zt1 = px.iter().zip(xz.iter()).map(|(a, b)| a.clone() * b.clone()); 155 | let zt2 = py.iter().zip(yz.iter()).map(|(a, b)| a.clone() * b.clone()); 156 | let zt3 = pz 157 | .iter() 158 | .zip(zzm1.iter()) 159 | .map(|(a, b)| a.clone() * b.clone()); 160 | let CZ: f64 = refsum_as_f64!(izip!(zt1, zt2, zt3).map(|(t1, t2, t3)| t1 + t2 + t3), R); 161 | let CZ: R = na::convert(CZ); 162 | 163 | let C = Vector3::new(CX, CY, CZ); 164 | 165 | let s_f64_pinv = my_pinv(&S)?; 166 | let s_pinv = despecialize!(s_f64_pinv, R, 3, 3); 167 | let r: Vector3 = s_pinv * C; 168 | 169 | let mut result = crate::Points::new(nalgebra::OMatrix::::zeros_generic(u1, u3)); 170 | for j in 0..3 { 171 | result.data[(0, j)] = r[j].clone(); 172 | } 173 | Ok(result) 174 | } 175 | 176 | fn my_pinv(m: &SMatrix) -> Result, Error> { 177 | Ok( 178 | na::linalg::SVD::try_new(m.clone(), true, true, na::convert(1e-7), 100) 179 | .ok_or(Error::SvdFailed)? 180 | .pseudo_inverse(na::convert(1.0e-7)) 181 | .unwrap(), 182 | ) 183 | // The unwrap() above is safe because, as of nalgebra 0.19, this could only error if the SVD has not been computed. 184 | // But this is not possible here, so the unwrap() should never result in a panic. 185 | } 186 | 187 | #[cfg(test)] 188 | mod tests { 189 | use nalgebra::Vector3; 190 | 191 | use crate::{Camera, ExtrinsicParameters, IntrinsicParametersPerspective, PerspectiveParams}; 192 | 193 | use super::*; 194 | 195 | #[test] 196 | fn roundtrip() { 197 | let i1: IntrinsicParametersPerspective<_> = PerspectiveParams { 198 | fx: 100.0, 199 | fy: 102.0, 200 | skew: 0.1, 201 | cx: 321.0, 202 | cy: 239.9, 203 | } 204 | .into(); 205 | let e1 = ExtrinsicParameters::from_view( 206 | &Vector3::new(1.2, 3.4, 5.6), // camcenter 207 | &Vector3::new(2.2, 3.4, 5.6), // lookat 208 | &nalgebra::Unit::new_normalize(Vector3::new(0.0, 0.0, 1.0)), // up 209 | ); 210 | let cam1 = Camera::new(i1, e1); 211 | 212 | let i2: IntrinsicParametersPerspective<_> = PerspectiveParams { 213 | fx: 200.0, 214 | fy: 202.0, 215 | skew: 0.01, 216 | cx: 321.0, 217 | cy: 239.9, 218 | } 219 | .into(); 220 | let e2 = ExtrinsicParameters::from_view( 221 | &Vector3::new(3.4, 1.2, 5.6), // camcenter 222 | &Vector3::new(2.2, 3.4, 5.6), // lookat 223 | &nalgebra::Unit::new_normalize(Vector3::new(0.0, 0.0, 1.0)), // up 224 | ); 225 | let cam2 = Camera::new(i2, e2); 226 | 227 | let expected_point = Points::new(SMatrix::<_, 1, 3>::new(2.21, 3.41, 5.61)); 228 | 229 | let image1 = cam1.world_to_pixel(&expected_point); 230 | 231 | let image2 = cam2.world_to_pixel(&expected_point); 232 | 233 | let ray1 = cam1.pixel_to_world(&image1).to_single_ray(); 234 | let ray2 = cam2.pixel_to_world(&image2).to_single_ray(); 235 | let rays = [ray1, ray2]; 236 | 237 | let point3d = best_intersection_of_rays(&rays).unwrap(); 238 | 239 | // check reprojection for cam1 240 | let image1_actual = cam1.world_to_pixel(&point3d); 241 | approx::assert_abs_diff_eq!( 242 | image1_actual.data, 243 | image1.data, 244 | epsilon = nalgebra::convert(1e-10) 245 | ); 246 | 247 | // check reprojection for cam2 248 | let image2_actual = cam2.world_to_pixel(&point3d); 249 | approx::assert_abs_diff_eq!( 250 | image2_actual.data, 251 | image2.data, 252 | epsilon = nalgebra::convert(1e-10) 253 | ); 254 | 255 | // check 3d reconstruction 256 | approx::assert_abs_diff_eq!( 257 | point3d.data, 258 | expected_point.data, 259 | epsilon = nalgebra::convert(1e-10) 260 | ); 261 | } 262 | } 263 | --------------------------------------------------------------------------------