├── .cargo
└── config.toml
├── .devcontainer
└── devcontainer.json
├── .github
├── FUNDING.yml
├── ISSUE_TEMPLATE
│ ├── bug_report.yaml
│ └── config.yml
├── dependabot.yml
└── workflows
│ ├── ci.yml
│ └── release.yml
├── .gitignore
├── CHANGLOG.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── Cargo.lock
├── Cargo.toml
├── LICENSE
├── README.md
├── SECURITY.md
├── assets
├── brand
│ ├── Product.ico
│ ├── banner.png
│ ├── no_video.png
│ └── white.png
├── dlls
│ ├── concrt140.dll
│ ├── msvcp140.dll
│ ├── onnxruntime.dll
│ ├── opencv_world490.dll
│ ├── vcruntime140.dll
│ └── vcruntime140_1.dll
├── fonts
│ ├── Inter-Regular.ttf
│ ├── Silkscreen-Regular.ttf
│ └── icons.ttf
├── model
│ ├── blazeface-320.onnx
│ ├── data.json
│ └── mb05_120x120.onnx
├── onnx_osx
│ └── .gitkeep
└── updates
│ ├── jan-03-2023-update.gif
│ └── may-12-2023-update.gif
├── src
├── camera.rs
├── consts.rs
├── enums
│ ├── crop_policy.rs
│ ├── extreme.rs
│ ├── message.rs
│ └── mod.rs
├── face.rs
├── filter.rs
├── gui
│ ├── app.rs
│ ├── mod.rs
│ ├── style.rs
│ └── view.rs
├── main.rs
├── network.rs
├── process.rs
├── structs
│ ├── app.rs
│ ├── camera.rs
│ ├── data.rs
│ ├── face.rs
│ ├── mod.rs
│ ├── network.rs
│ ├── pose.rs
│ ├── release.rs
│ ├── state.rs
│ └── tddfa.rs
├── tddfa.rs
└── utils
│ ├── common.rs
│ ├── headpose.rs
│ ├── image.rs
│ ├── mod.rs
│ ├── tddfa.rs
│ └── visualize.rs
└── wix
├── License.rtf
└── main.wxs
/.cargo/config.toml:
--------------------------------------------------------------------------------
1 | # On Windows
2 | #
3 | # cargo install -f cargo-binutils
4 | # rustup component add llvm-tools-preview
5 |
6 | [target.x86_64-pc-windows-msvc]
7 | rustflags = ["-C", "target-feature=+crt-static", "--emit=asm"]
8 |
9 | [target.x86_64-pc-windows-gnu]
10 | rustflags = ["-C", "target-feature=+crt-static", "--emit=asm"]
11 |
12 | # On Linux:
13 | # - Ubuntu, sudo apt-get install lld clang
14 | # - Arch, sudo pacman -S lld clang
15 | [target.x86_64-unknown-linux-gnu]
16 | rustflags = ["-C", "linker=clang", "-C", "link-arg=-fuse-ld=lld"]
--------------------------------------------------------------------------------
/.devcontainer/devcontainer.json:
--------------------------------------------------------------------------------
1 | {
2 | "image": "mcr.microsoft.com/devcontainers/universal:2",
3 | "features": {
4 | "ghcr.io/devcontainers/features/rust:1": {}
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: Shubhamai # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
4 | patreon: # Replace with a single Patreon username
5 | open_collective: # Replace with a single Open Collective username
6 | ko_fi: # Replace with a single Ko-fi username
7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
9 | liberapay: # Replace with a single Liberapay username
10 | issuehunt: # Replace with a single IssueHunt username
11 | otechie: # Replace with a single Otechie username
12 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
13 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
14 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.yaml:
--------------------------------------------------------------------------------
1 | name: Bug Report
2 | description: File a bug report
3 | labels: [Bug]
4 | body:
5 | - type: markdown
6 | attributes:
7 | value: "# Bug Report Form"
8 | - type: textarea
9 | id: logs
10 | attributes:
11 | label: Logs
12 | placeholder: |
13 | * INFO StableView::network: Version 0.1.0 on windows
14 | * INFO StableView::network: Sending data to 127.0.0.1 on port 4242
15 | description: Please paste the contents from the logs.txt (located in C:/Users/USERNAME/AppData/Roaming/StableView/data) into the field.
16 | render: StringTemplate
17 | validations:
18 | required: true
19 | - type: textarea
20 | id: desc
21 | attributes:
22 | label: Describe the bug
23 | description: A clear and concise description of what the bug is and what happened.
24 | validations:
25 | required: true
26 | - type: textarea
27 | id: reproduce
28 | attributes:
29 | label: Steps to reproduce
30 | placeholder: |
31 | 1.
32 | 2.
33 | 3.
34 | ...
35 | description: We need to know how you encountered the bug to properly troubleshoot the issue.
36 | validations:
37 | required: true
38 | - type: textarea
39 | id: references
40 | attributes:
41 | label: References (optional)
42 | description: If applicable, add screenshots or videos to help explain your problem.
43 | validations:
44 | required: false
45 | - type: textarea
46 | id: misc
47 | attributes:
48 | label: Additional info (optional)
49 | description: Add any other context about the problem here. Was this working before? When did the issue start occurring?
50 | validations:
51 | required: false
52 | - type: input
53 | id: discord
54 | attributes:
55 | label: Discord Username (optional)
56 | description: You may optionally provide your discord username, so that we may contact you directly about the issue.
57 | placeholder: ex. username#1234
58 | validations:
59 | required: false
60 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | blank_issues_enabled: false
2 | contact_links:
3 | - name: Discuss
4 | url: https://github.com/Shubhamai/StableView/discussions
5 | about: Please ask and answer questions here.
6 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # To get started with Dependabot version updates, you'll need to specify which
2 | # package ecosystems to update and where the package manifests are located.
3 | # Please see the documentation for all configuration options:
4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
5 |
6 | version: 2
7 | updates:
8 | - package-ecosystem: "cargo" # See documentation for possible values
9 | directory: "/" # Location of package manifests
10 | schedule:
11 | interval: "weekly"
12 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | push:
5 | branches:
6 | - "*"
7 | pull_request:
8 | branches:
9 | - "*"
10 |
11 | env:
12 | CARGO_TERM_COLOR: always
13 |
14 | jobs:
15 | windows:
16 | runs-on: windows-latest
17 |
18 | defaults:
19 | run:
20 | shell: bash
21 |
22 | steps:
23 | - uses: actions/checkout@v3
24 | - name: Cargo Cache
25 | uses: Swatinem/rust-cache@v2
26 |
27 | - name: Add msbuild to PATH
28 | uses: microsoft/setup-msbuild@v1.1
29 |
30 | - name: Install WiX
31 | run: dotnet tool install --global wix
32 |
33 | - name: Install WiX
34 | run: cargo install cargo-wix
35 |
36 | - name: Installing Opencv, LLVM
37 | run: choco install llvm opencv
38 |
39 | - name: Test
40 |
41 | env:
42 | OPENCV_LINK_LIBS: opencv_world490
43 | OPENCV_INCLUDE_PATHS: C:\tools\opencv\build\include
44 | OPENCV_LINK_PATHS: C:\tools\opencv\build\x64\vc16\lib
45 | run: cargo test --release
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Release
2 |
3 | on:
4 | push:
5 | tags:
6 | - "v*" # Push events to matching v*, i.e. v1.0, v20.15.10
7 | workflow_dispatch:
8 |
9 | jobs:
10 | windows:
11 | runs-on: windows-latest
12 |
13 | defaults:
14 | run:
15 | shell: bash
16 |
17 | steps:
18 | - uses: actions/checkout@v3
19 | - name: Cargo Cache
20 | uses: Swatinem/rust-cache@v2
21 |
22 |
23 | - name: Add msbuild to PATH
24 | uses: microsoft/setup-msbuild@v1.1
25 |
26 | - name: Install WiX
27 | run: dotnet tool install --global wix
28 |
29 | - name: Install WiX
30 | run: cargo install cargo-wix
31 |
32 | - name: Installing Opencv, LLVM
33 | run: choco install llvm opencv
34 | #- name: Test
35 | # env:
36 | # OPENCV_LINK_LIBS: opencv_world460
37 | # OPENCV_INCLUDE_PATHS: C:\tools\opencv\build\include
38 | # OPENCV_LINK_PATHS: C:\tools\opencv\build\x64\vc15\lib
39 | # run: cargo test --release
40 | - name: Build
41 | env:
42 | OPENCV_LINK_LIBS: opencv_world490
43 | OPENCV_INCLUDE_PATHS: C:\tools\opencv\build\include
44 | OPENCV_LINK_PATHS: C:\tools\opencv\build\x64\vc16\lib
45 | run: cargo build --release
46 | - name: Crate msi installer
47 | env:
48 | OPENCV_LINK_LIBS: opencv_world490
49 | OPENCV_INCLUDE_PATHS: C:\tools\opencv\build\include
50 | OPENCV_LINK_PATHS: C:\tools\opencv\build\x64\vc16\lib
51 | run: cargo wix --nocapture
52 | - name: Upload installer
53 | uses: actions/upload-artifact@v3
54 | with:
55 | name: StableView.msi
56 | path: target/wix/*.msi
57 | - name: Release
58 | uses: softprops/action-gh-release@v1
59 | if: startsWith(github.ref, 'refs/tags/')
60 | with:
61 | files: |
62 | target/wix/*.msi
63 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | target/
2 |
--------------------------------------------------------------------------------
/CHANGLOG.md:
--------------------------------------------------------------------------------
1 | # Change Log
2 |
3 | All StableView releases with the relative changes are documented in this file.
4 |
5 | > The application in currently in development.
6 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | We as members, contributors, and leaders pledge to make participation in our
6 | community a harassment-free experience for everyone, regardless of age, body
7 | size, visible or invisible disability, ethnicity, sex characteristics, gender
8 | identity and expression, level of experience, education, socio-economic status,
9 | nationality, personal appearance, race, religion, or sexual identity
10 | and orientation.
11 |
12 | We pledge to act and interact in ways that contribute to an open, welcoming,
13 | diverse, inclusive, and healthy community.
14 |
15 | ## Our Standards
16 |
17 | Examples of behavior that contributes to a positive environment for our
18 | community include:
19 |
20 | * Demonstrating empathy and kindness toward other people
21 | * Being respectful of differing opinions, viewpoints, and experiences
22 | * Giving and gracefully accepting constructive feedback
23 | * Accepting responsibility and apologizing to those affected by our mistakes,
24 | and learning from the experience
25 | * Focusing on what is best not just for us as individuals, but for the
26 | overall community
27 |
28 | Examples of unacceptable behavior include:
29 |
30 | * The use of sexualized language or imagery, and sexual attention or
31 | advances of any kind
32 | * Trolling, insulting or derogatory comments, and personal or political attacks
33 | * Public or private harassment
34 | * Publishing others' private information, such as a physical or email
35 | address, without their explicit permission
36 | * Other conduct which could reasonably be considered inappropriate in a
37 | professional setting
38 |
39 | ## Enforcement Responsibilities
40 |
41 | Community leaders are responsible for clarifying and enforcing our standards of
42 | acceptable behavior and will take appropriate and fair corrective action in
43 | response to any behavior that they deem inappropriate, threatening, offensive,
44 | or harmful.
45 |
46 | Community leaders have the right and responsibility to remove, edit, or reject
47 | comments, commits, code, wiki edits, issues, and other contributions that are
48 | not aligned to this Code of Conduct, and will communicate reasons for moderation
49 | decisions when appropriate.
50 |
51 | ## Scope
52 |
53 | This Code of Conduct applies within all community spaces, and also applies when
54 | an individual is officially representing the community in public spaces.
55 | Examples of representing our community include using an official e-mail address,
56 | posting via an official social media account, or acting as an appointed
57 | representative at an online or offline event.
58 |
59 | ## Enforcement
60 |
61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
62 | reported to the community leaders responsible for enforcement at
63 | shubham.aiengineer@gmail.com.
64 | All complaints will be reviewed and investigated promptly and fairly.
65 |
66 | All community leaders are obligated to respect the privacy and security of the
67 | reporter of any incident.
68 |
69 | ## Enforcement Guidelines
70 |
71 | Community leaders will follow these Community Impact Guidelines in determining
72 | the consequences for any action they deem in violation of this Code of Conduct:
73 |
74 | ### 1. Correction
75 |
76 | **Community Impact**: Use of inappropriate language or other behavior deemed
77 | unprofessional or unwelcome in the community.
78 |
79 | **Consequence**: A private, written warning from community leaders, providing
80 | clarity around the nature of the violation and an explanation of why the
81 | behavior was inappropriate. A public apology may be requested.
82 |
83 | ### 2. Warning
84 |
85 | **Community Impact**: A violation through a single incident or series
86 | of actions.
87 |
88 | **Consequence**: A warning with consequences for continued behavior. No
89 | interaction with the people involved, including unsolicited interaction with
90 | those enforcing the Code of Conduct, for a specified period of time. This
91 | includes avoiding interactions in community spaces as well as external channels
92 | like social media. Violating these terms may lead to a temporary or
93 | permanent ban.
94 |
95 | ### 3. Temporary Ban
96 |
97 | **Community Impact**: A serious violation of community standards, including
98 | sustained inappropriate behavior.
99 |
100 | **Consequence**: A temporary ban from any sort of interaction or public
101 | communication with the community for a specified period of time. No public or
102 | private interaction with the people involved, including unsolicited interaction
103 | with those enforcing the Code of Conduct, is allowed during this period.
104 | Violating these terms may lead to a permanent ban.
105 |
106 | ### 4. Permanent Ban
107 |
108 | **Community Impact**: Demonstrating a pattern of violation of community
109 | standards, including sustained inappropriate behavior, harassment of an
110 | individual, or aggression toward or disparagement of classes of individuals.
111 |
112 | **Consequence**: A permanent ban from any sort of public interaction within
113 | the community.
114 |
115 | ## Attribution
116 |
117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
118 | version 2.0, available at
119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
120 |
121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct
122 | enforcement ladder](https://github.com/mozilla/diversity).
123 |
124 | [homepage]: https://www.contributor-covenant.org
125 |
126 | For answers to common questions about this code of conduct, see the FAQ at
127 | https://www.contributor-covenant.org/faq. Translations are available at
128 | https://www.contributor-covenant.org/translations.
129 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | Thank you for considering to contribute to StableView!
4 |
5 | When contributing to this repository, please first discuss the change you wish to make via [discuss](<(https://github.com/shubhamai/StableView/discussions)>), [issue](https://github.com/shubhamai/StableView/issues/new/choose), email, or any other method with the owners of this repository before making a change.
6 |
7 | Please note we have a [code of conduct](CODE_OF_CONDUCT.md), please follow it in all your interactions with the project.
8 |
9 | ## Development environment setup
10 |
11 | To set up a development environment, please follow these steps:
12 |
13 | 1. Install [Rust](https://www.rust-lang.org/).
14 |
15 | 2. Clone the repo
16 |
17 | ```sh
18 | git clone https://github.com/shubhamai/StableView
19 | ```
20 |
21 | 3. Install [opencv-rust](https://github.com/twistedfall/opencv-rust) on your system.
22 |
23 | 4. Run `cargo run` to run the application without any optimizations. To run the application fully optimized, add `--release` to the command, ie. `cargo run --release`
24 |
25 | 5. To build the `.msi` installer for windows -
26 | 1. Install [WiX Toolset v3.14.1](https://github.com/wixtoolset/wix3/releases/).
27 | 2. Install [cargo-wix](https://github.com/volks73/cargo-wix).
28 | 3. Run `cargo wix`. A new folder will be created in `target` folder containing the `.msi` file.
29 |
30 | ### Apple Silicon
31 |
32 | To build stableview on apple silicon
33 |
34 | 1. Download `onnxruntime-osx-arm64-1.18.0.tgz` from [ONNX Runtime v1.18.0](https://github.com/microsoft/onnxruntime/releases/tag/v1.18.0) and unzip it to `assets/onnx_osx` folder.
35 |
36 | `sudo ORT_LIB_LOCATION=./assets/onnx_osx ORT_STRATEGY=system DYLD_FALLBACK_LIBRARY_PATH=./assets/onnx_osx/lib cargo run --release`
37 |
38 | ## Issues and feature requests
39 |
40 | You've found a bug in the source code, a mistake in the wiki or maybe you'd like a new feature? Take a look at [GitHub Discussions](https://github.com/shubhamai/StableView/discussions) to see if it's already being discussed. You can help us by [submitting an issue on GitHub](https://github.com/shubhamai/StableView/issues/new/choose). Before you create an issue, make sure to search the issue archive, your issue may have already been addressed!
41 |
42 | ### How to submit a Pull Request
43 |
44 | 1. Search our repository for open or closed
45 | [Pull Requests](https://github.com/shubhamai/StableView/pulls)
46 | that relate to your submission. You don't want to duplicate effort.
47 | 2. Fork the project
48 | 3. Create your feature branch (`git checkout -b feat/amazing_feature`)
49 | 4. Commit your changes (`git commit -m 'feat: add amazing_feature'`).
50 | 5. Push to the branch (`git push origin feat/amazing_feature`)
51 | 6. [Open a Pull Request](https://github.com/shubhamai/StableView/compare?expand=1)
52 |
--------------------------------------------------------------------------------
/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "StableView"
3 | description = "A Head tracking application using only webcam"
4 | version = "1.1.0"
5 | edition = "2021"
6 | authors = ["Shubhamai"]
7 | publish = false # To prevent accidental publish on crates.io
8 | license = "MIT"
9 | repository = "https://github.com/Shubhamai/StableView"
10 | homepage = "https://github.com/Shubhamai/StableView"
11 | readme = "README.md"
12 |
13 |
14 | [dependencies]
15 | opencv = {version = "0.92.0", features=["clang-runtime"]} # 0.89.0
16 | serde = { version = "1.0", features = ["derive"] }
17 | serde_json = "1.0"
18 | once_cell = "1.16.0"
19 | rand = "0.8.5"
20 | tracing = "0.1"
21 | tracing-subscriber = "0.3"
22 | tracing-appender = "0.2.2"
23 | nokhwa = {version = "0.10.3", features = ["input-msmf", "input-v4l"]}
24 | confy = "0.5.1"
25 | directories = "5.0.0"
26 | iced = {version = "0.12.1", features = ["image", "smol"]}
27 | iced_native = "0.10.3"
28 | image = "0.24.6"
29 | onnxruntime = {git = "https://github.com/nbigaouette/onnxruntime-rs"}
30 | crossbeam-channel = "0.5.6"
31 | anyhow = "1.0.70"
32 | reqwest = { version = "0.11.6", features = ["blocking", "json"] }
33 | version-compare = "0.1"
34 | itertools = "0.13.0"
35 |
36 | [profile.release]
37 | debug = 0
38 | strip = "symbols"
39 | overflow-checks = false
40 | panic = "abort"
41 | opt-level = 3 # optimizing for speed
42 | lto = "thin"
43 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 Shubhamai
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |

2 | Background image credits - Les Chevaliers du Ciel
3 |
4 |
5 |
6 | Easy, fast and efficient Head Tracking application using only webcam
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
35 |
36 |
37 |
38 | # Status
39 |
40 | **Last Updated - 6 June, 2024**
41 |
42 | 
43 |
44 | - Recent Updates
45 | - [x] Supporting Linux and Apple Silicon
46 | - [x] Fixing bug causing reduced performance #78
47 |
48 | # Usage
49 |
50 | 1. Visit the [releases page](https://github.com/shubhamai/StableView/releases/latest) and download the latest version on your platform. For Windows, a `.msi` installer will be provided, simply double-click on the installer and follow the installation steps. After Installing, you can simply run `StableView` from the start menu.
51 |
52 | - Make sure you have internet connectivity while installing the application as it downloads the model weights for the first time.
53 |
54 | 2. The application uses opentrack to send the tracking data to respective applications. Please install it from their [Github repo](https://github.com/opentrack/opentrack).
55 |
56 | After installing OpenTrack, select Input as **UDP over network** so that OpenTrack can receive data from StableView and send it to the required application.
57 |
58 | ### Linux
59 |
60 | Run the following command in the terminal inside the folder:
61 |
62 | ```bash
63 | LD_LIBRARY_PATH=. ./StableView
64 | ```
65 |
66 | ### MacOS ( Apple Silicon )
67 |
68 | Run the following command in the terminal inside the folder:
69 |
70 | ```bash
71 | DYLD_FALLBACK_LIBRARY_PATH=. ./StableView
72 | ```
73 |
74 |
75 | # Features
76 |
77 | - Uses your regular old webcam with AI for head tracking. Uses an extremely low CPU (<3%-60fps in Ryzen 5 3600H) and returns high performance.
78 | - Works with [opentrack](https://github.com/opentrack/opentrack) to run on any modern simulator including Microsoft Flight Simulator, Digital Combat Simulator, Xplane & more.
79 | - Easy to install :)
80 |
81 | # Shoutouts
82 |
83 | - Thanks to the authors of the paper [3DDFA_V2 : Towards Fast, Accurate and Stable 3D Dense Face Alignment](https://paperswithcode.com/paper/towards-fast-accurate-and-stable-3d-dense-1), without them, this application wouldn't have been possible, the majority of the model inference code is based on their work. Thanks, [Jianzhu Guo](https://guojianzhu.com), [Xiangyu Zhu](http://www.cbsr.ia.ac.cn/users/xiangyuzhu/), [Yang Yang](http://www.cbsr.ia.ac.cn/users/yyang/main.htm), Fan Yang, [Zhen Lei](http://www.cbsr.ia.ac.cn/users/zlei/) and [Stan Z. Li](https://scholar.google.com/citations?user=Y-nyLGIAAAAJ).
84 | - [Rust Faces](https://github.com/rustybuilder/rust-faces) by [rustybuilder](https://github.com/rustybuilder/rust-faces) face detection in rust, used to recapture the face when it's lost.
85 | - [Sniffer](https://github.com/GyulyVGC/sniffnet/) for GUI inspirations, code structure, readme, etc.
86 | - [ChatGPT](https://openai.com/blog/chatgpt/) for assisting me to convert some of the Python code to Rust.
87 | - Product Icon from [Leonardo Yip](https://unsplash.com/@yipleonardo) on [Unsplash](https://unsplash.com/photos/rn-NLirHQPY).
88 |
89 | ## Bug Report
90 |
91 | If you see an error message or run into an issue, please [open a new issue](https://github.com/Shubhamai/StableView/issues/new/choose). This effort is valued and helps all the users.
92 |
93 | ## Feature Request
94 |
95 | If you have any idea or a missing feature you would like to see, please [submit a feature request](https://github.com/Shubhamai/StableView/issues/new/choose) or [discuss](https://github.com/Shubhamai/StableView/discussions) it with other users.
96 |
97 | ## Contributing
98 |
99 | Contributions are greatly appreciated! If you want to contribute to the project, please read [Contributing.md](CONTRIBUTING.md) for more details.
100 |
101 | ## Contributors
102 |
103 | Thanks to all the people who contributed to the project.
104 |
105 |
106 |
107 |
108 |
109 | ## License
110 |
111 | StableView is open-source and free software released under the [MIT License](LICENSE).
112 |
113 | ## Citations
114 |
115 | ```bibtex
116 | @inproceedings{guo2020towards,
117 | title = {Towards Fast, Accurate and Stable 3D Dense Face Alignment},
118 | author = {Guo, Jianzhu and Zhu, Xiangyu and Yang, Yang and Yang, Fan and Lei, Zhen and Li, Stan Z},
119 | booktitle = {Proceedings of the European Conference on Computer Vision (ECCV)},
120 | year = {2020}
121 | }
122 | ```
123 |
124 | ```bibtex
125 | @misc{3ddfa_cleardusk,
126 | author = {Guo, Jianzhu and Zhu, Xiangyu and Lei, Zhen},
127 | title = {3DDFA},
128 | howpublished = {\url{https://github.com/cleardusk/3DDFA}},
129 | year = {2018}
130 | }
131 | ```
132 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Security
2 |
3 | If you believe you have found a security vulnerability in the repository, please report it to the email shubham.aiengineer@gmail.com.
--------------------------------------------------------------------------------
/assets/brand/Product.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Shubhamai/StableView/dca48d9c9148cd9c228c1492a0ecf3fc1df13fe8/assets/brand/Product.ico
--------------------------------------------------------------------------------
/assets/brand/banner.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Shubhamai/StableView/dca48d9c9148cd9c228c1492a0ecf3fc1df13fe8/assets/brand/banner.png
--------------------------------------------------------------------------------
/assets/brand/no_video.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Shubhamai/StableView/dca48d9c9148cd9c228c1492a0ecf3fc1df13fe8/assets/brand/no_video.png
--------------------------------------------------------------------------------
/assets/brand/white.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Shubhamai/StableView/dca48d9c9148cd9c228c1492a0ecf3fc1df13fe8/assets/brand/white.png
--------------------------------------------------------------------------------
/assets/dlls/concrt140.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Shubhamai/StableView/dca48d9c9148cd9c228c1492a0ecf3fc1df13fe8/assets/dlls/concrt140.dll
--------------------------------------------------------------------------------
/assets/dlls/msvcp140.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Shubhamai/StableView/dca48d9c9148cd9c228c1492a0ecf3fc1df13fe8/assets/dlls/msvcp140.dll
--------------------------------------------------------------------------------
/assets/dlls/onnxruntime.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Shubhamai/StableView/dca48d9c9148cd9c228c1492a0ecf3fc1df13fe8/assets/dlls/onnxruntime.dll
--------------------------------------------------------------------------------
/assets/dlls/opencv_world490.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Shubhamai/StableView/dca48d9c9148cd9c228c1492a0ecf3fc1df13fe8/assets/dlls/opencv_world490.dll
--------------------------------------------------------------------------------
/assets/dlls/vcruntime140.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Shubhamai/StableView/dca48d9c9148cd9c228c1492a0ecf3fc1df13fe8/assets/dlls/vcruntime140.dll
--------------------------------------------------------------------------------
/assets/dlls/vcruntime140_1.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Shubhamai/StableView/dca48d9c9148cd9c228c1492a0ecf3fc1df13fe8/assets/dlls/vcruntime140_1.dll
--------------------------------------------------------------------------------
/assets/fonts/Inter-Regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Shubhamai/StableView/dca48d9c9148cd9c228c1492a0ecf3fc1df13fe8/assets/fonts/Inter-Regular.ttf
--------------------------------------------------------------------------------
/assets/fonts/Silkscreen-Regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Shubhamai/StableView/dca48d9c9148cd9c228c1492a0ecf3fc1df13fe8/assets/fonts/Silkscreen-Regular.ttf
--------------------------------------------------------------------------------
/assets/fonts/icons.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Shubhamai/StableView/dca48d9c9148cd9c228c1492a0ecf3fc1df13fe8/assets/fonts/icons.ttf
--------------------------------------------------------------------------------
/assets/model/blazeface-320.onnx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Shubhamai/StableView/dca48d9c9148cd9c228c1492a0ecf3fc1df13fe8/assets/model/blazeface-320.onnx
--------------------------------------------------------------------------------
/assets/model/mb05_120x120.onnx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Shubhamai/StableView/dca48d9c9148cd9c228c1492a0ecf3fc1df13fe8/assets/model/mb05_120x120.onnx
--------------------------------------------------------------------------------
/assets/onnx_osx/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Shubhamai/StableView/dca48d9c9148cd9c228c1492a0ecf3fc1df13fe8/assets/onnx_osx/.gitkeep
--------------------------------------------------------------------------------
/assets/updates/jan-03-2023-update.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Shubhamai/StableView/dca48d9c9148cd9c228c1492a0ecf3fc1df13fe8/assets/updates/jan-03-2023-update.gif
--------------------------------------------------------------------------------
/assets/updates/may-12-2023-update.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Shubhamai/StableView/dca48d9c9148cd9c228c1492a0ecf3fc1df13fe8/assets/updates/may-12-2023-update.gif
--------------------------------------------------------------------------------
/src/camera.rs:
--------------------------------------------------------------------------------
1 | /// Running camera on a seperate thread and returning the frames
2 | use crossbeam_channel::Sender;
3 |
4 | use opencv::{
5 | prelude::{Mat, VideoCaptureTrait, VideoCaptureTraitConst},
6 | videoio,
7 | };
8 | use std::{
9 | sync::{
10 | self,
11 | atomic::{AtomicBool, Ordering},
12 | },
13 | thread,
14 | };
15 |
16 | use std::collections::HashMap;
17 |
18 | use crate::structs::camera::ThreadedCamera;
19 |
20 | use anyhow::Result;
21 |
22 | impl ThreadedCamera {
23 | pub fn get_available_cameras() -> Result> {
24 | let mut devices_list = HashMap::new();
25 |
26 | let available_devices = nokhwa::query(match nokhwa::native_api_backend() {
27 | Some(native_api_backend) => native_api_backend,
28 | None => {
29 | return Err(anyhow::anyhow!(
30 | "Unable to read native API backend for camera."
31 | ));
32 | }
33 | });
34 |
35 | match available_devices {
36 | Ok(available_devices) => {
37 | if available_devices.is_empty() {
38 | tracing::error!(
39 | "No Camera devices found. Setting default (No Device Found, -1)",
40 | );
41 | devices_list.insert("No Device Found".to_string(), -1);
42 | } else {
43 | for device_info in available_devices {
44 | tracing::warn!(
45 | "Detected : {} @ index {}",
46 | device_info.human_name(),
47 | device_info.index()
48 | );
49 | devices_list.insert(
50 | format!("{:<4} {}", device_info.human_name(), device_info.index()),
51 | match device_info.index().as_index() {
52 | Ok(index) => index as i32,
53 | Err(error) => {
54 | tracing::error!("Unable to get camera index : {:?}, adding (No Device Found, -1)", error);
55 | devices_list.insert("No Device Found".to_string(), -1);
56 | return Ok(devices_list);
57 | }
58 | },
59 | );
60 | }
61 | }
62 | }
63 | Err(error) => {
64 | tracing::error!("Unable to read camera devices : {:?}", error);
65 | return Err(anyhow::anyhow!(
66 | "Unable to read camera devices : {:?}",
67 | error
68 | ));
69 | // devices_list.insert("No Device Found".to_string(), 0);
70 | }
71 | };
72 |
73 | Ok(devices_list)
74 | }
75 |
76 | pub fn start_camera_thread(
77 | tx: Sender,
78 | camera_index: i32,
79 | camera_name: String,
80 | ) -> Result {
81 | // Serving as a signal to stop the thread when needed
82 | let keep_running = sync::Arc::new(AtomicBool::new(false));
83 | keep_running.store(true, Ordering::SeqCst);
84 |
85 | let cloned_keep_running = keep_running.clone();
86 |
87 | let mut cam = match videoio::VideoCapture::new(camera_index, videoio::CAP_ANY) {
88 | Ok(cam) => cam,
89 | Err(error) => {
90 | return Err(anyhow::anyhow!(
91 | "Unable to open camera {camera_name} with index {camera_index} : {:?}",
92 | error
93 | ));
94 | }
95 | };
96 | let opened = match videoio::VideoCapture::is_opened(&cam) {
97 | Ok(opened) => opened,
98 | Err(error) => {
99 | return Err(anyhow::anyhow!(
100 | "Unable to open camera {camera_name} with index {camera_index} : {:?}",
101 | error
102 | ));
103 | }
104 | };
105 |
106 | if !opened {
107 | return Err(anyhow::anyhow!("Unable to open the camera!"));
108 | }
109 |
110 | let cam_thread = Some(thread::spawn(move || {
111 | // Running loop as long as keep_running is true
112 | while cloned_keep_running.load(Ordering::SeqCst) {
113 | // Reading frame
114 | let mut frame = Mat::default();
115 | match cam.read(&mut frame) {
116 | Ok(_) => (),
117 | Err(error) => {
118 | panic!("Unable to read frame from camera : {:?}", error);
119 | // return Err(anyhow::anyhow!(
120 | // "Unable to read frame from camera : {:?}",
121 | // error
122 | // ));
123 | }
124 | }
125 |
126 | // Send the frame to the other thread for processing
127 | if tx.send(frame).is_err() {
128 | break;
129 | }
130 | }
131 | }));
132 |
133 | Ok(Self {
134 | cam_thread,
135 | keep_running,
136 | })
137 | }
138 |
139 | pub fn shutdown(&mut self) {
140 | tracing::warn!("Shutting down camera thread...");
141 |
142 | self.keep_running.store(false, Ordering::SeqCst);
143 | match self.cam_thread.take() {
144 | Some(cam_thread) => match cam_thread.join() {
145 | Ok(_) => (),
146 | Err(error) => {
147 | tracing::error!("Unable to join camera thread : {:?}", error);
148 | // panic!("Unable to join camera thread : {:?}", error);
149 | }
150 | },
151 | None => {
152 | tracing::error!("Called shutdown on a camera thread that was already shutdown");
153 | // panic!("Called shutdown on a camera thread that was already shutdown");
154 | }
155 | }
156 | }
157 | }
158 |
159 | #[test]
160 | #[ignore = "Can only test this offline since it requires webcam, run cargo test -- --ignored"]
161 | pub fn test_threaded_camera() -> Result<()> {
162 | let (tx, rx) = crossbeam_channel::unbounded::();
163 |
164 | println!("{:?}", ThreadedCamera::get_available_cameras());
165 |
166 | let mut thr_cam = ThreadedCamera::start_camera_thread(tx, 0, "Default Camera".to_owned())?;
167 |
168 | for _ in 0..100 {
169 | let _frame = rx.recv()?;
170 | }
171 |
172 | thr_cam.shutdown();
173 |
174 | Ok(())
175 | }
176 |
--------------------------------------------------------------------------------
/src/consts.rs:
--------------------------------------------------------------------------------
1 | pub const APP_NAME: &str = env!("CARGO_PKG_NAME");
2 | pub const APP_VERSION: &str = env!("CARGO_PKG_VERSION");
3 | pub const APP_REPOSITORY: &str = env!("CARGO_PKG_REPOSITORY");
4 | pub const APP_AUTHORS: &str = env!("CARGO_PKG_AUTHORS");
5 | pub const APP_GITHUB_API: &str =
6 | "https://api.github.com/repos/shubhamai/stableview/releases/latest";
7 |
8 | pub const MODEL: &[u8] = include_bytes!("../assets/model/mb05_120x120.onnx");
9 | pub const DATA: &[u8] = include_bytes!("../assets/model/data.json");
10 | pub const BLAZE_FACE_MODEL: &[u8] = include_bytes!("../assets/model/blazeface-320.onnx");
11 |
12 | pub const ICON: &[u8] = include_bytes!("../assets/brand/Product.ico");
13 | pub const INTER_FONT: &[u8] = include_bytes!("../assets/fonts/Inter-Regular.ttf");
14 | pub const NO_VIDEO_IMG: &[u8] = include_bytes!("../assets/brand/no_video.png");
15 |
16 | pub const ICONS_FONT: &[u8] = include_bytes!("../assets/fonts/icons.ttf");
17 |
--------------------------------------------------------------------------------
/src/enums/crop_policy.rs:
--------------------------------------------------------------------------------
1 | // Cropping policy for the face in the frame.
2 |
3 | pub enum CropPolicy {
4 | Box,
5 | Landmark,
6 | }
7 |
--------------------------------------------------------------------------------
/src/enums/extreme.rs:
--------------------------------------------------------------------------------
1 | // Get the min of max value from an array/vector of floats
2 |
3 | pub enum Extreme {
4 | Min,
5 | Max,
6 | }
7 |
--------------------------------------------------------------------------------
/src/enums/message.rs:
--------------------------------------------------------------------------------
1 | // Events than can be triggered by the user in the GUI
2 |
3 | use iced::event::{Event};
4 |
5 | #[derive(Debug, Clone)]
6 | pub enum Message {
7 | Toggle,
8 | DefaultSettings,
9 | Tick,
10 | MinCutoffSliderChanged(u32),
11 | BetaSliderChanged(u32),
12 | FPSSliderChanged(u32),
13 | InputIP(String),
14 | InputPort(String),
15 | Camera(String),
16 | HideCamera(bool),
17 | OpenURL(String),
18 | OpenLogs,
19 | EventOccurred(Event),
20 | }
21 |
--------------------------------------------------------------------------------
/src/enums/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod crop_policy;
2 | pub mod extreme;
3 | pub mod message;
4 |
--------------------------------------------------------------------------------
/src/face.rs:
--------------------------------------------------------------------------------
1 | // FROM https://github.com/rustybuilder/rust-faces
2 |
3 | use std::ops::Deref;
4 |
5 | use onnxruntime::environment::Environment;
6 | use onnxruntime::ndarray::{Array4, ArrayBase, Axis, Dim, OwnedRepr};
7 | use onnxruntime::tensor::OrtOwnedTensor;
8 | use onnxruntime::GraphOptimizationLevel;
9 | use opencv::prelude::MatTraitConstManual;
10 | use opencv::{
11 | core::{Mat, Size, Vec3b},
12 | imgproc,
13 | };
14 |
15 | use itertools::Itertools;
16 | use once_cell::sync::Lazy;
17 |
18 | use anyhow::Result;
19 |
20 | use crate::consts::BLAZE_FACE_MODEL;
21 | use crate::structs::face::FaceDetect;
22 |
23 | use itertools::iproduct;
24 |
25 | use std::collections::HashMap;
26 |
27 | /// Face detection result.
28 | #[derive(Debug, Clone)]
29 | pub struct Face {
30 | /// Face's bounding rectangle.
31 | pub rect: Rect,
32 | /// Confidence of the detection.
33 | pub confidence: f32,
34 | /// Landmarks of the face.
35 | pub landmarks: Option>,
36 | }
37 |
38 | /// Non-maximum suppression.
39 | #[derive(Copy, Clone, Debug)]
40 | pub struct Nms {
41 | pub iou_threshold: f32,
42 | }
43 |
44 | impl Default for Nms {
45 | fn default() -> Self {
46 | Self { iou_threshold: 0.3 }
47 | }
48 | }
49 |
50 | impl Nms {
51 | /// Suppress non-maxima faces.
52 | ///
53 | /// # Arguments
54 | ///
55 | /// * `faces` - Faces to suppress.
56 | ///
57 | /// # Returns
58 | ///
59 | /// * `Vec` - Suppressed faces.
60 | pub fn suppress_non_maxima(&self, mut faces: Vec) -> Vec {
61 | faces.sort_by(|a, b| a.confidence.partial_cmp(&b.confidence).unwrap());
62 |
63 | let mut faces_map = HashMap::new();
64 | faces.iter().rev().enumerate().for_each(|(i, face)| {
65 | faces_map.insert(i, face);
66 | });
67 |
68 | let mut nms_faces = Vec::with_capacity(faces.len());
69 | let mut count = 0;
70 | while !faces_map.is_empty() {
71 | if let Some((_, face)) = faces_map.remove_entry(&count) {
72 | nms_faces.push(face.clone());
73 | //faces_map.retain(|_, face2| face.rect.iou(&face2.rect) < self.iou_threshold);
74 | faces_map.retain(|_, face2| face.rect.iou(&face2.rect) < self.iou_threshold);
75 | }
76 | count += 1;
77 | }
78 |
79 | nms_faces
80 | }
81 | }
82 |
83 | #[derive(Debug, Clone)]
84 | pub struct PriorBoxesParams {
85 | min_sizes: Vec>,
86 | steps: Vec,
87 | variance: (f32, f32),
88 | }
89 |
90 | impl Default for PriorBoxesParams {
91 | fn default() -> Self {
92 | Self {
93 | min_sizes: vec![vec![8, 11], vec![14, 19, 26, 38, 64, 149]],
94 | steps: vec![8, 16],
95 | variance: (0.1, 0.2),
96 | }
97 | }
98 | }
99 |
100 | pub struct PriorBoxes {
101 | pub anchors: Vec<(f32, f32, f32, f32)>,
102 | variances: (f32, f32),
103 | }
104 |
105 | impl PriorBoxes {
106 | pub fn new(params: &PriorBoxesParams, image_size: (usize, usize)) -> Self {
107 | let feature_map_sizes: Vec<(usize, usize)> = params
108 | .steps
109 | .iter()
110 | .map(|&step| (image_size.0 / step, image_size.1 / step))
111 | .collect();
112 |
113 | let mut anchors = Vec::new();
114 |
115 | for ((f, min_sizes), step) in feature_map_sizes
116 | .iter()
117 | .zip(params.min_sizes.iter())
118 | .zip(params.steps.iter())
119 | {
120 | let step = *step;
121 | for (i, j) in iproduct!(0..f.1, 0..f.0) {
122 | for min_size in min_sizes {
123 | let s_kx = *min_size as f32 / image_size.0 as f32;
124 | let s_ky = *min_size as f32 / image_size.1 as f32;
125 | let cx = (j as f32 + 0.5) * step as f32 / image_size.0 as f32;
126 | let cy = (i as f32 + 0.5) * step as f32 / image_size.1 as f32;
127 | anchors.push((cx, cy, s_kx, s_ky));
128 | }
129 | }
130 | }
131 |
132 | Self {
133 | anchors,
134 | variances: params.variance,
135 | }
136 | }
137 |
138 | pub fn decode_box(&self, prior: &(f32, f32, f32, f32), pred: &(f32, f32, f32, f32)) -> Rect {
139 | let (anchor_cx, anchor_cy, s_kx, s_ky) = prior;
140 | let (x1, y1, x2, y2) = pred;
141 |
142 | let cx = anchor_cx + x1 * self.variances.0 * s_kx;
143 | let cy = anchor_cy + y1 * self.variances.0 * s_ky;
144 | let width = s_kx * (x2 * self.variances.1).exp();
145 | let height = s_ky * (y2 * self.variances.1).exp();
146 | let x_start = cx - width / 2.0;
147 | let y_start = cy - height / 2.0;
148 | Rect::at(x_start, y_start).ending_at(width + x_start, height + y_start)
149 | }
150 | }
151 |
152 | use std::fmt::Display;
153 |
154 | /// Rectangle.
155 | #[derive(Debug, Clone, Copy)]
156 | pub struct Rect {
157 | /// X coordinate of the top-left corner.
158 | pub x: f32,
159 | /// Y coordinate of the top-left corner.
160 | pub y: f32,
161 | /// Width of the rectangle.
162 | pub width: f32,
163 | /// Height of the rectangle.
164 | pub height: f32,
165 | }
166 |
167 | /// Rectangle position used for chaining constructors.
168 | pub struct RectPosition {
169 | pub x: f32,
170 | pub y: f32,
171 | }
172 |
173 | impl RectPosition {
174 | /// Makes a rectangle with the given end point.
175 | pub fn ending_at(&self, x: f32, y: f32) -> Rect {
176 | Rect {
177 | x: self.x,
178 | y: self.y,
179 | width: x - self.x,
180 | height: y - self.y,
181 | }
182 | }
183 | }
184 |
185 | impl Rect {
186 | /// Starts a rectangle with the given position.
187 | pub fn at(x: f32, y: f32) -> RectPosition {
188 | RectPosition { x, y }
189 | }
190 |
191 | /// Right end of the rectangle.
192 | pub fn right(&self) -> f32 {
193 | self.x + self.width
194 | }
195 |
196 | /// Bottom end of the rectangle.
197 | pub fn bottom(&self) -> f32 {
198 | self.y + self.height
199 | }
200 |
201 | pub fn iou(&self, other: &Rect) -> f32 {
202 | let left = self.x.max(other.x);
203 | let right = (self.right()).min(other.right());
204 | let top = self.y.max(other.y);
205 | let bottom = (self.bottom()).min(other.bottom());
206 |
207 | let intersection = (right - left).max(0.0) * (bottom - top).max(0.0);
208 | let area_self = self.width * self.height;
209 | let area_other = other.width * other.height;
210 |
211 | intersection / (area_self + area_other - intersection)
212 | }
213 |
214 | /// Scales the rectangle.
215 | pub fn scale(&self, x_scale: f32, y_scale: f32) -> Rect {
216 | Rect {
217 | x: self.x * x_scale,
218 | y: self.y * y_scale,
219 | width: self.width * x_scale,
220 | height: self.height * y_scale,
221 | }
222 | }
223 | }
224 |
225 | impl Display for Rect {
226 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
227 | write!(
228 | f,
229 | "{{x: {}, y: {}, width: {}, height: {}}}",
230 | self.x, self.y, self.width, self.height
231 | )
232 | }
233 | }
234 |
235 | impl FaceDetect {
236 | pub fn new() -> Result {
237 | static ENVIRONMENT: Lazy = Lazy::new(|| {
238 | match Environment::builder()
239 | .with_name("Face Detector")
240 | .with_log_level(onnxruntime::LoggingLevel::Warning)
241 | .build()
242 | {
243 | Ok(environment) => environment,
244 | Err(error) => {
245 | tracing::error!("Unable to create environment : {:?}", error);
246 | std::process::exit(1);
247 | }
248 | }
249 | });
250 |
251 | let face_detector = ENVIRONMENT
252 | .new_session_builder()?
253 | .with_optimization_level(GraphOptimizationLevel::All)?
254 | .with_number_threads(1)?
255 | .with_model_from_memory(BLAZE_FACE_MODEL)?;
256 |
257 | Ok(Self { face_detector })
258 | }
259 |
260 | pub fn preprocess_frame(
261 | &self,
262 | frame: Mat,
263 | ) -> Result, Dim<[usize; 4]>>> {
264 | let bgr_frame = frame;
265 |
266 | // bgr to rgb on new frame
267 | // let mut bgr_frame = Mat::default();
268 | // imgproc::cvt_color(&frame, &mut bgr_frame, imgproc::COLOR_BGR2RGB, 0)?;
269 |
270 | // let cropped_image = crop_img(&bgr_frame, &[150., 150., 400., 400.])?;
271 |
272 | // Resizing the frame
273 | let mut resized_frame = Mat::default();
274 | imgproc::resize(
275 | &bgr_frame,
276 | &mut resized_frame,
277 | Size {
278 | width: 320,
279 | height: 320,
280 | },
281 | 0.0,
282 | 0.0,
283 | imgproc::INTER_LINEAR, //*INTER_AREA, // https://stackoverflow.com/a/51042104 | Speed -> https://stackoverflow.com/a/44278268
284 | )?; // ! Error handling here
285 |
286 | let vec = Mat::data_typed::(&resized_frame)?;
287 |
288 | // use the shape [height, width, channels] instead of [channels, height, width].
289 | // Ok(Array3::from_shape_fn((120, 120, 3), |(y, x, c)| {
290 | // Vec3b::deref(&vec[x + y * 120])[c]
291 | // }))
292 |
293 | Ok(Array4::from_shape_fn((1, 3, 320, 320), |(_, c, y, x)| {
294 | f32::from(Vec3b::deref(&vec[x + y * 320])[c])
295 | }))
296 | }
297 |
298 | pub fn detect(&mut self, frame: Mat) -> Result<[f32; 4]> {
299 | // -> Result {
300 | // -> Result> {
301 | let array = match self.preprocess_frame(frame) {
302 | Ok(array) => vec![array],
303 | Err(e) => {
304 | tracing::error!("Error preprocessing frame: {:?}", e);
305 | // return Ok(vec![]);
306 | panic!("Error preprocessing frame: {:?}", e);
307 | }
308 | };
309 | let output_tensors: Vec> = self.face_detector.run(array)?;
310 | let boxes = &output_tensors[0];
311 | let scores = &output_tensors[1];
312 | let num_boxes = boxes.view().shape()[1];
313 | let input_width = 320;
314 | let input_height = 320;
315 | let priors = PriorBoxes::new(
316 | &PriorBoxesParams::default(),
317 | (input_width as usize, input_height as usize),
318 | );
319 |
320 | let ratio = 320. / 640.0;
321 | let scale_ratios = (input_width as f32 / ratio, input_height as f32 / ratio);
322 |
323 | // let faces: Vec<[f32; 4]> =
324 | let faces: Vec = boxes
325 | .view()
326 | .to_shape((num_boxes, 4))
327 | .unwrap()
328 | .axis_iter(Axis(0))
329 | .zip(priors.anchors.iter())
330 | .zip(
331 | scores
332 | .view()
333 | .to_shape((num_boxes, 2))
334 | .unwrap()
335 | .axis_iter(Axis(0)),
336 | )
337 | .filter_map(|((rect, prior), score)| {
338 | let score = score[1];
339 |
340 | if score > 0.5 {
341 | let rect = priors.decode_box(prior, &(rect[0], rect[1], rect[2], rect[3]));
342 | let rect = rect.scale(scale_ratios.0, scale_ratios.1);
343 |
344 | // Some([rect.x, rect.y, rect.width, rect.height])
345 | Some(Face {
346 | rect,
347 | landmarks: None,
348 | confidence: score,
349 | })
350 | } else {
351 | None
352 | }
353 | })
354 | .collect_vec();
355 |
356 | let nms = Nms::default();
357 | let nms_faces = nms.suppress_non_maxima(faces);
358 |
359 | if !nms_faces.is_empty() {
360 | return Ok([
361 | (nms_faces[0].rect.x), // * 640.) / 120.,
362 | (nms_faces[0].rect.y), // * 480.) / 120.,
363 | (nms_faces[0].rect.width), // * 640.) / 120.,
364 | (nms_faces[0].rect.height), // * 480.) / 120.,
365 | ]);
366 | } else {
367 | return Ok([0.0; 4]);
368 | }
369 | }
370 | }
371 |
--------------------------------------------------------------------------------
/src/filter.rs:
--------------------------------------------------------------------------------
1 | /// Rust Implementation of OneEuroFilter https://gery.casiez.net/1euro/ to filter real-time noisy signals
2 | /// Visit the site to learn more about the parameters involved and how to tune them
3 | /// The pseudocode is originajlly from https://github.com/jaantollander/OneEuroFilter, which is further modified for our use case
4 | use std::f32;
5 |
6 | // ! Need Default values
7 | struct OneEuroFilter {
8 | // Parameters
9 | min_cutoff: f32,
10 | beta: f32,
11 | d_cutoff: f32,
12 |
13 | // Previous Values
14 | x_prev: f32,
15 | dx_prev: f32,
16 | }
17 |
18 | // TODO : A way to have default value in filter
19 | impl OneEuroFilter {
20 | fn new(x0: f32, dx0: f32, min_cutoff: f32, beta: f32, d_cutoff: f32) -> Self {
21 | Self {
22 | min_cutoff,
23 | beta,
24 | d_cutoff,
25 |
26 | x_prev: x0,
27 | dx_prev: dx0,
28 | }
29 | }
30 |
31 | fn smoothing_factor(&self, t_e: f32, cutoff: f32) -> f32 {
32 | let r = 2.0 * std::f32::consts::PI * cutoff * t_e;
33 | r / (r + 1.0)
34 | }
35 |
36 | fn exponential_smoothing(&self, a: f32, x: f32, x_prev: f32) -> f32 {
37 | a.mul_add(x, (1.0 - a) * x_prev)
38 | }
39 |
40 | fn run(&mut self, x: f32, min_cutoff: Option, beta: Option) -> f32 {
41 | let min_cutoff = match min_cutoff {
42 | Some(min_cutoff) => min_cutoff,
43 | None => self.min_cutoff,
44 | };
45 |
46 | let beta = match beta {
47 | Some(beta) => beta,
48 | None => self.beta,
49 | };
50 |
51 | let t_e = 1.; // constant change in time
52 |
53 | let a_d = self.smoothing_factor(t_e, self.d_cutoff);
54 | let dx = (x - self.x_prev) / t_e;
55 |
56 | self.dx_prev = self.exponential_smoothing(a_d, dx, self.dx_prev);
57 |
58 | let cutoff = beta.mul_add(self.dx_prev.abs(), min_cutoff);
59 | let a = self.smoothing_factor(t_e, cutoff);
60 | self.x_prev = self.exponential_smoothing(a, x, self.x_prev);
61 |
62 | self.x_prev
63 | }
64 | }
65 |
66 | // TODO : Need to clean this up
67 | pub struct EuroDataFilter {
68 | x: OneEuroFilter,
69 | y: OneEuroFilter,
70 | z: OneEuroFilter,
71 | yaw: OneEuroFilter,
72 | pitch: OneEuroFilter,
73 | roll: OneEuroFilter,
74 | }
75 |
76 | impl EuroDataFilter {
77 | pub fn new(min_cutoff: f32, beta: f32) -> Self {
78 | Self {
79 | x: OneEuroFilter::new(0., 0., min_cutoff, beta, 1.),
80 | y: OneEuroFilter::new(0., 0., min_cutoff, beta, 1.),
81 | z: OneEuroFilter::new(0., 0., min_cutoff, beta, 1.),
82 | yaw: OneEuroFilter::new(0., 0., min_cutoff, beta, 1.),
83 | pitch: OneEuroFilter::new(0., 0., min_cutoff, beta, 1.),
84 | roll: OneEuroFilter::new(0., 0., min_cutoff, beta, 1.),
85 | }
86 | }
87 |
88 | pub fn filter_data(
89 | &mut self,
90 | data: [f32; 6],
91 | min_cutoff: Option,
92 | beta: Option,
93 | ) -> [f32; 6] {
94 | let mut filtered_data = [0.; 6];
95 |
96 | filtered_data[0] = self.x.run(data[0], min_cutoff, beta);
97 | filtered_data[1] = self.y.run(data[1], min_cutoff, beta);
98 | filtered_data[2] = self.z.run(data[2], min_cutoff, beta);
99 | filtered_data[3] = self.yaw.run(data[3], min_cutoff, beta);
100 | filtered_data[4] = self.pitch.run(data[4], min_cutoff, beta);
101 | filtered_data[5] = self.roll.run(data[5], min_cutoff, beta);
102 |
103 | filtered_data
104 | }
105 | }
106 |
107 | #[test]
108 | fn test_euro_filter() {
109 | use rand::Rng;
110 |
111 | // Create the filter with the initial values
112 | let mut filter = OneEuroFilter::new(1., 0.0, 0.0001, 0.1, 1.0);
113 |
114 | // Iterate over the sin values and apply the filter
115 | for i in 1..100 {
116 | // Compute the noisy sin value
117 | let x = (0.1 * i as f32).sin();
118 | let x_noisy = x + (rand::thread_rng().gen_range(0..10) as f32 / 10.0);
119 |
120 | // Filter the noisy sin value
121 | let x_filtered = filter.run(x_noisy, None, None);
122 |
123 | // Print the original and filtered sin values
124 | println!(
125 | "x {:.2}, noisy {:.2}, filtered {:.2}",
126 | x, x_noisy, x_filtered
127 | );
128 | }
129 | }
130 |
--------------------------------------------------------------------------------
/src/gui/app.rs:
--------------------------------------------------------------------------------
1 | // Handing the events and updating the state of the application
2 |
3 | use crate::consts::APP_NAME;
4 | use crate::gui::view::run_page;
5 | use crate::{
6 | enums::message::Message,
7 | filter::EuroDataFilter,
8 | structs::{app::HeadTracker, state::AppConfig},
9 | structs::{camera::ThreadedCamera, network::SocketNetwork, pose::ProcessHeadPose},
10 | };
11 | use iced::{
12 | executor, widget::Container, Application, Command, Element, Length,
13 | Theme,
14 | };
15 | use iced::{mouse, window};
16 | use std::{
17 | sync::atomic::Ordering,
18 | thread,
19 | time::{Duration, Instant},
20 | };
21 | use iced::Subscription;
22 | use iced::event::{self, Event};
23 |
24 | // Log the error and break the block expression
25 | macro_rules! trace_error {
26 | ($error:expr) => {
27 | let error_message = $error.to_string();
28 | tracing::error!(error_message);
29 | };
30 | }
31 |
32 | impl Application for HeadTracker {
33 | type Executor = executor::Default;
34 | type Flags = HeadTracker;
35 | type Message = Message;
36 | type Theme = Theme;
37 |
38 | fn new(flags: HeadTracker) -> (HeadTracker, Command) {
39 | (flags, Command::none())
40 | }
41 |
42 | fn title(&self) -> String {
43 | String::from(APP_NAME)
44 | }
45 |
46 | fn subscription(&self) -> Subscription {
47 | // If camera is hidden, only listen for events, otherwise listen for events and ticks to update camera frame in GUI
48 | match self.config.hide_camera {
49 | true => event::listen().map(Message::EventOccurred),
50 | false => {
51 | if self.headtracker_running.load(Ordering::SeqCst) {
52 | let ticks = iced::time::every(Duration::from_millis(1)).map(|_| Message::Tick);
53 | let runtime_events =
54 | event::listen().map(Message::EventOccurred);
55 | Subscription::batch(vec![runtime_events, ticks])
56 | } else {
57 | event::listen().map(Message::EventOccurred)
58 | }
59 | }
60 | }
61 | }
62 |
63 | fn theme(&self) -> Theme {
64 | Theme::Light
65 | }
66 |
67 | // fn style(&self) -> theme::Application {
68 | // fn dark_background(_theme: &Theme) -> application::Appearance {
69 | // application::Appearance {
70 | // background_color: Color::from_rgb8(245, 245, 245),
71 | // text_color: Color::BLACK,
72 | // }
73 | // }
74 |
75 | // theme::Application::from(dark_background as fn(&Theme) -> _)
76 | // }
77 |
78 | fn update(&mut self, message: Message) -> Command {
79 | match message {
80 | // Handles the event of the user clicking on the Start/Stop button
81 | Message::Toggle => {
82 | // If headtracker is not running, clicking the button will run it, otherwise it will stop it
83 | if !self.headtracker_running.load(Ordering::SeqCst) {
84 | // Setting headtracker to running
85 | self.headtracker_running.store(true, Ordering::SeqCst);
86 |
87 | // Getting the index of the selected camera
88 | let camera_index = match self.camera_list.get(&self.config.selected_camera) {
89 | Some(index) => *index,
90 | // ! Should this be 0 or something else ?
91 | None => {
92 | tracing::error!("Unable to find camera index, setting default to 0");
93 | 0
94 | }
95 | };
96 |
97 | // Copying the necessary data to the thread
98 | let camera_name = self.config.selected_camera.clone();
99 | let config = self.config.clone();
100 | let headtracker_running = self.headtracker_running.clone();
101 | let tx = self.sender.clone();
102 | let rx = self.receiver.clone();
103 | let error_tracker = self.error_tracker.clone();
104 |
105 | // Spawning the thread
106 | self.headtracker_thread = Some(thread::spawn(move || {
107 | let mut error_message = String::new();
108 |
109 | // Resetting error message
110 | {
111 | let mut error_guard = error_tracker.lock().unwrap();
112 | *error_guard = error_message.clone();
113 | }
114 |
115 | 'inner: {
116 | // Creating the filter
117 | let mut euro_filter = EuroDataFilter::new(
118 | config.min_cutoff.load(Ordering::SeqCst),
119 | config.beta.load(Ordering::SeqCst),
120 | );
121 |
122 | // Creating the network to send data to OpenTrack
123 | let mut socket_network =
124 | match SocketNetwork::new(config.ip.clone(), config.port.clone()) {
125 | Ok(socket) => socket,
126 | Err(error) => {
127 | // If an error occurs, set the error message and break the block expression
128 | trace_error!(error);
129 | break 'inner;
130 | }
131 | };
132 |
133 | // Create a channel to communicate between threads
134 | let mut thr_cam = match ThreadedCamera::start_camera_thread(
135 | tx,
136 | camera_index,
137 | camera_name,
138 | ) {
139 | Ok(camera) => camera,
140 | Err(error) => {
141 | // If an error occurs, set the error message and break the block expression
142 | trace_error!(error);
143 | break 'inner;
144 | }
145 | };
146 |
147 | let mut head_pose = match ProcessHeadPose::new(120) {
148 | Ok(pose) => pose,
149 | Err(error) => {
150 | // If an error occurs, set the error message and break the block expression
151 | trace_error!(error);
152 | break 'inner;
153 | }
154 | };
155 |
156 | // Getting the first frame from the camera, if an error occurs, set the error message and use an empty frame
157 | let mut frame = match rx.recv() {
158 | Ok(result) => result,
159 | Err(error) => {
160 | error_message =
161 | format!("Unable to receive image data: {}", error);
162 | tracing::error!(error_message);
163 | opencv::core::Mat::default()
164 | }
165 | };
166 |
167 | // Contains x, y, z, yaw, pitch, roll
168 | let mut data;
169 |
170 | // Looping until headtracker_running is set to false ( ie. user clicks on the Stop button )
171 | while headtracker_running.load(Ordering::SeqCst) {
172 | let start_time = Instant::now();
173 |
174 | // Getting the frame from the camera, if an error occurs, use the previous frame
175 | frame = match rx.try_recv() {
176 | Ok(result) => result,
177 | Err(_) => frame.clone(),
178 | };
179 |
180 | // Getting the head pose from the frame
181 | let out = head_pose.single_iter(&frame);
182 |
183 | // If an error occurs, skip the loop
184 | match out {
185 | Ok(value) => {
186 | data = value;
187 | }
188 | Err(_) => {
189 | // println!("An error: {}; skipped.", e);
190 | // head_pose.face_box = [150., 150., 400., 400.];
191 | // head_pose.pts_3d =
192 | // vec![vec![1., 2., 3.], vec![4., 5., 6.], vec![7., 8., 9.]];
193 | // head_pose.face_box = [0., 0., 600., 600.];
194 | // headtracker_running.store(false, Ordering::SeqCst);
195 | continue;
196 | }
197 | };
198 |
199 | // Smoothing and Filtering the data
200 | data = euro_filter.filter_data(
201 | data,
202 | Some(config.min_cutoff.load(Ordering::SeqCst)),
203 | Some(config.beta.load(Ordering::SeqCst)),
204 | );
205 |
206 | // Sending the data to OpenTrack, if an error occurs, set the error message and break the loop
207 | match socket_network.send(data) {
208 | Ok(_) => {}
209 | Err(_) => {
210 | error_message = format!(
211 | "Unable to send data to {}:{}",
212 | &config.ip, &config.port
213 | );
214 | tracing::error!(error_message);
215 | break;
216 | }
217 | };
218 |
219 | // Calculating the delay time and sleeping for that amount of time, Used to set the fps
220 | let elapsed_time = start_time.elapsed();
221 | let delay_time = ((1000 / config.fps.load(Ordering::SeqCst))
222 | as f32
223 | - elapsed_time.as_millis() as f32)
224 | .max(0.);
225 | thread::sleep(Duration::from_millis(delay_time.round() as u64));
226 | }
227 |
228 | thr_cam.shutdown();
229 | }
230 |
231 | // Setting the error message
232 | let mut error_guard = error_tracker.lock().unwrap();
233 | *error_guard = String::from(error_message);
234 | headtracker_running.store(false, Ordering::SeqCst);
235 | }));
236 | } else {
237 | // If the thread is already running, stop it
238 | self.headtracker_running.store(false, Ordering::SeqCst);
239 |
240 | // Joining the thread
241 | match self.headtracker_thread.take() {
242 | Some(thread) => match thread.join() {
243 | Ok(_) => {}
244 | Err(e) => tracing::error!("Could not join spawned thread: {:?}", e),
245 | },
246 | None => tracing::error!("Called stop on non-running thread"),
247 | }
248 | }
249 | }
250 |
251 | // If camera is set visible, get the frame and show it in the GUI
252 | Message::Tick => {
253 | self.frame = match self.receiver.try_recv() {
254 | Ok(result) => result,
255 | Err(_) => self.frame.clone(),
256 | };
257 | }
258 |
259 | // Deals with the filter values
260 | Message::MinCutoffSliderChanged(value) => {
261 | if value == 0 {
262 | self.config.min_cutoff.store(0., Ordering::SeqCst)
263 | } else {
264 | self.config
265 | .min_cutoff
266 | .store(1. / ((value * value) as f32), Ordering::SeqCst)
267 | };
268 | self.save_config()
269 | }
270 | Message::BetaSliderChanged(value) => {
271 | if value == 0 {
272 | self.config.beta.store(0., Ordering::SeqCst)
273 | } else {
274 | self.config
275 | .beta
276 | .store(1. / ((value * value) as f32), Ordering::SeqCst)
277 | };
278 | self.save_config()
279 | }
280 | Message::FPSSliderChanged(fps) => {
281 | self.config.fps.store(fps, Ordering::SeqCst);
282 | self.save_config()
283 | }
284 | Message::InputIP(ip) => {
285 | self.config.ip = ip;
286 | self.save_config()
287 | }
288 | Message::InputPort(port) => {
289 | self.config.port = port;
290 | self.save_config()
291 | }
292 |
293 | Message::Camera(camera_name) => {
294 | self.config.selected_camera = camera_name;
295 |
296 | // If camera changes while running
297 | if self.headtracker_running.load(Ordering::SeqCst) {
298 | // Turn it back off and on again :)
299 | #[allow(unused_must_use)]
300 | {
301 | self.update(Message::Toggle);
302 | self.update(Message::Toggle);
303 | }
304 | }
305 |
306 | self.save_config()
307 | }
308 | Message::HideCamera(value) => {
309 | self.config.hide_camera = value;
310 | self.save_config()
311 | }
312 |
313 | Message::DefaultSettings => {
314 | self.config
315 | .min_cutoff
316 | .store(AppConfig::default().min_cutoff, Ordering::SeqCst);
317 | self.config
318 | .beta
319 | .store(AppConfig::default().beta, Ordering::SeqCst);
320 | self.config
321 | .fps
322 | .store(AppConfig::default().fps, Ordering::SeqCst);
323 | self.config.ip = AppConfig::default().ip;
324 | self.config.port = AppConfig::default().port;
325 | self.config.hide_camera = AppConfig::default().hide_camera;
326 |
327 | self.save_config();
328 | }
329 | Message::OpenURL(url) => {
330 | #[cfg(target_os = "windows")]
331 | let program = "explorer";
332 | #[cfg(target_os = "macos")]
333 | let program = "open";
334 | #[cfg(target_os = "linux")]
335 | let program = "xdg-open";
336 |
337 | match std::process::Command::new(program).arg(url).spawn() {
338 | Ok(_) => {}
339 | Err(e) => {
340 | tracing::error!("Unable to open the url : {:?}", e);
341 |
342 | let mut error_guard = self.error_tracker.lock().unwrap();
343 | *error_guard = String::from("Unable to open the url");
344 | }
345 | }
346 | }
347 |
348 | Message::OpenLogs => {
349 | #[cfg(target_os = "windows")]
350 | let program = "explorer";
351 | #[cfg(target_os = "macos")]
352 | let program = "open";
353 | #[cfg(target_os = "linux")]
354 | let program = "xdg-open";
355 |
356 | match std::process::Command::new(program)
357 | .arg(
358 | directories::ProjectDirs::from("rs", "", APP_NAME)
359 | .unwrap()
360 | .data_dir()
361 | .to_str()
362 | .unwrap(),
363 | )
364 | .spawn()
365 | {
366 | Ok(_) => {}
367 | Err(e) => {
368 | tracing::error!("Unable to open logs directory : {:?}", e);
369 |
370 | let mut error_guard = self.error_tracker.lock().unwrap();
371 | *error_guard = String::from("Unable to open logs directory");
372 | }
373 | }
374 | }
375 | Message::EventOccurred(event) => {
376 | // If the user request to close the window, stop the thread ( if running ) and exit the program
377 | if let Event::Window(_, window::Event::CloseRequested) = event {
378 | if self.headtracker_running.load(Ordering::SeqCst) {
379 | self.headtracker_running.store(false, Ordering::SeqCst);
380 | match self.headtracker_thread.take() {
381 | Some(thread) => match thread.join() {
382 | Ok(_) => {}
383 | Err(e) => {
384 | tracing::error!("Could not join spawned thread: {:?}", e);
385 | }
386 | },
387 | None => {
388 | tracing::error!("Called stop on non-running thread");
389 | }
390 | }
391 | }
392 | std::process::exit(0);
393 | }
394 |
395 | // TODO : Refresh the camera list when use clicks anywhere in the app, need better approach,
396 | if let Event::Mouse(mouse::Event::ButtonPressed(mouse::Button::Left)) = event {
397 | let mut error_guard = self.error_tracker.lock().unwrap();
398 | *error_guard = String::new();
399 |
400 | // Updating camera list
401 | match ThreadedCamera::get_available_cameras() {
402 | Ok(camera_list) => self.camera_list = camera_list,
403 | Err(e) => {
404 | tracing::error!("{}", e);
405 | *error_guard = e.to_string();
406 | }
407 | }
408 | }
409 | }
410 | }
411 | Command::none()
412 | }
413 |
414 | fn view(&self) -> Element {
415 | let body = run_page(self);
416 |
417 | Container::new(body)
418 | .width(Length::Fill)
419 | .height(Length::Fill)
420 | .center_x()
421 | .center_y()
422 | .into()
423 | }
424 | }
425 |
--------------------------------------------------------------------------------
/src/gui/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod app;
2 | pub mod style;
3 | pub mod view;
4 |
--------------------------------------------------------------------------------
/src/gui/style.rs:
--------------------------------------------------------------------------------
1 | pub const HEIGHT_BODY: u16 = 80;
2 | pub const HEIGHT_FOOTER: u16 = 10;
3 |
--------------------------------------------------------------------------------
/src/gui/view.rs:
--------------------------------------------------------------------------------
1 | // The UI of the application
2 |
3 | use std::{borrow::Cow, sync::atomic::Ordering};
4 |
5 | use iced::{
6 | alignment::{self, Horizontal, Vertical},
7 | widget::{
8 | button, pick_list, slider, text, text_input, toggler, Column, Container, Row, Space, Text,
9 | },
10 | Alignment, Length, Renderer,
11 | };
12 | use opencv::{core::VectorToVec, imgcodecs};
13 |
14 | use crate::{consts::NO_VIDEO_IMG, enums::message::Message, structs::app::HeadTracker};
15 |
16 | use super::style::{HEIGHT_BODY, HEIGHT_FOOTER};
17 | use crate::consts::{APP_AUTHORS, APP_NAME, APP_REPOSITORY, APP_VERSION};
18 |
19 | pub fn run_page(headtracker: &HeadTracker) -> Column {
20 | // Convert the min_cutoff and beta values to u32
21 | let min_cutoff = {
22 | if (headtracker.config.min_cutoff.load(Ordering::SeqCst) - 0.).abs() < f32::EPSILON {
23 | 0
24 | } else {
25 | (1. / headtracker.config.min_cutoff.load(Ordering::SeqCst)).sqrt() as u32
26 | }
27 | };
28 |
29 | let beta = {
30 | if (headtracker.config.beta.load(Ordering::SeqCst) - 0.).abs() < f32::EPSILON {
31 | 0
32 | } else {
33 | (1. / headtracker.config.beta.load(Ordering::SeqCst)).sqrt() as u32
34 | }
35 | };
36 | let fps = headtracker.config.fps.load(Ordering::SeqCst);
37 |
38 | let ip = headtracker.config.ip.as_str();
39 | let port = headtracker.config.port.as_str();
40 | let hide_camera = headtracker.config.hide_camera;
41 |
42 | // Create the sliders
43 | let min_cutoff_slider =
44 | slider(0..=50, min_cutoff, Message::MinCutoffSliderChanged).step(1 as u32);
45 | let beta_slider = slider(0..=50, beta, Message::BetaSliderChanged).step(1 as u32);
46 | let fps_slider = slider(15..=120, fps, Message::FPSSliderChanged).step(1 as u32);
47 |
48 | // The main Start/Stop button
49 | let toggle_start = {
50 | let label = match headtracker.headtracker_running.load(Ordering::SeqCst) {
51 | true => "Stop",
52 | false => "Start",
53 | };
54 | button(
55 | text(label)
56 | .vertical_alignment(Vertical::Center)
57 | .horizontal_alignment(Horizontal::Center),
58 | )
59 | .height(Length::Fixed(40.))
60 | .width(Length::Fixed(180.))
61 | .on_press(Message::Toggle)
62 | };
63 |
64 | let sliders_row = Container::new(
65 | Column::new()
66 | .push(text("Filter Settings").size(15))
67 | .push(Space::with_height(Length::Fixed(20.)))
68 | .push(text("Speed").size(14))
69 | .push(Container::new(min_cutoff_slider).width(Length::FillPortion(2)))
70 | .push(Space::with_height(Length::Fixed(10.)))
71 | .push(text("Smooth").size(14))
72 | .push(Container::new(beta_slider).width(Length::FillPortion(2)))
73 | .push(Space::with_height(Length::Fixed(30.)))
74 | .push(text("FPS").size(15))
75 | .push(Container::new(fps_slider).width(Length::FillPortion(2)))
76 | .push(Space::with_height(Length::Fixed(30.)))
77 | .push(text("IP and Port").size(15))
78 | // ! IPV4 and V6 support for external devices, having only two inputs, ip and port
79 | .push(Container::new(
80 | Row::new()
81 | .spacing(5)
82 | .push(
83 | text_input("127.0.0.1", ip)
84 | .on_input(Message::InputIP)
85 | .width(Length::FillPortion(70)),
86 | )
87 | .push(text(" "))
88 | .push(
89 | text_input("4242", port)
90 | .on_input(Message::InputPort)
91 | .width(Length::FillPortion(15)),
92 | ),
93 | ))
94 | .push(Space::with_height(Length::Fixed(30.))),
95 | )
96 | .padding(40);
97 |
98 | // If camera is set to hidden, show a placeholder image
99 | let image = match hide_camera {
100 | true => NO_VIDEO_IMG.to_vec(),
101 | false => {
102 | if headtracker.headtracker_running.load(Ordering::SeqCst) {
103 | let frame = headtracker.frame.clone();
104 | let mut encoded_image = opencv::core::Vector::::new();
105 | let params = opencv::core::Vector::::new();
106 | match imgcodecs::imencode(".PNG", &frame, &mut encoded_image, ¶ms) {
107 | Ok(_) => {}
108 | Err(e) => {
109 | tracing::error!("Error encoding image: {}", e);
110 | }
111 | }
112 | encoded_image.to_vec()
113 | } else {
114 | NO_VIDEO_IMG.to_vec()
115 | }
116 | }
117 | };
118 |
119 | // Contains camera placeholder, available cameras list and the toggle button to hide the camera
120 | let camera_row = Container::new(
121 | Column::new()
122 | .push({
123 | iced::widget::image::viewer(iced::widget::image::Handle::from_memory(image))
124 | .width(Length::Fill)
125 | .height(Length::Fixed(200.))
126 | })
127 | .push(Space::with_height(Length::Fixed(32.)))
128 | .push(Container::new(
129 | Row::new()
130 | .push(
131 | pick_list(
132 | Cow::from(
133 | headtracker
134 | .camera_list
135 | .keys()
136 | .cloned()
137 | .collect::>(),
138 | ),
139 | Some(headtracker.config.selected_camera.clone()),
140 | Message::Camera,
141 | )
142 | .width(Length::FillPortion(50)),
143 | )
144 | .push(Space::with_width(Length::FillPortion(10)))
145 | .push(
146 | toggler("Hide Cam".to_string(), hide_camera, Message::HideCamera)
147 | .size(24)
148 | .spacing(2)
149 | .width(Length::FillPortion(40)),
150 | )
151 | .padding(1),
152 | )),
153 | )
154 | .padding(40)
155 | .center_x()
156 | .center_y();
157 |
158 | let start_button_row = Container::new(toggle_start)
159 | .width(Length::Fill)
160 | .align_x(Horizontal::Center);
161 |
162 | let controls_row = Container::new(
163 | Row::new()
164 | .push(Container::new(camera_row).width(Length::FillPortion(5)))
165 | .push(Container::new(sliders_row).width(Length::FillPortion(5))),
166 | );
167 |
168 | let body = Container::new(
169 | Column::new()
170 | .width(Length::Fill)
171 | .push(Space::with_height(Length::Fixed(40.)))
172 | .push(Container::new(
173 | Row::new()
174 | .push(Space::with_width(Length::FillPortion(2)))
175 | // If there is a new release, show a button to download it/update it
176 | .push(match &headtracker.release_info {
177 | Some(release_info) => Container::new(
178 | button(
179 | text(format!(" {} now available! ", release_info.tag_name))
180 | .size(15),
181 | )
182 | .on_press(Message::OpenURL(release_info.html_url.clone())),
183 | ),
184 | None => Container::new(Space::with_height(Length::Fixed(40.))),
185 | })
186 | .push(Space::with_width(Length::Fixed(34.)))
187 | .push(
188 | button(text(" Reset to Default ").size(15))
189 | .on_press(Message::DefaultSettings),
190 | )
191 | .push(Space::with_width(Length::Fixed(40.))),
192 | ))
193 | .push(controls_row.width(Length::FillPortion(50)))
194 | .push(start_button_row.width(Length::FillPortion(50)))
195 | .push(Space::with_height(Length::Fixed(20.)))
196 | .push(
197 | Container::new(
198 | Row::new()
199 | .push(Space::with_width(Length::FillPortion(40)))
200 | // If there is an error, show it
201 | .push(
202 | text(headtracker.error_tracker.clone().lock().unwrap())
203 | .size(15)
204 | .horizontal_alignment(Horizontal::Center)
205 | .width(Length::FillPortion(50)),
206 | )
207 | .push(Space::with_width(Length::FillPortion(40))),
208 | )
209 | .center_x(),
210 | ),
211 | )
212 | .height(Length::FillPortion(HEIGHT_BODY));
213 |
214 | let footer = footer();
215 |
216 | Column::new().spacing(10).push(body).push(footer)
217 | }
218 |
219 | // Shows app version and links to github and logs
220 | fn footer() -> Container<'static, Message, iced::Theme, Renderer> {
221 | let github_button = button(
222 | Text::new('\u{48}'.to_string())
223 | .font(iced::font::Font::with_name("Glyphter"))
224 | .size(12.)
225 | .horizontal_alignment(alignment::Horizontal::Center)
226 | .vertical_alignment(alignment::Vertical::Center),
227 | )
228 | .height(Length::Fixed(35.))
229 | .width(Length::Fixed(40.))
230 | .on_press(Message::OpenURL(String::from(APP_REPOSITORY)));
231 |
232 | let logs_button = button(
233 | Text::new('\u{66}'.to_string())
234 | .font(iced::font::Font::with_name("Glyphter"))
235 | .size(12.)
236 | .horizontal_alignment(alignment::Horizontal::Center)
237 | .vertical_alignment(alignment::Vertical::Center),
238 | )
239 | .height(Length::Fixed(35.))
240 | .width(Length::Fixed(40.))
241 | .on_press(Message::OpenLogs);
242 |
243 | let footer_row = Row::new()
244 | .align_items(Alignment::Center)
245 | .push(Text::new(format!(
246 | "{} v{} by {} ",
247 | APP_NAME, APP_VERSION, APP_AUTHORS
248 | )))
249 | .push(github_button)
250 | .push(Space::with_width(Length::Fixed(10.)))
251 | .push(logs_button);
252 |
253 | Container::new(footer_row)
254 | .width(Length::Fill)
255 | .height(Length::FillPortion(HEIGHT_FOOTER))
256 | .align_y(Vertical::Bottom)
257 | .align_x(Horizontal::Center)
258 | .padding(20)
259 | }
260 |
--------------------------------------------------------------------------------
/src/main.rs:
--------------------------------------------------------------------------------
1 | #![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
2 | #![allow(non_snake_case)]
3 |
4 | mod camera;
5 | mod consts;
6 | mod enums;
7 | mod face;
8 | mod filter;
9 | mod gui;
10 | mod network;
11 | mod process;
12 | mod structs;
13 | mod tddfa;
14 | mod utils;
15 |
16 | use crate::{
17 | consts::{APP_NAME, APP_VERSION, ICON, INTER_FONT},
18 | structs::app::HeadTracker,
19 | };
20 | use consts::ICONS_FONT;
21 | use iced::{
22 | window::{self, settings::PlatformSpecific, Level},
23 | Application, Settings, Size,
24 | };
25 | use image::ImageFormat;
26 |
27 | use std::{fs, path::Path};
28 |
29 | use anyhow::Result;
30 |
31 | fn main() -> Result<(), Box> {
32 | // ? Adding organization name
33 | let log_filepath = match directories::ProjectDirs::from("rs", "", APP_NAME) {
34 | Some(dirs) => dirs,
35 | None => {
36 | tracing::error!("Could not find project directories");
37 | std::process::exit(1);
38 | }
39 | };
40 | let log_filename = "StableView.log";
41 |
42 | // println!("{:?}", std::env::current_exe().unwrap());
43 |
44 | match fs::remove_file(log_filepath.data_dir().join(log_filename)) {
45 | Ok(_) => tracing::warn!("Removed old log file"),
46 | Err(_) => tracing::warn!("No old log file found"),
47 | }
48 |
49 | let file_appender = tracing_appender::rolling::never(
50 | // * Similar path is also used by confy https://github.com/rust-cli/confy/blob/master/src/lib.rs#L316
51 | match log_filepath.data_dir().to_str() {
52 | Some(path) => path,
53 | None => {
54 | tracing::error!("Could not find project directories");
55 | std::process::exit(1);
56 | }
57 | },
58 | log_filename,
59 | );
60 |
61 | let (non_blocking, _guard) = tracing_appender::non_blocking(file_appender);
62 |
63 | tracing_subscriber::fmt()
64 | .with_writer(non_blocking)
65 | .with_ansi(false)
66 | .with_max_level(tracing::Level::WARN)
67 | .init(); // ! Need to have only 1 log file which resets daily
68 |
69 | tracing::warn!("Version {} on {}", APP_VERSION, std::env::consts::OS);
70 | tracing::warn!(
71 | "The configuration file path is: {:#?}",
72 | match confy::get_configuration_file_path(APP_NAME, "config") {
73 | Ok(path) => path,
74 | Err(e) => {
75 | tracing::error!("Error getting config file path: {}", e);
76 | Path::new("Could not find config file path").to_path_buf()
77 | }
78 | }
79 | );
80 |
81 | let mut flags = HeadTracker::default();
82 | flags.config = flags.load_config();
83 |
84 | tracing::warn!("Config : {}", flags);
85 |
86 | let settings = Settings {
87 | id: None,
88 | window: window::Settings {
89 | size: Size::new(750., 620.), // start size
90 | position: window::Position::Centered,
91 | min_size: Some(Size::new(750., 620.)),
92 | max_size: None,
93 | resizable: true,
94 | decorations: true,
95 | transparent: false,
96 | icon: match window::icon::from_file_data(ICON, Some(ImageFormat::Ico)) {
97 | Ok(icon) => Some(icon),
98 | Err(_) => None,
99 | },
100 | visible: true,
101 | platform_specific: PlatformSpecific::default(),
102 | exit_on_close_request: true,
103 | level: Level::Normal,
104 |
105 | },
106 | flags,
107 | default_font: iced::font::Font::with_name("Inter-Regular"),
108 | fonts: vec![
109 | std::borrow::Cow::Borrowed(INTER_FONT),
110 | std::borrow::Cow::Borrowed(ICONS_FONT),
111 | ],
112 | default_text_size: iced::Pixels(13.),
113 | antialiasing: false,
114 | };
115 |
116 | if let Err(e) = HeadTracker::run(settings) {
117 | tracing::error!("{}", e);
118 | }
119 |
120 | Ok(())
121 | }
122 |
--------------------------------------------------------------------------------
/src/network.rs:
--------------------------------------------------------------------------------
1 | /// Deals with sending the data (x,y,depth,yaw,pitch,roll) to opentrack (https://github.com/opentrack/opentrack) using UDP socket
2 | use crate::structs::network::SocketNetwork;
3 | use anyhow::{Context, Result};
4 | use std::net::UdpSocket;
5 |
6 | impl SocketNetwork {
7 | pub fn new(ip: String, port: String) -> Result {
8 | tracing::info!("Sending data to {} on port {}", ip, port);
9 |
10 | let address = format!("{}:{}", ip, port);
11 |
12 | let socket_network = UdpSocket::bind("0.0.0.0:0")
13 | .with_context(|| format!("Unable to bind socket : {:?}:{:?}", ip, port))?;
14 |
15 | Ok(Self {
16 | address,
17 | socket_network,
18 | })
19 | }
20 |
21 | // TODO : Cleaning and possibly removing unsafe code
22 | pub fn send(&mut self, data: [f32; 6]) -> Result<()> {
23 | let data: [f64; 6] = [
24 | data[0] as f64,
25 | data[1] as f64,
26 | data[2] as f64,
27 | data[3] as f64,
28 | data[4] as f64,
29 | data[5] as f64,
30 | ];
31 |
32 | // Convert an array to f64 to array of u8
33 | let out =
34 | unsafe { std::slice::from_raw_parts(data.as_ptr() as *const u8, data.len() * 10) };
35 |
36 | // Send data
37 | self.socket_network.send_to(out, &self.address)?;
38 |
39 | Ok(())
40 | }
41 | }
42 |
43 | #[test]
44 | pub fn test_socket_network() -> Result<()> {
45 | let mut socket_network = SocketNetwork::new("127.0.0.1".to_owned(), "4242".to_owned())?;
46 | socket_network.send([1., 2., 3., 4., 5., 6.])?;
47 |
48 | Ok(())
49 | }
50 |
--------------------------------------------------------------------------------
/src/process.rs:
--------------------------------------------------------------------------------
1 |
2 | /// Processing the head pose (filters, etc.) and generating the x,y,z of the head.
3 | use crate::enums::crop_policy::CropPolicy;
4 | use crate::structs::face::FaceDetect;
5 | use crate::structs::{pose::ProcessHeadPose, tddfa::Tddfa};
6 | use crate::utils::headpose::{calc_pose, gen_point2d};
7 | use anyhow::{Context, Result};
8 | use opencv::prelude::Mat;
9 | use opencv::prelude::MatTraitConst;
10 |
11 | impl ProcessHeadPose {
12 | pub fn new(image_size: i32) -> Result {
13 | let tddfa = Tddfa::new(image_size).context("Unable to create tddfa")?;
14 | let face_detector = FaceDetect::new().unwrap();
15 |
16 | Ok(Self {
17 | tddfa,
18 | face_detector,
19 | pts_3d: vec![vec![1., 2., 3.], vec![4., 5., 6.], vec![7., 8., 9.]],
20 | face_box: [150., 150., 400., 400.],
21 | first_iteration: true,
22 | param: [0.; 62],
23 | roi_box: [150., 150., 400., 400.],
24 | })
25 | }
26 |
27 | // Get the X,Y,Z coordinates of the head
28 | fn get_coordintes_and_depth(
29 | &self,
30 | pose: [f32; 3],
31 | mut distance: f32,
32 | _point2d: Vec>,
33 | roi_box: &[f32; 4],
34 | ) -> ([f32; 2], f32) {
35 | distance -= 56.;
36 | distance += (pose[0] * 0.2).abs();
37 |
38 | // let x = [point2d[0][0], point2d[1][0], point2d[2][0], point2d[3][0]];
39 | // let y = [point2d[0][1], point2d[1][1], point2d[2][1], point2d[3][1]];
40 |
41 | let mut centroid = [
42 | // x.iter().sum::() / (x.len()) as f32,
43 | // y.iter().sum::() / (y.len()) as f32,
44 | ((roi_box[2] + roi_box[0]) / 20.) - 40.,
45 | ((roi_box[3] + roi_box[1]) / 20.) - 15.,
46 | ];
47 | // * disbling the multiplying pose with distance (pose[0]*(distance/31), pose[1]*(distance/27)), it seems to causing jitting even when blinking eyes or smiling
48 | // centroid[0] += pose[0]; // * When very close to the camera, the head pose invariant seems to does't work, to miltgate the issue, we use this
49 | centroid[1] -= pose[1] * 0.15; // * 31 & 27 represent the distance where head pose invariant is fully solved, and we use this ratio to make it work for closer distance
50 | // if pose[2] > 0. {
51 | // centroid[0] += pose[2].abs()
52 | // } else {
53 | // centroid[0] -= pose[2].abs()
54 | // }
55 |
56 | (centroid, distance)
57 | }
58 |
59 | pub fn single_iter(&mut self, frame: &Mat) -> Result<[f32; 6]> {
60 | // ! A very tuff bug laying around somewhere here, resulting in out of ordinary roi box values when moving to camera border
61 |
62 | let mut return_data = [0.; 6];
63 |
64 | if self.first_iteration {
65 | (self.param, self.roi_box) =
66 | self.tddfa
67 | .run(frame, self.face_box, &self.pts_3d, CropPolicy::Box)?;
68 | self.pts_3d = self.tddfa.recon_vers(self.param, self.face_box);
69 |
70 | (self.param, self.roi_box) =
71 | self.tddfa
72 | .run(frame, self.face_box, &self.pts_3d, CropPolicy::Landmark)?;
73 | self.pts_3d = self.tddfa.recon_vers(self.param, self.face_box);
74 |
75 | self.first_iteration = false;
76 | } else {
77 | (self.param, self.roi_box) =
78 | self.tddfa
79 | .run(frame, self.face_box, &self.pts_3d, CropPolicy::Landmark)?;
80 | if (self.roi_box[2] - self.roi_box[0]).abs() * (self.roi_box[3] - self.roi_box[1]).abs()
81 | < 2020.
82 | {
83 | (self.param, self.roi_box) =
84 | self.tddfa
85 | .run(frame, self.face_box, &self.pts_3d, CropPolicy::Box)?;
86 | }
87 |
88 | // make sure the roi_box is not out of the frame
89 | if self.roi_box[0] < 0. {
90 | self.roi_box[0] = 0.;
91 | }
92 | if self.roi_box[1] < 0. {
93 | self.roi_box[1] = 0.;
94 | }
95 | if self.roi_box[2] > frame.size()?.width as f32 {
96 | self.roi_box[2] = frame.size()?.width as f32;
97 | }
98 | if self.roi_box[3] > frame.size()?.height as f32 {
99 | self.roi_box[3] = frame.size()?.height as f32;
100 | }
101 | self.pts_3d = self.tddfa.recon_vers(self.param, self.roi_box);
102 | }
103 | let (p, pose) = calc_pose(&self.param);
104 |
105 | let (point2d, distance) = gen_point2d(
106 | &p,
107 | vec![
108 | self.pts_3d[0][28..48].to_vec(),
109 | self.pts_3d[1][28..48].to_vec(),
110 | self.pts_3d[2][28..48].to_vec(),
111 | ],
112 | );
113 |
114 | let (centroid, distance) =
115 | self.get_coordintes_and_depth(pose, distance, point2d, &self.roi_box);
116 |
117 | // detect any faces, if there are no faces, return the previous values
118 | // let mut rng = rand::thread_rng();
119 | // if rng.gen::() > 0.9 {
120 | let face_detected = self.face_detector.detect(frame.clone()).unwrap();
121 |
122 | if face_detected[0] < 1. {
123 | return Ok(return_data);
124 | }
125 | self.face_box = [
126 | face_detected[0] - 50.,
127 | face_detected[1] - 50.,
128 | face_detected[0] + face_detected[2] + 50.,
129 | face_detected[1] + face_detected[3] + 50.,
130 | ];
131 |
132 | return_data = [
133 | centroid[0],
134 | -centroid[1],
135 | distance,
136 | pose[0],
137 | -pose[1],
138 | pose[2],
139 | ];
140 |
141 | Ok(return_data)
142 | }
143 | }
144 |
145 | #[test]
146 | #[ignore = "Can only test this offline since it requires webcam, run cargo test -- --ignored"]
147 | #[allow(unused_variables)]
148 | pub fn test_process_head_pose() -> Result<()> {
149 | use crate::structs::camera::ThreadedCamera;
150 | // use crate::utils::image::crop_img;
151 | use crate::utils::visualize::draw_landmark;
152 | use opencv::highgui;
153 |
154 | let (tx, rx) = crossbeam_channel::unbounded::();
155 | let mut thr_cam = ThreadedCamera::start_camera_thread(tx, 0, "Test Camera".to_owned())?;
156 |
157 | let mut face_detector = FaceDetect::new().unwrap();
158 | let mut head_pose = ProcessHeadPose::new(120)?;
159 |
160 | let window = "video capture";
161 | highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?;
162 |
163 | let mut frame = rx.recv()?;
164 |
165 | loop {
166 | frame = match rx.try_recv() {
167 | Ok(result) => result,
168 | Err(_) => frame.clone(),
169 | };
170 |
171 | let data = head_pose.single_iter(&frame)?;
172 |
173 | // frame = draw_landmark(
174 | // frame,
175 | // vec![
176 | // head_pose.pts_3d[0][28..48].to_vec(),
177 | // head_pose.pts_3d[1][28..48].to_vec(),
178 | // head_pose.pts_3d[2][28..48].to_vec(),
179 | // ],
180 | // head_pose.roi_box,
181 | // (0., 255., 0.),
182 | // 1,
183 | // )?;
184 |
185 | // let detected_faces = face_detector.detect(frame.clone()).unwrap();
186 |
187 | // // draw point
188 | // imgproc::circle(
189 | // &mut frame,
190 | // opencv::core::Point::new(
191 | // (detected_faces[0]).try_into().unwrap(),
192 | // (detected_faces[1]).try_into().unwrap(),
193 | // ),
194 | // 5,
195 | // opencv::core::Scalar::from((0.0, 0.0, 255.0)),
196 | // 2,
197 | // imgproc::LINE_4,
198 | // 0,
199 | // )?;
200 |
201 | // imgproc::circle(
202 | // &mut frame,
203 | // opencv::core::Point::new(
204 | // (detected_faces[0] + detected_faces[2]).try_into().unwrap(),
205 | // (detected_faces[1] + detected_faces[3]).try_into().unwrap(),
206 | // ),
207 | // 5,
208 | // opencv::core::Scalar::from((0.0, 0.0, 255.0)),
209 | // 2,
210 | // imgproc::LINE_4,
211 | // 0,
212 | // )?;
213 |
214 | if frame.size()?.width > 0 {
215 | // let cropped_image = crop_img(&frame, &head_pose.roi_box)?;
216 |
217 | // Resizing the frame
218 | // let mut resized_frame = Mat::default();
219 | // imgproc::resize(
220 | // &cropped_image,
221 | // &mut resized_frame,
222 | // Size {
223 | // width: 120,
224 | // height: 120,
225 | // },
226 | // 0.0,
227 | // 0.0,
228 | // imgproc::INTER_LINEAR, //*INTER_AREA, // https://stackoverflow.com/a/51042104 | Speed -> https://stackoverflow.com/a/44278268
229 | // )?; // ! Error handling here
230 |
231 | highgui::imshow(window, &frame)?;
232 | }
233 | let key = highgui::wait_key(30)?;
234 | if key > 0 && key != 255 {
235 | break;
236 | }
237 | }
238 | thr_cam.shutdown();
239 | Ok(())
240 | }
241 |
--------------------------------------------------------------------------------
/src/structs/app.rs:
--------------------------------------------------------------------------------
1 | // Importing Modules
2 | use std::{
3 | collections::HashMap,
4 | sync::{
5 | self,
6 | atomic::{AtomicBool, AtomicU32, Ordering},
7 | Arc, Mutex,
8 | },
9 | thread,
10 | };
11 |
12 | use crossbeam_channel::{unbounded, Receiver, Sender};
13 | use opencv::{core::MatTraitConst, imgcodecs, prelude::Mat};
14 |
15 | use super::{camera::ThreadedCamera, release::Release, state::AppConfig};
16 | use crate::consts::{APP_GITHUB_API, APP_VERSION, NO_VIDEO_IMG};
17 | use version_compare::{compare_to, Cmp};
18 |
19 | // * Adding this to another struct file
20 | pub struct AtomicF32 {
21 | storage: AtomicU32,
22 | }
23 | impl AtomicF32 {
24 | pub fn new(value: f32) -> Self {
25 | let as_u64 = value.to_bits();
26 | Self {
27 | storage: AtomicU32::new(as_u64),
28 | }
29 | }
30 | pub fn store(&self, value: f32, ordering: Ordering) {
31 | let as_u64 = value.to_bits();
32 | self.storage.store(as_u64, ordering)
33 | }
34 | pub fn load(&self, ordering: Ordering) -> f32 {
35 | let as_u64 = self.storage.load(ordering);
36 | f32::from_bits(as_u64)
37 | }
38 | }
39 |
40 | #[derive(Clone)]
41 | pub struct Config {
42 | pub min_cutoff: Arc,
43 | pub beta: Arc,
44 |
45 | pub ip: String,
46 | pub port: String,
47 |
48 | pub fps: Arc,
49 |
50 | pub selected_camera: String,
51 | pub hide_camera: bool,
52 | }
53 |
54 | // Contains configuration and state of the application and other data
55 | pub struct HeadTracker {
56 | pub config: Config,
57 |
58 | pub camera_list: HashMap,
59 |
60 | pub headtracker_thread: Option>,
61 | pub headtracker_running: sync::Arc,
62 |
63 | pub should_exit: bool,
64 | pub error_tracker: Arc>,
65 |
66 | pub sender: Sender,
67 | pub receiver: Receiver,
68 | pub frame: Mat,
69 |
70 | pub release_info: Option,
71 | pub version: String,
72 | }
73 |
74 | impl Default for Config {
75 | fn default() -> Self {
76 | Config {
77 | // ? Adding log directory path might lead to un-anonymous logs
78 | min_cutoff: Arc::new(AtomicF32::new(AppConfig::default().min_cutoff)),
79 | beta: Arc::new(AtomicF32::new(AppConfig::default().beta)),
80 |
81 | ip: AppConfig::default().ip,
82 | port: AppConfig::default().port,
83 |
84 | fps: Arc::new(AtomicU32::new(AppConfig::default().fps)),
85 |
86 | selected_camera: AppConfig::default().selected_camera, // ? Maybe checking for new cameras in main.rs
87 | hide_camera: AppConfig::default().hide_camera,
88 | }
89 | }
90 | }
91 |
92 | impl Default for HeadTracker {
93 | fn default() -> Self {
94 | // Setup channels for camera thread to headtracker thread
95 | let (sender, receiver) = unbounded::(); // ! bounded causes unwanted crashes bounded::(1);
96 |
97 | let frame = match Mat::from_slice(NO_VIDEO_IMG) {
98 | Ok(frame) => frame.try_clone().unwrap(),
99 | Err(e) => {
100 | tracing::error!("Error loading NO_VIDEO_IMG: {}", e);
101 | Mat::default()
102 | }
103 | };
104 | let frame = match imgcodecs::imdecode(&frame, 1) {
105 | Ok(frame) => frame,
106 | Err(e) => {
107 | tracing::error!("Error decoding NO_VIDEO_IMG: {}", e);
108 | Mat::default()
109 | }
110 | };
111 |
112 | // Check for new version on GitHub
113 | let client = reqwest::blocking::Client::new();
114 |
115 | let response_json = match client
116 | .get(APP_GITHUB_API)
117 | .header("User-Agent", "rust-app")
118 | .send()
119 | .and_then(|response| response.json::())
120 | {
121 | Ok(release_info) => {
122 | if compare_to(&release_info.tag_name[1..], APP_VERSION, Cmp::Gt) == Ok(true) {
123 | tracing::info!(
124 | "New version available: {} (current: {})",
125 | release_info.tag_name,
126 | APP_VERSION
127 | );
128 | Some(release_info)
129 | } else {
130 | None
131 | }
132 | }
133 |
134 | Err(e) => {
135 | tracing::info!("Unable to check for new version: {}", e);
136 | None
137 | }
138 | };
139 |
140 | HeadTracker {
141 | config: Config::default(),
142 |
143 | camera_list: match ThreadedCamera::get_available_cameras() {
144 | Ok(camera_list) => camera_list,
145 | Err(e) => {
146 | tracing::error!("{}", e);
147 | HashMap::new()
148 | }
149 | },
150 |
151 | headtracker_thread: None,
152 | headtracker_running: Arc::new(AtomicBool::new(false)),
153 |
154 | should_exit: false,
155 | error_tracker: Arc::new(Mutex::new(String::new())),
156 |
157 | version: APP_VERSION.to_string(),
158 | release_info: response_json,
159 |
160 | sender,
161 | receiver,
162 | frame,
163 | }
164 | }
165 | }
166 |
167 | impl std::fmt::Display for Config {
168 | fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
169 | write!(f, "(min_cutoff : {}, beta: {}, ip: {}, port: {}, fps: {}, selected_camera: {}, hide_camera: {})",
170 | self.min_cutoff.load(Ordering::SeqCst), self.beta.load(Ordering::SeqCst), self.ip,self.port, self.fps.load(Ordering::SeqCst), self.selected_camera.clone(), self.hide_camera)
171 | }
172 | }
173 |
174 | impl std::fmt::Display for HeadTracker {
175 | fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
176 | write!(f, "(config: {}, camera_list: {:?}, headtracker_running: {}, should_exit: {}, version: {})", self.config, self.camera_list, self.headtracker_running.load(Ordering::SeqCst), self.should_exit, self.version)
177 | }
178 | }
179 |
--------------------------------------------------------------------------------
/src/structs/camera.rs:
--------------------------------------------------------------------------------
1 | use std::{
2 | sync::{self, atomic::AtomicBool},
3 | thread,
4 | };
5 |
6 | pub struct ThreadedCamera {
7 | pub cam_thread: Option>, // Storing the thread
8 | pub keep_running: sync::Arc, // Signal to stop the thread
9 | }
10 |
--------------------------------------------------------------------------------
/src/structs/data.rs:
--------------------------------------------------------------------------------
1 | use serde::{Deserialize, Serialize};
2 |
3 | #[derive(Serialize, Deserialize)]
4 | pub struct Jsondata {
5 | pub mean: Vec,
6 | pub std: Vec,
7 | pub u_base: Vec>,
8 | pub w_shp_base: Vec>,
9 | pub w_exp_base: Vec>,
10 | }
11 |
--------------------------------------------------------------------------------
/src/structs/face.rs:
--------------------------------------------------------------------------------
1 | use onnxruntime::session::Session;
2 |
3 | pub struct FaceDetect {
4 | pub face_detector: Session<'static>,
5 | }
6 |
--------------------------------------------------------------------------------
/src/structs/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod app;
2 | pub mod camera;
3 | pub mod data;
4 | pub mod network;
5 | pub mod pose;
6 | pub mod release;
7 | pub mod state;
8 | pub mod tddfa;
9 | pub mod face;
--------------------------------------------------------------------------------
/src/structs/network.rs:
--------------------------------------------------------------------------------
1 | use std::net::UdpSocket;
2 |
3 | pub struct SocketNetwork {
4 | pub address: String,
5 | pub socket_network: UdpSocket,
6 | }
7 |
--------------------------------------------------------------------------------
/src/structs/pose.rs:
--------------------------------------------------------------------------------
1 | use super::{face::FaceDetect, tddfa::Tddfa};
2 |
3 | pub struct ProcessHeadPose {
4 | pub tddfa: Tddfa,
5 | pub face_detector: FaceDetect,
6 | pub pts_3d: Vec>,
7 | pub face_box: [f32; 4],
8 | pub first_iteration: bool,
9 | pub param: [f32; 62],
10 | pub roi_box: [f32; 4],
11 | }
12 |
--------------------------------------------------------------------------------
/src/structs/release.rs:
--------------------------------------------------------------------------------
1 | // Contains new release information
2 |
3 | use serde::{Deserialize, Serialize};
4 |
5 | #[derive(Debug, Deserialize, Serialize)]
6 | pub struct Release {
7 | pub name: String,
8 | pub tag_name: String,
9 | pub html_url: String,
10 | }
11 |
--------------------------------------------------------------------------------
/src/structs/state.rs:
--------------------------------------------------------------------------------
1 | /// Saving state of the application
2 | use std::sync::{
3 | atomic::{AtomicU32, Ordering},
4 | Arc,
5 | };
6 |
7 | use crate::{
8 | consts::APP_NAME,
9 | structs::app::{AtomicF32, Config, HeadTracker},
10 | };
11 |
12 | use serde::{Deserialize, Serialize};
13 |
14 | use super::camera::ThreadedCamera;
15 |
16 | #[derive(Debug, Clone, Deserialize, Serialize)]
17 | pub struct AppConfig {
18 | pub ip: String,
19 | pub port: String,
20 | pub min_cutoff: f32,
21 | pub beta: f32,
22 | pub fps: u32,
23 | pub selected_camera: String,
24 | pub hide_camera: bool,
25 | }
26 |
27 | // Default values are used when the config file is not found or when there is an error loading the config file
28 | impl Default for AppConfig {
29 | fn default() -> Self {
30 | AppConfig {
31 | min_cutoff: 0.0025,
32 | beta: 0.01,
33 |
34 | ip: "127.0.0.1".to_string(),
35 | port: "4242".to_string(),
36 |
37 | fps: 60,
38 |
39 | selected_camera: match ThreadedCamera::get_available_cameras() {
40 | Ok(cameras) => match cameras.keys().next() {
41 | Some(key) => key.clone(),
42 | None => "No Device Found".to_string(),
43 | },
44 | Err(e) => {
45 | tracing::error!("{e}");
46 | "No Device Found".to_string()
47 | }
48 | },
49 |
50 | hide_camera: true,
51 | }
52 | }
53 | }
54 |
55 | impl HeadTracker {
56 | pub fn load_config(&mut self) -> Config {
57 | // ! Error occurs when config data types in file does match config data types in code
58 | let cfg: AppConfig = match confy::load(APP_NAME, "config") {
59 | Ok(cfg) => cfg,
60 | Err(e) => {
61 | tracing::error!("Error loading config: {}", e);
62 | AppConfig::default()
63 | }
64 | };
65 |
66 | let selected_camera = match self.camera_list.get(&cfg.selected_camera) {
67 | Some(_) => cfg.selected_camera,
68 | None => match self.camera_list.keys().next() {
69 | Some(key) => key.clone(),
70 | None => "No Device Found".to_string(),
71 | },
72 | };
73 |
74 | Config {
75 | min_cutoff: Arc::new(AtomicF32::new(cfg.min_cutoff)),
76 | beta: Arc::new(AtomicF32::new(cfg.beta)),
77 |
78 | ip: cfg.ip.to_string(),
79 | port: cfg.port.to_string(),
80 |
81 | fps: Arc::new(AtomicU32::new(cfg.fps)),
82 |
83 | selected_camera,
84 | hide_camera: cfg.hide_camera,
85 | }
86 | }
87 | pub fn save_config(&self) {
88 | let config = AppConfig {
89 | ip: self.config.ip.clone(),
90 | port: self.config.port.clone(),
91 | min_cutoff: self.config.min_cutoff.load(Ordering::SeqCst),
92 | beta: self.config.beta.load(Ordering::SeqCst),
93 | fps: self.config.fps.load(Ordering::SeqCst),
94 | selected_camera: self.config.selected_camera.clone(),
95 | hide_camera: self.config.hide_camera,
96 | };
97 |
98 | match confy::store(APP_NAME, "config", config) {
99 | Ok(_) => tracing::info!("Config saved"),
100 | Err(e) => tracing::error!("Error saving config: {}", e),
101 | }
102 | }
103 | }
104 |
--------------------------------------------------------------------------------
/src/structs/tddfa.rs:
--------------------------------------------------------------------------------
1 | use onnxruntime::{
2 | ndarray::{ArrayBase, Dim, OwnedRepr},
3 | session::Session,
4 | };
5 |
6 | pub struct Tddfa {
7 | pub landmark_model: Session<'static>,
8 | pub size: i32,
9 | pub mean_array: [f32; 62],
10 | pub std_array: [f32; 62],
11 | pub u_base_array: ArrayBase, Dim<[usize; 2]>>,
12 | pub w_shp_base_array: ArrayBase, Dim<[usize; 2]>>,
13 | pub w_exp_base_array: ArrayBase, Dim<[usize; 2]>>,
14 | }
15 |
--------------------------------------------------------------------------------
/src/tddfa.rs:
--------------------------------------------------------------------------------
1 | /// Model inference and generating the head pose
2 | /// Python source - https://github.com/cleardusk/3DDFA_V2/blob/master/TDDFA.py
3 | // Importing Modules
4 | use crate::{
5 | consts::{DATA, MODEL},
6 | enums::crop_policy::CropPolicy,
7 | structs::{data::Jsondata, tddfa::Tddfa},
8 | utils::{
9 | common::get_ndarray,
10 | image::crop_img,
11 | tddfa::{
12 | parse_param, parse_roi_box_from_bbox, parse_roi_box_from_landmark, similar_transform,
13 | },
14 | },
15 | };
16 |
17 | use onnxruntime::{
18 | environment::Environment,
19 | ndarray::{arr1, arr2, s, Array4, ArrayBase, Dim, Order, OwnedRepr},
20 | tensor::OrtOwnedTensor,
21 | GraphOptimizationLevel,
22 | };
23 | use std::ops::Deref;
24 |
25 | use anyhow::{anyhow, Result};
26 | use once_cell::sync::Lazy;
27 | use opencv::{
28 | core::{Size, Vec3b},
29 | imgproc,
30 | prelude::{Mat, MatTraitConstManual},
31 | };
32 |
33 | impl Tddfa {
34 | pub fn new(size: i32) -> Result {
35 | static ENVIRONMENT: Lazy = Lazy::new(|| {
36 | match Environment::builder()
37 | .with_name("Landmark Detection")
38 | .with_log_level(onnxruntime::LoggingLevel::Warning)
39 | .build()
40 | {
41 | Ok(environment) => environment,
42 | Err(error) => {
43 | tracing::error!("Unable to create environment : {:?}", error);
44 | std::process::exit(1);
45 | }
46 | }
47 | });
48 |
49 | let landmark_model = ENVIRONMENT
50 | .new_session_builder()?
51 | .with_optimization_level(GraphOptimizationLevel::All)?
52 | .with_number_threads(1)?
53 | .with_model_from_memory(MODEL)?;
54 |
55 | let data = serde_json::from_slice::(DATA)?;
56 |
57 | let mean_array: [f32; 62] = data.mean.as_slice().try_into()?;
58 | let std_array: [f32; 62] = data.std.as_slice().try_into()?;
59 |
60 | let u_base_array = get_ndarray(data.u_base, (204, 1));
61 | let w_shp_base_array = get_ndarray(data.w_shp_base, (204, 40));
62 | let w_exp_base_array = get_ndarray(data.w_exp_base, (204, 10));
63 |
64 | Ok(Self {
65 | landmark_model,
66 | size,
67 | mean_array,
68 | std_array,
69 | u_base_array,
70 | w_shp_base_array,
71 | w_exp_base_array,
72 | })
73 | }
74 |
75 | fn preprocess_input(
76 | &self,
77 | input_frame: &Mat,
78 | roi_box: &[f32; 4],
79 | ) -> Result, Dim<[usize; 4]>>>> {
80 | // let mut rgb_frame = Mat::default();
81 | // imgproc::cvt_color(&input_frame, &mut rgb_frame, imgproc::COLOR_BGR2RGB, 0)?;
82 |
83 | // Cropping the image
84 | let cropped_image = crop_img(input_frame, roi_box)?;
85 |
86 | // Resizing the frame
87 | let mut resized_frame = Mat::default();
88 | imgproc::resize(
89 | &cropped_image,
90 | &mut resized_frame,
91 | Size {
92 | width: self.size,
93 | height: self.size,
94 | },
95 | 0.0,
96 | 0.0,
97 | imgproc::INTER_LINEAR, //*INTER_AREA, // https://stackoverflow.com/a/51042104 | Speed -> https://stackoverflow.com/a/44278268
98 | )?; // ! Error handling here
99 |
100 | let vec = Mat::data_typed::(&resized_frame)?;
101 | // .?("Unable to convert the image to vector");
102 |
103 | let array = Array4::from_shape_fn(
104 | (1, 3, self.size as usize, self.size as usize),
105 | |(_, c, y, x)| {
106 | (f32::from(Vec3b::deref(&vec[x + y * self.size as usize])[c]) - 127.5) / 128.0
107 | },
108 | );
109 |
110 | Ok(vec![array])
111 | }
112 |
113 | // ? Many adding generick types of remove two face_box, ver input
114 | pub fn run(
115 | &mut self,
116 | input_frame: &Mat,
117 | face_box: [f32; 4],
118 | ver: &[Vec],
119 | crop_policy: CropPolicy,
120 | ) -> Result<([f32; 62], [f32; 4])> {
121 | let roi_box = match crop_policy {
122 | CropPolicy::Box => parse_roi_box_from_bbox(face_box),
123 | CropPolicy::Landmark => parse_roi_box_from_landmark(ver),
124 | };
125 |
126 | // println!("{:?}", roi_box);
127 | let model_input = self.preprocess_input(input_frame, &roi_box)?;
128 |
129 | // Inference
130 | let param: Vec> = self.landmark_model.run(model_input)?;
131 | let param: [f32; 62] = match param[0].as_slice() {
132 | Some(slice) => slice.try_into()?,
133 | None => {
134 | tracing::error!("Unable to convert the tensor to slice param");
135 | return Err(anyhow!("Unable to convert the tensor to slice param"));
136 | }
137 | };
138 |
139 | // Postprocessing - Rescaling the output by multiplying with standard deviation and adding mean
140 | let processed_param = arr1(¶m) * arr1(&self.std_array) + arr1(&self.mean_array);
141 | let processed_param: [f32; 62] = match processed_param.as_slice() {
142 | Some(slice) => slice.try_into()?,
143 | None => {
144 | tracing::error!("Unable to convert the tensor to slice processed_param");
145 | return Err(anyhow!(
146 | "Unable to convert the tensor to slice processed_param"
147 | ));
148 | }
149 | };
150 |
151 | Ok((processed_param, roi_box))
152 | }
153 |
154 | pub fn recon_vers(&self, param: [f32; 62], roi_box: [f32; 4]) -> Vec> {
155 | let (r, offset, alpha_shp, alpha_exp) = parse_param(¶m);
156 |
157 | let pts3d = &self.u_base_array
158 | + (&self.w_shp_base_array.dot(&arr2(&alpha_shp)))
159 | + (&self.w_exp_base_array.dot(&arr2(&alpha_exp)));
160 |
161 | let pts3d = match pts3d.to_shape(((3, 68), Order::ColumnMajor)) {
162 | Ok(pts3d) => pts3d,
163 | Err(_) => {
164 | tracing::error!("Unable to convert the tensor to shape");
165 | return similar_transform(vec![vec![0.0, 1.0, 2.0]; 3], roi_box, self.size as f32);
166 | }
167 | };
168 | let pts3d = arr2(&r).dot(&pts3d) + arr2(&offset);
169 |
170 | let vec_pts_3d = vec![
171 | pts3d.slice(s![0, ..]).to_vec(),
172 | pts3d.slice(s![1, ..]).to_vec(),
173 | pts3d.slice(s![2, ..]).to_vec(),
174 | ];
175 | similar_transform(vec_pts_3d, roi_box, self.size as f32)
176 | }
177 | }
178 |
179 | #[test]
180 | #[allow(unused_variables)]
181 | pub fn test() -> Result<()> {
182 | use opencv::core::{Scalar, CV_8UC3};
183 |
184 | let size = 120;
185 |
186 | let mut bfm = Tddfa::new(size)?;
187 |
188 | let frame = Mat::new_rows_cols_with_default(120, 120, CV_8UC3, Scalar::new(255., 0., 0., 0.))?;
189 |
190 | let face_box = [30., 30., 60., 60.];
191 | let (param, roi_box) = bfm.run(
192 | &frame,
193 | face_box,
194 | &[vec![1., 2., 3.], vec![4., 5., 6.], vec![7., 8., 9.]],
195 | CropPolicy::Box,
196 | )?;
197 | let pts_3d = bfm.recon_vers(param, roi_box);
198 |
199 | let (param, roi_box) = bfm.run(&frame, face_box, &pts_3d, CropPolicy::Landmark)?;
200 |
201 | // let roi_box = [150., 150., 400., 400.];
202 | // let param = [
203 | // 1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16., 17., 18., 19., 20.,
204 | // 21., 22., 23., 24., 25., 26., 27., 28., 29., 30., 31., 32., 33., 34., 35., 36., 37., 38.,
205 | // 39., 40., 41., 42., 43., 44., 45., 46., 47., 48., 49., 50., 51., 52., 53., 54., 55., 56.,
206 | // 57., 58., 59., 60., 61., 62.,
207 | // ];
208 | // let pts_3d = bfm.recon_vers(param, roi_box);
209 |
210 | Ok(())
211 | }
212 |
--------------------------------------------------------------------------------
/src/utils/common.rs:
--------------------------------------------------------------------------------
1 | /// Common utility functions used by multiple modules
2 | // Importing Modules
3 | use crate::enums::extreme::Extreme;
4 | use onnxruntime::ndarray::{Array2, ArrayBase, Axis, Dim, OwnedRepr};
5 |
6 | // Get minimum or maximum value from a vector containing floats
7 | pub fn get_extreme_value(vec: &[f32], extreme: Extreme) -> f32 {
8 | let output = match extreme {
9 | Extreme::Min => vec.iter().min_by(|a, b| a.total_cmp(b)),
10 | Extreme::Max => vec.iter().max_by(|a, b| a.total_cmp(b)),
11 | };
12 |
13 | match output {
14 | Some(value) => *value,
15 | None => {
16 | tracing::error!("Unable to get extreme value. Default to 0");
17 | 0.
18 | }
19 | }
20 | }
21 |
22 | // Converting a vector to ndarray
23 | pub fn get_ndarray(
24 | vec: Vec>,
25 | shape: (usize, usize),
26 | ) -> ArrayBase, Dim<[usize; 2]>> {
27 | let mut array = Array2::::default(shape);
28 | for (i, mut row) in array.axis_iter_mut(Axis(0)).enumerate() {
29 | for (j, col) in row.iter_mut().enumerate() {
30 | *col = vec[i][j];
31 | }
32 | }
33 |
34 | array
35 | }
36 |
37 | #[cfg(test)]
38 | mod tests {
39 | use super::*;
40 |
41 | #[test]
42 | fn test_get_ndarray() {
43 | let _result = get_ndarray(vec![vec![1., 2.], vec![2., 3.], vec![3., 4.]], (3, 2));
44 | }
45 |
46 | #[test]
47 | fn test_get_extreme() {
48 | assert_eq!(get_extreme_value(&[1., 2., 3.], Extreme::Max), 3.);
49 | assert_eq!(get_extreme_value(&[1., 2., 3.], Extreme::Min), 1.)
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/src/utils/headpose.rs:
--------------------------------------------------------------------------------
1 | /// Utility function used by headpose module
2 | /// The code is mostly converted from python to rust with assistance from ChatGPT.
3 | /// Python source - https://github.com/cleardusk/3DDFA_V2/blob/fa8dfc479b46c218e7d375706c673d5823ddb464/utils/pose.py
4 | // Imporing Modules
5 | use crate::enums::extreme::Extreme;
6 | use crate::utils::common::{get_extreme_value, get_ndarray};
7 | use onnxruntime::ndarray::{arr2, s, Axis};
8 | use std::f32::consts::{FRAC_PI_2, PI};
9 |
10 | fn p2s_rt(p: &[[f32; 4]]) -> (f32, [[f32; 3]; 3], [f32; 3]) {
11 | let t3d = [p[0][3], p[1][3], p[2][3]];
12 | let r1 = [p[0][0], p[0][1], p[0][2]];
13 | let r2 = [p[1][0], p[1][1], p[1][2]];
14 | let s = (r1.iter().map(|&x| x * x).sum::().sqrt()
15 | + r2.iter().map(|&x| x * x).sum::().sqrt())
16 | / 2.0;
17 |
18 | let r1 = [
19 | r1[0] / r1.iter().map(|&x| x * x).sum::().sqrt(),
20 | r1[1] / r1.iter().map(|&x| x * x).sum::().sqrt(),
21 | r1[2] / r1.iter().map(|&x| x * x).sum::().sqrt(),
22 | ];
23 |
24 | let r2 = [
25 | r2[0] / r2.iter().map(|&x| x * x).sum::().sqrt(),
26 | r2[1] / r2.iter().map(|&x| x * x).sum::().sqrt(),
27 | r2[2] / r2.iter().map(|&x| x * x).sum::().sqrt(),
28 | ];
29 |
30 | let r3 = [
31 | r1[1] * r2[2] - r1[2] * r2[1],
32 | r1[2] * r2[0] - r1[0] * r2[2],
33 | r1[0] * r2[1] - r1[1] * r2[0],
34 | ];
35 |
36 | let r = [r1, r2, r3];
37 |
38 | (s, r, t3d)
39 | }
40 |
41 | fn matrix2angle(r: &[[f32; 3]]) -> (f32, f32, f32) {
42 | let (x, y, z);
43 |
44 | if r[2][0] > 0.998 {
45 | z = 0.0;
46 | x = FRAC_PI_2;
47 | y = z + -r[0][1].atan2(-r[0][2]);
48 | } else if r[2][0] < -0.998 {
49 | z = 0.0;
50 | x = -FRAC_PI_2;
51 | y = -z + r[0][1].atan2(r[0][2]);
52 | } else {
53 | x = r[2][0].asin();
54 | y = (r[2][1] / x.cos()).atan2(r[2][2] / x.cos());
55 | z = (r[1][0] / x.cos()).atan2(r[0][0] / x.cos());
56 | }
57 |
58 | (x, y, z)
59 | }
60 |
61 | pub fn calc_pose(param: &[f32; 62]) -> ([[f32; 4]; 3], [f32; 3]) {
62 | let p = [
63 | [param[0], param[1], param[2], param[3]],
64 | [param[4], param[5], param[6], param[7]],
65 | [param[8], param[9], param[10], param[11]],
66 | ];
67 |
68 | let (_, r, t3d) = p2s_rt(&p);
69 | let p = [
70 | [r[0][0], r[0][1], r[0][2], t3d[0]],
71 | [r[1][0], r[1][1], r[1][2], t3d[1]],
72 | [r[2][0], r[2][1], r[2][2], t3d[2]],
73 | ];
74 |
75 | let pose = matrix2angle(&r);
76 |
77 | let pose = [
78 | pose.0 * 180.0 / PI,
79 | pose.1 * 180.0 / PI,
80 | pose.2 * 180.0 / PI,
81 | ];
82 |
83 | (p, pose)
84 | }
85 |
86 | fn build_camera_box(rear_size: f32) -> Vec<[f32; 3]> {
87 | let mut point_3d: Vec<[f32; 3]> = Vec::new();
88 | let rear_depth = 0.;
89 | point_3d.push([-rear_size, -rear_size, rear_depth]);
90 | point_3d.push([-rear_size, rear_size, rear_depth]);
91 | point_3d.push([rear_size, rear_size, rear_depth]);
92 | point_3d.push([rear_size, -rear_size, rear_depth]);
93 | point_3d.push([-rear_size, -rear_size, rear_depth]);
94 |
95 | let mut front_size = (4. / 3. * rear_size).ceil();
96 | let mut front_depth = (4. / 3. * rear_size).ceil();
97 |
98 | // ? Subtracting by -1 because in python, the int conversion returns the greatest integer instead of rounding the values to integer
99 | if (rear_size.ceil() - rear_size).abs() > f32::EPSILON {
100 | front_size -= 1.;
101 | front_depth -= 1.;
102 | };
103 |
104 | point_3d.push([-front_size, -front_size, front_depth]);
105 | point_3d.push([-front_size, front_size, front_depth]);
106 | point_3d.push([front_size, front_size, front_depth]);
107 | point_3d.push([front_size, -front_size, front_depth]);
108 | point_3d.push([-front_size, -front_size, front_depth]);
109 |
110 | point_3d
111 | }
112 |
113 | fn calc_hypotenuse(pts: &[Vec]) -> f32 {
114 | let bbox = [
115 | get_extreme_value(&pts[0], Extreme::Min),
116 | get_extreme_value(&pts[1], Extreme::Min),
117 | get_extreme_value(&pts[0], Extreme::Max),
118 | get_extreme_value(&pts[1], Extreme::Max),
119 | ];
120 |
121 | let center = [(bbox[0] + bbox[2]) / 2.0, (bbox[1] + bbox[3]) / 2.0];
122 | let radius = f32::max(bbox[2] - bbox[0], bbox[3] - bbox[1]) / 2.0;
123 | let bbox = [
124 | center[0] - radius,
125 | center[1] - radius,
126 | center[0] + radius,
127 | center[1] + radius,
128 | ];
129 | let llength = (bbox[2] - bbox[0]).hypot(bbox[3] - bbox[1]);
130 | llength / 3.0
131 | }
132 |
133 | // TODO : Cleaning this up
134 | pub fn gen_point2d(p: &[[f32; 4]; 3], ver: Vec>) -> (Vec>, f32) {
135 | let llength = calc_hypotenuse(&ver);
136 | let point_3d = build_camera_box(llength);
137 |
138 | let point_3d_homo: Vec<[f32; 4]> = point_3d
139 | .into_iter()
140 | .map(|p| [p[0], p[1], p[2], 1.])
141 | .collect();
142 |
143 | let mut binding = arr2(&point_3d_homo).dot(&arr2(p).t());
144 | let mut point_2d = binding.slice_mut(s![.., ..2]);
145 |
146 | point_2d.slice_mut(s![.., 1]).map_inplace(|x| *x = -*x);
147 | let sliced_ver_mean = get_ndarray(ver, (3, 20))
148 | .slice_mut(s![..2, ..])
149 | .mean_axis(Axis(1))
150 | .expect("Unable to calculate sliced_ver_mean in gen_point2d");
151 |
152 | let sliced_point2d_mean = point_2d
153 | .to_owned()
154 | .slice_mut(s![..4, ..2])
155 | .mean_axis(Axis(0))
156 | .expect("Unable to calculate sliced_point2d_mean in gen_point2d");
157 |
158 | let point_2d = point_2d.slice_mut(s![.., ..2]).map_axis(Axis(1), |x| {
159 | (&x - &sliced_point2d_mean + &sliced_ver_mean).to_vec()
160 | });
161 |
162 | (point_2d.to_vec(), llength)
163 | }
164 |
165 | #[cfg(test)]
166 | mod tests {
167 | use super::*;
168 |
169 | #[test]
170 | fn test_p2s_rt() {
171 | let p = [
172 | [1.0, 2.0, 3.0, 4.0],
173 | [5.0, 6.0, 7.0, 8.0],
174 | [9.0, 10.0, 11.0, 12.0],
175 | ];
176 | let (s, r, t3d) = p2s_rt(&p);
177 | assert_eq!(s, 7.114_873);
178 | assert_eq!(
179 | r,
180 | [
181 | [0.267_261_24, 0.534_522_5, 0.801_783_7],
182 | [0.476_731_3, 0.572_077_6, 0.667_423_8],
183 | [-0.101_929_426, 0.203_858_87, -0.101_929_44]
184 | ]
185 | );
186 | assert_eq!(t3d, [4.0, 8.0, 12.0]);
187 | }
188 | #[test]
189 | fn test_matrix2angle() {
190 | let r = [[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0]];
191 | let (x, y, z) = matrix2angle(&r);
192 |
193 | assert_eq!(x, 1.570_796_4);
194 | assert_eq!(y, -2.553_59);
195 | assert_eq!(z, 0.0);
196 | }
197 |
198 | #[test]
199 | fn test_calc_pose() {
200 | let param = [
201 | 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0,
202 | 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0,
203 | 30.0, 31.0, 32.0, 33.0, 34.0, 35.0, 36.0, 37.0, 38.0, 39.0, 40.0, 41.0, 42.0, 43.0,
204 | 44.0, 45.0, 46.0, 47.0, 48.0, 49.0, 50.0, 51.0, 52.0, 53.0, 54.0, 55.0, 56.0, 57.0,
205 | 58.0, 59.0, 60.0, 61.0,
206 | ];
207 |
208 | let (p, updated_pose) = calc_pose(¶m);
209 |
210 | assert_eq!(
211 | p,
212 | [
213 | [0.0, 0.447_213_6, 0.894_427_2, 3.0],
214 | [0.455_842_32, 0.569_802_9, 0.683_763_44, 7.0],
215 | [-0.203_858_88, 0.407_717_76, -0.203_858_88, 11.0]
216 | ]
217 | );
218 | assert_eq!(updated_pose, [-11.762_708, 116.565_04, 90.0]);
219 | }
220 |
221 | #[test]
222 | fn test_build_camera_box() {
223 | let llength = 90.0;
224 | let point_3d = build_camera_box(llength);
225 | assert_eq!(
226 | point_3d,
227 | [
228 | [-llength, -llength, 0.],
229 | [-llength, llength, 0.],
230 | [llength, llength, 0.],
231 | [llength, -llength, 0.],
232 | [-llength, -llength, 0.],
233 | [-120., -120., 120.],
234 | [-120., 120., 120.],
235 | [120., 120., 120.],
236 | [120., -120., 120.],
237 | [-120., -120., 120.]
238 | ]
239 | );
240 |
241 | let llength = 90.1;
242 | let point_3d = build_camera_box(llength);
243 | assert_eq!(
244 | point_3d,
245 | [
246 | [-llength, -llength, 0.],
247 | [-llength, llength, 0.],
248 | [llength, llength, 0.],
249 | [llength, -llength, 0.],
250 | [-llength, -llength, 0.],
251 | [-120., -120., 120.],
252 | [-120., 120., 120.],
253 | [120., 120., 120.],
254 | [120., -120., 120.],
255 | [-120., -120., 120.]
256 | ]
257 | );
258 |
259 | let llength = 89.9;
260 | let point_3d = build_camera_box(llength);
261 | assert_eq!(
262 | point_3d,
263 | [
264 | [-llength, -llength, 0.],
265 | [-llength, llength, 0.],
266 | [llength, llength, 0.],
267 | [llength, -llength, 0.],
268 | [-llength, -llength, 0.],
269 | [-119., -119., 119.],
270 | [-119., 119., 119.],
271 | [119., 119., 119.],
272 | [119., -119., 119.],
273 | [-119., -119., 119.]
274 | ]
275 | )
276 | }
277 |
278 | #[test]
279 | fn test_calc_hypotenuse() {
280 | let pts = [
281 | vec![
282 | 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0,
283 | 16.0, 17.0, 18.0, 19.0, 20.0,
284 | ],
285 | vec![
286 | 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0,
287 | 16.0, 17.0, 18.0, 19.0, 20.0,
288 | ],
289 | vec![
290 | 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0,
291 | 16.0, 17.0, 18.0, 19.0, 20.0,
292 | ],
293 | ];
294 | let expected = 8.95;
295 |
296 | let result = calc_hypotenuse(&pts);
297 |
298 | assert!((result - expected).abs() < 0.01);
299 | }
300 |
301 | #[test]
302 | fn test_gen_point2d() {
303 | let p = [
304 | [1.0, 2.0, 3.0, 4.0],
305 | [5.0, 6.0, 7.0, 8.0],
306 | [9.0, 10.0, 11.0, 12.0],
307 | ];
308 | let ver = vec![
309 | vec![
310 | 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0,
311 | 16.0, 17.0, 18.0, 19.0, 20.0,
312 | ],
313 | vec![
314 | 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0,
315 | 16.0, 17.0, 18.0, 19.0, 20.0,
316 | ],
317 | vec![
318 | 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0,
319 | 16.0, 17.0, 18.0, 19.0, 20.0,
320 | ],
321 | ];
322 |
323 | let (point_2d, length) = gen_point2d(&p, ver);
324 | assert_eq!(
325 | point_2d,
326 | vec![
327 | vec![-16.370058, 109.023544],
328 | vec![19.456686, 1.5433149],
329 | vec![37.370056, -88.023544],
330 | vec![1.543314, 19.456684],
331 | vec![-16.370058, 109.023544],
332 | vec![10.5, 54.5],
333 | vec![54.5, -77.5],
334 | vec![76.5, -187.5],
335 | vec![32.5, -55.5],
336 | vec![10.5, 54.5]
337 | ]
338 | );
339 | assert_eq!(length, 8.956_686);
340 | }
341 | }
342 |
--------------------------------------------------------------------------------
/src/utils/image.rs:
--------------------------------------------------------------------------------
1 | /// Utility function for processing image
2 | /// Python source - https://github.com/cleardusk/3DDFA/blob/d5c1f6a647a89070b1f9ea4e88c910b743a1a87a/utils/inference.py#L20
3 | use opencv::core::{Mat, Rect};
4 | use opencv::prelude::MatTraitConst;
5 |
6 | pub fn crop_img(img: &Mat, roi_box: &[f32; 4]) -> Result {
7 | let h = img.size()?.height;
8 | let w = img.size()?.width;
9 |
10 | let sx = roi_box[0].round() as i32;
11 | let sy = roi_box[1].round() as i32;
12 | let ex = roi_box[2].round() as i32;
13 | let ey = roi_box[3].round() as i32;
14 |
15 | let dh = ey - sy;
16 | let dw = ex - sx;
17 |
18 | let (sx, _) = if sx < 0 { (0, -sx) } else { (sx, 0) };
19 | let (ex, _) = if ex > w { (w, dw - (ex - w)) } else { (ex, dw) };
20 | let (sy, _) = if sy < 0 { (0, -sy) } else { (sy, 0) };
21 | let (ey, _) = if ey > h { (h, dh - (ey - h)) } else { (ey, dh) };
22 |
23 | let width = ex - sx;
24 | let height = ey - sy;
25 | // if width < 0 {
26 | // width = 1;
27 | // }
28 | // if height < 0 {
29 | // height = 1;
30 | // }
31 | // if sy > h - 1 {
32 | // sy = h - 1;
33 | // }
34 | // if sx > w - 1 {
35 | // sx = w - 1;
36 | // }
37 | // println!("{} {} {} {}", sx, sy, width, height);
38 | let roi = Rect::new(sx, sy, width, height);
39 | // Ok(Mat::roi(img, roi)?.clone_pointee()) // ! Need to deal with this, when camera disconnects while running, error occures here
40 | let result = Mat::roi(img, roi);
41 |
42 | match result {
43 | Ok(mat) => Ok(mat.try_clone().unwrap()),
44 | Err(e) => {
45 | tracing::error!("Error cropping image: {}", e);
46 | Ok(Mat::default())
47 | }
48 | }
49 | }
50 |
51 | #[test]
52 | fn test_crop_img() -> Result<(), opencv::Error> {
53 | use opencv::{
54 | core::{Scalar, CV_8UC3},
55 | prelude::MatTraitConst,
56 | };
57 |
58 | let frame = Mat::new_rows_cols_with_default(120, 120, CV_8UC3, Scalar::new(255., 0., 0., 0.))?;
59 | let roi_box = [50., 60., 100., 120.];
60 | let result = crop_img(&frame, &roi_box)?;
61 |
62 | assert_eq!(result.rows() as f32, roi_box[3] - roi_box[1]);
63 | assert_eq!(result.cols() as f32, roi_box[2] - roi_box[0]);
64 |
65 | let roi_box = [50., 60., 400., 400.];
66 | let result = crop_img(&frame, &roi_box)?;
67 |
68 | assert_eq!(result.rows() as f32, 60.);
69 | assert_eq!(result.cols() as f32, 70.);
70 |
71 | Ok(())
72 | }
73 |
--------------------------------------------------------------------------------
/src/utils/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod common;
2 | pub mod headpose;
3 | pub mod image;
4 | pub mod tddfa;
5 | pub mod visualize;
6 |
--------------------------------------------------------------------------------
/src/utils/tddfa.rs:
--------------------------------------------------------------------------------
1 | /// Utility function used by tddfa module
2 | /// The code is mostly converted from python to rust with assistance from ChatGPT.
3 | /// Python source - https://github.com/cleardusk/3DDFA_V2/blob/master/utils/tddfa_util.py
4 | /// https://github.com/cleardusk/3DDFA_V2/blob/master/utils/functions.py#L65
5 | use crate::enums::extreme::Extreme;
6 | use crate::utils::common::get_extreme_value;
7 |
8 | pub fn parse_param(
9 | param: &[f32; 62],
10 | ) -> ([[f32; 3]; 3], [[f32; 1]; 3], [[f32; 1]; 40], [[f32; 1]; 10]) {
11 | // TODO: Can use type alias/defininations to improve code redability.
12 | let n = param.len();
13 |
14 | let (trans_dim, shape_dim, _) = match n {
15 | 62 => (12, 40, 10),
16 | 72 => (12, 40, 20),
17 | 141 => (12, 100, 29),
18 | invalid_size => {
19 | tracing::error!("Undefined templated param parsing rule : {invalid_size}");
20 | panic!()
21 | }
22 | };
23 |
24 | let r_ = [
25 | [param[0], param[1], param[2], param[3]],
26 | [param[4], param[5], param[6], param[7]],
27 | [param[8], param[9], param[10], param[11]],
28 | ];
29 |
30 | let r = [
31 | [r_[0][0], r_[0][1], r_[0][2]],
32 | [r_[1][0], r_[1][1], r_[1][2]],
33 | [r_[2][0], r_[2][1], r_[2][2]],
34 | ];
35 |
36 | let offset = [[r_[0][3]], [r_[1][3]], [r_[2][3]]];
37 |
38 | let mut alpha_shp = [[0.0; 1]; 40];
39 | for i in 0..40 {
40 | alpha_shp[i][0] = param[trans_dim + i];
41 | }
42 |
43 | let mut alpha_exp = [[0.0; 1]; 10];
44 | for i in 0..10 {
45 | alpha_exp[i][0] = param[trans_dim + shape_dim + i];
46 | }
47 |
48 | (r, offset, alpha_shp, alpha_exp)
49 | }
50 |
51 | pub fn similar_transform(mut pts3d: Vec>, roi_box: [f32; 4], size: f32) -> Vec> {
52 | pts3d[0].iter_mut().for_each(|p| *p -= 1.0);
53 | pts3d[2].iter_mut().for_each(|p| *p -= 1.0);
54 | pts3d[1].iter_mut().for_each(|p| *p = size - *p);
55 |
56 | let sx = roi_box[0];
57 | let sy = roi_box[1];
58 | let ex = roi_box[2];
59 | let ey = roi_box[3];
60 |
61 | let scale_x = (ex - sx) / size;
62 | let scale_y = (ey - sy) / size;
63 | pts3d[0]
64 | .iter_mut()
65 | .for_each(|p| *p = (*p).mul_add(scale_x, sx));
66 | pts3d[1]
67 | .iter_mut()
68 | .for_each(|p| *p = (*p).mul_add(scale_y, sy));
69 |
70 | let s = (scale_x + scale_y) / 2.0;
71 | pts3d[2].iter_mut().for_each(|p| *p *= s);
72 |
73 | let min_z = get_extreme_value(&pts3d[2], Extreme::Min);
74 |
75 | pts3d[2].iter_mut().for_each(|p| *p -= min_z);
76 |
77 | pts3d
78 | }
79 |
80 | pub fn parse_roi_box_from_landmark(pts: &[Vec]) -> [f32; 4] {
81 | let bbox = [
82 | get_extreme_value(&pts[0], Extreme::Min),
83 | get_extreme_value(&pts[1], Extreme::Min),
84 | get_extreme_value(&pts[0], Extreme::Max),
85 | get_extreme_value(&pts[1], Extreme::Max),
86 | ];
87 |
88 | let center = [(bbox[0] + bbox[2]) / 2., (bbox[1] + bbox[3]) / 2.];
89 | let radius = f32::max(bbox[2] - bbox[0], bbox[3] - bbox[1]) / 2.;
90 | let bbox = [
91 | center[0] - radius,
92 | center[1] - radius,
93 | center[0] + radius,
94 | center[1] + radius,
95 | ];
96 |
97 | let llength = ((bbox[2] - bbox[0]).powi(2) + (bbox[3] - bbox[1]).powi(2)).sqrt();
98 |
99 | let center_x = (bbox[2] + bbox[0]) / 2.;
100 | let center_y = (bbox[3] + bbox[1]) / 2.;
101 |
102 | let mut roi_box = [0.0; 4];
103 | roi_box[0] = center_x - llength / 2.;
104 | roi_box[1] = center_y - llength / 2.;
105 | roi_box[2] = roi_box[0] + llength;
106 | roi_box[3] = roi_box[1] + llength;
107 |
108 | roi_box
109 | }
110 |
111 | pub fn parse_roi_box_from_bbox(bbox: [f32; 4]) -> [f32; 4] {
112 | let left = bbox[0];
113 | let top = bbox[1];
114 | let right = bbox[2];
115 | let bottom = bbox[3];
116 | let old_size = (right - left + bottom - top) / 2.;
117 | let center_x = right - (right - left) / 2.;
118 | let center_y = old_size.mul_add(0.14, bottom - (bottom - top) / 2.);
119 | let size = (old_size * 1.58).round();
120 |
121 | let mut roi_box = [0.; 4];
122 | roi_box[0] = center_x - size / 2.;
123 | roi_box[1] = center_y - size / 2.;
124 | roi_box[2] = roi_box[0] + size;
125 | roi_box[3] = roi_box[1] + size;
126 |
127 | roi_box
128 | }
129 |
130 | #[cfg(test)]
131 | mod tests {
132 | use super::*;
133 |
134 | #[test]
135 | fn test_parse_param() {
136 | let param = [
137 | 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0,
138 | 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0,
139 | 30.0, 31.0, 32.0, 33.0, 34.0, 35.0, 36.0, 37.0, 38.0, 39.0, 40.0, 41.0, 42.0, 43.0,
140 | 44.0, 45.0, 46.0, 47.0, 48.0, 49.0, 50.0, 51.0, 52.0, 53.0, 54.0, 55.0, 56.0, 57.0,
141 | 58.0, 59.0, 60.0, 61.0,
142 | ];
143 |
144 | let result = parse_param(¶m);
145 |
146 | let expected = (
147 | [[0.0, 1.0, 2.0], [4.0, 5.0, 6.0], [8.0, 9.0, 10.0]],
148 | [[3.0], [7.0], [11.0]],
149 | [
150 | [12.0],
151 | [13.0],
152 | [14.0],
153 | [15.0],
154 | [16.0],
155 | [17.0],
156 | [18.0],
157 | [19.0],
158 | [20.0],
159 | [21.0],
160 | [22.0],
161 | [23.0],
162 | [24.0],
163 | [25.0],
164 | [26.0],
165 | [27.0],
166 | [28.0],
167 | [29.0],
168 | [30.0],
169 | [31.0],
170 | [32.0],
171 | [33.0],
172 | [34.0],
173 | [35.0],
174 | [36.0],
175 | [37.0],
176 | [38.0],
177 | [39.0],
178 | [40.0],
179 | [41.0],
180 | [42.0],
181 | [43.0],
182 | [44.0],
183 | [45.0],
184 | [46.0],
185 | [47.0],
186 | [48.0],
187 | [49.0],
188 | [50.0],
189 | [51.0],
190 | ],
191 | [
192 | [52.0],
193 | [53.0],
194 | [54.0],
195 | [55.0],
196 | [56.0],
197 | [57.0],
198 | [58.0],
199 | [59.0],
200 | [60.0],
201 | [61.0],
202 | ],
203 | );
204 |
205 | assert_eq!(result, expected);
206 | }
207 |
208 | #[test]
209 | fn test_similar_transform() {
210 | let pts3d = vec![
211 | vec![0.0, 1.0, 2.0],
212 | vec![3.0, 4.0, 5.0],
213 | vec![6.0, 7.0, 8.0],
214 | ];
215 | let roi_box = [1., 2., 3., 4.];
216 | let size = 120.;
217 |
218 | let result = similar_transform(pts3d, roi_box, size);
219 |
220 | assert_eq!(
221 | result,
222 | vec![
223 | vec![0.983_333_35, 1.0, 1.016_666_7],
224 | vec![3.95, 3.933_333_4, 3.916_666_7],
225 | vec![0.0, 0.016_666_673, 0.03333334]
226 | ]
227 | );
228 | }
229 |
230 | #[test]
231 | fn test_parse_roi_box_from_landmark() {
232 | let pts = vec![vec![1., 2., 3.], vec![4., 5., 6.], vec![7., 8., 9.]];
233 | let result = parse_roi_box_from_landmark(&pts);
234 |
235 | assert_eq!(result, [0.585_786_46, 3.585_786_3, 3.414_213_7, 6.414_213]);
236 | }
237 |
238 | #[test]
239 | fn test_parse_roi_box_from_bbox() {
240 | let bbox = [1., 2., 3., 4.];
241 | let roi_box = parse_roi_box_from_bbox(bbox);
242 |
243 | assert_eq!(roi_box, [0.5, 1.78, 3.5, 4.779_999_7]);
244 | }
245 | }
246 |
--------------------------------------------------------------------------------
/src/utils/visualize.rs:
--------------------------------------------------------------------------------
1 | use anyhow::Result;
2 | use opencv::{
3 | core::{Mat, Point, Point2i, Scalar},
4 | imgproc::{circle, line, LINE_8},
5 | };
6 |
7 | #[allow(dead_code)]
8 | pub fn draw_landmark(
9 | frame: Mat,
10 | pts_3d: Vec>,
11 | face_box: [f32; 4],
12 | color: (f64, f64, f64),
13 | size: i32,
14 | ) -> Result {
15 | let mut img = frame.clone();
16 | let n = pts_3d[0].len();
17 | if n <= 106 {
18 | for i in 0..n {
19 | circle(
20 | &mut img,
21 | Point::new(pts_3d[0][i] as i32, pts_3d[1][i] as i32),
22 | size,
23 | Scalar::new(color.0, color.1, color.2, 0.),
24 | -1,
25 | LINE_8,
26 | 0,
27 | )?
28 | }
29 | } else {
30 | let sep = 1;
31 | for i in (0..n).step_by(sep) {
32 | circle(
33 | &mut img,
34 | Point::new(pts_3d[0][i] as i32, pts_3d[1][i] as i32),
35 | size,
36 | Scalar::new(color.0, color.1, color.2, 0.),
37 | 1,
38 | LINE_8,
39 | 0,
40 | )?
41 | }
42 | };
43 |
44 | let left = face_box[0].round() as i32;
45 | let top = face_box[1].round() as i32;
46 | let right = face_box[2].round() as i32;
47 | let bottom = face_box[3].round() as i32;
48 | let left_top = Point2i::new(left, top);
49 | let right_top = Point2i::new(right, top);
50 | let right_bottom = Point2i::new(right, bottom);
51 | let left_bottom = Point2i::new(left, bottom);
52 | line(
53 | &mut img,
54 | left_top,
55 | right_top,
56 | Scalar::new(0., 0., 255., 0.),
57 | 1,
58 | LINE_8,
59 | 0,
60 | )?;
61 | line(
62 | &mut img,
63 | right_top,
64 | right_bottom,
65 | Scalar::new(0., 0., 255., 0.),
66 | 1,
67 | LINE_8,
68 | 0,
69 | )?;
70 | line(
71 | &mut img,
72 | right_bottom,
73 | left_bottom,
74 | Scalar::new(0., 0., 255., 0.),
75 | 1,
76 | LINE_8,
77 | 0,
78 | )?;
79 | line(
80 | &mut img,
81 | left_bottom,
82 | left_top,
83 | Scalar::new(0., 0., 255., 0.),
84 | 1,
85 | LINE_8,
86 | 0,
87 | )?;
88 |
89 | Ok(img)
90 | }
91 |
--------------------------------------------------------------------------------
/wix/License.rtf:
--------------------------------------------------------------------------------
1 | {\rtf1\ansi\deff0\nouicompat{\fonttbl{\f0\fnil\fcharset0 Arial;}{\f1\fnil\fcharset0 Courier New;}}
2 | {\*\generator Riched20 10.0.15063}\viewkind4\uc1
3 | \pard\sa180\fs24\lang9 Copyright (c) 2024 Shubhamai\par
4 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\par
5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\par
6 | \f1 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\f0\par
7 | }
8 |
9 |
--------------------------------------------------------------------------------
/wix/main.wxs:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
17 |
18 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
42 |
43 |
53 |
54 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
73 |
74 |
79 |
80 |
81 |
82 |
83 |
91 |
92 |
93 |
99 |
100 |
101 |
102 |
108 |
109 |
110 |
111 |
117 |
118 |
119 |
120 |
126 |
127 |
128 |
129 |
135 |
136 |
137 |
138 |
144 |
145 |
146 |
147 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
188 |
189 |
190 |
191 |
192 |
193 |
202 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
221 |
222 |
223 |
224 |
225 |
233 |
234 |
235 |
236 |
237 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 |
257 |
258 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 |
271 |
272 |
273 |
274 |
275 |
276 |
280 |
281 |
282 |
283 |
290 |
291 |
292 |
293 |
300 |
301 |
302 |
303 |
304 |
--------------------------------------------------------------------------------