├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE │ ├── BUG_REPORT.md │ ├── FEATURE_REQUEST.md │ └── config.yml ├── pull_request_template.md └── workflows │ ├── ci-linux.yml │ ├── ci-macos.yml │ └── markdown-link-check.yml ├── .gitignore ├── .swiftformat ├── .swiftlint.yml ├── CODEOWNERS ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── Makefile ├── Mintfile ├── Package.swift ├── README.md ├── Sources ├── Assimp │ ├── AiCamera.swift │ ├── AiFace.swift │ ├── AiLight.swift │ ├── AiMatKey.swift │ ├── AiMaterial.swift │ ├── AiMaterialProperty.swift │ ├── AiMesh.swift │ ├── AiNode.swift │ ├── AiPostProcessStep.swift │ ├── AiScene.swift │ ├── AiShadingMode.swift │ ├── AiTexture.swift │ ├── AiTextureType.swift │ ├── Assimp.swift │ ├── CArray.swift │ ├── String+aiString.swift │ └── simd+aiVector.swift └── CAssimp │ ├── module.modulemap │ └── shims.h ├── Tests └── AssimpTests │ ├── AssimpTests.swift │ ├── InternalTests.swift │ └── Resource.swift └── renovate.json /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: [ctreffs] 2 | custom: ['https://www.paypal.com/donate?hosted_button_id=GCG3K54SKRALQ'] 3 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/BUG_REPORT.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 🐛 Bug Report 3 | about: Something isn't working as expected, create a report to help us improve 4 | labels: bug 5 | --- 6 | 7 | 17 | 18 | ### Bug Description 19 | 20 | *A clear and concise description of what the bug is. 21 | Replace this paragraph with a short description of the incorrect behavior. 22 | (If this is a regression, please note the last version of the package that exhibited the correct behavior in addition to your current version.)* 23 | 24 | ### Information 25 | 26 | - **Package version:** What tag or branch of this package are you using? e.g. tag `1.2.3` or branch `main` 27 | - **Platform version:** Please tell us the version number of your operating system. e.g. `macOS 11.2.3` or `Ubuntu 20.04` 28 | - **Swift version:** Paste the output of `swift --version` here. 29 | 30 | ### Checklist 31 | 32 | - [ ] If possible, I've reproduced the issue using the `main`/`master` branch of this package. 33 | - [ ] I've searched for existing issues under the issues tab. 34 | - [ ] The bug is reproducible 35 | 36 | ### Steps to Reproduce 37 | 38 | *Steps to reproduce the behavior:* 39 | 40 | 1. Go to '...' 41 | 2. '....' 42 | 43 | *Replace this paragraph with an explanation of how to reproduce the incorrect behavior. 44 | Include a simple code example, if possible.* 45 | 46 | ### Expected behavior 47 | 48 | *A clear and concise description of what you expected to happen. 49 | Describe what you expect to happen.* 50 | 51 | ### Actual behavior 52 | 53 | *Describe or copy/paste the behavior you observe.* 54 | 55 | ### Screenshots 56 | 57 | If applicable, add screenshots to help explain your problem. 58 | 59 | ### Additional context 60 | 61 | *Add any other context about the problem here.* 62 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 💡 Feature Request 3 | about: A suggestion for a new feature or idea for this project 4 | labels: enhancement 5 | --- 6 | 7 | 13 | 14 | ### Feature request 15 | 16 | *Replace this paragraph with a description of your proposed feature. 17 | A clear and concise description of what the idea or problem is you want to solve. 18 | Please be sure to describe some concrete use cases for the new feature -- be as specific as possible. 19 | Provide links to existing issues or external references/discussions, if appropriate.* 20 | 21 | ### Describe the solution you'd like 22 | 23 | *A clear and concise description of what you want to happen.* 24 | 25 | ### Describe alternatives you've considered 26 | 27 | *A clear and concise description of any alternative solutions or features you've considered.* 28 | 29 | ### Additional context 30 | 31 | *Add any other context or screenshots about the feature request here.* 32 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 11 | 12 | ### Description 13 | 14 | *Replace this paragraph with a description of your changes and rationale. 15 | Provide links to an existing issue or external references/discussions, if appropriate.* 16 | 17 | ### Detailed Design 18 | 19 | *Include any additional information about the design here. At minimum, describe a synopsis of any public API additions.* 20 | 21 | ```swift 22 | /// The new feature implemented by this pull request. 23 | public struct Example: Collection { 24 | } 25 | ``` 26 | 27 | ### Documentation 28 | 29 | *How has the new feature been documented? 30 | Have the relevant portions of the guides in the Documentation folder been updated in addition to symbol-level documentation?* 31 | 32 | ### Testing 33 | 34 | *How is the new feature tested? 35 | Please ensure CI is not broken* 36 | 37 | ### Performance 38 | 39 | *How did you verify the new feature performs as expected?* 40 | 41 | ### Source Impact 42 | 43 | *What is the impact of this change on existing users of this package? Does it deprecate or remove any existing API?* 44 | 45 | ### Checklist 46 | 47 | - [ ] I've read the [Contribution Guidelines](https://github.com/ctreffs/SwiftAssimp/blob/master/CONTRIBUTING.md) 48 | - [ ] I've followed the coding style of the rest of the project. 49 | - [ ] I've added tests covering all new code paths my change adds to the project (to the extent possible). 50 | - [ ] I've added benchmarks covering new functionality (if appropriate). 51 | - [ ] I've verified that my change does not break any existing tests or introduce unexpected benchmark regressions. 52 | - [ ] I've updated the documentation (if appropriate). 53 | -------------------------------------------------------------------------------- /.github/workflows/ci-linux.yml: -------------------------------------------------------------------------------- 1 | name: Linux 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | jobs: 10 | linux-test-build-release: 11 | runs-on: ubuntu-latest 12 | strategy: 13 | matrix: 14 | swift: ["latest"] 15 | container: 16 | image: swift:${{ matrix.swift }} 17 | steps: 18 | - name: Checkout 19 | uses: actions/checkout@v3.6.0 20 | 21 | - name: Restore APT cache 22 | uses: actions/cache@v3.3.1 23 | with: 24 | path: /var/cache/apt 25 | key: ${{ runner.os }}-apt 26 | 27 | - name: Restore cached SPM dependencies 28 | uses: actions/cache@v3.3.1 29 | with: 30 | path: .build 31 | key: swiftpm-${{ runner.os }}-${{ inputs.swift-version }}-${{ hashFiles('Package.swift') }} 32 | restore-keys: | 33 | swiftpm-${{ runner.os }}-${{ inputs.swift-version }}- 34 | swiftpm-${{ runner.os }}- 35 | 36 | - name: Install dependencies 37 | run: | 38 | # update dependency source list to more modern linux distribution 39 | # echo -e "deb http://archive.ubuntu.com/ubuntu focal main restricted universe multiverse" >> /etc/apt/sources.list 40 | apt update 41 | apt-get install -y -qq pkg-config libassimp-dev 42 | 43 | - name: Test 44 | run: swift test 45 | 46 | - name: Build Release 47 | run: swift build -c release 48 | -------------------------------------------------------------------------------- /.github/workflows/ci-macos.yml: -------------------------------------------------------------------------------- 1 | name: macOS 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | jobs: 10 | macos-test-build-release-xcode: 11 | runs-on: macos-13 12 | strategy: 13 | matrix: 14 | xcode: ["14.3.1", "15.0"] 15 | steps: 16 | - name: Checkout 17 | uses: actions/checkout@v3.6.0 18 | - name: Select Xcode ${{ matrix.xcode }} 19 | run: sudo xcode-select -s /Applications/Xcode_${{ matrix.xcode }}.app 20 | - name: Restore cached Homebrew dependencies 21 | uses: actions/cache@v3.3.1 22 | with: 23 | path: | 24 | ~/Library/Caches/Homebrew/assimp--* 25 | ~/Library/Caches/Homebrew/downloads/*--assimp-* 26 | key: ${{ runner.os }}-brew-assimp-${{ hashFiles('.github/brew-formulae') }} 27 | restore-keys: ${{ runner.os }}-brew-assimp- 28 | - name: Restore cached SPM dependencies 29 | uses: actions/cache@v3.3.1 30 | with: 31 | path: | 32 | .build/repositories 33 | key: ${{ runner.os }}-spm-repos-${{ hashFiles('**/Package.resolved') }} 34 | restore-keys: | 35 | ${{ runner.os }}-spm-repos- 36 | - name: Install dependencies 37 | run: | 38 | brew install assimp 39 | - name: Test 40 | run: make test 41 | env: 42 | DEVELOPER_DIR: /Applications/Xcode_${{ matrix.xcode }}.app/Contents/Developer 43 | - name: Upload test artifacts 44 | if: failure() 45 | uses: actions/upload-artifact@v3.1.2 46 | with: 47 | name: test-artifacts-${{ matrix.xcode }}-${{ github.run_id }} 48 | path: | 49 | .build/**/*.json 50 | .build/**/*.xctest 51 | - name: Build Release 52 | run: make build-release 53 | env: 54 | DEVELOPER_DIR: /Applications/Xcode_${{ matrix.xcode }}.app/Contents/Developer 55 | - name: Upload build artifacts 56 | if: failure() 57 | uses: actions/upload-artifact@v3.1.2 58 | with: 59 | name: build-artifacts-${{ matrix.xcode }}-${{ github.run_id }} 60 | path: | 61 | *.lcov 62 | .build/*.yaml 63 | .build/**/*.a 64 | .build/**/*.so 65 | .build/**/*.dylib 66 | .build/**/*.dSYM 67 | .build/**/*.json 68 | -------------------------------------------------------------------------------- /.github/workflows/markdown-link-check.yml: -------------------------------------------------------------------------------- 1 | name: Check markdown links 2 | 3 | on: push 4 | 5 | jobs: 6 | markdown-link-check: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - name: Checkout 10 | uses: actions/checkout@master 11 | - name: markdown-link-check 12 | uses: gaurav-nelson/github-action-markdown-link-check@master 13 | 14 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | /.build 3 | /Packages 4 | /*.xcodeproj 5 | .swiftpm/xcode -------------------------------------------------------------------------------- /.swiftformat: -------------------------------------------------------------------------------- 1 | --exclude .build,.github,.swiftpm,Scripts 2 | --extensionacl on-declarations 3 | --ifdef no-indent 4 | --maxwidth 220 5 | --semicolons never 6 | --stripunusedargs unnamed-only 7 | --header ignore 8 | -------------------------------------------------------------------------------- /.swiftlint.yml: -------------------------------------------------------------------------------- 1 | included: 2 | - Package.swift 3 | - Sources 4 | - Tests 5 | - Apps 6 | excluded: 7 | - Scripts 8 | - .build 9 | - .swiftpm 10 | - .github -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | # This file is a list of the people responsible for ensuring that contributions 2 | # to this projected are reviewed, either by themselves or by someone else. 3 | # They are also the gatekeepers for their part of this project, with the final 4 | # word on what goes in or not. 5 | # The code owners file uses a .gitignore-like syntax to specify which parts of 6 | # the codebase is associated with an owner. See 7 | # 8 | # for details. 9 | # The following lines are used by GitHub to automatically recommend reviewers. 10 | # Each line is a file pattern followed by one or more owners. 11 | 12 | * @ctreffs 13 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, religion, or sexual identity 10 | and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the 26 | overall community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or 31 | advances of any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email 35 | address, without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible for enforcement 63 | e.g. via [content abuse report][ref-report-abuse]. 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series 86 | of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or 93 | permanent ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within 113 | the community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][ref-homepage-cc], 118 | version 2.0, available at 119 | . 120 | 121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct 122 | enforcement ladder](https://github.com/mozilla/diversity). 123 | 124 | For answers to common questions about this code of conduct, see the FAQ at 125 | . 126 | Translations are available at 127 | . 128 | 129 | 130 | 131 | [ref-homepage-cc]: https://www.contributor-covenant.org 132 | [ref-report-abuse]: https://docs.github.com/communities/maintaining-your-safety-on-github/reporting-abuse-or-spam#reporting-an-issue-or-pull-request 133 | [ref-gh-coc]: https://docs.github.com/en/communities/setting-up-your-project-for-healthy-contributions/adding-a-code-of-conduct-to-your-project 134 | [ref-gh-abuse]: https://docs.github.com/en/communities/moderating-comments-and-conversations/managing-how-contributors-report-abuse-in-your-organizations-repository 135 | [ref-coc-guide]: https://opensource.guide/code-of-conduct/ 136 | 137 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # 💁 Contributing to this project 2 | 3 | 4 | > First off, thank you for considering contributing to this project. 5 | > It’s [people like you][ref-contributors] that keep this project alive and make it great! 6 | > Thank you! 🙏💜🎉👍 7 | 8 | The following is a set of **guidelines for contributing** to this project. 9 | Use your best judgment and feel free to propose changes to this document in a pull request. 10 | 11 | **Working on your first Pull Request?** You can learn how from this *free* series [How to Contribute to an Open Source Project on GitHub](https://egghead.io/courses/how-to-contribute-to-an-open-source-project-on-github) 12 | 13 | ### 💡 Your contribution - the sky is the limit 🌈 14 | 15 | This is an open source project and we love to receive contributions from our community — [**you**][ref-contributors]! 16 | 17 | There are many ways to contribute, from writing __tutorials__ or __blog posts__, improving the [__documentation__][ref-documentation], submitting [__bug reports__][ref-issues-new] and [__enhancement__][ref-pull-request-new] or 18 | [__writing code__][ref-pull-request-new] which can be incorporated into the repository itself. 19 | 20 | When contributing to this project, please feel free to discuss the changes and ideas you wish to contribute with the repository owners before making a change by opening a [new issue][ref-issues-new] and add the **feature request** tag to that issue. 21 | 22 | Note that we have a [code of conduct][ref-code-of-conduct], please follow it in all your interactions with the project. 23 | 24 | ### 🐞 You want to report a bug or file an issue? 25 | 26 | 1. Ensure that it was **not already reported** and is being worked on by checking [open issues][ref-issues]. 27 | 2. Create a [new issue][ref-issues-new] with a **clear and descriptive title** 28 | 3. Write a **detailed comment** with as much relevant information as possible including 29 | - *how to reproduce* the bug 30 | - a *code sample* or an *executable test case* demonstrating the expected behavior that is not occurring 31 | - any *files that could help* trace it down (i.e. logs) 32 | 33 | ### 🩹 You wrote a patch that fixes an issue? 34 | 35 | 1. Open a [new pull request (PR)][ref-pull-request-new] with the patch. 36 | 2. Ensure the PR description clearly describes the problem and solution. 37 | 3. Link the relevant **issue** if applicable ([how to link issues in PRs][ref-pull-request-how-to]). 38 | 4. Ensure that [**no tests are failing**][ref-gh-actions] and **coding conventions** are met 39 | 5. Submit the patch and await review. 40 | 41 | ### 🎁 You want to suggest or contribute a new feature? 42 | 43 | That's great, thank you! You rock 🤘 44 | 45 | If you want to dive deep and help out with development on this project, then first get the project [installed locally][ref-readme]. 46 | After that is done we suggest you have a look at tickets in our [issue tracker][ref-issues]. 47 | You can start by looking through the beginner or help-wanted issues: 48 | - [__Good first issues__][ref-issues-first] are issues which should only require a few lines of code, and a test or two. 49 | - [__Help wanted issues__][ref-issues-help] are issues which should be a bit more involved than beginner issues. 50 | These are meant to be a great way to get a smooth start and won't put you in front of the most complex parts of the system. 51 | 52 | If you are up to more challenging tasks with a bigger scope, then there are a set of tickets with a __feature__, __enhancement__ or __improvement__ tag. 53 | These tickets have a general overview and description of the work required to finish. 54 | If you want to start somewhere, this would be a good place to start. 55 | That said, these aren't necessarily the easiest tickets. 56 | 57 | For any new contributions please consider these guidelines: 58 | 59 | 1. Open a [new pull request (PR)][ref-pull-request-new] with a **clear and descriptive title** 60 | 2. Write a **detailed comment** with as much relevant information as possible including: 61 | - What your feature is intended to do? 62 | - How it can be used? 63 | - What alternatives where considered, if any? 64 | - Has this feature impact on performance or stability of the project? 65 | 66 | #### Your contribution responsibilities 67 | 68 | Don't be intimidated by these responsibilities, they are easy to meet if you take your time to develop your feature 😌 69 | 70 | - [x] Create issues for any major changes and enhancements that you wish to make. Discuss things transparently and get community feedback. 71 | - [x] Ensure (cross-)platform compatibility for every change that's accepted. An addition should not reduce the number of platforms that the project supports. 72 | - [x] Ensure **coding conventions** are met. Lint your code with the project's default tools. Project wide commands are available through the [Makefile][ref-makefile] in the repository root. 73 | - [x] Add tests for your feature that prove it's working as expected. Code coverage should not drop below its previous value. 74 | - [x] Ensure none of the existing tests are failing after adding your changes. 75 | - [x] Document your public API code and ensure to add code comments where necessary. 76 | 77 | 78 | ### ⚙️ How to set up the environment 79 | 80 | Please consult the [README][ref-readme] for installation instructions. 81 | 82 | 83 | 84 | [ref-code-of-conduct]: https://github.com/ctreffs/SwiftAssimp/blob/master/CODE_OF_CONDUCT.md 85 | [ref-contributors]: https://github.com/ctreffs/SwiftAssimp/graphs/contributors 86 | [ref-documentation]: https://github.com/ctreffs/SwiftAssimp/wiki 87 | [ref-gh-actions]: https://github.com/ctreffs/SwiftAssimp/actions 88 | [ref-issues-first]: https://github.com/ctreffs/SwiftAssimp/issues?q=is%3Aopen+is%3Aissue+label%3A"good+first+issue" 89 | [ref-issues-help]: https://github.com/ctreffs/SwiftAssimp/issues?q=is%3Aopen+is%3Aissue+label%3A"help+wanted" 90 | [ref-issues-new]: https://github.com/ctreffs/SwiftAssimp/issues/new/choose 91 | [ref-issues]: https://github.com/ctreffs/SwiftAssimp/issues 92 | [ref-pull-request-how-to]: https://docs.github.com/github/writing-on-github/autolinked-references-and-urls 93 | [ref-pull-request-new]: https://github.com/ctreffs/SwiftAssimp/compare 94 | [ref-readme]: https://github.com/ctreffs/SwiftAssimp/blob/master/README.md 95 | [ref-makefile]: https://github.com/ctreffs/SwiftAssimp/blob/master/Makefile 96 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2019-2020, Christian Treffs 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | SWIFT_PACKAGE_VERSION := $(shell swift package tools-version) 2 | SOURCES_DIR="${PWD}/Sources" 3 | FILEHEADER="\n{file}\nSwiftAssimp\n\nCopyright © 2019-{year} Christian Treffs. All rights reserved.\nLicensed under BSD 3-Clause License. See LICENSE file for details." 4 | 5 | .PHONY: clean 6 | clean: 7 | swift package clean 8 | 9 | # Lint fix and format code. 10 | .PHONY: lint-fix 11 | lint-fix: 12 | mint run swiftlint --fix --quiet 13 | mint run swiftformat ${SOURCES_DIR} --swiftversion ${SWIFT_PACKAGE_VERSION} --header ${FILEHEADER} 14 | 15 | .PHONY: pre-push 16 | pre-push: lint-fix 17 | 18 | # Build debug version 19 | .PHONY: build-debug 20 | build-debug: 21 | swift build -c debug 22 | 23 | # Build release version 24 | .PHONY: build-release 25 | build-release: 26 | swift build -c release --skip-update 27 | 28 | .PHONY: test 29 | test: 30 | swift test 31 | 32 | # Reset the complete cache/build directory and Package.resolved files 33 | .PHONY: reset 34 | swift package reset 35 | -rm Package.resolved 36 | -rm rdf *.xcworkspace/xcshareddata/swiftpm/Package.resolved 37 | -rm -rdf .swiftpm/xcode/* 38 | 39 | .PHONY: resolve 40 | resolve: 41 | swift package resolve 42 | 43 | .PHONY: open-proj-xcode 44 | open-proj-xcode: 45 | open -b com.apple.dt.Xcode Package.swift 46 | 47 | .PHONY: open-proj-vscode 48 | open-proj-vscode: 49 | code . 50 | 51 | PHONY: setup-brew 52 | setup-brew: 53 | @which -s brew || /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)" 54 | @brew update 55 | 56 | .PHONY: setup-project 57 | setup-project: setup-brew 58 | brew install mint 59 | mint bootstrap 60 | 61 | .PHONY: install-dependencies-macOS 62 | install-dependencies-macOS: setup-project 63 | brew install assimp 64 | 65 | .PHONY: pkg-config-assimp 66 | pkg-config-assimp: 67 | pkg-config --libs --cflags assimp 68 | 69 | .PHONY: brew-assimp-version 70 | brew-assimp-version: 71 | @echo `brew info assimp | head -1 | awk '{ print $$3; }'` 72 | 73 | .PHONY: copyMacPkgConfig500 74 | copyMacPkgConfig500: 75 | sudo cp ${PWD}/assimp5.0.0.mac.pc /usr/local/lib/pkgconfig/assimp.pc 76 | 77 | .PHONY: copyMacPkgConfig501 78 | copyMacPkgConfig501: 79 | sudo cp ${PWD}/assimp5.0.1.mac.pc /usr/local/lib/pkgconfig/assimp.pc 80 | -------------------------------------------------------------------------------- /Mintfile: -------------------------------------------------------------------------------- 1 | realm/SwiftLint@0.52.4 2 | nicklockwood/SwiftFormat@0.52.3 -------------------------------------------------------------------------------- /Package.swift: -------------------------------------------------------------------------------- 1 | // swift-tools-version:5.3 2 | import PackageDescription 3 | 4 | let package = Package( 5 | name: "Assimp", 6 | products: [ 7 | .library(name: "Assimp", 8 | type: .static, 9 | targets: ["Assimp"]) 10 | ], 11 | targets: [ 12 | .target(name: "Assimp", 13 | dependencies: ["CAssimp"]), 14 | .testTarget(name: "AssimpTests", dependencies: ["Assimp"]), 15 | .systemLibrary(name: "CAssimp", 16 | path: "Sources/CAssimp", 17 | pkgConfig: "assimp", 18 | providers: [ 19 | .brew(["assimp"]), 20 | .apt(["libassimp-dev"]) 21 | ]) 22 | ] 23 | ) 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Swift Assimp 2 | 3 | [![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2Fctreffs%2FSwiftAssimp%2Fbadge%3Ftype%3Dswift-versions)](https://swiftpackageindex.com/ctreffs/SwiftAssimp) 4 | [![](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2Fctreffs%2FSwiftAssimp%2Fbadge%3Ftype%3Dplatforms)](https://swiftpackageindex.com/ctreffs/SwiftAssimp) 5 | [![macOS](https://github.com/ctreffs/SwiftAssimp/actions/workflows/ci-macos.yml/badge.svg)](https://github.com/ctreffs/SwiftAssimp/actions/workflows/ci-macos.yml) 6 | [![Linux](https://github.com/ctreffs/SwiftAssimp/actions/workflows/ci-linux.yml/badge.svg)](https://github.com/ctreffs/SwiftAssimp/actions/workflows/ci-linux.yml) 7 | [![license](https://img.shields.io/badge/license-BSD3-brightgreen.svg)](LICENSE) 8 | 9 | 10 | This is a **thin** Swift wrapper around the popular and excellent [**Open Asset Import Library**](https://github.com/assimp/assimp) library. 11 | It provides a **swifty** and **typesafe** API. 12 | 13 | > Open Asset Import Library (short name: Assimp) is a portable Open Source library to import various well-known 3D model formats in a uniform manner. The most recent version also knows how to export 3d files and is therefore suitable as a general-purpose 3D model converter. 14 | > Loads 40+ 3D file formats into one unified and clean data structure. 15 | > ~ [www.assimp.org](https://github.com/assimp/assimp) 16 | 17 | ## 🚀 Getting Started 18 | 19 | These instructions will get your copy of the project up and running on your local machine and provide a code example. 20 | 21 | ### 📋 Prerequisites 22 | 23 | * [Swift Package Manager (SPM)](https://github.com/apple/swift-package-manager) 24 | * [Open Asset Import Library (Assimp)](https://github.com/assimp/assimp) 25 | * [SwiftEnv](https://swiftenv.fuller.li/) for Swift version management - (optional) 26 | * [Swiftlint](https://github.com/realm/SwiftLint) for linting - (optional) 27 | 28 | ### 💻 Installing 29 | 30 | Swift Assimp is available for all platforms that support [Swift 5.3](https://swift.org/) and higher and the [Swift Package Manager (SPM)](https://github.com/apple/swift-package-manager). 31 | 32 | Extend your `Package.swift` file with the following lines or use it to create a new project. 33 | 34 | For package manifests using the Swift 5.3+ toolchain use: 35 | 36 | ```swift 37 | // swift-tools-version:5.3 38 | import PackageDescription 39 | 40 | let package = Package( 41 | name: "YourPackageName", 42 | dependencies: [ 43 | .package(name: "Assimp", url: "https://github.com/ctreffs/SwiftAssimp.git", from: "2.1.0") 44 | ], 45 | targets: [ 46 | .target( 47 | name: "YourTargetName", 48 | dependencies: ["Assimp"]) 49 | ] 50 | ) 51 | 52 | ``` 53 | 54 | Since it's a system library wrapper you need to install the assimp library (>=5.0.0) either via 55 | 56 | ```sh 57 | brew install assimp 58 | ``` 59 | 60 | or 61 | 62 | ```sh 63 | apt-get install libassimp-dev 64 | ``` 65 | 66 | depending on you platform. 67 | 68 | 69 | ## 📝 Code Example 70 | 71 | 72 | ```swift 73 | import Assimp 74 | 75 | let scene: AiScene = try AiScene(file: , 76 | flags: [.removeRedundantMaterials, .genSmoothNormals])) 77 | 78 | // get meshes 79 | let meshes: [AiMesh] = scene.meshes 80 | 81 | // get materials 82 | let matrials: [AiMaterial] = scene.materials 83 | 84 | // get the root node of the scene graph 85 | let rootNode: [AiNode] = scene.rootNode 86 | 87 | ``` 88 | 89 | See the unit tests for more examples. 90 | 91 | ## 💁 Help needed 92 | 93 | This project is in an early stage and needs a lot of love. 94 | If you are interested in contributing, please feel free to do so! 95 | 96 | Things that need to be done are, among others: 97 | 98 | - [ ] Wrap more assimp functions and types 99 | - [ ] Support for [Cocoapods](https://cocoapods.org) packaging 100 | - [ ] Support for [Carthage](https://github.com/Carthage/Carthage) packaging 101 | - [ ] Write some additional tests to improve coverage 102 | 103 | ## 🏷️ Versioning 104 | 105 | We use [SemVer](http://semver.org/) for versioning. For the versions available, see the [tags on this repository](https://github.com/ctreffs/SwiftAssimp/tags). 106 | 107 | ## ✍️ Authors 108 | 109 | * [Christian Treffs](https://github.com/ctreffs) 110 | 111 | See also the list of [contributors](https://github.com/ctreffs/SwiftAssimp/contributors) who participated in this project. 112 | 113 | ## 🔏 Licenses 114 | 115 | This project is licensed under the 3-Clause BSD License - see the [LICENSE](LICENSE) file for details. 116 | 117 | * assimp licensed under [3-clause BSD license](https://github.com/assimp/assimp/blob/master/LICENSE) 118 | 119 | 120 | ## 🙏 Original code 121 | 122 | Since Swift Assimp is merely a wrapper around [**assimp**](https://github.com/assimp/assimp) it obviously depends on it. 123 | Support them if you can! 124 | 125 | ### Open Asset Import Library (assimp) 126 | 127 | ##### From [assimp/assimp/Readme.md](https://github.com/assimp/assimp/blob/master/Readme.md): 128 | 129 | A library to import and export various 3d-model-formats including scene-post-processing to generate missing render data. 130 | 131 | One-off donations via PayPal: 132 |
[![PayPal](https://www.paypalobjects.com/en_US/i/btn/btn_donate_LG.gif)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=4JRJVPXC4QJM4) 133 | 134 | ## ☮️ Alternatives 135 | 136 | * [dmsurti/AssimpKit](https://github.com/dmsurti/AssimpKit) 137 | * [eugenebokhan/AssetImportKit](https://github.com/eugenebokhan/AssetImportKit) 138 | * [troughton/CAssimp](https://github.com/troughton/CAssimp) 139 | -------------------------------------------------------------------------------- /Sources/Assimp/AiCamera.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AiCamera.swift 3 | // SwiftAssimp 4 | // 5 | // Copyright © 2019-2023 Christian Treffs. All rights reserved. 6 | // Licensed under BSD 3-Clause License. See LICENSE file for details. 7 | 8 | @_implementationOnly import CAssimp 9 | 10 | public struct AiCamera { 11 | init(_ camera: aiCamera) { 12 | name = String(camera.mName) 13 | position = Vec3(camera.mPosition) 14 | up = Vec3(camera.mUp) 15 | lookAt = Vec3(camera.mLookAt) 16 | horizontalFOV = camera.mHorizontalFOV 17 | clipPlaneNear = camera.mClipPlaneNear 18 | clipPlaneFar = camera.mClipPlaneFar 19 | aspect = camera.mAspect 20 | orthographicWidth = camera.mOrthographicWidth 21 | } 22 | 23 | init?(_ camera: aiCamera?) { 24 | guard let camera = camera else { 25 | return nil 26 | } 27 | 28 | self.init(camera) 29 | } 30 | 31 | /// The name of the camera. 32 | /// 33 | /// There must be a node in the scenegraph with the same name. 34 | /// This node specifies the position of the camera in the scene 35 | /// hierarchy and can be animated. 36 | public var name: String? 37 | 38 | /// Position of the camera relative to the coordinate space 39 | /// defined by the corresponding node. 40 | /// 41 | /// The default value is 0|0|0. 42 | public var position: Vec3 43 | 44 | /// 'Up' - vector of the camera coordinate system relative to 45 | /// the coordinate space defined by the corresponding node. 46 | /// 47 | /// The 'right' vector of the camera coordinate system is 48 | /// the cross product of the up and lookAt vectors. 49 | /// The default value is 0|1|0. The vector 50 | /// may be normalized, but it needn't. 51 | public var up: Vec3 52 | 53 | /// 'LookAt' - vector of the camera coordinate system relative to 54 | /// the coordinate space defined by the corresponding node. 55 | /// 56 | /// This is the viewing direction of the user. 57 | /// The default value is 0|0|1. The vector 58 | /// may be normalized, but it needn't. 59 | public var lookAt: Vec3 60 | 61 | /// Horizontal field of view angle, in radians. 62 | /// 63 | /// The field of view angle is the angle between the center 64 | /// line of the screen and the left or right border. 65 | /// The default value is 1/4PI. 66 | public var horizontalFOV: Float 67 | 68 | /// Distance of the near clipping plane from the camera. 69 | /// 70 | /// The value may not be 0.f (for arithmetic reasons to prevent 71 | /// a division through zero). The default value is 0.1f. 72 | public var clipPlaneNear: Float 73 | 74 | /// Distance of the far clipping plane from the camera. 75 | /// 76 | /// The far clipping plane must, of course, be further away than the 77 | /// near clipping plane. The default value is 1000.f. The ratio 78 | /// between the near and the far plane should not be too 79 | /// large (between 1000-10000 should be ok) to avoid floating-point 80 | /// inaccuracies which could lead to z-fighting. 81 | public var clipPlaneFar: Float 82 | 83 | /// Screen aspect ratio. 84 | /// 85 | /// This is the ration between the width and the height of the 86 | /// screen. Typical values are 4/3, 1/2 or 1/1. This value is 87 | /// 0 if the aspect ratio is not defined in the source file. 88 | /// 0 is also the default value. 89 | public var aspect: Float 90 | 91 | /// Half horizontal orthographic width, in scene units. 92 | /// 93 | /// The orthographic width specifies the half width of the 94 | /// orthographic view box. If non-zero the camera is 95 | /// orthographic and the mAspect should define to the 96 | /// ratio between the orthographic width and height 97 | /// and mHorizontalFOV should be set to 0. 98 | /// The default value is 0 (not orthographic). 99 | public var orthographicWidth: Float 100 | } 101 | -------------------------------------------------------------------------------- /Sources/Assimp/AiFace.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AiFace.swift 3 | // SwiftAssimp 4 | // 5 | // Copyright © 2019-2023 Christian Treffs. All rights reserved. 6 | // Licensed under BSD 3-Clause License. See LICENSE file for details. 7 | 8 | @_implementationOnly import CAssimp 9 | 10 | /// The default face winding order is counter clockwise (CCW). 11 | public struct AiFace { 12 | init(_ face: aiFace) { 13 | numIndices = Int(face.mNumIndices) 14 | indices = Array(UnsafeBufferPointer(start: face.mIndices, count: numIndices)) 15 | } 16 | 17 | /// Number of indices defining this face. 18 | /// 19 | /// The maximum value for this member is #AI_MAX_FACE_INDICES. 20 | public var numIndices: Int 21 | 22 | /// Pointer to the indices array. 23 | /// Size of the array is given in numIndices. 24 | public var indices: [UInt32] 25 | } 26 | -------------------------------------------------------------------------------- /Sources/Assimp/AiLight.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AiLight.swift 3 | // SwiftAssimp 4 | // 5 | // Copyright © 2019-2023 Christian Treffs. All rights reserved. 6 | // Licensed under BSD 3-Clause License. See LICENSE file for details. 7 | 8 | @_implementationOnly import CAssimp 9 | 10 | public struct AiLight { 11 | init(_ light: aiLight) { 12 | name = String(light.mName) 13 | type = AiLightSourceType(light.mType) 14 | position = Vec3(light.mPosition) 15 | direction = Vec3(light.mDirection) 16 | up = Vec3(light.mUp) 17 | attenuationConstant = light.mAttenuationConstant 18 | attenuationLinear = light.mAttenuationLinear 19 | attenuationQuadratic = light.mAttenuationQuadratic 20 | colorDiffuse = Vec3(light.mColorDiffuse) 21 | colorSpecular = Vec3(light.mColorSpecular) 22 | colorAmbient = Vec3(light.mColorAmbient) 23 | angleInnerCone = light.mAngleInnerCone 24 | angleOuterCone = light.mAngleOuterCone 25 | size = Vec2(light.mSize) 26 | } 27 | 28 | init?(_ aiLight: aiLight?) { 29 | guard let aiLight = aiLight else { 30 | return nil 31 | } 32 | self.init(aiLight) 33 | } 34 | 35 | /// The name of the light source. 36 | /// 37 | /// There must be a node in the scene-graph with the same name. 38 | /// This node specifies the position of the light in the scene 39 | /// hierarchy and can be animated. 40 | public var name: String? 41 | 42 | /// The type of the light source. 43 | public var type: AiLightSourceType 44 | 45 | /// Position of the light source in space. Relative to the 46 | /// transformation of the node corresponding to the light. 47 | /// 48 | /// The position is undefined for directional lights. 49 | public var position: Vec3 50 | 51 | /// Direction of the light source in space. Relative to the 52 | /// transformation of the node corresponding to the light. 53 | /// 54 | /// The direction is undefined for point lights. The vector 55 | /// may be normalized, but it needn't. 56 | public var direction: Vec3 57 | 58 | /// Up direction of the light source in space. Relative to the 59 | /// transformation of the node corresponding to the light. 60 | /// 61 | /// The direction is undefined for point lights. The vector 62 | /// may be normalized, but it needn't. 63 | public var up: Vec3 64 | 65 | /// Constant light attenuation factor. 66 | /// 67 | /// The intensity of the light source at a given distance 'd' from 68 | /// the light's position is 69 | /// @code 70 | /// Atten = 1/( att0 + att1 * d + att2 * d*d) 71 | /// @endcode 72 | /// This member corresponds to the att0 variable in the equation. 73 | /// Naturally undefined for directional lights. 74 | public var attenuationConstant: Float 75 | 76 | /// Linear light attenuation factor. 77 | /// 78 | /// The intensity of the light source at a given distance 'd' from 79 | /// the light's position is 80 | /// @code 81 | /// Atten = 1/( att0 + att1 * d + att2 * d*d) 82 | /// @endcode 83 | /// This member corresponds to the att1 variable in the equation. 84 | /// Naturally undefined for directional lights. 85 | public var attenuationLinear: Float 86 | 87 | /// Quadratic light attenuation factor. 88 | /// 89 | /// The intensity of the light source at a given distance 'd' from 90 | /// the light's position is 91 | /// @code 92 | /// Atten = 1/( att0 + att1 * d + att2 * d*d) 93 | /// @endcode 94 | /// This member corresponds to the att2 variable in the equation. 95 | /// Naturally undefined for directional lights. 96 | public var attenuationQuadratic: Float 97 | 98 | /// Diffuse color of the light source 99 | /// 100 | /// The diffuse light color is multiplied with the diffuse 101 | /// material color to obtain the final color that contributes 102 | /// to the diffuse shading term. 103 | public var colorDiffuse: Vec3 104 | 105 | /// Specular color of the light source 106 | /// 107 | /// The specular light color is multiplied with the specular 108 | /// material color to obtain the final color that contributes 109 | /// to the specular shading term. 110 | public var colorSpecular: Vec3 111 | 112 | /// Ambient color of the light source 113 | /// 114 | /// The ambient light color is multiplied with the ambient 115 | /// material color to obtain the final color that contributes 116 | /// to the ambient shading term. Most renderers will ignore 117 | /// this value it, is just a remaining of the fixed-function pipeline 118 | /// that is still supported by quite many file formats. 119 | public var colorAmbient: Vec3 120 | 121 | /// Inner angle of a spot light's light cone. 122 | /// 123 | /// The spot light has maximum influence on objects inside this 124 | /// angle. The angle is given in radians. It is 2PI for point 125 | /// lights and undefined for directional lights. 126 | public var angleInnerCone: Float 127 | 128 | /// Outer angle of a spot light's light cone. 129 | /// 130 | /// The spot light does not affect objects outside this angle. 131 | /// The angle is given in radians. It is 2PI for point lights and 132 | /// undefined for directional lights. The outer angle must be 133 | /// greater than or equal to the inner angle. 134 | /// It is assumed that the application uses a smooth 135 | /// interpolation between the inner and the outer cone of the 136 | /// spot light. 137 | public var angleOuterCone: Float 138 | 139 | /// Size of area light source. 140 | public var size: Vec2 141 | } 142 | 143 | public enum AiLightSourceType { 144 | case undefined 145 | 146 | /// A directional light source has a well-defined direction 147 | /// but is infinitely far away. That's quite a good 148 | /// approximation for sun light. 149 | case directional 150 | 151 | /// A point light source has a well-defined position 152 | /// in space but no direction - it emits light in all 153 | /// directions. A normal bulb is a point light. 154 | case point 155 | 156 | /// A spot light source emits light in a specific 157 | /// angle. It has a position and a direction it is pointing to. 158 | /// A good example for a spot light is a light spot in 159 | /// sport arenas. 160 | case spot 161 | 162 | /// The generic light level of the world, including the bounces 163 | /// of all other light sources. 164 | /// Typically, there's at most one ambient light in a scene. 165 | /// This light type doesn't have a valid position, direction, or 166 | /// other properties, just a color. 167 | case ambient 168 | 169 | /// An area light is a rectangle with predefined size that uniformly 170 | /// emits light from one of its sides. The position is center of the 171 | /// rectangle and direction is its normal vector. 172 | case area 173 | 174 | init(_ aiLightSourceType: aiLightSourceType) { 175 | switch aiLightSourceType { 176 | case aiLightSource_UNDEFINED: 177 | self = .undefined 178 | 179 | case aiLightSource_DIRECTIONAL: 180 | self = .directional 181 | 182 | case aiLightSource_POINT: 183 | self = .point 184 | 185 | case aiLightSource_SPOT: 186 | self = .spot 187 | 188 | case aiLightSource_AMBIENT: 189 | self = .ambient 190 | 191 | case aiLightSource_AREA: 192 | self = .area 193 | 194 | default: 195 | self = .undefined 196 | } 197 | } 198 | } 199 | -------------------------------------------------------------------------------- /Sources/Assimp/AiMatKey.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AiMatKey.swift 3 | // SwiftAssimp 4 | // 5 | // Copyright © 2019-2023 Christian Treffs. All rights reserved. 6 | // Licensed under BSD 3-Clause License. See LICENSE file for details. 7 | 8 | public struct AiMatKey: RawRepresentable { 9 | public let baseName: String 10 | public let texType: UInt32 11 | public let texIndex: UInt32 12 | public let rawValue: String 13 | 14 | init(base: Base, texType: AiTextureType = .none, texIndex: Int = 0) { 15 | rawValue = "\(base.rawValue),\(texType.rawValue),\(texIndex)" 16 | baseName = base.rawValue 17 | self.texType = texType.rawValue 18 | self.texIndex = UInt32(texIndex) 19 | } 20 | 21 | public init?(rawValue: String) { 22 | self.rawValue = rawValue 23 | baseName = "" 24 | texType = 0 25 | texIndex = 0 26 | } 27 | } 28 | 29 | extension AiMatKey: Equatable {} 30 | 31 | // swiftlint:disable identifier_name 32 | 33 | extension AiMatKey { 34 | enum Base: String { 35 | case BLEND_FUNC_BASE = "$mat.blend" 36 | case BUMPSCALING_BASE = "$mat.bumpscaling" 37 | case COLOR_AMBIENT_BASE = "$clr.ambient" 38 | case COLOR_DIFFUSE_BASE = "$clr.diffuse" 39 | case COLOR_EMISSIVE_BASE = "$clr.emissive" 40 | case COLOR_REFLECTIVE_BASE = "$clr.reflective" 41 | case COLOR_SPECULAR_BASE = "$clr.specular" 42 | case COLOR_TRANSPARENT_BASE = "$clr.transparent" 43 | case ENABLE_WIREFRAME_BASE = "$mat.wireframe" 44 | case GLOBAL_BACKGROUND_IMAGE_BASE = "?bg.global" 45 | case MAPPING_BASE = "$tex.mapping" 46 | case MAPPINGMODE_U_BASE = "$tex.mapmodeu" 47 | case MAPPINGMODE_V_BASE = "$tex.mapmodev" 48 | case NAME_BASE = "?mat.name" 49 | case OPACITY_BASE = "$mat.opacity" 50 | case REFLECTIVITY_BASE = "$mat.reflectivity" 51 | case REFRACTI_BASE = "$mat.refracti" 52 | case SHADING_MODEL_BASE = "$mat.shadingm" 53 | case SHININESS_BASE = "$mat.shininess" 54 | case SHININESS_STRENGTH_BASE = "$mat.shinpercent" 55 | case TEXBLEND_BASE = "$tex.blend" 56 | case TEXFLAGS_BASE = "$tex.flags" 57 | case TEXMAP_AXIS_BASE = "$tex.mapaxis" 58 | case TEXOP_BASE = "$tex.op" 59 | case TEXTURE_BASE = "$tex.file" 60 | case TWOSIDED_BASE = "$mat.twosided" 61 | case UVTRANSFORM_BASE = "$tex.uvtrafo" 62 | case UVWSRC_BASE = "$tex.uvwsrc" 63 | 64 | case GLTF_TEXTURE_TEXCOORD_BASE = "$tex.file.texCoord" 65 | case GLTF_MAPPINGNAME_BASE = "$tex.mappingname" 66 | case GLTF_MAPPINGID_BASE = "$tex.mappingid" 67 | case GLTF_MAPPINGFILTER_MAG_BASE = "$tex.mappingfiltermag" 68 | case GLTF_MAPPINGFILTER_MIN_BASE = "$tex.mappingfiltermin" 69 | case GLTF_SCALE_BASE = "$tex.scale" 70 | case GLTF_STRENGTH_BASE = "$tex.strength" 71 | 72 | case GLTF_PBRMETALLICROUGHNESS_BASE_COLOR_FACTOR = "$mat.gltf.pbrMetallicRoughness.baseColorFactor" 73 | case GLTF_PBRMETALLICROUGHNESS_METALLIC_FACTOR = "$mat.gltf.pbrMetallicRoughness.metallicFactor" 74 | case GLTF_PBRMETALLICROUGHNESS_ROUGHNESS_FACTOR = "$mat.gltf.pbrMetallicRoughness.roughnessFactor" 75 | case GLTF_ALPHAMODE = "$mat.gltf.alphaMode" 76 | case GLTF_ALPHACUTOFF = "$mat.gltf.alphaCutoff" 77 | case GLTF_PBRSPECULARGLOSSINESS = "$mat.gltf.pbrSpecularGlossiness" 78 | case GLTF_PBRSPECULARGLOSSINESS_GLOSSINESS_FACTOR = "$mat.gltf.pbrMetallicRoughness.glossinessFactor" 79 | case GLTF_UNLIT = "$mat.gltf.unlit" 80 | } 81 | } 82 | 83 | /// https://assimp-docs.readthedocs.io/en/latest/usage/use_the_lib.html#constants 84 | /// https://assimp-docs.readthedocs.io/en/latest/_sources/usage/use_the_lib.rst.txt 85 | extension AiMatKey { 86 | /// One of the aiBlendMode enumerated values. 87 | /// Defines how the final color value in the screen buffer is computed from the given color at that 88 | /// position and the newly computed color from the material. 89 | /// Simply said, alpha blending settings. 90 | public static let BLEND_FUNC: AiMatKey = .init(base: .BLEND_FUNC_BASE) 91 | public static let BUMPSCALING: AiMatKey = .init(base: .BUMPSCALING_BASE) 92 | 93 | /// Ambient color of the material. This is typically scaled by the amount of ambient light. 94 | public static let COLOR_AMBIENT: AiMatKey = .init(base: .COLOR_AMBIENT_BASE) 95 | 96 | /// Diffuse color of the material. This is typically scaled by the amount of incoming diffuse light (e.g. using gouraud shading) 97 | public static let COLOR_DIFFUSE: AiMatKey = .init(base: .COLOR_DIFFUSE_BASE) 98 | 99 | /// Emissive color of the material. 100 | /// This is the amount of light emitted by the object. 101 | /// In real time applications it will usually not affect surrounding objects, 102 | /// but raytracing applications may wish to treat emissive objects as light sources. 103 | public static let COLOR_EMISSIVE: AiMatKey = .init(base: .COLOR_EMISSIVE_BASE) 104 | 105 | /// Defines the reflective color of the material. 106 | /// This is typically scaled by the amount of incoming light from the direction of mirror reflection. 107 | /// Usually combined with an environment lightmap of some kind for real-time applications. 108 | public static let COLOR_REFLECTIVE: AiMatKey = .init(base: .COLOR_REFLECTIVE_BASE) 109 | 110 | /// Specular color of the material. This is typically scaled by the amount of incoming specular light (e.g. using phong shading) 111 | public static let COLOR_SPECULAR: AiMatKey = .init(base: .COLOR_SPECULAR_BASE) 112 | 113 | /// Defines the transparent color of the material, this is the color to be multiplied with the color of translucent 114 | /// light to construct the final 'destination color' for a particular position in the screen buffer. T 115 | public static let COLOR_TRANSPARENT: AiMatKey = .init(base: .COLOR_TRANSPARENT_BASE) 116 | 117 | /// Specifies whether wireframe rendering must be turned on for the material. 0 for false, !0 for true. 118 | public static let ENABLE_WIREFRAME: AiMatKey = .init(base: .ENABLE_WIREFRAME_BASE) 119 | 120 | public static let GLOBAL_BACKGROUND_IMAGE: AiMatKey = .init(base: .GLOBAL_BACKGROUND_IMAGE_BASE) 121 | 122 | /// Defines how the input mapping coordinates for sampling the n'th texture on the stack 't' are computed. 123 | /// Usually explicit UV coordinates are provided, but some model file formats might also be using basic shapes, 124 | /// such as spheres or cylinders, to project textures onto meshes. 125 | public static let MAPPING: AiMatKey = .init(base: .MAPPING_BASE) 126 | 127 | /// Any of the aiTextureMapMode enumerated values. 128 | /// Defines the texture wrapping mode on the x axis for sampling the n'th texture on the stack 't'. 129 | /// 'Wrapping' occurs whenever UVs lie outside the 0..1 range. 130 | public static let MAPPINGMODE_U: AiMatKey = .init(base: .MAPPINGMODE_U_BASE) 131 | 132 | /// Wrap mode on the v axis. See MAPPINGMODE_U. 133 | public static let MAPPINGMODE_V: AiMatKey = .init(base: .MAPPINGMODE_V_BASE) 134 | 135 | /// The name of the material, if available. 136 | /// 137 | /// Ignored by aiProcess_RemoveRedundantMaterials. Materials are considered equal even if their names are different. 138 | public static let NAME: AiMatKey = .init(base: .NAME_BASE) 139 | 140 | /// Defines the opacity of the material in a range between 0..1. 141 | public static let OPACITY: AiMatKey = .init(base: .OPACITY_BASE) 142 | 143 | /// Scales the reflective color of the material. 144 | public static let REFLECTIVITY: AiMatKey = .init(base: .REFLECTIVITY_BASE) 145 | 146 | /// Defines the Index Of Refraction for the material. That's not supported by most file formats. 147 | public static let REFRACTI: AiMatKey = .init(base: .REFRACTI_BASE) 148 | 149 | /// One of the aiShadingMode enumerated values. 150 | /// Defines the library shading model to use for (real time) rendering to approximate the original look of the material as closely as possible. 151 | public static let SHADING_MODEL: AiMatKey = .init(base: .SHADING_MODEL_BASE) 152 | 153 | /// Defines the shininess of a phong-shaded material. 154 | /// This is actually the exponent of the phong specular equation. 155 | public static let SHININESS: AiMatKey = .init(base: .SHININESS_BASE) 156 | 157 | /// Scales the specular color of the material. 158 | public static let SHININESS_STRENGTH: AiMatKey = .init(base: .SHININESS_STRENGTH_BASE) 159 | 160 | /// Defines the strength the n'th texture on the stack 't'. 161 | /// All color components (rgb) are multipled with this factor before any further processing is done. 162 | public static let TEXBLEND: AiMatKey = .init(base: .TEXBLEND_BASE) 163 | 164 | /// Defines miscellaneous flag for the n'th texture on the stack 't'. 165 | /// This is a bitwise combination of the aiTextureFlags enumerated values. 166 | public static let TEXFLAGS: AiMatKey = .init(base: .TEXFLAGS_BASE) 167 | 168 | /// Defines the base axis to to compute the mapping coordinates for the n'th texture on the stack 't' from. 169 | /// This is not required for UV-mapped textures. For instance, if MAPPING(t,n) is aiTextureMapping_SPHERE, U and V 170 | /// would map to longitude and latitude of a sphere around the given axis. The axis is given in local mesh space. 171 | public static let TEXMAP_AXIS: AiMatKey = .init(base: .TEXMAP_AXIS_BASE) 172 | 173 | /// One of the aiTextureOp enumerated values. 174 | /// Defines the arithmetic operation to be used to combine the n'th texture on the stack 't' with the n-1'th. 175 | /// TEXOP(t,0) refers to the blend operation between the base color for this stack (e.g. COLOR_DIFFUSE for the diffuse stack) and the first texture. 176 | public static let TEXOP: AiMatKey = .init(base: .TEXOP_BASE) 177 | 178 | /// Defines the path to the n'th texture on the stack 't', where 'n' is any value >= 0 and 't' is one of the aiTextureType enumerated values. 179 | public static let TEXTURE: AiMatKey = .init(base: .TEXTURE_BASE) 180 | 181 | /// Specifies whether meshes using this material must be rendered without backface culling. 0 for false, !0 for true. 182 | public static let TWOSIDED: AiMatKey = .init(base: .TWOSIDED_BASE) 183 | 184 | public static let UVTRANSFORM: AiMatKey = .init(base: .UVTRANSFORM_BASE) 185 | 186 | /// Defines the UV channel to be used as input mapping coordinates for sampling the n'th texture on the stack 't'. 187 | /// All meshes assigned to this material share the same UV channel setup 188 | public static let UVWSRC: AiMatKey = .init(base: .UVWSRC_BASE) 189 | 190 | public static let GLTF_PBRMETALLICROUGHNESS_BASE_COLOR_FACTOR: AiMatKey = .init(base: .GLTF_PBRMETALLICROUGHNESS_BASE_COLOR_FACTOR) 191 | public static let GLTF_PBRMETALLICROUGHNESS_METALLIC_FACTOR: AiMatKey = .init(base: .GLTF_PBRMETALLICROUGHNESS_METALLIC_FACTOR) 192 | public static let GLTF_PBRMETALLICROUGHNESS_ROUGHNESS_FACTOR: AiMatKey = .init(base: .GLTF_PBRMETALLICROUGHNESS_ROUGHNESS_FACTOR) 193 | public static var GLTF_PBRMETALLICROUGHNESS_BASE_COLOR_TEXTURE: AiMatKey = .init(base: .TEXTURE_BASE, texType: .diffuse, texIndex: 1) 194 | public static let GLTF_PBRMETALLICROUGHNESS_METALLICROUGHNESS_TEXTURE: AiMatKey = .init(base: .TEXTURE_BASE, texType: .unknown, texIndex: 0) 195 | public static let GLTF_ALPHAMODE: AiMatKey = .init(base: .GLTF_ALPHAMODE) 196 | public static let GLTF_ALPHACUTOFF: AiMatKey = .init(base: .GLTF_ALPHACUTOFF) 197 | public static let GLTF_PBRSPECULARGLOSSINESS: AiMatKey = .init(base: .GLTF_PBRSPECULARGLOSSINESS) 198 | public static let GLTF_PBRSPECULARGLOSSINESS_GLOSSINESS_FACTOR: AiMatKey = .init(base: .GLTF_PBRSPECULARGLOSSINESS_GLOSSINESS_FACTOR) 199 | public static let GLTF_UNLIT: AiMatKey = .init(base: .GLTF_UNLIT) 200 | 201 | public static func BLEND_FUNC(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .BLEND_FUNC_BASE, texType: texType, texIndex: texIndex) } 202 | public static func BUMPSCALING(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .BUMPSCALING_BASE, texType: texType, texIndex: texIndex) } 203 | public static func COLOR_AMBIENT(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .COLOR_AMBIENT_BASE, texType: texType, texIndex: texIndex) } 204 | public static func COLOR_DIFFUSE(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .COLOR_DIFFUSE_BASE, texType: texType, texIndex: texIndex) } 205 | public static func COLOR_EMISSIVE(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .COLOR_EMISSIVE_BASE, texType: texType, texIndex: texIndex) } 206 | public static func COLOR_REFLECTIVE(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .COLOR_REFLECTIVE_BASE, texType: texType, texIndex: texIndex) } 207 | public static func COLOR_SPECULAR(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .COLOR_SPECULAR_BASE, texType: texType, texIndex: texIndex) } 208 | public static func COLOR_TRANSPARENT(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .COLOR_TRANSPARENT_BASE, texType: texType, texIndex: texIndex) } 209 | public static func ENABLE_WIREFRAME(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .ENABLE_WIREFRAME_BASE, texType: texType, texIndex: texIndex) } 210 | public static func GLOBAL_BACKGROUND_IMAGE(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .GLOBAL_BACKGROUND_IMAGE_BASE, texType: texType, texIndex: texIndex) } 211 | public static func MAPPING(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .MAPPING_BASE, texType: texType, texIndex: texIndex) } 212 | public static func MAPPINGMODE_U(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .MAPPINGMODE_U_BASE, texType: texType, texIndex: texIndex) } 213 | public static func MAPPINGMODE_V(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .MAPPINGMODE_V_BASE, texType: texType, texIndex: texIndex) } 214 | public static func NAME(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .NAME_BASE, texType: texType, texIndex: texIndex) } 215 | public static func OPACITY(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .OPACITY_BASE, texType: texType, texIndex: texIndex) } 216 | public static func REFLECTIVITY(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .REFLECTIVITY_BASE, texType: texType, texIndex: texIndex) } 217 | public static func REFRACTI(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .REFRACTI_BASE, texType: texType, texIndex: texIndex) } 218 | public static func SHADING_MODEL(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .SHADING_MODEL_BASE, texType: texType, texIndex: texIndex) } 219 | public static func SHININESS(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .SHININESS_BASE, texType: texType, texIndex: texIndex) } 220 | public static func SHININESS_STRENGTH(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .SHININESS_STRENGTH_BASE, texType: texType, texIndex: texIndex) } 221 | public static func TEXBLEND(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .TEXBLEND_BASE, texType: texType, texIndex: texIndex) } 222 | public static func TEXFLAGS(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .TEXFLAGS_BASE, texType: texType, texIndex: texIndex) } 223 | public static func TEXMAP_AXIS(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .TEXMAP_AXIS_BASE, texType: texType, texIndex: texIndex) } 224 | public static func TEXOP(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .TEXOP_BASE, texType: texType, texIndex: texIndex) } 225 | 226 | /// Defines the path to the n'th texture on the stack 't', where 'n' is any value >= 0 and 't' is one of the aiTextureType enumerated values. 227 | public static func TEXTURE(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .TEXTURE_BASE, texType: texType, texIndex: texIndex) } 228 | public static func TWOSIDED(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .TWOSIDED_BASE, texType: texType, texIndex: texIndex) } 229 | public static func UVTRANSFORM(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .UVTRANSFORM_BASE, texType: texType, texIndex: texIndex) } 230 | public static func UVWSRC(_ texType: AiTextureType, _ texIndex: Int) -> AiMatKey { .init(base: .UVWSRC_BASE, texType: texType, texIndex: texIndex) } 231 | } 232 | -------------------------------------------------------------------------------- /Sources/Assimp/AiMaterial.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AiMaterial.swift 3 | // SwiftAssimp 4 | // 5 | // Copyright © 2019-2023 Christian Treffs. All rights reserved. 6 | // Licensed under BSD 3-Clause License. See LICENSE file for details. 7 | 8 | @_implementationOnly import CAssimp 9 | 10 | // Ref: https://github.com/helix-toolkit/helix-toolkit/blob/master/Source/HelixToolkit.SharpDX.Assimp.Shared/ImporterPartial_Material.cs 11 | public struct AiMaterial { 12 | let material: aiMaterial 13 | 14 | init(_ material: aiMaterial) { 15 | self.material = material 16 | let numProperties = Int(material.mNumProperties) 17 | self.numProperties = numProperties 18 | let numAllocated = Int(material.mNumAllocated) 19 | self.numAllocated = numAllocated 20 | properties = { 21 | guard numProperties > 0 else { 22 | return [] 23 | } 24 | return [AiMaterialProperty](unsafeUninitializedCapacity: numProperties) { buffer, written in 25 | for idx in 0 ..< numProperties { 26 | if let prop = material.mProperties[idx] { 27 | buffer[idx] = AiMaterialProperty(prop.pointee) 28 | written += 1 29 | } 30 | } 31 | } 32 | }() 33 | } 34 | 35 | init?(_ mat: aiMaterial?) { 36 | guard let mat = mat else { 37 | return nil 38 | } 39 | self.init(mat) 40 | } 41 | 42 | /// Number of properties in the data base 43 | public var numProperties: Int 44 | 45 | /// Storage allocated 46 | public var numAllocated: Int 47 | 48 | /// List of all material properties loaded. 49 | public var properties: [AiMaterialProperty] 50 | 51 | public lazy var typedProperties: [AiMaterialPropertyIdentifiable] = properties.compactMap { prop -> AiMaterialPropertyIdentifiable? in 52 | switch prop.type { 53 | case .string: 54 | return AiMaterialPropertyString(prop) 55 | 56 | case .float: 57 | return AiMaterialPropertyFloat(prop) 58 | 59 | case .int: 60 | return AiMaterialPropertyInt(prop) 61 | 62 | case .buffer: 63 | return AiMaterialPropertyBuffer(prop) 64 | 65 | case .double: 66 | return AiMaterialPropertyDouble(prop) 67 | 68 | default: 69 | return nil 70 | } 71 | } 72 | 73 | /* 74 | - aiGetMaterialProperty 75 | - aiGetMaterialTextureCount 76 | - aiGetMaterialTexture 77 | - aiGetMaterialString 78 | - aiGetMaterialColor 79 | 80 | - aiGetMaterialFloat 81 | - aiGetMaterialFloatArray 82 | - aiGetMaterialInteger 83 | - aiGetMaterialIntegerArray 84 | - aiGetMaterialUVTransform 85 | - aiGetMaterialXXX 86 | */ 87 | public func getMaterialProperty(_ key: AiMatKey) -> AiMaterialProperty? { 88 | withUnsafePointer(to: material) { matPtr -> AiMaterialProperty? in 89 | let matPropPtr = UnsafeMutablePointer?>.allocate(capacity: MemoryLayout.stride) 90 | defer { 91 | matPropPtr.deinitialize(count: 1) 92 | matPropPtr.deallocate() 93 | } 94 | 95 | let result = aiGetMaterialProperty(matPtr, 96 | key.baseName, 97 | key.texType, 98 | key.texIndex, 99 | matPropPtr) 100 | 101 | guard result == aiReturn_SUCCESS, let property = matPropPtr.pointee?.pointee else { 102 | return nil 103 | } 104 | return AiMaterialProperty(property) 105 | } 106 | } 107 | 108 | /// Get the number of textures for a particular texture type. 109 | public func getMaterialTextureCount(texType: AiTextureType) -> Int { 110 | withUnsafePointer(to: material) { 111 | Int(aiGetMaterialTextureCount($0, texType.type)) 112 | } 113 | } 114 | 115 | public func getMaterialTexture(texType: AiTextureType, texIndex: Int) -> String? { 116 | withUnsafePointer(to: material) { (matPtr: UnsafePointer) -> String? in 117 | var path = aiString() 118 | // NOTE: the properties do not seem to be working 119 | var mapping: aiTextureMapping = aiTextureMapping_UV 120 | var uvIndex: UInt32 = 0 121 | var blend: ai_real = 0.0 122 | var texOp: aiTextureOp = aiTextureOp_Multiply 123 | var mapmode: [aiTextureMapMode] = [aiTextureMapMode_Wrap, aiTextureMapMode_Wrap] 124 | var flags: UInt32 = 0 125 | let result = aiGetMaterialTexture(matPtr, 126 | texType.type, 127 | UInt32(texIndex), 128 | &path, 129 | &mapping, 130 | &uvIndex, 131 | &blend, 132 | &texOp, 133 | &mapmode, 134 | &flags) 135 | 136 | guard result == aiReturn_SUCCESS else { 137 | return nil 138 | } 139 | 140 | return String(path) 141 | } 142 | } 143 | 144 | public func getMaterialString(_ key: AiMatKey) -> String? { 145 | withUnsafePointer(to: material) { matPtr -> String? in 146 | var string = aiString() 147 | let result = aiGetMaterialString(matPtr, 148 | key.baseName, 149 | key.texType, 150 | key.texIndex, 151 | &string) 152 | 153 | guard result == aiReturn_SUCCESS else { 154 | return nil 155 | } 156 | 157 | return String(string) 158 | } 159 | } 160 | 161 | public func getMaterialColor(_ key: AiMatKey) -> SIMD4? { 162 | withUnsafePointer(to: material) { matPtr in 163 | var color = aiColor4D() 164 | let result = aiGetMaterialColor(matPtr, 165 | key.baseName, 166 | key.texType, 167 | key.texIndex, 168 | &color) 169 | guard result == aiReturn_SUCCESS else { 170 | return nil 171 | } 172 | return SIMD4(color.r, color.g, color.b, color.a) 173 | } 174 | } 175 | 176 | public func getMaterialFloatArray(_ key: AiMatKey) -> [AiReal]? { 177 | withUnsafePointer(to: material) { matPtr in 178 | let count = MemoryLayout.stride / MemoryLayout.stride 179 | return [ai_real](unsafeUninitializedCapacity: count) { buffer, written in 180 | var pMax: UInt32 = 0 181 | let result = aiGetMaterialFloatArray(matPtr, 182 | key.baseName, 183 | key.texType, 184 | key.texIndex, 185 | buffer.baseAddress!, 186 | &pMax) 187 | guard result == aiReturn_SUCCESS else { 188 | return 189 | } 190 | 191 | written = Int(pMax) 192 | } 193 | } 194 | } 195 | 196 | public func getMaterialIntegerArray(_ key: AiMatKey) -> [Int32] { 197 | withUnsafePointer(to: material) { matPtr in 198 | [Int32](unsafeUninitializedCapacity: 4) { buffer, written in 199 | var pMax: UInt32 = 0 200 | let result = aiGetMaterialIntegerArray(matPtr, 201 | key.baseName, 202 | key.texType, 203 | key.texIndex, 204 | buffer.baseAddress!, 205 | &pMax) 206 | 207 | guard result == aiReturn_SUCCESS, pMax > 0 else { 208 | return 209 | } 210 | 211 | written = Int(pMax) 212 | } 213 | } 214 | } 215 | } 216 | 217 | extension AiMaterial { 218 | @inlinable public var name: String? { getMaterialString(.NAME) } 219 | 220 | @inlinable public var shadingModel: AiShadingMode? { 221 | guard let int = getMaterialProperty(.SHADING_MODEL)?.int.first else { 222 | return nil 223 | } 224 | return AiShadingMode(rawValue: UInt32(int)) 225 | } 226 | 227 | @inlinable public var cullBackfaces: Bool? { 228 | guard let int = getMaterialProperty(.TWOSIDED)?.int.first else { 229 | return nil 230 | } 231 | 232 | return !(int == 1) 233 | } 234 | 235 | public var blendMode: AiBlendMode? { 236 | guard let int = getMaterialProperty(.BLEND_FUNC)?.int.first else { 237 | return nil 238 | } 239 | 240 | return AiBlendMode(aiBlendMode(UInt32(int))) 241 | } 242 | } 243 | 244 | /// Defines alpha-blend flags. 245 | /// 246 | /// If you're familiar with OpenGL or D3D, these flags aren't new to you. 247 | /// They define *how* the final color value of a pixel is computed, basing 248 | /// on the previous color at that pixel and the new color value from the 249 | /// material. 250 | /// The blend formula is: 251 | /// ``` 252 | /// SourceColor * SourceBlend + DestColor * DestBlend 253 | /// ``` 254 | /// where DestColor is the previous color in the frame-buffer at this 255 | /// position and SourceColor is the material color before the transparency 256 | /// calculation.
257 | /// This corresponds to the #AI_MATKEY_BLEND_FUNC property. 258 | /// 259 | public enum AiBlendMode { 260 | /// Default blend mode 261 | /// 262 | /// Formula: 263 | /// ``` 264 | /// SourceColor*SourceAlpha + DestColor*(1-SourceAlpha) 265 | /// ``` 266 | case `default` 267 | 268 | /// Additive blending 269 | /// 270 | /// Formula: 271 | /// ``` 272 | /// SourceColor*1 + DestColor*1 273 | /// ``` 274 | case additive 275 | 276 | init?(_ blendMode: aiBlendMode) { 277 | switch blendMode { 278 | case aiBlendMode_Default: 279 | self = .default 280 | 281 | case aiBlendMode_Additive: 282 | self = .additive 283 | 284 | default: 285 | return nil 286 | } 287 | } 288 | } 289 | -------------------------------------------------------------------------------- /Sources/Assimp/AiMaterialProperty.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AiMaterialProperty.swift 3 | // SwiftAssimp 4 | // 5 | // Copyright © 2019-2023 Christian Treffs. All rights reserved. 6 | // Licensed under BSD 3-Clause License. See LICENSE file for details. 7 | 8 | @_implementationOnly import CAssimp 9 | 10 | public protocol AiMaterialPropertyIdentifiable { 11 | /// Specifies the name of the property (key) Keys are generally case insensitive. 12 | var key: String { get } 13 | 14 | /// Textures: Specifies the index of the texture. 15 | /// For non-texture properties, this member is always 0. 16 | var index: Int { get } 17 | 18 | /// Textures: Specifies their exact usage semantic. 19 | /// For non-texture properties, this member is always 0 (or, better-said, #aiTextureType_NONE). 20 | var semantic: AiTextureType { get } 21 | 22 | /// Type information for the property. 23 | /// 24 | /// Defines the data layout inside the data buffer. This is used 25 | /// by the library internally to perform debug checks and to 26 | /// utilize proper type conversions. 27 | /// 28 | /// (It's probably a hacky solution, but it works.) 29 | var type: AiMaterialProperty.TypeInfo { get } 30 | 31 | init(_ property: AiMaterialProperty) 32 | } 33 | 34 | public struct AiMaterialProperty: AiMaterialPropertyIdentifiable { 35 | public struct TypeInfo: RawRepresentable, Equatable, CustomDebugStringConvertible { 36 | public let rawValue: UInt32 37 | 38 | public init(rawValue: UInt32) { 39 | self.rawValue = rawValue 40 | } 41 | 42 | public static let float = TypeInfo(rawValue: aiPTI_Float.rawValue) 43 | public static let double = TypeInfo(rawValue: aiPTI_Double.rawValue) 44 | public static let string = TypeInfo(rawValue: aiPTI_String.rawValue) 45 | public static let int = TypeInfo(rawValue: aiPTI_Integer.rawValue) 46 | public static let buffer = TypeInfo(rawValue: aiPTI_Buffer.rawValue) 47 | 48 | public var debugDescription: String { 49 | switch self { 50 | case .float: 51 | return "float" 52 | case .double: 53 | return "double" 54 | case .string: 55 | return "string" 56 | case .int: 57 | return "int" 58 | case .buffer: 59 | return "buffer" 60 | default: 61 | return "unknown: \(rawValue)" 62 | } 63 | } 64 | } 65 | 66 | var property: aiMaterialProperty 67 | 68 | init(_ aiMaterialProperty: aiMaterialProperty) { 69 | property = aiMaterialProperty 70 | } 71 | 72 | public init(_ property: AiMaterialProperty) { 73 | self.property = property.property 74 | } 75 | 76 | /// Specifies the name of the property (key) Keys are generally case insensitive. 77 | public var key: String { 78 | String(property.mKey) ?? "" 79 | } 80 | 81 | /// Textures: Specifies the index of the texture. 82 | /// For non-texture properties, this member is always 0. 83 | public var index: Int { 84 | Int(property.mIndex) 85 | } 86 | 87 | /// Textures: Specifies their exact usage semantic. 88 | /// For non-texture properties, this member is always 0 (or, better-said, #aiTextureType_NONE). 89 | public var semantic: AiTextureType { 90 | AiTextureType(rawValue: property.mSemantic) 91 | } 92 | 93 | /// Type information for the property. 94 | /// 95 | /// Defines the data layout inside the data buffer. This is used 96 | /// by the library internally to perform debug checks and to 97 | /// utilize proper type conversions. 98 | /// 99 | /// (It's probably a hacky solution, but it works.) 100 | public var type: TypeInfo { 101 | TypeInfo(rawValue: property.mType.rawValue) 102 | } 103 | 104 | /// Size of the buffer mData is pointing to, in bytes. 105 | /// 106 | /// This value may not be 0. 107 | public var dataLength: Int { 108 | Int(property.mDataLength) 109 | } 110 | 111 | /// Binary buffer to hold the property's value. 112 | /// The size of the buffer is always mDataLength. 113 | public var dataBuffer: UnsafeBufferPointer { 114 | UnsafeBufferPointer(start: property.mData, 115 | count: dataLength) 116 | } 117 | 118 | public var string: String? { 119 | guard type == .string, dataLength > 0, let ptr = property.mData else { 120 | return nil 121 | } 122 | // FIXME: we cut out the array length field and the terminating NULL of the aiString - this is not nice! 123 | let bytes = ptr.advanced(by: MemoryLayout.stride) 124 | 125 | return String(bytes: bytes, length: dataLength - 1 - MemoryLayout.stride) 126 | } 127 | 128 | func getString(pMat: UnsafePointer) -> String? { 129 | var pOut = aiString() 130 | 131 | let result = aiGetMaterialString(pMat, 132 | key.withCString { $0 }, 133 | property.mType.rawValue, 134 | property.mIndex, 135 | &pOut) 136 | 137 | guard result == aiReturn_SUCCESS else { 138 | return nil 139 | } 140 | return String(pOut) 141 | } 142 | 143 | public var double: [Double] { 144 | guard type == .double, dataLength > 0, let ptr = property.mData else { 145 | return [] 146 | } 147 | 148 | return (0 ..< dataLength).map { Double(ptr[$0]) } 149 | } 150 | 151 | public var float: [Float32] { 152 | guard type == .float, dataLength > 0, let ptr = property.mData else { 153 | return [] 154 | } 155 | 156 | return (0 ..< dataLength).map { Float32(ptr[$0]) } 157 | } 158 | 159 | public var int: [Int32] { 160 | guard type == .int, dataLength > 0, let ptr = property.mData else { 161 | return [] 162 | } 163 | 164 | return (0 ..< dataLength).map { Int32(ptr[$0]) } 165 | } 166 | } 167 | 168 | extension AiMaterialProperty: CustomDebugStringConvertible { 169 | public var debugDescription: String { 170 | """ 171 | 178 | """ 179 | } 180 | } 181 | 182 | extension AiMaterialProperty: Equatable { 183 | public static func == (lhs: AiMaterialProperty, rhs: AiMaterialProperty) -> Bool { 184 | lhs.key == rhs.key && 185 | lhs.index == rhs.index && 186 | lhs.semantic == rhs.semantic && 187 | lhs.type == rhs.type && 188 | lhs.dataLength == rhs.dataLength 189 | } 190 | } 191 | 192 | public struct AiMaterialPropertyString: AiMaterialPropertyIdentifiable, CustomDebugStringConvertible { 193 | public let key: String 194 | public let index: Int 195 | public let semantic: AiTextureType 196 | public let type: AiMaterialProperty.TypeInfo 197 | public let string: String 198 | 199 | public init(_ property: AiMaterialProperty) { 200 | key = property.key 201 | index = property.index 202 | semantic = property.semantic 203 | type = property.type 204 | string = property.string ?? "" 205 | } 206 | 207 | public var debugDescription: String { 208 | """ 209 | 216 | """ 217 | } 218 | } 219 | 220 | public struct AiMaterialPropertyBuffer: AiMaterialPropertyIdentifiable, CustomDebugStringConvertible { 221 | public let key: String 222 | public let index: Int 223 | public let semantic: AiTextureType 224 | public let type: AiMaterialProperty.TypeInfo 225 | public let buffer: UnsafeBufferPointer 226 | public let length: Int 227 | 228 | public init(_ property: AiMaterialProperty) { 229 | key = property.key 230 | index = property.index 231 | semantic = property.semantic 232 | type = property.type 233 | buffer = property.dataBuffer 234 | length = property.dataLength 235 | } 236 | 237 | public var debugDescription: String { 238 | """ 239 | 246 | """ 247 | } 248 | } 249 | 250 | public struct AiMaterialPropertyDouble: AiMaterialPropertyIdentifiable, CustomDebugStringConvertible { 251 | public let key: String 252 | public let index: Int 253 | public let semantic: AiTextureType 254 | public let type: AiMaterialProperty.TypeInfo 255 | public let doubles: [Double] 256 | 257 | public init(_ property: AiMaterialProperty) { 258 | key = property.key 259 | index = property.index 260 | semantic = property.semantic 261 | type = property.type 262 | doubles = property.double 263 | } 264 | 265 | public var debugDescription: String { 266 | """ 267 | 274 | """ 275 | } 276 | } 277 | 278 | public struct AiMaterialPropertyFloat: AiMaterialPropertyIdentifiable, CustomDebugStringConvertible { 279 | public let key: String 280 | public let index: Int 281 | public let semantic: AiTextureType 282 | public let type: AiMaterialProperty.TypeInfo 283 | public let floats: [Float32] 284 | 285 | public init(_ property: AiMaterialProperty) { 286 | key = property.key 287 | index = property.index 288 | semantic = property.semantic 289 | type = property.type 290 | floats = property.float 291 | } 292 | 293 | public var debugDescription: String { 294 | """ 295 | 302 | """ 303 | } 304 | } 305 | 306 | public struct AiMaterialPropertyInt: AiMaterialPropertyIdentifiable, CustomDebugStringConvertible { 307 | public let key: String 308 | public let index: Int 309 | public let semantic: AiTextureType 310 | public let type: AiMaterialProperty.TypeInfo 311 | public let ints: [Int32] 312 | 313 | public init(_ property: AiMaterialProperty) { 314 | key = property.key 315 | index = property.index 316 | semantic = property.semantic 317 | type = property.type 318 | ints = property.int 319 | } 320 | 321 | public var debugDescription: String { 322 | """ 323 | 330 | """ 331 | } 332 | } 333 | -------------------------------------------------------------------------------- /Sources/Assimp/AiMesh.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AiMesh.swift 3 | // SwiftAssimp 4 | // 5 | // Copyright © 2019-2023 Christian Treffs. All rights reserved. 6 | // Licensed under BSD 3-Clause License. See LICENSE file for details. 7 | 8 | @_implementationOnly import CAssimp 9 | 10 | public final class AiMesh { 11 | public struct PrimitiveType: OptionSet { 12 | public let rawValue: UInt32 13 | 14 | public init(rawValue: UInt32) { 15 | self.rawValue = rawValue 16 | } 17 | 18 | public static let point = PrimitiveType(rawValue: aiPrimitiveType_POINT.rawValue) 19 | public static let line = PrimitiveType(rawValue: aiPrimitiveType_LINE.rawValue) 20 | public static let triangle = PrimitiveType(rawValue: aiPrimitiveType_TRIANGLE.rawValue) 21 | public static let polygon = PrimitiveType(rawValue: aiPrimitiveType_POLYGON.rawValue) 22 | } 23 | 24 | let mesh: aiMesh 25 | 26 | init(_ mesh: aiMesh) { 27 | self.mesh = mesh 28 | primitiveTypes = PrimitiveType(rawValue: mesh.mPrimitiveTypes) 29 | numVertices = Int(mesh.mNumVertices) 30 | numFaces = Int(mesh.mNumFaces) 31 | numBones = Int(mesh.mNumBones) 32 | materialIndex = Int(mesh.mMaterialIndex) 33 | name = String(mesh.mName) 34 | numAnimMeshes = Int(mesh.mNumAnimMeshes) 35 | method = mesh.mMethod 36 | } 37 | 38 | convenience init?(_ mesh: aiMesh?) { 39 | guard let mesh = mesh else { 40 | return nil 41 | } 42 | 43 | self.init(mesh) 44 | } 45 | 46 | /// Bitwise combination of the members of the #aiPrimitiveType enum. 47 | /// This specifies which types of primitives are present in the mesh. 48 | /// 49 | /// The "SortByPrimitiveType"-Step can be used to make sure the output meshes consist of one primitive type each. 50 | public var primitiveTypes: PrimitiveType 51 | 52 | /// The number of vertices in this mesh. This is also the size of all of the per-vertex data arrays. 53 | /// The maximum value for this member is #AI_MAX_VERTICES. 54 | public var numVertices: Int 55 | 56 | /// The number of primitives (triangles, polygons, lines) in this mesh. 57 | /// This is also the size of the mFaces array. 58 | /// The maximum value for this member is #AI_MAX_FACES. 59 | public var numFaces: Int 60 | 61 | /// Vertex positions. This array is always present in a mesh. 62 | /// The array is numVertices * 3 in size. 63 | public lazy var vertices = withUnsafeVertices([AiReal].init) 64 | 65 | public func withUnsafeVertices(_ body: (UnsafeBufferPointer) throws -> R) rethrows -> R { 66 | let count = numVertices * 3 67 | return try mesh.mVertices.withMemoryRebound(to: AiReal.self, capacity: count) { 68 | try body(UnsafeBufferPointer(start: $0, count: count)) 69 | } 70 | } 71 | 72 | /// Vertex normals. 73 | /// The array contains normalized vectors, NULL if not present. 74 | /// The array is mNumVertices * 3 in size. 75 | /// 76 | /// Normals are undefined for point and line primitives. 77 | /// A mesh consisting of points and lines only may not have normal vectors. 78 | /// Meshes with mixed primitive types (i.e. lines and triangles) may have normals, 79 | /// but the normals for vertices that are only referenced by point or line primitives 80 | /// are undefined and set to QNaN (WARN: qNaN compares to inequal to *everything*, even to qNaN itself. 81 | public lazy var normals = withUnsafeNormals([AiReal].init) 82 | 83 | public func withUnsafeNormals(_ body: (UnsafeBufferPointer) throws -> R) rethrows -> R { 84 | let count = numVertices * 3 85 | return try mesh.mNormals.withMemoryRebound(to: AiReal.self, capacity: count) { 86 | try body(UnsafeBufferPointer(start: $0, count: count)) 87 | } 88 | } 89 | 90 | /// Vertex tangents. 91 | /// The tangent of a vertex points in the direction of the positive X texture axis. 92 | /// The array contains normalized vectors, NULL if not present. 93 | /// The array is mNumVertices * 3 in size. 94 | /// 95 | /// A mesh consisting of points and lines only may not have normal vectors. 96 | /// Meshes with mixed primitive types (i.e. lines and triangles) may have normals, 97 | /// but the normals for vertices that are only referenced by point or line primitives 98 | /// are undefined and set to qNaN. 99 | /// See the #mNormals member for a detailed discussion of qNaNs. 100 | public lazy var tangents = withUnsafeTangents([AiReal].init) 101 | 102 | public func withUnsafeTangents(_ body: (UnsafeBufferPointer) throws -> R) rethrows -> R { 103 | let count = numVertices * 3 104 | return try mesh.mTangents.withMemoryRebound(to: AiReal.self, capacity: count) { 105 | try body(UnsafeBufferPointer(start: $0, count: count)) 106 | } 107 | } 108 | 109 | /// Vertex bitangents. 110 | /// The bitangent of a vertex points in the direction of the positive Y texture axis. 111 | /// The array contains normalized vectors, NULL if not present. 112 | /// The array is mNumVertices * 3 in size. 113 | public lazy var bitangents = withUnsafeBitangents([AiReal].init) 114 | 115 | public func withUnsafeBitangents(_ body: (UnsafeBufferPointer) throws -> R) rethrows -> R { 116 | let count = numVertices * 3 117 | return try mesh.mBitangents.withMemoryRebound(to: AiReal.self, capacity: count) { 118 | try body(UnsafeBufferPointer(start: $0, count: count)) 119 | } 120 | } 121 | 122 | public typealias Channels = (T, T, T, T, T, T, T, T) 123 | 124 | /// Vertex color sets. 125 | /// 126 | /// A mesh may contain 0 to #AI_MAX_NUMBER_OF_COLOR_SETS vertex colors per vertex. 127 | /// NULL if not present. 128 | /// Each array is numVertices * 4 in size if present. 129 | /// Returns RGBA colors. 130 | public lazy var colors: Channels<[AiReal]?> = { 131 | typealias CVertexColorSet = (UnsafeMutablePointer?, 132 | UnsafeMutablePointer?, 133 | UnsafeMutablePointer?, 134 | UnsafeMutablePointer?, 135 | UnsafeMutablePointer?, 136 | UnsafeMutablePointer?, 137 | UnsafeMutablePointer?, 138 | UnsafeMutablePointer?) 139 | 140 | let maxColorsPerSet = numVertices * 4 // aiColor4D(RGBA) * numVertices 141 | func colorSet(at keyPath: KeyPath?>) -> [AiReal]? { 142 | guard let baseAddress = mesh.mColors[keyPath: keyPath] else { 143 | return nil 144 | } 145 | 146 | return baseAddress.withMemoryRebound(to: AiReal.self, capacity: maxColorsPerSet) { pColorSet in 147 | [AiReal](UnsafeBufferPointer(start: pColorSet, count: maxColorsPerSet)) 148 | } 149 | } 150 | 151 | return ( 152 | colorSet(at: \.0), 153 | colorSet(at: \.1), 154 | colorSet(at: \.2), 155 | colorSet(at: \.3), 156 | colorSet(at: \.4), 157 | colorSet(at: \.5), 158 | colorSet(at: \.6), 159 | colorSet(at: \.7) 160 | ) 161 | }() 162 | 163 | /// Vertex texture coords, also known as UV channels. 164 | /// 165 | /// A mesh may contain 0 to AI_MAX_NUMBER_OF_TEXTURECOORDS per vertex. 166 | /// NULL if not present. 167 | /// The array is numVertices * 3 in size. 168 | public lazy var texCoords: Channels<[AiReal]?> = { 169 | typealias CVertexUVChannels = (UnsafeMutablePointer?, 170 | UnsafeMutablePointer?, 171 | UnsafeMutablePointer?, 172 | UnsafeMutablePointer?, 173 | UnsafeMutablePointer?, 174 | UnsafeMutablePointer?, 175 | UnsafeMutablePointer?, 176 | UnsafeMutablePointer?) 177 | 178 | let maxTexCoordsPerChannel = numVertices * 3 // aiVector3D * numVertices 179 | 180 | func uvChannel(at keyPath: KeyPath?>) -> [AiReal]? { 181 | guard let baseAddress = mesh.mTextureCoords[keyPath: keyPath] else { 182 | return nil 183 | } 184 | 185 | return baseAddress.withMemoryRebound(to: AiReal.self, capacity: maxTexCoordsPerChannel) { 186 | [AiReal](UnsafeBufferPointer(start: $0, count: maxTexCoordsPerChannel)) 187 | } 188 | } 189 | 190 | return Channels( 191 | uvChannel(at: \.0), 192 | uvChannel(at: \.1), 193 | uvChannel(at: \.2), 194 | uvChannel(at: \.3), 195 | uvChannel(at: \.4), 196 | uvChannel(at: \.5), 197 | uvChannel(at: \.6), 198 | uvChannel(at: \.7) 199 | ) 200 | }() 201 | 202 | public lazy var texCoordsPacked: Channels<[AiReal]?> = { 203 | func packChannel(uv: KeyPath, Int>, tex: KeyPath, [AiReal]?>) -> [AiReal]? { 204 | let numComps: Int = numUVComponents[keyPath: uv] 205 | guard let uvs = self.texCoords[keyPath: tex] else { 206 | return nil 207 | } 208 | switch numComps { 209 | case 1: // u 210 | return stride(from: 0, to: uvs.count, by: 3).map { uvs[$0] } 211 | 212 | case 2: // uv 213 | return stride(from: 0, to: uvs.count, by: 3).flatMap { uvs[$0 ... $0 + 1] } 214 | 215 | case 3: // uvw 216 | return uvs 217 | 218 | default: 219 | return nil 220 | } 221 | } 222 | 223 | return Channels( 224 | packChannel(uv: \.0, tex: \.0), 225 | packChannel(uv: \.1, tex: \.1), 226 | packChannel(uv: \.2, tex: \.2), 227 | packChannel(uv: \.3, tex: \.3), 228 | packChannel(uv: \.4, tex: \.4), 229 | packChannel(uv: \.5, tex: \.5), 230 | packChannel(uv: \.6, tex: \.6), 231 | packChannel(uv: \.7, tex: \.7) 232 | ) 233 | }() 234 | 235 | /// Specifies the number of components for a given UV channel. 236 | /// Up to three channels are supported (UVW, for accessing volume or cube maps). 237 | /// 238 | /// If the value is 2 for a given channel n, the component p.z of mTextureCoords[n][p] is set to 0.0f. 239 | /// If the value is 1 for a given channel, p.y is set to 0.0f, too. 240 | /// 4D coords are not supported 241 | public lazy var numUVComponents = Channels(Int(mesh.mNumUVComponents.0), 242 | Int(mesh.mNumUVComponents.1), 243 | Int(mesh.mNumUVComponents.2), 244 | Int(mesh.mNumUVComponents.3), 245 | Int(mesh.mNumUVComponents.4), 246 | Int(mesh.mNumUVComponents.5), 247 | Int(mesh.mNumUVComponents.6), 248 | Int(mesh.mNumUVComponents.7)) 249 | 250 | /// The faces the mesh is constructed from. 251 | /// Each face refers to a number of vertices by their indices. 252 | /// This array is always present in a mesh, its size is given in mNumFaces. 253 | /// 254 | /// If the #AI_SCENE_FLAGS_NON_VERBOSE_FORMAT is NOT set each face references an unique set of vertices. 255 | public lazy var faces: [AiFace] = UnsafeBufferPointer(start: mesh.mFaces, count: numFaces).compactMap(AiFace.init) 256 | 257 | /// The number of bones this mesh contains. 258 | /// Can be 0, in which case the mBones array is NULL. 259 | public var numBones: Int 260 | 261 | /// The material used by this mesh. 262 | /// 263 | /// A mesh uses only a single material. 264 | /// If an imported model uses multiple materials, the import splits up the mesh. 265 | /// Use this value as index into the scene's material list. 266 | public var materialIndex: Int 267 | 268 | /// Name of the mesh. Meshes can be named, but this is not a requirement and leaving this field empty is totally fine. 269 | /// 270 | /// There are mainly three uses for mesh names: 271 | /// - some formats name nodes and meshes independently. 272 | /// - importers tend to split meshes up to meet the one-material-per-mesh requirement. 273 | /// Assigning the same (dummy) name to each of the result meshes aids the caller at recovering the original mesh partitioning. 274 | /// - Vertex animations refer to meshes by their names. 275 | /// 276 | public var name: String? 277 | 278 | /// The number of attachment meshes. 279 | /// 280 | /// **Note:** Currently only works with Collada loader. 281 | public var numAnimMeshes: Int 282 | 283 | /// Method of morphing when animeshes are specified. 284 | public var method: UInt32 285 | } 286 | -------------------------------------------------------------------------------- /Sources/Assimp/AiNode.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AiNode.swift 3 | // SwiftAssimp 4 | // 5 | // Copyright © 2019-2023 Christian Treffs. All rights reserved. 6 | // Licensed under BSD 3-Clause License. See LICENSE file for details. 7 | 8 | @_implementationOnly import CAssimp 9 | 10 | public struct AiNode { 11 | let node: aiNode 12 | 13 | init(_ node: aiNode) { 14 | self.node = node 15 | name = String(node.mName) 16 | transformation = AiMatrix4x4(node.mTransformation) 17 | let numMeshes = Int(node.mNumMeshes) 18 | self.numMeshes = numMeshes 19 | let numChildren = Int(node.mNumChildren) 20 | self.numChildren = numChildren 21 | meshes = { 22 | guard numMeshes > 0 else { 23 | return [] 24 | } 25 | 26 | return (0 ..< numMeshes) 27 | .compactMap { node.mMeshes[$0] } 28 | .map { Int($0) } 29 | }() 30 | 31 | if numChildren > 0 { 32 | children = UnsafeBufferPointer(start: node.mChildren, count: numChildren).compactMap { AiNode($0?.pointee) } 33 | } else { 34 | children = [] 35 | } 36 | 37 | if let meta = node.mMetaData { 38 | metaData = AiMetadata(meta.pointee) 39 | } else { 40 | metaData = nil 41 | } 42 | } 43 | 44 | init?(_ node: aiNode?) { 45 | guard let node = node else { 46 | return nil 47 | } 48 | self.init(node) 49 | } 50 | 51 | /// The name of the node. 52 | /// 53 | /// The name might be empty (length of zero) but all nodes which need to be referenced by either bones or animations are named. 54 | /// Multiple nodes may have the same name, except for nodes which are referenced by bones (see #aiBone and #aiMesh::mBones). 55 | /// Their names *must* be unique. 56 | /// 57 | /// Cameras and lights reference a specific node by name - if there are multiple nodes with this name, they are assigned to each of them. 58 | /// There are no limitations with regard to the characters contained in the name string as it is usually taken directly from the source file. 59 | /// 60 | /// Implementations should be able to handle tokens such as whitespace, tabs, line feeds, quotation marks, ampersands etc. 61 | /// 62 | /// Sometimes assimp introduces new nodes not present in the source file into the hierarchy (usually out of necessity because sometimes the source hierarchy format is simply not compatible). 63 | /// 64 | /// Their names are surrounded by 65 | /// `<>` 66 | /// e.g. 67 | /// `` 68 | public var name: String? 69 | 70 | /// The transformation relative to the node's parent. 71 | public var transformation: AiMatrix4x4 72 | 73 | /// Parent node. 74 | /// 75 | /// NULL if this node is the root node. 76 | public var parent: AiNode? { 77 | guard let parent = node.mParent?.pointee else { 78 | return nil 79 | } 80 | return AiNode(parent) 81 | } 82 | 83 | /// The number of meshes of this node. 84 | public var numMeshes: Int 85 | 86 | /// The number of child nodes of this node. 87 | public var numChildren: Int 88 | 89 | /// The meshes of this node. 90 | /// Each entry is an index into the mesh list of the #aiScene. 91 | public var meshes: [Int] 92 | 93 | /// The child nodes of this node. 94 | /// 95 | /// NULL if mNumChildren is 0. 96 | public var children: [AiNode] 97 | 98 | /// Metadata associated with this node or NULL if there is no metadata. 99 | /// Whether any metadata is generated depends on the source file format. 100 | public var metaData: AiMetadata? 101 | } 102 | 103 | extension AiNode: CustomDebugStringConvertible { 104 | public var debugDescription: String { 105 | """ 106 | \n\(children.map { "\t" + $0.debugDescription }.joined()) 107 | """ 108 | } 109 | } 110 | 111 | /// Container for holding metadata. 112 | /// Metadata is a key-value store using string keys and values. 113 | public struct AiMetadata { 114 | init(_ meta: aiMetadata) { 115 | numProperties = Int(meta.mNumProperties) 116 | keys = UnsafeBufferPointer(start: meta.mKeys, count: numProperties).compactMap(String.init) 117 | values = UnsafeBufferPointer(start: meta.mValues, count: numProperties).compactMap(Entry.init) 118 | } 119 | 120 | /// Length of the mKeys and mValues arrays, respectively 121 | public var numProperties: Int 122 | 123 | /// Arrays of keys, may not be NULL. 124 | /// Entries in this array may not be NULL as well. 125 | public var keys: [String] 126 | 127 | /// Arrays of values, may not be NULL. 128 | /// Entries in this array may be NULL if the corresponding property key has no assigned value. 129 | public var values: [Entry] 130 | 131 | public var metadata: [String: Entry] { 132 | [String: Entry](uniqueKeysWithValues: (0 ..< numProperties).map { (keys[$0], values[$0]) }) 133 | } 134 | 135 | public enum Entry { 136 | case bool(Bool) 137 | case int32(Int32) 138 | case uint64(UInt64) 139 | case float(Float) 140 | case double(Double) 141 | case string(String) 142 | case vec3(Vec3) 143 | case metadata(AiMetadata) 144 | 145 | init?(_ entry: aiMetadataEntry) { 146 | guard let pData = entry.mData else { 147 | return nil 148 | } 149 | 150 | switch entry.mType { 151 | case AI_BOOL: 152 | self = .bool(pData.bindMemory(to: Bool.self, capacity: 1).pointee) 153 | 154 | case AI_INT32: 155 | self = .int32(pData.bindMemory(to: Int32.self, capacity: 1).pointee) 156 | 157 | case AI_UINT64: 158 | self = .uint64(pData.bindMemory(to: UInt64.self, capacity: 1).pointee) 159 | 160 | case AI_FLOAT: 161 | self = .float(pData.bindMemory(to: Float.self, capacity: 1).pointee) 162 | 163 | case AI_DOUBLE: 164 | self = .double(pData.bindMemory(to: Double.self, capacity: 1).pointee) 165 | 166 | case AI_AISTRING: 167 | guard let string = String(pData.bindMemory(to: aiString.self, capacity: 1).pointee) else { 168 | return nil 169 | } 170 | self = .string(string) 171 | 172 | case AI_AIVECTOR3D: 173 | self = .vec3(Vec3(pData.bindMemory(to: aiVector3D.self, capacity: 1).pointee)) 174 | 175 | case AI_AIMETADATA: 176 | self = .metadata(AiMetadata(pData.bindMemory(to: aiMetadata.self, capacity: 1).pointee)) 177 | 178 | case AI_META_MAX: 179 | return nil 180 | 181 | case FORCE_32BIT: 182 | return nil 183 | 184 | default: 185 | return nil 186 | } 187 | } 188 | } 189 | } 190 | -------------------------------------------------------------------------------- /Sources/Assimp/AiPostProcessStep.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AiPostProcessStep.swift 3 | // SwiftAssimp 4 | // 5 | // Copyright © 2019-2023 Christian Treffs. All rights reserved. 6 | // Licensed under BSD 3-Clause License. See LICENSE file for details. 7 | 8 | @_implementationOnly import CAssimp 9 | 10 | public struct AiPostProcessStep: OptionSet { 11 | public var rawValue: UInt32 12 | 13 | public init(rawValue: UInt32) { 14 | self.rawValue = rawValue 15 | } 16 | 17 | /// Calculates the tangents and bitangents for the imported meshes. 18 | /// 19 | /// Does nothing if a mesh does not have normals. 20 | /// You might want this post processing step to be executed if you plan to use tangent space calculations 21 | /// such as normal mapping applied to the meshes. 22 | /// There's an importer property, #AI_CONFIG_PP_CT_MAX_SMOOTHING_ANGLE, 23 | /// which allows you to specify a maximum smoothing angle for the algorithm. 24 | /// However, usually you'll want to leave it at the default value. 25 | public static let calcTangentSpace = AiPostProcessStep(rawValue: aiProcess_CalcTangentSpace.rawValue) 26 | 27 | /// This step removes bones losslessly or according to some threshold. 28 | /// 29 | /// In some cases (i.e. formats that require it) exporters are forced to assign dummy bone weights 30 | /// to otherwise static meshes assigned to animated meshes. 31 | /// Full, weight-based skinning is expensive while animating nodes is extremely cheap, 32 | /// so this step is offered to clean up the data in that regard. 33 | /// 34 | /// Use #AI_CONFIG_PP_DB_THRESHOLD to control this. 35 | /// Use #AI_CONFIG_PP_DB_ALL_OR_NONE if you want bones removed if and only if all bones within the scene qualify for removal. 36 | public static let debone = AiPostProcessStep(rawValue: aiProcess_Debone.rawValue) 37 | 38 | /// This step searches all meshes for degenerate primitives and converts them to proper lines or points. 39 | /// 40 | /// A face is 'degenerate' if one or more of its points are identical. 41 | /// To have the degenerate stuff not only detected and collapsed but removed, 42 | /// try one of the following procedures: 43 | /// 1. (if you support lines and points for rendering but don't want the degenerates) Specify the #aiProcess_FindDegenerates flag. 44 | /// Set the #AI_CONFIG_PP_FD_REMOVE importer property to 1. 45 | /// This will cause the step to remove degenerate triangles from the import as soon as they're detected. 46 | /// They won't pass any further pipeline steps. 47 | /// 48 | /// 2.(if you don't support lines and points at all) Specify the #aiProcess_FindDegenerates flag. 49 | /// Specify the #aiProcess_SortByPType flag. T 50 | /// his moves line and point primitives to separate meshes. Set the #AI_CONFIG_PP_SBP_REMOVE importer property to 51 | /// `aiPrimitiveType_POINTS | aiPrimitiveType_LINES` to cause SortByPType to reject point and line meshes from the scene. 52 | /// 53 | /// Degenerate polygons are not necessarily evil and that's why they're not removed by default. 54 | /// There are several file formats which don't support lines or points, and some exporters bypass 55 | /// the format specification and write them as degenerate triangles instead. 56 | public static let findDegenerates = AiPostProcessStep(rawValue: aiProcess_FindDegenerates.rawValue) 57 | 58 | /// This step searches for duplicate meshes and replaces them with references to the first mesh. 59 | /// 60 | /// This step takes a while, so don't use it if speed is a concern. 61 | /// Its main purpose is to workaround the fact that many export file formats don't support instanced meshes, 62 | /// so exporters need to duplicate meshes. 63 | /// This step removes the duplicates again. 64 | /// Please note that Assimp does not currently support per-node material assignment to meshes, 65 | /// which means that identical meshes with different materials are currently *not* joined, 66 | /// although this is planned for future versions. 67 | public static let findInstances = AiPostProcessStep(rawValue: aiProcess_FindInstances.rawValue) 68 | 69 | /// This step searches all meshes for invalid data, such as zeroed normal vectors 70 | /// or invalid UV coords and removes/fixes them. 71 | /// This is intended to get rid of some common exporter errors. 72 | /// 73 | /// This is especially useful for normals. 74 | /// If they are invalid, and the step recognizes this, they will be removed and can later be recomputed, 75 | /// i.e. by the #aiProcess_GenSmoothNormals flag. 76 | /// The step will also remove meshes that are infinitely small and reduce animation tracks consisting of 77 | /// hundreds if redundant keys to a single key. 78 | /// The AI_CONFIG_PP_FID_ANIM_ACCURACY config property decides the accuracy of the check for duplicate animation tracks. 79 | public static let findInvalidData = AiPostProcessStep(rawValue: aiProcess_FindInvalidData.rawValue) 80 | 81 | /// This step tries to determine which meshes have normal vectors that are facing inwards and inverts them. 82 | /// 83 | /// The algorithm is simple but effective: the bounding box of all vertices + their normals is compared against the 84 | /// volume of the bounding box of all vertices without their normals. 85 | /// This works well for most objects, problems might occur with planar surfaces. 86 | /// However, the step tries to filter such cases. 87 | /// The step inverts all in-facing normals. 88 | /// Generally it is recommended to enable this step, although the result is not always correct. 89 | public static let fixInfacingNormals = AiPostProcessStep(rawValue: aiProcess_FixInfacingNormals.rawValue) 90 | 91 | /// This step flips all UV coordinates along the y-axis and adjusts material settings and bitangents accordingly. 92 | /// 93 | /// You'll probably want to consider this flag if you use Direct3D for rendering. 94 | /// The #aiProcess_ConvertToLeftHanded flag supersedes this setting and bundles 95 | /// all conversions typically required for D3D-based applications. 96 | public static let flipUVs = AiPostProcessStep(rawValue: aiProcess_FlipUVs.rawValue) 97 | 98 | /// This step adjusts the output face winding order to be CW. 99 | /// 100 | /// The default face winding order is counter clockwise (CCW). 101 | public static let flipWindingOrder = AiPostProcessStep(rawValue: aiProcess_FlipWindingOrder.rawValue) 102 | 103 | /// Generates normals for all faces of all meshes. 104 | /// 105 | /// This is ignored if normals are already there at the time this flag is evaluated. 106 | /// Model importers try to load them from the source file, so they're usually already there. 107 | /// Face normals are shared between all points of a single face, so a single point can have multiple normals, 108 | /// which forces the library to duplicate vertices in some cases. 109 | /// #aiProcess_JoinIdenticalVertices is *senseless* then. 110 | /// This flag may not be specified together with #aiProcess_GenSmoothNormals. 111 | public static let genNormals = AiPostProcessStep(rawValue: aiProcess_GenNormals.rawValue) 112 | 113 | /// Generates smooth normals for all vertices in the mesh. 114 | /// This is ignored if normals are already there at the time this flag is evaluated. 115 | /// Model importers try to load them from the source file, so they're usually already there. 116 | /// This flag may not be specified together with #aiProcess_GenNormals. 117 | /// There's a importer property, #AI_CONFIG_PP_GSN_MAX_SMOOTHING_ANGLE which allows you to specify 118 | /// an angle maximum for the normal smoothing algorithm. 119 | /// Normals exceeding this limit are not smoothed, resulting in a 'hard' seam between two faces. 120 | /// Using a decent angle here (e.g. 80 degrees) results in very good visual appearance. 121 | public static let genSmoothNormals = AiPostProcessStep(rawValue: aiProcess_GenSmoothNormals.rawValue) 122 | 123 | /// This step converts non-UV mappings (such as spherical or cylindrical mapping) to proper texture coordinate channels. 124 | /// 125 | /// Most applications will support UV mapping only, so you will probably want to specify this step in every case. 126 | /// Note that Assimp is not always able to match the original mapping implementation of the 3D app which produced 127 | /// a model perfectly. 128 | /// It's always better to let the modelling app compute the UV channels 129 | /// - 3ds max, Maya, Blender, LightWave, and Modo do this for example. 130 | public static let genUVCoords = AiPostProcessStep(rawValue: aiProcess_GenUVCoords.rawValue) 131 | 132 | /// This step will perform a global scale of the model. 133 | /// 134 | /// Some importers are providing a mechanism to define a scaling unit for the model. 135 | /// This post processing step can be used to do so. 136 | /// Use #AI_CONFIG_GLOBAL_SCALE_FACTOR_KEY to control this. 137 | public static let globalScale = AiPostProcessStep(rawValue: aiProcess_GlobalScale.rawValue) 138 | 139 | /// Reorders triangles for better vertex cache locality. 140 | /// 141 | /// The step tries to improve the ACMR (average post-transform vertex cache miss ratio) for all meshes. 142 | /// The implementation runs in O(n) and is roughly based on the 'tipsify' algorithm (see this paper). 143 | /// 144 | /// If you intend to render huge models in hardware, this step might be of interest to you. 145 | /// The #AI_CONFIG_PP_ICL_PTCACHE_SIZE importer property can be used to fine-tune the cache optimization. 146 | public static let improveCacheLocality = AiPostProcessStep(rawValue: aiProcess_ImproveCacheLocality.rawValue) 147 | 148 | /// Identifies and joins identical vertex data sets within all imported meshes. 149 | /// 150 | /// After this step is run, each mesh contains unique vertices, so a vertex may be used by multiple faces. 151 | /// You usually want to use this post processing step. 152 | /// If your application deals with indexed geometry, this step is compulsory or you'll just waste rendering time. 153 | /// If this flag is not specified, no vertices are referenced by more than one face and no index buffer is required for rendering. 154 | public static let joinIdenticalVertices = AiPostProcessStep(rawValue: aiProcess_JoinIdenticalVertices.rawValue) 155 | 156 | /// Limits the number of bones simultaneously affecting a single vertex to a maximum value. 157 | /// 158 | /// If any vertex is affected by more than the maximum number of bones, the least important vertex weights 159 | /// are removed and the remaining vertex weights are renormalized so that the weights still sum up to 1. 160 | /// 161 | /// The default bone weight limit is 4 (defined as #AI_LMW_MAX_WEIGHTS in config.h), 162 | /// but you can use the #AI_CONFIG_PP_LBW_MAX_WEIGHTS importer property to 163 | /// supply your own limit to the post processing step. 164 | /// If you intend to perform the skinning in hardware, this post processing step might be of interest to you. 165 | public static let limitBoneWeights = AiPostProcessStep(rawValue: aiProcess_LimitBoneWeights.rawValue) 166 | 167 | /// Converts all the imported data to a left-handed coordinate space. 168 | /// 169 | /// By default the data is returned in a right-handed coordinate space (which OpenGL prefers). 170 | /// In this space, +X points to the right, +Z points towards the viewer, and +Y points upwards. 171 | /// In the DirectX coordinate space +X points to the right, +Y points upwards, and +Z points away from the viewer. 172 | /// 173 | /// You'll probably want to consider this flag if you use Direct3D for rendering. 174 | /// The #aiProcess_ConvertToLeftHanded flag supersedes this setting and bundles all conversions typically required for D3D-based applications. 175 | public static let makeLeftHanded = AiPostProcessStep(rawValue: aiProcess_MakeLeftHanded.rawValue) 176 | 177 | /// A postprocessing step to optimize the scene hierarchy. 178 | /// 179 | /// Nodes without animations, bones, lights or cameras assigned are collapsed and joined. 180 | /// Node names can be lost during this step. 181 | /// If you use special 'tag nodes' to pass additional information through your content pipeline, 182 | /// use the #AI_CONFIG_PP_OG_EXCLUDE_LIST importer property to specify a list of node names you want to be kept. 183 | /// Nodes matching one of the names in this list won't be touched or modified. 184 | /// 185 | /// Use this flag with caution. 186 | /// Most simple files will be collapsed to a single node, so complex hierarchies are usually completely lost. 187 | /// This is not useful for editor environments, but probably a very effective optimization if you 188 | /// just want to get the model data, convert it to your own format, and render it as fast as possible. 189 | /// This flag is designed to be used with #aiProcess_OptimizeMeshes for best results. 190 | /// 191 | /// **Note** 192 | /// 'Crappy' scenes with thousands of extremely small meshes packed in deeply nested nodes exist for almost all file formats. 193 | /// #aiProcess_OptimizeMeshes in combination with #aiProcess_OptimizeGraph usually fixes them all and makes them renderable. 194 | public static let optimizeGraph = AiPostProcessStep(rawValue: aiProcess_OptimizeGraph.rawValue) 195 | 196 | /// A postprocessing step to reduce the number of meshes. 197 | /// 198 | /// This will, in fact, reduce the number of draw calls. 199 | /// This is a very effective optimization and is recommended to be used together with #aiProcess_OptimizeGraph, if possible. 200 | /// The flag is fully compatible with both #aiProcess_SplitLargeMeshes and #aiProcess_SortByPType. 201 | public static let optimizeMeshes = AiPostProcessStep(rawValue: aiProcess_OptimizeMeshes.rawValue) 202 | 203 | /// Removes the node graph and pre-transforms all vertices with the local transformation matrices of their nodes. 204 | /// 205 | /// The output scene still contains nodes, however there is only a root node with children, 206 | /// each one referencing only one mesh, and each mesh referencing one material. 207 | /// For rendering, you can simply render all meshes in order - you don't need to pay attention to 208 | /// local transformations and the node hierarchy. 209 | /// Animations are removed during this step. 210 | /// This step is intended for applications without a scenegraph. 211 | /// The step CAN cause some problems: if e.g. a mesh of the asset contains normals and another, 212 | /// using the same material index, does not, they will be brought together, 213 | /// but the first meshes's part of the normal list is zeroed. 214 | /// However, these artifacts are rare. 215 | /// **Note** 216 | /// The #AI_CONFIG_PP_PTV_NORMALIZE configuration property can be set to normalize the scene's spatial dimension to the -1...1 range. 217 | public static let preTransformVertices = AiPostProcessStep(rawValue: aiProcess_PreTransformVertices.rawValue) 218 | 219 | /// Removes some parts of the data structure (animations, materials, light sources, cameras, textures, vertex components). 220 | /// 221 | /// The components to be removed are specified in a separate importer property, #AI_CONFIG_PP_RVC_FLAGS. 222 | /// This is quite useful if you don't need all parts of the output structure. 223 | /// Vertex colors are rarely used today for example... 224 | /// Calling this step to remove unneeded data from the pipeline as early as possible results in increased performance 225 | /// and a more optimized output data structure. 226 | /// This step is also useful if you want to force Assimp to recompute normals or tangents. 227 | /// The corresponding steps don't recompute them if they're already there (loaded from the source asset). 228 | /// By using this step you can make sure they are NOT there. 229 | /// 230 | /// This flag is a poor one, mainly because its purpose is usually misunderstood. 231 | /// 232 | /// Consider the following case: a 3D model has been exported from a CAD app, and it has per-face vertex colors. 233 | /// Vertex positions can't be shared, thus the #aiProcess_JoinIdenticalVertices step fails to optimize 234 | /// the data because of these nasty little vertex colors. 235 | /// Most apps don't even process them, so it's all for nothing. 236 | /// By using this step, unneeded components are excluded as early as possible thus opening more room for internal optimizations. 237 | public static let removeComponent = AiPostProcessStep(rawValue: aiProcess_RemoveComponent.rawValue) 238 | 239 | /// Searches for redundant/unreferenced materials and removes them. 240 | /// 241 | /// This is especially useful in combination with the #aiProcess_PreTransformVertices and #aiProcess_OptimizeMeshes flags. 242 | /// Both join small meshes with equal characteristics, but they can't do their work if two meshes have different materials. 243 | /// Because several material settings are lost during Assimp's import filters, 244 | /// (and because many exporters don't check for redundant materials), 245 | /// huge models often have materials which are are defined several times with exactly the same settings. 246 | /// 247 | /// Several material settings not contributing to the final appearance of a surface are ignored in all comparisons 248 | /// (e.g. the material name). 249 | /// So, if you're passing additional information through the content pipeline (probably using *magic* material names), 250 | /// don't specify this flag. 251 | /// Alternatively take a look at the #AI_CONFIG_PP_RRM_EXCLUDE_LIST importer property. 252 | public static let removeRedundantMaterials = AiPostProcessStep(rawValue: aiProcess_RemoveRedundantMaterials.rawValue) 253 | 254 | /// This step splits meshes with more than one primitive type in homogeneous sub-meshes. 255 | /// 256 | /// The step is executed after the triangulation step. 257 | /// After the step returns, just one bit is set in aiMesh::mPrimitiveTypes. 258 | /// This is especially useful for real-time rendering where point and line primitives are often ignored or rendered separately. 259 | /// You can use the #AI_CONFIG_PP_SBP_REMOVE importer property to specify which primitive types you need. 260 | /// This can be used to easily exclude lines and points, which are rarely used, from the import. 261 | public static let sortByPType = AiPostProcessStep(rawValue: aiProcess_SortByPType.rawValue) 262 | 263 | /// This step splits meshes with many bones into sub-meshes so that each su-bmesh has fewer or as many bones as a given limit. 264 | public static let splitByBoneCount = AiPostProcessStep(rawValue: aiProcess_SplitByBoneCount.rawValue) 265 | 266 | /// Splits large meshes into smaller sub-meshes. 267 | /// 268 | /// This is quite useful for real-time rendering, where the number of triangles which can be 269 | /// maximally processed in a single draw-call is limited by the video driver/hardware. 270 | /// The maximum vertex buffer is usually limited too. 271 | /// Both requirements can be met with this step: you may specify both a triangle and vertex limit for a single mesh. 272 | /// The split limits can (and should!) be set through the #AI_CONFIG_PP_SLM_VERTEX_LIMIT and #AI_CONFIG_PP_SLM_TRIANGLE_LIMIT 273 | /// importer properties. 274 | /// The default values are #AI_SLM_DEFAULT_MAX_VERTICES and #AI_SLM_DEFAULT_MAX_TRIANGLES. 275 | /// Note that splitting is generally a time-consuming task, but only if there's something to split. 276 | /// The use of this step is recommended for most users. 277 | public static let splitLargeMeshes = AiPostProcessStep(rawValue: aiProcess_SplitLargeMeshes.rawValue) 278 | 279 | /// This step applies per-texture UV transformations and bakes them into stand-alone vtexture coordinate channels. 280 | /// 281 | /// UV transformations are specified per-texture - see the #AI_MATKEY_UVTRANSFORM material key for more information. 282 | /// This step processes all textures with transformed input UV coordinates and generates a new (pre-transformed) 283 | /// UV channel which replaces the old channel. 284 | /// Most applications won't support UV transformations, so you will probably want to specify this step. 285 | /// 286 | /// **Note** 287 | /// UV transformations are usually implemented in real-time apps by transforming texture coordinates 288 | /// at vertex shader stage with a 3x3 (homogenous) transformation matrix. 289 | public static let transformUVCoords = AiPostProcessStep(rawValue: aiProcess_TransformUVCoords.rawValue) 290 | 291 | /// Triangulates all faces of all meshes. 292 | /// 293 | /// By default the imported mesh data might contain faces with more than 3 indices. 294 | /// For rendering you'll usually want all faces to be triangles. 295 | /// This post processing step splits up faces with more than 3 indices into triangles. 296 | /// Line and point primitives are *not* modified! If you want 'triangles only' with no other kinds of primitives, 297 | /// try the following solution: Specify both #aiProcess_Triangulate and #aiProcess_SortByPType Ignore all 298 | /// point and line meshes when you process assimp's output 299 | public static let triangulate = AiPostProcessStep(rawValue: aiProcess_Triangulate.rawValue) 300 | 301 | /// Validates the imported scene data structure. 302 | /// This makes sure that all indices are valid, all animations and bones are linked correctly, 303 | /// all material references are correct .. etc. 304 | /// 305 | /// It is recommended that you capture Assimp's log output if you use this flag, so you can easily find out 306 | /// what's wrong if a file fails the validation. 307 | /// The validator is quite strict and will find *all* inconsistencies in the data structure... 308 | /// It is recommended that plugin developers use it to debug their loaders. 309 | /// There are two types of validation failures: 310 | /// Error: There's something wrong with the imported data. 311 | /// Further postprocessing is not possible and the data is not usable at all. 312 | /// The import fails. #Importer::GetErrorString() or #aiGetErrorString() carry the error message around. 313 | /// Warning: There are some minor issues (e.g. 1000000 animation keyframes with the same time), 314 | /// but further postprocessing and use of the data structure is still safe. 315 | /// Warning details are written to the log file, #AI_SCENE_FLAGS_VALIDATION_WARNING is set in #aiScene::mFlags 316 | /// 317 | /// This post-processing step is not time-consuming. 318 | /// Its use is not compulsory, but recommended. 319 | public static let validateDataStructure = AiPostProcessStep(rawValue: aiProcess_ValidateDataStructure.rawValue) 320 | } 321 | -------------------------------------------------------------------------------- /Sources/Assimp/AiScene.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AiScene.swift 3 | // SwiftAssimp 4 | // 5 | // Copyright © 2019-2023 Christian Treffs. All rights reserved. 6 | // Licensed under BSD 3-Clause License. See LICENSE file for details. 7 | 8 | @_implementationOnly import CAssimp 9 | 10 | public final class AiScene { 11 | public enum Error: Swift.Error { 12 | case importFailed(String) 13 | case importIncomplete(String) 14 | case noRootNode 15 | } 16 | 17 | public struct Flags: OptionSet { 18 | public var rawValue: Int32 19 | 20 | public init(rawValue: Int32) { 21 | self.rawValue = rawValue 22 | } 23 | 24 | public static let incomplete = Flags(rawValue: AI_SCENE_FLAGS_INCOMPLETE) 25 | public static let validated = Flags(rawValue: AI_SCENE_FLAGS_VALIDATED) 26 | public static let validationWarning = Flags(rawValue: AI_SCENE_FLAGS_VALIDATION_WARNING) 27 | public static let nonVerboseFormat = Flags(rawValue: AI_SCENE_FLAGS_NON_VERBOSE_FORMAT) 28 | public static let terrain = Flags(rawValue: AI_SCENE_FLAGS_TERRAIN) 29 | public static let allowShared = Flags(rawValue: AI_SCENE_FLAGS_ALLOW_SHARED) 30 | } 31 | 32 | let scene: aiScene 33 | 34 | public init(file filePath: String, flags: AiPostProcessStep = []) throws { 35 | guard let scenePtr = aiImportFile(filePath, flags.rawValue) else { 36 | throw Error.importFailed(String(cString: aiGetErrorString())) 37 | } 38 | scene = scenePtr.pointee 39 | let flags = Flags(rawValue: Int32(scene.mFlags)) 40 | 41 | if flags.contains(.incomplete) { 42 | throw Error.importIncomplete(filePath) 43 | } 44 | self.flags = flags 45 | 46 | let numMeshes = Int(scene.mNumMeshes) 47 | self.numMeshes = numMeshes 48 | let numMaterials = Int(scene.mNumMaterials) 49 | self.numMaterials = numMaterials 50 | let numAnimations = Int(scene.mNumAnimations) 51 | self.numAnimations = numAnimations 52 | let numTextures = Int(scene.mNumTextures) 53 | self.numTextures = numTextures 54 | let numLights = Int(scene.mNumLights) 55 | self.numLights = numLights 56 | let numCameras = Int(scene.mNumCameras) 57 | self.numCameras = numCameras 58 | 59 | hasMeshes = scene.mMeshes != nil && numMeshes > 0 60 | hasMaterials = scene.mMaterials != nil && numMaterials > 0 61 | hasLights = scene.mLights != nil && numLights > 0 62 | hasTextures = scene.mTextures != nil && numTextures > 0 63 | hasCameras = scene.mCameras != nil && numCameras > 0 64 | hasAnimations = scene.mAnimations != nil && numAnimations > 0 65 | 66 | guard let node = scene.mRootNode?.pointee else { 67 | throw Error.noRootNode 68 | } 69 | 70 | rootNode = AiNode(node) 71 | } 72 | 73 | deinit { 74 | withUnsafePointer(to: scene) { 75 | aiReleaseImport($0) 76 | } 77 | } 78 | 79 | /// Check whether the scene contains meshes 80 | /// Unless no special scene flags are set this will always be true. 81 | public var hasMeshes: Bool 82 | 83 | /// Check whether the scene contains materials 84 | /// Unless no special scene flags are set this will always be true. 85 | public var hasMaterials: Bool 86 | 87 | /// Check whether the scene contains lights 88 | public var hasLights: Bool 89 | 90 | /// Check whether the scene contains embedded textures 91 | public var hasTextures: Bool 92 | 93 | /// Check whether the scene contains cameras 94 | public var hasCameras: Bool 95 | 96 | /// Check whether the scene contains animations 97 | public var hasAnimations: Bool 98 | 99 | /// Any combination of the AI_SCENE_FLAGS_XXX flags. 100 | /// 101 | /// By default this value is 0, no flags are set. 102 | /// Most applications will want to reject all scenes with the AI_SCENE_FLAGS_INCOMPLETE bit set. 103 | public var flags: Flags 104 | 105 | /// The root node of the hierarchy. 106 | /// 107 | /// There will always be at least the root node if the import was successful (and no special flags have been set). 108 | /// Presence of further nodes depends on the format and content of the imported file. 109 | public var rootNode: AiNode 110 | 111 | /// The number of meshes in the scene. 112 | public var numMeshes: Int 113 | 114 | /// The array of meshes. 115 | /// Use the indices given in the aiNode structure to access this array. 116 | /// The array is mNumMeshes in size. 117 | /// 118 | /// If the AI_SCENE_FLAGS_INCOMPLETE flag is not set there will always be at least ONE material. 119 | public lazy var meshes: [AiMesh] = UnsafeBufferPointer(start: scene.mMeshes, count: numMeshes).compactMap { AiMesh($0?.pointee) } 120 | 121 | /// The number of materials in the scene. 122 | public var numMaterials: Int 123 | 124 | /// The array of materials. 125 | /// Use the index given in each aiMesh structure to access this array. 126 | /// The array is mNumMaterials in size. 127 | /// 128 | /// If the AI_SCENE_FLAGS_INCOMPLETE flag is not set there will always be at least ONE material. 129 | /// 130 | /// 131 | public lazy var materials: [AiMaterial] = UnsafeBufferPointer(start: scene.mMaterials, count: numMaterials).compactMap { AiMaterial($0?.pointee) } 132 | 133 | /// The number of animations in the scene. 134 | public var numAnimations: Int 135 | 136 | /// The array of animations. 137 | /// All animations imported from the given file are listed here. 138 | /// The array is mNumAnimations in size. 139 | // public var animations: [aiAnimation] { 140 | // guard numAnimations > 0 else { 141 | // return [] 142 | // } 143 | 144 | // let animations = (0.. [AiMesh] { 185 | node.meshes.map { meshes[$0] } 186 | } 187 | } 188 | -------------------------------------------------------------------------------- /Sources/Assimp/AiShadingMode.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AiShadingMode.swift 3 | // SwiftAssimp 4 | // 5 | // Copyright © 2019-2023 Christian Treffs. All rights reserved. 6 | // Licensed under BSD 3-Clause License. See LICENSE file for details. 7 | 8 | @_implementationOnly import CAssimp 9 | 10 | public struct AiShadingMode: RawRepresentable { 11 | public let rawValue: UInt32 12 | 13 | public init(rawValue: UInt32) { 14 | self.rawValue = rawValue 15 | } 16 | 17 | init(_ shadingMode: aiShadingMode) { 18 | rawValue = shadingMode.rawValue 19 | } 20 | 21 | /** Flat shading. Shading is done on per-face base, 22 | * diffuse only. Also known as 'faceted shading'. 23 | */ 24 | public static let flat = AiShadingMode(aiShadingMode_Flat) 25 | 26 | /** Simple Gouraud shading. 27 | */ 28 | public static let gouraud = AiShadingMode(aiShadingMode_Gouraud) 29 | 30 | /** Phong-Shading - 31 | */ 32 | public static let phong = AiShadingMode(aiShadingMode_Phong) 33 | 34 | /** Phong-Blinn-Shading 35 | */ 36 | public static let blinn = AiShadingMode(aiShadingMode_Blinn) 37 | 38 | /** Toon-Shading per pixel 39 | * 40 | * Also known as 'comic' shader. 41 | */ 42 | public static let toon = AiShadingMode(aiShadingMode_Toon) 43 | 44 | /** OrenNayar-Shading per pixel 45 | * 46 | * Extension to standard Lambertian shading, taking the 47 | * roughness of the material into account 48 | */ 49 | public static let orenNayar = AiShadingMode(aiShadingMode_OrenNayar) 50 | 51 | /** Minnaert-Shading per pixel 52 | * 53 | * Extension to standard Lambertian shading, taking the 54 | * "darkness" of the material into account 55 | */ 56 | public static let minnaert = AiShadingMode(aiShadingMode_Minnaert) 57 | 58 | /** CookTorrance-Shading per pixel 59 | * 60 | * Special shader for metallic surfaces. 61 | */ 62 | public static let cookTorrance = AiShadingMode(aiShadingMode_CookTorrance) 63 | 64 | /** No shading at all. Constant light influence of 1.0. 65 | */ 66 | public static let noShading = AiShadingMode(aiShadingMode_NoShading) 67 | 68 | /** Fresnel shading 69 | */ 70 | public static let fresnel = AiShadingMode(aiShadingMode_Fresnel) 71 | } 72 | 73 | extension AiShadingMode: Equatable {} 74 | extension AiShadingMode: CustomDebugStringConvertible { 75 | public var debugDescription: String { 76 | switch self { 77 | case .flat: 78 | return "flat" 79 | case .blinn: 80 | return "blinn" 81 | case .cookTorrance: 82 | return "cookTorrance" 83 | case .fresnel: 84 | return "fresnel" 85 | case .gouraud: 86 | return "gouraud" 87 | case .minnaert: 88 | return "minnaert" 89 | case .noShading: 90 | return "noShading" 91 | case .orenNayar: 92 | return "orenNayar" 93 | case .phong: 94 | return "phong" 95 | case .toon: 96 | return "toon" 97 | default: 98 | return "AiShadingMode(\(rawValue))" 99 | } 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /Sources/Assimp/AiTexture.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AiTexture.swift 3 | // SwiftAssimp 4 | // 5 | // Copyright © 2019-2023 Christian Treffs. All rights reserved. 6 | // Licensed under BSD 3-Clause License. See LICENSE file for details. 7 | 8 | @_implementationOnly import CAssimp 9 | 10 | /// Helper structure to describe an embedded texture 11 | /// 12 | /// Normally textures are contained in external files but some file formats embed them directly in the model file. 13 | /// There are two types of embedded textures: 14 | /// 1. Uncompressed textures. 15 | /// The color data is given in an uncompressed format. 16 | /// 2. Compressed textures stored in a file format like png or jpg. 17 | /// The raw file bytes are given so the application must utilize an image decoder (e.g. DevIL) to get access to the actual color data. 18 | /// 19 | /// Embedded textures are referenced from materials using strings like "*0", "*1", etc. as the texture paths (a single asterisk character followed by the zero-based index of the texture in the aiScene::mTextures array). 20 | public struct AiTexture { 21 | let texture: aiTexture 22 | 23 | init(_ texture: aiTexture) { 24 | self.texture = texture 25 | filename = String(texture.mFilename) 26 | achFormatHint = CArray.read(texture.achFormatHint) { body in 27 | guard let baseAddress = body.baseAddress else { 28 | return "" 29 | } 30 | return String(cString: baseAddress) 31 | } 32 | let width = Int(texture.mWidth) 33 | self.width = width 34 | let height = Int(texture.mHeight) 35 | self.height = height 36 | numPixels = { 37 | if height == 0 { 38 | let sizeInBytes = width 39 | return sizeInBytes / MemoryLayout.stride 40 | } else { 41 | return width * height 42 | } 43 | }() 44 | } 45 | 46 | init?(_ aiTexture: aiTexture?) { 47 | guard let aiTexture = aiTexture else { 48 | return nil 49 | } 50 | 51 | self.init(aiTexture) 52 | } 53 | 54 | /// Texture original filename. 55 | /// 56 | /// Used to get the texture reference. 57 | public var filename: String? 58 | 59 | /// A hint from the loader to make it easier for applications 60 | /// to determine the type of embedded textures. 61 | /// 62 | /// If mHeight != 0 this member is show how data is packed. Hint will consist of 63 | /// two parts: channel order and channel bitness (count of the bits for every 64 | /// color channel). For simple parsing by the viewer it's better to not omit 65 | /// absent color channel and just use 0 for bitness. For example: 66 | /// 1. Image contain RGBA and 8 bit per channel, achFormatHint == "rgba8888"; 67 | /// 2. Image contain ARGB and 8 bit per channel, achFormatHint == "argb8888"; 68 | /// 3. Image contain RGB and 5 bit for R and B channels and 6 bit for G channel, achFormatHint == "rgba5650"; 69 | /// 4. One color image with B channel and 1 bit for it, achFormatHint == "rgba0010"; 70 | /// If mHeight == 0 then achFormatHint is set set to '\\0\\0\\0\\0' if the loader has no additional 71 | /// information about the texture file format used OR the 72 | /// file extension of the format without a trailing dot. If there 73 | /// are multiple file extensions for a format, the shortest 74 | /// extension is chosen (JPEG maps to 'jpg', not to 'jpeg'). 75 | /// E.g. 'dds\\0', 'pcx\\0', 'jpg\\0'. All characters are lower-case. 76 | /// The fourth character will always be '\\0'. 77 | public var achFormatHint: String 78 | 79 | /// Width of the texture, in pixels 80 | /// 81 | /// If mHeight is zero the texture is compressed in a format like JPEG. 82 | /// In this case mWidth specifies the size of the memory area pcData is pointing to, in bytes. 83 | public var width: Int 84 | 85 | /// Height of the texture, in pixels 86 | /// 87 | /// If this value is zero, pcData points to a compressed texture in any format (e.g. JPEG). 88 | public var height: Int 89 | 90 | @inline(__always) 91 | public var isCompressed: Bool { height == 0 } 92 | 93 | /// Number of pixels in texture. 94 | public var numPixels: Int 95 | 96 | /// Data of the texture. 97 | /// 98 | /// Points to an array of mWidth * mHeight aiTexel's. 99 | /// The format of the texture data is always ARGB8888 to make the implementation for user of the library as easy as possible. 100 | /// If mHeight = 0 this is a pointer to a memory buffer of size mWidth containing the compressed texture data. 101 | /// Texel layout is BGRA. 102 | public lazy var textureData: [UInt8] = withUnsafeTextureData([UInt8].init) 103 | 104 | public mutating func withUnsafeTextureData(_ body: (UnsafeBufferPointer) throws -> R) rethrows -> R { 105 | let count = numPixels * 4 // aiTexel(BGRA) * numPixel 106 | return try texture.pcData.withMemoryRebound(to: UInt8.self, capacity: count) { pBytes in 107 | try body(UnsafeBufferPointer(start: pBytes, count: count)) 108 | } 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /Sources/Assimp/AiTextureType.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AiTextureType.swift 3 | // SwiftAssimp 4 | // 5 | // Copyright © 2019-2023 Christian Treffs. All rights reserved. 6 | // Licensed under BSD 3-Clause License. See LICENSE file for details. 7 | 8 | @_implementationOnly import CAssimp 9 | 10 | public struct AiTextureType: RawRepresentable { 11 | public let rawValue: UInt32 12 | 13 | public init(rawValue: UInt32) { 14 | self.rawValue = rawValue 15 | } 16 | 17 | init(_ textureType: aiTextureType) { 18 | self.init(rawValue: textureType.rawValue) 19 | } 20 | 21 | var type: aiTextureType { aiTextureType(rawValue: rawValue) } 22 | 23 | /// Dummy value. 24 | /// 25 | /// No texture, but the value to be used as 'texture semantic' (#aiMaterialProperty::mSemantic) 26 | /// for all material properties *not* related to textures. 27 | public static let none = AiTextureType(aiTextureType_NONE) 28 | 29 | /// The texture is combined with the result of the diffuse lighting equation. 30 | public static let diffuse = AiTextureType(aiTextureType_DIFFUSE) 31 | 32 | /// The texture is combined with the result of the specular lighting equation. 33 | public static let specular = AiTextureType(aiTextureType_SPECULAR) 34 | 35 | /// The texture is combined with the result of the ambient lighting equation. 36 | public static let ambient = AiTextureType(aiTextureType_AMBIENT) 37 | 38 | /// The texture is added to the result of the lighting calculation. 39 | /// It isn't influenced by incoming light. 40 | public static let emissive = AiTextureType(aiTextureType_EMISSIVE) 41 | 42 | /// The texture is a height map. 43 | /// 44 | /// By convention, higher gray-scale values stand for higher elevations from the base height. 45 | public static let height = AiTextureType(aiTextureType_HEIGHT) 46 | 47 | /// The texture is a (tangent space) normal-map. 48 | /// 49 | /// Again, there are several conventions for tangent-space normal maps. 50 | /// Assimp does (intentionally) not distinguish here. 51 | public static let normals = AiTextureType(aiTextureType_NORMALS) 52 | 53 | /// The texture defines the glossiness of the material. 54 | /// 55 | /// The glossiness is in fact the exponent of the specular (phong) lighting equation. 56 | /// Usually there is a conversion function defined to map the linear color values in the texture to a suitable exponent. 57 | /// Have fun. 58 | public static let shininess = AiTextureType(aiTextureType_SHININESS) 59 | 60 | /// The texture defines per-pixel opacity. 61 | /// 62 | /// Usually 'white' means opaque and 'black' means 'transparency'. 63 | /// Or quite the opposite. 64 | /// Have fun. 65 | public static let opacity = AiTextureType(aiTextureType_OPACITY) 66 | 67 | /// Displacement texture 68 | /// 69 | /// The exact purpose and format is application-dependent. 70 | /// Higher color values stand for higher vertex displacements. 71 | public static let displacement = AiTextureType(aiTextureType_DISPLACEMENT) 72 | 73 | /// Lightmap texture (aka Ambient Occlusion) 74 | /// 75 | /// Both 'Lightmaps' and dedicated 'ambient occlusion maps' are covered by this material property. 76 | /// The texture contains a scaling value for the final color value of a pixel. 77 | /// Its intensity is not affected by incoming light. 78 | public static let lightmap = AiTextureType(aiTextureType_LIGHTMAP) 79 | 80 | /// Reflection texture 81 | /// 82 | /// Contains the color of a perfect mirror reflection. 83 | /// Rarely used, almost never for real-time applications. 84 | public static let reflection = AiTextureType(aiTextureType_REFLECTION) 85 | 86 | /// PBR Materials 87 | /// 88 | /// PBR definitions from maya and other modelling packages now use this standard. 89 | /// This was originally introduced around 2012. 90 | /// Support for this is in game engines like Godot, Unreal or Unity3D. 91 | /// Modelling packages which use this are very common now. 92 | 93 | public static let baseColor = AiTextureType(aiTextureType_BASE_COLOR) 94 | 95 | public static let normalCamera = AiTextureType(aiTextureType_NORMAL_CAMERA) 96 | 97 | public static let emissionColor = AiTextureType(aiTextureType_EMISSION_COLOR) 98 | 99 | public static let metalness = AiTextureType(aiTextureType_METALNESS) 100 | 101 | public static let diffuseRoughness = AiTextureType(aiTextureType_DIFFUSE_ROUGHNESS) 102 | 103 | public static let ambientOcclusion = AiTextureType(aiTextureType_AMBIENT_OCCLUSION) 104 | 105 | /// Unknown texture 106 | /// 107 | /// A texture reference that does not match any of the definitions above is considered to be 'unknown'. 108 | /// It is still imported, but is excluded from any further postprocessing. 109 | public static let unknown = AiTextureType(aiTextureType_UNKNOWN) 110 | } 111 | 112 | extension AiTextureType: Equatable {} 113 | -------------------------------------------------------------------------------- /Sources/Assimp/Assimp.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Assimp.swift 3 | // SwiftAssimp 4 | // 5 | // Copyright © 2019-2023 Christian Treffs. All rights reserved. 6 | // Licensed under BSD 3-Clause License. See LICENSE file for details. 7 | 8 | @_implementationOnly import CAssimp 9 | 10 | public enum Assimp { 11 | public static func canImportFileExtension(_ fileExtension: String) -> Bool { aiGetImporterDesc(fileExtension.lowercased()) != nil } 12 | 13 | public static func getImporterDescriptor(for fileExtension: String) -> AiImporterDesc? { AiImporterDesc(aiGetImporterDesc(fileExtension.lowercased())?.pointee) } 14 | 15 | public static func importFormats() -> [AiImporterDesc] { 16 | let count = aiGetImportFormatCount() 17 | 18 | guard count > 0 else { 19 | return [] 20 | } 21 | 22 | return (0 ..< count) 23 | .compactMap { AiImporterDesc(aiGetImportFormatDescription($0)?.pointee) } 24 | } 25 | 26 | public static func importFileExtensions() -> [String] { 27 | importFormats().flatMap(\.fileExtensions).sorted() 28 | } 29 | 30 | public static func exportFormats() -> [AiExporterDesc] { 31 | let count = aiGetExportFormatCount() 32 | 33 | guard count > 0 else { 34 | return [] 35 | } 36 | 37 | return (0 ..< count) 38 | .compactMap { AiExporterDesc(aiGetExportFormatDescription($0)?.pointee) } 39 | } 40 | 41 | public static func exportFileExtensions() -> [String] { 42 | exportFormats().map(\.fileExtension).sorted() 43 | } 44 | } 45 | 46 | public struct AiImporterDesc: Equatable { 47 | public let name: String 48 | public let author: String 49 | public let maintainer: String 50 | public let comments: String 51 | public let flags: AiImporterFlags 52 | public let major: Range 53 | public let minor: Range 54 | public let fileExtensions: [String] 55 | 56 | init(_ desc: aiImporterDesc) { 57 | name = String(cString: desc.mName) 58 | author = String(cString: desc.mAuthor) 59 | maintainer = String(cString: desc.mMaintainer) 60 | comments = String(cString: desc.mComments) 61 | flags = AiImporterFlags(rawValue: desc.mFlags) 62 | major = Range(uncheckedBounds: (lower: Int(desc.mMinMajor), upper: Int(desc.mMaxMajor))) 63 | minor = Range(uncheckedBounds: (lower: Int(desc.mMinMinor), upper: Int(desc.mMaxMinor))) 64 | fileExtensions = String(cString: desc.mFileExtensions).split(separator: " ").map(String.init) 65 | } 66 | 67 | init?(_ desc: aiImporterDesc?) { 68 | guard let desc = desc else { 69 | return nil 70 | } 71 | self.init(desc) 72 | } 73 | } 74 | 75 | public struct AiImporterFlags: Equatable, RawRepresentable { 76 | public let rawValue: UInt32 77 | 78 | public init(rawValue: UInt32) { 79 | self.rawValue = rawValue 80 | } 81 | 82 | public static let supportTextFlavour = AiImporterFlags(rawValue: aiImporterFlags_SupportTextFlavour.rawValue) 83 | public static let supportBinaryFlavour = AiImporterFlags(rawValue: aiImporterFlags_SupportBinaryFlavour.rawValue) 84 | public static let supportCompressedFlavour = AiImporterFlags(rawValue: aiImporterFlags_SupportCompressedFlavour.rawValue) 85 | public static let limitedSupport = AiImporterFlags(rawValue: aiImporterFlags_LimitedSupport.rawValue) 86 | public static let experimental = AiImporterFlags(rawValue: aiImporterFlags_Experimental.rawValue) 87 | } 88 | 89 | public struct AiExporterDesc: Equatable { 90 | public let id: String 91 | public let description: String 92 | public let fileExtension: String 93 | 94 | init(_ desc: aiExportFormatDesc) { 95 | id = String(cString: desc.id) 96 | description = String(cString: desc.description) 97 | fileExtension = String(cString: desc.fileExtension) 98 | } 99 | 100 | init?(_ desc: aiExportFormatDesc?) { 101 | guard let desc = desc else { 102 | return nil 103 | } 104 | self.init(desc) 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /Sources/Assimp/CArray.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CArray.swift 3 | // SwiftAssimp 4 | // 5 | // Copyright © 2019-2023 Christian Treffs. All rights reserved. 6 | // Licensed under BSD 3-Clause License. See LICENSE file for details. 7 | 8 | enum CArray { 9 | @discardableResult 10 | @_transparent 11 | static func write(_ cArray: inout C, _ body: (UnsafeMutableBufferPointer) throws -> O) rethrows -> O { 12 | try withUnsafeMutablePointer(to: &cArray) { 13 | try body(UnsafeMutableBufferPointer(start: UnsafeMutableRawPointer($0).assumingMemoryBound(to: T.self), 14 | count: MemoryLayout.stride / MemoryLayout.stride)) 15 | } 16 | } 17 | 18 | @discardableResult 19 | @_transparent 20 | static func read(_ cArray: C, _ body: (UnsafeBufferPointer) throws -> O) rethrows -> O { 21 | try withUnsafePointer(to: cArray) { 22 | try body(UnsafeBufferPointer(start: UnsafeRawPointer($0).assumingMemoryBound(to: T.self), 23 | count: MemoryLayout.stride / MemoryLayout.stride)) 24 | } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /Sources/Assimp/String+aiString.swift: -------------------------------------------------------------------------------- 1 | // 2 | // String+aiString.swift 3 | // SwiftAssimp 4 | // 5 | // Copyright © 2019-2023 Christian Treffs. All rights reserved. 6 | // Licensed under BSD 3-Clause License. See LICENSE file for details. 7 | 8 | @_implementationOnly import CAssimp 9 | 10 | extension String { 11 | init?(_ aiString: aiString) { 12 | let cStringBuffer: UnsafePointer? = withUnsafeBytes(of: aiString.data) { bytesPtr -> UnsafePointer? in 13 | if aiString.length <= 0 { 14 | return nil 15 | } 16 | guard let boundMemory: UnsafePointer = bytesPtr.baseAddress?.bindMemory(to: CChar.self, 17 | capacity: Int(aiString.length)) 18 | else { 19 | return nil 20 | } 21 | 22 | let stringBuffer = UnsafeBufferPointer(start: boundMemory, 23 | count: Int(aiString.length)) 24 | 25 | return stringBuffer.baseAddress 26 | } 27 | 28 | guard let cStringBufferStart = cStringBuffer else { 29 | return nil 30 | } 31 | 32 | self.init(cString: cStringBufferStart) 33 | } 34 | 35 | init?(bytes: UnsafeMutablePointer, length: Int) { 36 | let bufferPtr = UnsafeMutableBufferPointer(start: bytes, 37 | count: length) 38 | 39 | let codeUnits: [UTF8.CodeUnit] = bufferPtr 40 | // .map { $0 > 0 ? $0 : Int8(0x20) } // this replaces all invalid characters with blank space 41 | .map { UTF8.CodeUnit($0) } 42 | 43 | self.init(decoding: codeUnits, as: UTF8.self) 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /Sources/Assimp/simd+aiVector.swift: -------------------------------------------------------------------------------- 1 | // 2 | // simd+aiVector.swift 3 | // SwiftAssimp 4 | // 5 | // Copyright © 2019-2023 Christian Treffs. All rights reserved. 6 | // Licensed under BSD 3-Clause License. See LICENSE file for details. 7 | 8 | @_implementationOnly import CAssimp 9 | 10 | extension aiVector3D { 11 | @_transparent var vector: SIMD3 { 12 | SIMD3(x, y, z) 13 | } 14 | } 15 | 16 | extension aiVector2D { 17 | @_transparent var vector: SIMD2 { 18 | SIMD2(x, y) 19 | } 20 | } 21 | 22 | extension SIMD3 where Scalar == Float { 23 | @_transparent init(_ aiVector3D: aiVector3D) { 24 | self.init(aiVector3D.x, aiVector3D.y, aiVector3D.z) 25 | } 26 | 27 | @_transparent init(_ aiColor3D: aiColor3D) { 28 | self.init(aiColor3D.r, aiColor3D.g, aiColor3D.b) 29 | } 30 | } 31 | 32 | extension SIMD2 where Scalar == Float { 33 | @_transparent init(_ aiVector2D: aiVector2D) { 34 | self.init(aiVector2D.x, aiVector2D.y) 35 | } 36 | } 37 | 38 | public typealias Vec2 = SIMD2 39 | public typealias Vec3 = SIMD3 40 | 41 | public typealias AiReal = Float 42 | 43 | public struct AiMatrix4x4 { 44 | public let a1: AiReal 45 | public let a2: AiReal 46 | public let a3: AiReal 47 | public let a4: AiReal 48 | public let b1: AiReal 49 | public let b2: AiReal 50 | public let b3: AiReal 51 | public let b4: AiReal 52 | public let c1: AiReal 53 | public let c2: AiReal 54 | public let c3: AiReal 55 | public let c4: AiReal 56 | public let d1: AiReal 57 | public let d2: AiReal 58 | public let d3: AiReal 59 | public let d4: AiReal 60 | 61 | init(_ m: aiMatrix4x4) { 62 | a1 = m.a1 63 | a2 = m.a2 64 | a3 = m.a3 65 | a4 = m.a4 66 | b1 = m.b1 67 | b2 = m.b2 68 | b3 = m.b3 69 | b4 = m.b4 70 | c1 = m.c1 71 | c2 = m.c2 72 | c3 = m.c3 73 | c4 = m.c4 74 | d1 = m.d1 75 | d2 = m.d2 76 | d3 = m.d3 77 | d4 = m.d4 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /Sources/CAssimp/module.modulemap: -------------------------------------------------------------------------------- 1 | module CAssimp [system] { 2 | umbrella header "shims.h" 3 | link "assimp" 4 | export * 5 | } 6 | -------------------------------------------------------------------------------- /Sources/CAssimp/shims.h: -------------------------------------------------------------------------------- 1 | #import 2 | #import 3 | #import 4 | #import 5 | #import 6 | #import 7 | #import 8 | #import 9 | #import 10 | #import 11 | #import 12 | #import 13 | #import 14 | #import 15 | #import 16 | #import 17 | #import 18 | #import 19 | #import 20 | #import 21 | #import 22 | #import 23 | #import 24 | #import 25 | #import 26 | #import 27 | -------------------------------------------------------------------------------- /Tests/AssimpTests/AssimpTests.swift: -------------------------------------------------------------------------------- 1 | import XCTest 2 | import Assimp 3 | 4 | final class AssimpTests: XCTestCase { 5 | 6 | func testFailingInitializer() { 7 | XCTAssertThrowsError(try AiScene(file: "")) 8 | } 9 | 10 | func testImportFormats() { 11 | XCTAssertTrue(Assimp.canImportFileExtension("obj")) 12 | XCTAssertTrue(Assimp.canImportFileExtension("dae")) 13 | XCTAssertTrue(Assimp.canImportFileExtension("gltf")) 14 | XCTAssertFalse(Assimp.canImportFileExtension("txt")) 15 | XCTAssertFalse(Assimp.canImportFileExtension("psd")) 16 | 17 | XCTAssertGreaterThanOrEqual(Assimp.importFileExtensions().count, 70) 18 | 19 | print(Assimp.importFileExtensions()) 20 | } 21 | 22 | func testExportFormats() { 23 | XCTAssertGreaterThanOrEqual(Assimp.exportFileExtensions().count, 20) 24 | } 25 | 26 | func testLoadAiSceneDAE() throws { 27 | 28 | let fileURL = try Resource.load(.duck_dae) 29 | 30 | var scene: AiScene! 31 | XCTAssertNoThrow(scene = try AiScene(file: fileURL.path, flags: [.removeRedundantMaterials, 32 | .genSmoothNormals, 33 | .calcTangentSpace])) 34 | 35 | XCTAssertEqual(scene.flags, []) 36 | XCTAssertEqual(scene.numMeshes, 1) 37 | XCTAssertEqual(scene.numMaterials, 1) 38 | XCTAssertEqual(scene.numAnimations, 0) 39 | XCTAssertEqual(scene.numCameras, 1) 40 | XCTAssertEqual(scene.numLights, 1) 41 | XCTAssertEqual(scene.numTextures, 0) 42 | 43 | // Scene Graph 44 | 45 | XCTAssertEqual(scene.rootNode.numMeshes, 0) 46 | XCTAssertEqual(scene.rootNode.meshes.count, 0) 47 | XCTAssertEqual(scene.rootNode.numChildren, 3) 48 | XCTAssertEqual(scene.rootNode.children.count, 3) 49 | XCTAssertEqual(scene.rootNode.name, "VisualSceneNode") 50 | XCTAssertEqual(scene.rootNode.children[0].name, "LOD3sp") 51 | XCTAssertEqual(scene.rootNode.children[0].meshes, [0]) 52 | XCTAssertEqual(scene.rootNode.children[0].numMeshes, 1) 53 | XCTAssertEqual(scene.rootNode.children[0].numChildren, 0) 54 | XCTAssertEqual(scene.rootNode.children[1].name, "camera1") 55 | XCTAssertEqual(scene.rootNode.children[1].meshes, []) 56 | XCTAssertEqual(scene.rootNode.children[1].numMeshes, 0) 57 | XCTAssertEqual(scene.rootNode.children[1].numChildren, 0) 58 | XCTAssertEqual(scene.rootNode.children[2].name, "directionalLight1") 59 | XCTAssertEqual(scene.rootNode.children[2].meshes, []) 60 | XCTAssertEqual(scene.rootNode.children[2].numMeshes, 0) 61 | XCTAssertEqual(scene.rootNode.children[2].numChildren, 0) 62 | 63 | // Mesh 64 | 65 | XCTAssertEqual(scene.meshes[0].name, "LOD3spShape-lib") 66 | XCTAssertEqual(scene.meshes[0].primitiveTypes, [.triangle, .polygon]) 67 | XCTAssertEqual(scene.meshes[0].numVertices, 8500) 68 | XCTAssertEqual(scene.meshes[0].vertices[0...2], [-23.9364, 11.5353, 30.6125]) 69 | XCTAssertEqual(scene.meshes[0].vertices.count, 25500) 70 | XCTAssertEqual(scene.meshes[0].numFaces, 2144) 71 | XCTAssertEqual(scene.meshes[0].numBones, 0) 72 | XCTAssertEqual(scene.meshes[0].numAnimMeshes, 0) 73 | XCTAssertEqual(scene.meshes[0].tangents.count, 25500) 74 | XCTAssertEqual(scene.meshes[0].bitangents.count, 25500) 75 | 76 | // Faces 77 | 78 | XCTAssertEqual(scene.meshes[0].numFaces, 2144) 79 | XCTAssertEqual(scene.meshes[0].faces.count, 2144) 80 | XCTAssertEqual(scene.meshes[0].faces[0].numIndices, 4) 81 | XCTAssertEqual(scene.meshes[0].faces[0].indices, [0, 1, 2, 3]) 82 | 83 | // Materials 84 | 85 | XCTAssertEqual(scene.materials[0].numProperties, 19) 86 | XCTAssertEqual(scene.materials[0].numAllocated, 20) 87 | XCTAssertEqual(scene.materials[0].properties[0].key, "?mat.name") 88 | 89 | // Textures 90 | 91 | XCTAssertEqual(scene.textures.count, 0) 92 | XCTAssertEqual(scene.meshes[0].numUVComponents.0, 2) 93 | XCTAssertEqual(scene.meshes[0].texCoords.0?.count, 25500) 94 | XCTAssertEqual(scene.meshes[0].texCoords.0?[0...2], [0.866606, 0.398924, 0.0]) 95 | XCTAssertEqual(scene.meshes[0].texCoords.0?[3...5], [0.871384, 0.397619, 0.0]) 96 | XCTAssertEqual(scene.meshes[0].texCoordsPacked.0?[0...1], [0.866606, 0.398924]) 97 | XCTAssertEqual(scene.meshes[0].texCoordsPacked.0?[2...3], [0.871384, 0.397619]) 98 | 99 | // Lights 100 | 101 | XCTAssertEqual(scene.lights[0].name, "directionalLight1") 102 | 103 | // Cameras 104 | 105 | XCTAssertEqual(scene.cameras.count, 1) 106 | 107 | // print(scene.materials.map { $0.debugDescription }) 108 | 109 | XCTAssertEqual(scene.materials[0].getMaterialColor(.COLOR_DIFFUSE), SIMD4(1.0, 1.0, 1.0, 1.0)) 110 | XCTAssertEqual(scene.materials[0].getMaterialString(.TEXTURE(.diffuse, 0)), "./duckCM.tga") 111 | } 112 | 113 | func testLoadAiSceneObj() throws { 114 | 115 | let fileURL = try Resource.load(.box_obj) 116 | 117 | let scene: AiScene = try AiScene(file: fileURL.path) 118 | 119 | XCTAssertEqual(scene.flags, []) 120 | XCTAssertEqual(scene.numMeshes, 1) 121 | XCTAssertEqual(scene.numMaterials, 2) 122 | XCTAssertEqual(scene.numAnimations, 0) 123 | XCTAssertEqual(scene.numCameras, 0) 124 | XCTAssertEqual(scene.numLights, 0) 125 | XCTAssertEqual(scene.numTextures, 0) 126 | 127 | // Scene Graph 128 | 129 | XCTAssertEqual(scene.rootNode.numMeshes, 0) 130 | XCTAssertEqual(scene.rootNode.meshes.count, 0) 131 | XCTAssertEqual(scene.rootNode.numChildren, 1) 132 | XCTAssertEqual(scene.rootNode.children.count, 1) 133 | XCTAssertEqual(scene.rootNode.name, "models_OBJ_box.obj.box.obj") 134 | XCTAssertEqual(scene.rootNode.children[0].name, "1") 135 | XCTAssertEqual(scene.rootNode.children[0].meshes, [0]) 136 | XCTAssertEqual(scene.rootNode.children[0].numMeshes, 1) 137 | XCTAssertEqual(scene.rootNode.children[0].numChildren, 0) 138 | 139 | // Mesh 140 | 141 | XCTAssertEqual(scene.meshes[0].name, "1") 142 | XCTAssertEqual(scene.meshes[0].primitiveTypes, [.polygon]) 143 | XCTAssertEqual(scene.meshes[0].numVertices, 8 * 3) 144 | XCTAssertEqual(scene.meshes[0].vertices[0...2], [-0.5, 0.5, 0.5]) 145 | XCTAssertEqual(scene.meshes[0].numFaces, 6) 146 | XCTAssertEqual(scene.meshes[0].numBones, 0) 147 | XCTAssertEqual(scene.meshes[0].numAnimMeshes, 0) 148 | 149 | // Faces 150 | 151 | XCTAssertEqual(scene.meshes[0].numFaces, 6) 152 | XCTAssertEqual(scene.meshes[0].faces.count, 6) 153 | XCTAssertEqual(scene.meshes[0].faces[0].numIndices, 4) 154 | XCTAssertEqual(scene.meshes[0].faces[0].indices, [0, 1, 2, 3]) 155 | 156 | // Materials 157 | 158 | XCTAssertGreaterThanOrEqual(scene.materials[0].numProperties, 12) 159 | XCTAssertEqual(scene.materials[0].numAllocated, 20) 160 | XCTAssertEqual(scene.materials[0].properties[0].key, "?mat.name") 161 | 162 | // Textures 163 | 164 | XCTAssertEqual(scene.textures.count, 0) 165 | XCTAssertEqual(scene.meshes[0].numUVComponents.0, 0) 166 | XCTAssertEqual(scene.meshes[0].texCoords.0?.count, nil) 167 | 168 | // Lights 169 | 170 | XCTAssertEqual(scene.lights.count, 0) 171 | 172 | // Cameras 173 | 174 | XCTAssertEqual(scene.cameras.count, 0) 175 | } 176 | 177 | func testLoadAiScene3DS() throws { 178 | let fileURL = try Resource.load(.cubeDiffuseTextured_3ds) 179 | 180 | let scene: AiScene = try AiScene(file: fileURL.path) 181 | 182 | XCTAssertEqual(scene.flags, []) 183 | XCTAssertEqual(scene.numMeshes, 1) 184 | XCTAssertEqual(scene.numMaterials, 1) 185 | XCTAssertEqual(scene.numAnimations, 0) 186 | XCTAssertEqual(scene.numCameras, 0) 187 | XCTAssertEqual(scene.numLights, 0) 188 | XCTAssertEqual(scene.numTextures, 0) 189 | 190 | // Scene Graph 191 | 192 | XCTAssertEqual(scene.rootNode.numMeshes, 0) 193 | XCTAssertEqual(scene.rootNode.meshes.count, 0) 194 | XCTAssertEqual(scene.rootNode.numChildren, 1) 195 | XCTAssertEqual(scene.rootNode.children.count, 1) 196 | XCTAssertEqual(scene.rootNode.name, "<3DSRoot>") 197 | XCTAssertEqual(scene.rootNode.children[0].name, "Quader01") 198 | XCTAssertEqual(scene.rootNode.children[0].meshes, [0]) 199 | XCTAssertEqual(scene.rootNode.children[0].numMeshes, 1) 200 | XCTAssertEqual(scene.rootNode.children[0].numChildren, 0) 201 | 202 | // Mesh 203 | 204 | XCTAssertEqual(scene.meshes[0].name, "0") 205 | XCTAssertEqual(scene.meshes[0].primitiveTypes, [.triangle]) 206 | XCTAssertEqual(scene.meshes[0].numVertices, 36) 207 | XCTAssertEqual(scene.meshes[0].vertices[0...2], [-25.0, -25.0, 0.0]) 208 | XCTAssertEqual(scene.meshes[0].numFaces, 12) 209 | XCTAssertEqual(scene.meshes[0].numBones, 0) 210 | XCTAssertEqual(scene.meshes[0].numAnimMeshes, 0) 211 | XCTAssertEqual(scene.meshes[0].vertices[0], -25.0) 212 | XCTAssertEqual(scene.meshes[0].vertices[1], -25.0) 213 | XCTAssertEqual(scene.meshes[0].vertices[2], 0.0) 214 | 215 | XCTAssertEqual(scene.meshes[0].vertices[105], -25.0) 216 | XCTAssertEqual(scene.meshes[0].vertices[106], 25.0) 217 | XCTAssertEqual(scene.meshes[0].vertices[107], 0.0) 218 | 219 | // Faces 220 | 221 | XCTAssertEqual(scene.meshes[0].numFaces, 12) 222 | XCTAssertEqual(scene.meshes[0].faces.count, 12) 223 | XCTAssertEqual(scene.meshes[0].faces[0].numIndices, 3) 224 | XCTAssertEqual(scene.meshes[0].faces[0].indices, [0, 1, 2]) 225 | 226 | // Materials 227 | 228 | XCTAssertEqual(scene.materials[0].numProperties, 13) 229 | XCTAssertEqual(scene.materials[0].numAllocated, 20) 230 | XCTAssertEqual(scene.materials[0].properties[0].key, "?mat.name") 231 | 232 | // Textures 233 | 234 | XCTAssertEqual(scene.textures.count, 0) 235 | XCTAssertEqual(scene.meshes[0].numUVComponents.0, 2) 236 | XCTAssertEqual(scene.meshes[0].texCoords.0?.count, 108) 237 | XCTAssertEqual(scene.meshes[0].texCoords.0?[0...2], [0.6936096, 0.30822724, 0.0]) 238 | 239 | // Lights 240 | 241 | XCTAssertEqual(scene.lights.count, 0) 242 | 243 | // Cameras 244 | 245 | XCTAssertEqual(scene.cameras.count, 0) 246 | 247 | XCTAssertEqual(scene.materials[0].getMaterialColor(.COLOR_DIFFUSE), SIMD4(0.5882353, 0.5882353, 0.5882353, 1.0)) 248 | XCTAssertEqual(scene.materials[0].getMaterialString(.TEXTURE(.diffuse, 0)), "TEST.PNG") 249 | 250 | } 251 | 252 | func testLoadAiSceneGLB() throws { 253 | let fileURL = try Resource.load(.damagedHelmet_glb) 254 | 255 | let scene: AiScene = try AiScene(file: fileURL.path) 256 | 257 | XCTAssertEqual(scene.flags, []) 258 | XCTAssertEqual(scene.numMeshes, 1) 259 | XCTAssertEqual(scene.numMaterials, 2) 260 | XCTAssertEqual(scene.numAnimations, 0) 261 | XCTAssertEqual(scene.numCameras, 0) 262 | XCTAssertEqual(scene.numLights, 0) 263 | XCTAssertEqual(scene.numTextures, 5) 264 | 265 | // Scene Graph 266 | 267 | XCTAssertEqual(scene.rootNode.numMeshes, 1) 268 | XCTAssertEqual(scene.rootNode.meshes.count, 1) 269 | XCTAssertEqual(scene.rootNode.numChildren, 0) 270 | XCTAssertEqual(scene.rootNode.children.count, 0) 271 | XCTAssertEqual(scene.rootNode.name, "node_damagedHelmet_-6514") 272 | // Mesh 273 | 274 | XCTAssertEqual(scene.meshes[0].name, "mesh_helmet_LP_13930damagedHelmet") 275 | XCTAssertEqual(scene.meshes[0].primitiveTypes, [.triangle]) 276 | XCTAssertEqual(scene.meshes[0].numVertices, 14556) 277 | XCTAssertEqual(scene.meshes[0].vertices[0...2], [-0.61199456, -0.030940875, 0.48309004]) 278 | XCTAssertEqual(scene.meshes[0].numFaces, 15452) 279 | XCTAssertEqual(scene.meshes[0].numBones, 0) 280 | XCTAssertEqual(scene.meshes[0].numAnimMeshes, 0) 281 | XCTAssertEqual(scene.meshes[0].vertices[0], -0.61199456) 282 | XCTAssertEqual(scene.meshes[0].vertices[1], -0.030940875) 283 | XCTAssertEqual(scene.meshes[0].vertices[2], 0.48309004) 284 | 285 | XCTAssertEqual(scene.meshes[0].vertices[105], -0.5812146) 286 | XCTAssertEqual(scene.meshes[0].vertices[106], -0.029344887) 287 | XCTAssertEqual(scene.meshes[0].vertices[107], 0.391574) 288 | 289 | // Faces 290 | 291 | XCTAssertEqual(scene.meshes[0].numFaces, 15452) 292 | XCTAssertEqual(scene.meshes[0].faces.count, 15452) 293 | XCTAssertEqual(scene.meshes[0].faces[0].numIndices, 3) 294 | XCTAssertEqual(scene.meshes[0].faces[0].indices, [0, 1, 2]) 295 | 296 | // Materials 297 | 298 | XCTAssertGreaterThanOrEqual(scene.materials[0].numProperties, 50) 299 | XCTAssertEqual(scene.materials[0].numAllocated, 80) 300 | XCTAssertEqual(scene.materials[0].properties[0].key, "?mat.name") 301 | 302 | // Textures 303 | 304 | XCTAssertEqual(scene.textures.count, scene.numTextures) 305 | XCTAssertEqual(scene.meshes[0].numUVComponents.0, 2) 306 | XCTAssertEqual(scene.meshes[0].texCoords.0?.count, 43668) 307 | XCTAssertEqual(scene.meshes[0].texCoords.0?[0...2], [0.704686, -0.24560404, 0.0]) 308 | 309 | XCTAssertEqual(scene.textures[0].filename, nil) 310 | XCTAssertEqual(scene.textures[0].achFormatHint, "jpg") 311 | XCTAssertEqual(scene.textures[0].width, 935629) 312 | XCTAssertEqual(scene.textures[0].height, 0) 313 | XCTAssertEqual(scene.textures[0].isCompressed, true) 314 | XCTAssertEqual(scene.textures[0].numPixels, 233907) 315 | XCTAssertEqual(scene.textures[0].textureData.count, 935628) 316 | 317 | XCTAssertEqual(scene.textures[0].textureData[0], 255) 318 | XCTAssertEqual(scene.textures[0].textureData[1], 216) 319 | XCTAssertEqual(scene.textures[0].textureData[2], 255) 320 | XCTAssertEqual(scene.textures[0].textureData[3], 224) 321 | XCTAssertEqual(scene.textures[0].textureData[0], 255) // b 255 322 | XCTAssertEqual(scene.textures[0].textureData[1], 216) // g 216 323 | XCTAssertEqual(scene.textures[0].textureData[2], 255) // r 255 324 | XCTAssertEqual(scene.textures[0].textureData[3], 224) // a 224 325 | XCTAssertEqual(scene.textures[0].textureData[0], 255) // r 255 326 | XCTAssertEqual(scene.textures[0].textureData[1], 216) // g 216 327 | XCTAssertEqual(scene.textures[0].textureData[2], 255) // b 255 328 | XCTAssertEqual(scene.textures[0].textureData[3], 224) // a 224 329 | 330 | XCTAssertEqual(scene.textures[1].filename, nil) 331 | XCTAssertEqual(scene.textures[1].achFormatHint, "jpg") 332 | XCTAssertEqual(scene.textures[1].width, 1300661) 333 | XCTAssertEqual(scene.textures[1].height, 0) 334 | XCTAssertEqual(scene.textures[1].isCompressed, true) 335 | XCTAssertEqual(scene.textures[1].numPixels, 325165) 336 | XCTAssertEqual(scene.textures[1].textureData.count, 1300660) 337 | XCTAssertEqual(scene.textures[1].textureData[0], 255) 338 | XCTAssertEqual(scene.textures[1].textureData[1], 216) 339 | XCTAssertEqual(scene.textures[1].textureData[2], 255) 340 | XCTAssertEqual(scene.textures[1].textureData[3], 224) 341 | XCTAssertEqual(scene.textures[1].textureData[0], 255) // b 255 342 | XCTAssertEqual(scene.textures[1].textureData[1], 216) // g 216 343 | XCTAssertEqual(scene.textures[1].textureData[2], 255) // r 255 344 | XCTAssertEqual(scene.textures[1].textureData[3], 224) // a 224 345 | XCTAssertEqual(scene.textures[1].textureData[0], 255) // r 255 346 | XCTAssertEqual(scene.textures[1].textureData[1], 216) // g 216 347 | XCTAssertEqual(scene.textures[1].textureData[2], 255) // b 255 348 | XCTAssertEqual(scene.textures[1].textureData[3], 224) // a 224 349 | 350 | // Lights 351 | 352 | XCTAssertEqual(scene.lights.count, 0) 353 | 354 | // Cameras 355 | 356 | XCTAssertEqual(scene.cameras.count, 0) 357 | } 358 | } 359 | -------------------------------------------------------------------------------- /Tests/AssimpTests/InternalTests.swift: -------------------------------------------------------------------------------- 1 | import XCTest 2 | @testable import Assimp 3 | @_implementationOnly import CAssimp 4 | 5 | final class InternalTests: XCTestCase { 6 | func testVersion() { 7 | XCTAssertEqual(aiGetVersionMajor(), 5) 8 | XCTAssertNotNil(aiGetVersionMinor()) 9 | XCTAssertNotNil(aiGetVersionRevision()) 10 | } 11 | 12 | func testVec3fFromAiVector3D() { 13 | let vec3f = aiVector3D(x: 1.2, y: 3.4, z: 5.6).vector 14 | XCTAssertEqual(vec3f, SIMD3(1.2, 3.4, 5.6)) 15 | XCTAssertEqual(SIMD3(aiVector3D(x: 5.6, y: 3.4, z: 1.2)), SIMD3(5.6, 3.4, 1.2)) 16 | } 17 | 18 | func testVec2fFromAiVector2D() { 19 | let vec2f = aiVector2D(x: 1.2, y: 3.4).vector 20 | XCTAssertEqual(vec2f, SIMD2(1.2, 3.4)) 21 | XCTAssertEqual(SIMD2(aiVector2D(x: 5.6, y: 3.4)), SIMD2(5.6, 3.4)) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /Tests/AssimpTests/Resource.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Resource.swift 3 | // 4 | // 5 | // Created by Christian Treffs on 21.06.19. 6 | // 7 | 8 | import Foundation 9 | #if os(Linux) 10 | import FoundationNetworking 11 | #endif 12 | 13 | enum Resource: String { 14 | case bunny_obj = "https://graphics.stanford.edu/~mdfisher/Data/Meshes/bunny.obj" 15 | case duck_dae = "https://raw.githubusercontent.com/assimp/assimp/master/test/models/Collada/duck.dae" 16 | case box_obj = "https://raw.githubusercontent.com/assimp/assimp/master/test/models/OBJ/box.obj" 17 | case nier_gltf = "https://gitlab.com/ctreffs/assets/raw/master/models/nier/scene.gltf" 18 | case busterDrone_gltf = "https://gitlab.com/ctreffs/assets/raw/master/models/buster_drone/scene.gltf" 19 | case airplane_usdz = "https://developer.apple.com/augmented-reality/quick-look/models/biplane/toy_biplane.usdz" 20 | case boxTextured_gltf = "https://raw.githubusercontent.com/assimp/assimp/master/test/models/glTF2/BoxTextured-glTF/BoxTextured.gltf" 21 | case cubeDiffuseTextured_3ds = "https://github.com/assimp/assimp/raw/master/test/models/3DS/cube_with_diffuse_texture.3DS" 22 | case damagedHelmet_glb = "https://github.com/KhronosGroup/glTF-Sample-Models/raw/master/2.0/DamagedHelmet/glTF-Binary/DamagedHelmet.glb" 23 | 24 | private static let fm = FileManager.default 25 | 26 | enum Error: Swift.Error { 27 | case invalidURL(String) 28 | } 29 | 30 | static func resourcesDir() -> URL { 31 | #if os(Linux) 32 | // linux does not have .allBundles yet. 33 | let bundle = Bundle.main 34 | #else 35 | guard let bundle = Bundle.allBundles.first(where: { $0.bundlePath.contains("Tests") }) else { 36 | fatalError("no test bundle found") 37 | } 38 | #endif 39 | var resourcesURL: URL = bundle.bundleURL 40 | resourcesURL.deleteLastPathComponent() 41 | return resourcesURL 42 | } 43 | static func load(_ resource: Resource) throws -> URL { 44 | guard let remoteURL: URL = URL(string: resource.rawValue) else { 45 | throw Error.invalidURL(resource.rawValue) 46 | } 47 | 48 | var name: String = remoteURL.pathComponents.reversed().prefix(3).reversed().joined(separator: "_") 49 | name.append(".") 50 | name.append(remoteURL.lastPathComponent) 51 | 52 | let localFile: URL = resourcesDir().appendingPathComponent(name) 53 | if !fm.fileExists(atPath: localFile.path) { 54 | let data = try Data(contentsOf: remoteURL) 55 | try data.write(to: localFile) 56 | print("⬇ Downloaded '\(localFile.path)' ⬇") 57 | } 58 | 59 | return localFile 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": [ 4 | "config:base" 5 | ] 6 | } 7 | --------------------------------------------------------------------------------