├── .circleci └── config.yml ├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── Makefile ├── README.md ├── extract-grammar ├── .gitignore ├── .prettierrc.js ├── CONCEPT.md ├── jest.config.js ├── package-lock.json ├── package.json └── src │ ├── __tests__ │ ├── full.grammar │ ├── implemented.grammar │ ├── multiple_definitions.grammar │ ├── parseBnf.test.js │ ├── rhs_function.grammar │ ├── rhs_function_single_param.grammar │ ├── rhs_mixed_function.grammar │ ├── rhs_multiple_multiline.grammar │ ├── rhs_multiple_singleline.grammar │ ├── rhs_zero_or_more.grammar │ ├── rhs_zero_or_one.grammar │ ├── section_1.grammar │ └── simple.grammar │ ├── bnf_grammar.pegjs │ ├── generatePb2.js │ ├── generateSolidityContracts.js │ ├── generateSolidty.js │ ├── generateV0FieldMapping.js │ ├── generateV0Intermediate.js │ ├── index.js │ ├── manual.js │ ├── mapperHelpers.js │ ├── params.js │ ├── parseBnf.js │ └── parseBnfCli.js ├── rlay_ontology ├── .gitignore ├── Cargo.toml ├── build.rs ├── build_features.sh ├── rust-toolchain ├── src │ ├── bin.rs │ ├── intermediate.json │ ├── lib.rs │ └── ontology │ │ ├── mod.rs │ │ ├── rlay.ontology.macros.rs │ │ └── web3.rs └── tests │ ├── core.rs │ ├── protobuf_format.rs │ └── web3.rs ├── rlay_ontology_build ├── Cargo.toml └── src │ ├── compact.rs │ ├── core.rs │ ├── entities.rs │ ├── intermediate.rs │ ├── lib.rs │ ├── v0.rs │ └── web3.rs ├── rlay_ontology_js ├── .gitignore ├── Cargo.toml ├── prepare-pkg.js ├── src │ └── lib.rs └── test.js ├── rust-toolchain ├── rustfmt.toml └── update_grammar.sh /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | # Javascript Node CircleCI 2.0 configuration file 2 | # 3 | # Check https://circleci.com/docs/2.0/language-javascript/ for more details 4 | # 5 | version: 2 6 | jobs: 7 | build: 8 | docker: 9 | # specify the version you desire here 10 | - image: circleci/node:8.10.0 11 | 12 | # environment: 13 | # CARGO_WEB_VERSION: xxx 14 | 15 | working_directory: ~/repo 16 | 17 | steps: 18 | - checkout 19 | 20 | # Download and cache dependencies 21 | - restore_cache: 22 | keys: 23 | - v1-dependencies-{{ checksum "Cargo.lock" }} 24 | # fallback to using the latest cache if no exact match is found 25 | - v1-dependencies- 26 | - restore_cache: 27 | keys: 28 | - cargo-dependencies-{{ checksum "Cargo.lock" }} 29 | - restore_cache: 30 | keys: 31 | - cargo-binaries-{{ checksum "Makefile" }} 32 | 33 | - run: npm install 34 | - run: 35 | name: Install rustup 36 | command: curl https://sh.rustup.rs -sSf | sh /dev/stdin --default-toolchain $(cat rust-toolchain) -y --verbose 37 | - run: echo 'export PATH=$HOME/.cargo/bin:$PWD/bin:$PATH' >> $BASH_ENV 38 | - run: 39 | name: Install different rustup components 40 | command: | 41 | rustup component add clippy 42 | rustup component add rustfmt 43 | - run: 44 | name: Install wasm-pack 45 | command: cargo install --git https://github.com/hobofan/wasm-pack.git --branch feat/multiple_targets 46 | - run: 47 | name: Fetch dependencies (for cache) 48 | command: cargo fetch 49 | 50 | - save_cache: 51 | paths: 52 | - ~/.cargo/registry 53 | key: cargo-dependencies-{{ checksum "Cargo.lock" }} 54 | - save_cache: 55 | paths: 56 | - bin 57 | key: cargo-binaries-{{ checksum "Makefile" }} 58 | 59 | # run tests! 60 | - run: 61 | name: Test crate rlay_ontology (web3_compat) 62 | command: cd rlay_ontology && cargo test --features web3_compat 63 | - run: 64 | name: Test crate rlay_ontology_js / @rlay/ontology 65 | working_directory: rlay_ontology_js 66 | command: | 67 | wasm-pack build --target nodejs 68 | node prepare-pkg.js 69 | node test.js 70 | 71 | - run: 72 | name: Clippy lint 73 | command: | 74 | cargo fmt --package rlay_ontology -- --check 75 | cd rlay_ontology && cargo clippy 76 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | bin 2 | .crates.toml 3 | 4 | /target/ 5 | rlay_ontology_build/target 6 | **/*.rs.bk 7 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | "rlay_ontology", 4 | "rlay_ontology_js" 5 | ] 6 | 7 | exclude = [ 8 | "rlay_ontology_nostd_test" 9 | ] 10 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Project T Ltd. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | BINARY_DIR = bin 2 | 3 | CLIPPY_VERSION = 0.0.206 4 | CLIPPY_TOOLCHAIN = $(shell cat rust-toolchain-clippy) 5 | 6 | CARGO_WEB_VERSION = 0.6.8 7 | CARGO_WEB_TOOLCHAIN = $(shell cat rlay_ontology_stdweb/rust-toolchain) 8 | 9 | toolchain-clippy: 10 | rustup install $(CLIPPY_TOOLCHAIN) 11 | 12 | toolchain-cargo-web: 13 | rustup install $(CARGO_WEB_TOOLCHAIN) 14 | rustup target add --toolchain $(CARGO_WEB_TOOLCHAIN) wasm32-unknown-unknown 15 | 16 | toolchains: toolchain-clippy toolchain-cargo-web 17 | 18 | $(BINARY_DIR)/cargo-clippy: 19 | cargo +$(CLIPPY_TOOLCHAIN) install clippy --root . --version $(CLIPPY_VERSION) 20 | 21 | $(BINARY_DIR)/cargo-web: 22 | cargo +$(CARGO_WEB_TOOLCHAIN) install cargo-web --root . --version $(CARGO_WEB_VERSION) 23 | 24 | cargo-binaries-clippy: $(BINARY_DIR)/cargo-clippy 25 | 26 | cargo-binaries-cargo-web: $(BINARY_DIR)/cargo-web 27 | 28 | cargo-binaries: cargo-binaries-clippy cargo-binaries-cargo-web 29 | 30 | lint: cargo-binaries-clippy 31 | # rustup run $(CLIPPY_TOOLCHAIN) bin/cargo-clippy 32 | cd rlay_ontology && rustup run $(CLIPPY_TOOLCHAIN) ../bin/cargo-clippy 33 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ### Rlay decentralized ontology format 2 | 3 | Implementation of the Rlay decentralized ontology format, for use in the Ɍlay protocol, a Decentralized Information Network. 4 | 5 | The goal of the format is to bring the expressive power of a ontology format like OWL to decentralized applications and protocols by making it content-addressable. 6 | 7 | ## Contributing & Contact 8 | 9 | We are very open to contributions! Feel free to open a [Github issue][github-issues], or a Pull Request. 10 | 11 | If you want to get in contact you can find us here: 12 | 13 | - [Matrix chat room][matrix-chat] - development focused chat 14 | - [Telegram channel][telegram-chat] - general Rlay discussion 15 | 16 | > Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as below, without any additional terms or conditions. 17 | 18 | ## License 19 | 20 | Licensed under either of 21 | 22 | * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) 23 | * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) 24 | 25 | at your option. 26 | 27 | [matrix-chat]: https://matrix.to/#/#rlay:matrix.org 28 | [github-issues]: https://github.com/rlay-project/rlay-ontology/issues 29 | [telegram-chat]: https://t.me/rlay_official 30 | -------------------------------------------------------------------------------- /extract-grammar/.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | build/ 3 | -------------------------------------------------------------------------------- /extract-grammar/.prettierrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | trailingComma: 'es5', 3 | singleQuote: true, 4 | }; 5 | -------------------------------------------------------------------------------- /extract-grammar/CONCEPT.md: -------------------------------------------------------------------------------- 1 | Related to: https://www.w3.org/TR/owl2-syntax 2 | 3 | - Declarations as specified in the document are superflous, as we base out concept around content-addressing 4 | - Instead of declarations entities are defined by their EnityAxiom (e.g. ClassAxiom) 5 | - Regarding the EntityAxiom care needs to be taken regarding immutability properties of the individual variants (e.g. EquivalentClasses can mutate meaning of existing ontology) 6 | - Annotations are then also folded into the declarations via Axioms 7 | -------------------------------------------------------------------------------- /extract-grammar/jest.config.js: -------------------------------------------------------------------------------- 1 | // For a detailed explanation regarding each configuration property, visit: 2 | // https://jestjs.io/docs/en/configuration.html 3 | 4 | module.exports = { 5 | // All imported modules in your tests should be mocked automatically 6 | // automock: false, 7 | 8 | // Stop running tests after the first failure 9 | // bail: false, 10 | 11 | // Respect "browser" field in package.json when resolving modules 12 | // browser: false, 13 | 14 | // The directory where Jest should store its cached dependency information 15 | // cacheDirectory: "/var/folders/k8/4q34s__14fsgd5z94lv4n6m00000gn/T/jest_dx", 16 | 17 | // Automatically clear mock calls and instances between every test 18 | clearMocks: true, 19 | 20 | // Indicates whether the coverage information should be collected while executing the test 21 | // collectCoverage: false, 22 | 23 | // An array of glob patterns indicating a set of files for which coverage information should be collected 24 | // collectCoverageFrom: null, 25 | 26 | // The directory where Jest should output its coverage files 27 | // coverageDirectory: null, 28 | 29 | // An array of regexp pattern strings used to skip coverage collection 30 | // coveragePathIgnorePatterns: [ 31 | // "/node_modules/" 32 | // ], 33 | 34 | // A list of reporter names that Jest uses when writing coverage reports 35 | // coverageReporters: [ 36 | // "json", 37 | // "text", 38 | // "lcov", 39 | // "clover" 40 | // ], 41 | 42 | // An object that configures minimum threshold enforcement for coverage results 43 | // coverageThreshold: null, 44 | 45 | // Make calling deprecated APIs throw helpful error messages 46 | // errorOnDeprecated: false, 47 | 48 | // Force coverage collection from ignored files usin a array of glob patterns 49 | // forceCoverageMatch: [], 50 | 51 | // A path to a module which exports an async function that is triggered once before all test suites 52 | // globalSetup: null, 53 | 54 | // A path to a module which exports an async function that is triggered once after all test suites 55 | // globalTeardown: null, 56 | 57 | // A set of global variables that need to be available in all test environments 58 | // globals: {}, 59 | 60 | // An array of directory names to be searched recursively up from the requiring module's location 61 | // moduleDirectories: [ 62 | // "node_modules" 63 | // ], 64 | 65 | // An array of file extensions your modules use 66 | // moduleFileExtensions: [ 67 | // "js", 68 | // "json", 69 | // "jsx", 70 | // "node" 71 | // ], 72 | 73 | // A map from regular expressions to module names that allow to stub out resources with a single module 74 | // moduleNameMapper: {}, 75 | 76 | // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader 77 | // modulePathIgnorePatterns: [], 78 | 79 | // Activates notifications for test results 80 | // notify: false, 81 | 82 | // An enum that specifies notification mode. Requires { notify: true } 83 | // notifyMode: "always", 84 | 85 | // A preset that is used as a base for Jest's configuration 86 | // preset: null, 87 | 88 | // Run tests from one or more projects 89 | // projects: null, 90 | 91 | // Use this configuration option to add custom reporters to Jest 92 | // reporters: undefined, 93 | 94 | // Automatically reset mock state between every test 95 | // resetMocks: false, 96 | 97 | // Reset the module registry before running each individual test 98 | // resetModules: false, 99 | 100 | // A path to a custom resolver 101 | // resolver: null, 102 | 103 | // Automatically restore mock state between every test 104 | // restoreMocks: false, 105 | 106 | // The root directory that Jest should scan for tests and modules within 107 | // rootDir: null, 108 | 109 | // A list of paths to directories that Jest should use to search for files in 110 | // roots: [ 111 | // "" 112 | // ], 113 | 114 | // Allows you to use a custom runner instead of Jest's default test runner 115 | // runner: "jest-runner", 116 | 117 | // The paths to modules that run some code to configure or set up the testing environment before each test 118 | // setupFiles: [], 119 | 120 | // The path to a module that runs some code to configure or set up the testing framework before each test 121 | // setupTestFrameworkScriptFile: null, 122 | 123 | // A list of paths to snapshot serializer modules Jest should use for snapshot testing 124 | // snapshotSerializers: [], 125 | 126 | // The test environment that will be used for testing 127 | testEnvironment: "node", 128 | 129 | // Options that will be passed to the testEnvironment 130 | // testEnvironmentOptions: {}, 131 | 132 | // Adds a location field to test results 133 | // testLocationInResults: false, 134 | 135 | // The glob patterns Jest uses to detect test files 136 | // testMatch: [ 137 | // "**/__tests__/**/*.js?(x)", 138 | // "**/?(*.)+(spec|test).js?(x)" 139 | // ], 140 | 141 | // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped 142 | // testPathIgnorePatterns: [ 143 | // "/node_modules/" 144 | // ], 145 | 146 | // The regexp pattern Jest uses to detect test files 147 | // testRegex: "", 148 | 149 | // This option allows the use of a custom results processor 150 | // testResultsProcessor: null, 151 | 152 | // This option allows use of a custom test runner 153 | // testRunner: "jasmine2", 154 | 155 | // This option sets the URL for the jsdom environment. It is reflected in properties such as location.href 156 | // testURL: "about:blank", 157 | 158 | // Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout" 159 | // timers: "real", 160 | 161 | // A map from regular expressions to paths to transformers 162 | // transform: null, 163 | 164 | // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation 165 | // transformIgnorePatterns: [ 166 | // "/node_modules/" 167 | // ], 168 | 169 | // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them 170 | // unmockedModulePathPatterns: undefined, 171 | 172 | // Indicates whether each individual test should be reported during the run 173 | // verbose: null, 174 | 175 | // An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode 176 | // watchPathIgnorePatterns: [], 177 | 178 | // Whether to use watchman for file crawling 179 | // watchman: true, 180 | }; 181 | -------------------------------------------------------------------------------- /extract-grammar/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "js-bnf-parser-toasty", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "build": "mkdir -p build && npm run build:parsed && npm run build:intermediate && npm run build:solidity && npm run build:protobuf && npm run build:mapping", 8 | "build:parsed": "node src/parseBnfCli.js", 9 | "build:intermediate": "node src/index.js", 10 | "build:solidity": "node src/generateSolidty.js", 11 | "build:protobuf": "node src/generatePb2.js", 12 | "build:mapping": "node src/generateV0FieldMapping.js", 13 | "test": "jest" 14 | }, 15 | "author": "", 16 | "license": "ISC", 17 | "dependencies": { 18 | "pegjs": "^0.10.0", 19 | "varint": "^5.0.0" 20 | }, 21 | "devDependencies": { 22 | "jest": "^23.4.2" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /extract-grammar/src/__tests__/full.grammar: -------------------------------------------------------------------------------- 1 | AnnotationSubject := IRI | AnonymousIndividual 2 | AnnotationValue := AnonymousIndividual | IRI | Literal 3 | axiomAnnotations := { Annotation } 4 | 5 | Annotation := 'Annotation' '(' annotationAnnotations AnnotationProperty AnnotationValue ')' 6 | annotationAnnotations := { Annotation } 7 | 8 | AnnotationAxiom := AnnotationAssertion | SubAnnotationPropertyOf | AnnotationPropertyDomain | AnnotationPropertyRange 9 | 10 | AnnotationAssertion := 'AnnotationAssertion' '(' axiomAnnotations AnnotationProperty AnnotationSubject AnnotationValue ')' 11 | 12 | SubAnnotationPropertyOf := 'SubAnnotationPropertyOf' '(' axiomAnnotations subAnnotationProperty superAnnotationProperty ')' 13 | subAnnotationProperty := AnnotationProperty 14 | superAnnotationProperty := AnnotationProperty 15 | 16 | AnnotationPropertyDomain := 'AnnotationPropertyDomain' '(' axiomAnnotations AnnotationProperty IRI ')' 17 | 18 | AnnotationPropertyRange := 'AnnotationPropertyRange' '(' axiomAnnotations AnnotationProperty IRI ')' 19 | 20 | Class := IRI 21 | 22 | Datatype := IRI 23 | 24 | ObjectProperty := IRI 25 | 26 | DataProperty := IRI 27 | 28 | AnnotationProperty := IRI 29 | 30 | Individual := NamedIndividual | AnonymousIndividual 31 | 32 | NamedIndividual := IRI 33 | 34 | AnonymousIndividual := nodeID 35 | 36 | Literal := typedLiteral | stringLiteralNoLanguage | stringLiteralWithLanguage 37 | typedLiteral := lexicalForm '^^' Datatype 38 | lexicalForm := quotedString 39 | stringLiteralNoLanguage := quotedString 40 | stringLiteralWithLanguage := quotedString languageTag 41 | 42 | 43 | 44 | ObjectPropertyExpression := ObjectProperty | InverseObjectProperty 45 | 46 | InverseObjectProperty := 'ObjectInverseOf' '(' ObjectProperty ')' 47 | 48 | DataPropertyExpression := DataProperty 49 | 50 | 51 | 52 | DataRange := 53 | Datatype | 54 | DataIntersectionOf | 55 | DataUnionOf | 56 | DataComplementOf | 57 | DataOneOf | 58 | DatatypeRestriction 59 | 60 | DataIntersectionOf := 'DataIntersectionOf' '(' DataRange DataRange { DataRange } ')' 61 | 62 | DataUnionOf := 'DataUnionOf' '(' DataRange DataRange { DataRange } ')' 63 | 64 | DataComplementOf := 'DataComplementOf' '(' DataRange ')' 65 | 66 | DataOneOf := 'DataOneOf' '(' Literal { Literal } ')' 67 | 68 | DatatypeRestriction := 'DatatypeRestriction' '(' Datatype constrainingFacet restrictionValue { constrainingFacet restrictionValue } ')' 69 | constrainingFacet := IRI 70 | restrictionValue := Literal 71 | 72 | 73 | 74 | ClassExpression := 75 | Class | 76 | ObjectIntersectionOf | ObjectUnionOf | ObjectComplementOf | ObjectOneOf | 77 | ObjectSomeValuesFrom | ObjectAllValuesFrom | ObjectHasValue | ObjectHasSelf | 78 | ObjectMinCardinality | ObjectMaxCardinality | ObjectExactCardinality | 79 | DataSomeValuesFrom | DataAllValuesFrom | DataHasValue | 80 | DataMinCardinality | DataMaxCardinality | DataExactCardinality 81 | 82 | ObjectIntersectionOf := 'ObjectIntersectionOf' '(' ClassExpression ClassExpression { ClassExpression } ')' 83 | 84 | ObjectUnionOf := 'ObjectUnionOf' '(' ClassExpression ClassExpression { ClassExpression } ')' 85 | 86 | ObjectComplementOf := 'ObjectComplementOf' '(' ClassExpression ')' 87 | 88 | ObjectOneOf := 'ObjectOneOf' '(' Individual { Individual }')' 89 | 90 | ObjectSomeValuesFrom := 'ObjectSomeValuesFrom' '(' ObjectPropertyExpression ClassExpression ')' 91 | 92 | ObjectAllValuesFrom := 'ObjectAllValuesFrom' '(' ObjectPropertyExpression ClassExpression ')' 93 | 94 | ObjectHasValue := 'ObjectHasValue' '(' ObjectPropertyExpression Individual ')' 95 | 96 | ObjectHasSelf := 'ObjectHasSelf' '(' ObjectPropertyExpression ')' 97 | 98 | ObjectMinCardinality := 'ObjectMinCardinality' '(' nonNegativeInteger ObjectPropertyExpression [ ClassExpression ] ')' 99 | 100 | ObjectMaxCardinality := 'ObjectMaxCardinality' '(' nonNegativeInteger ObjectPropertyExpression [ ClassExpression ] ')' 101 | 102 | ObjectExactCardinality := 'ObjectExactCardinality' '(' nonNegativeInteger ObjectPropertyExpression [ ClassExpression ] ')' 103 | 104 | DataSomeValuesFrom := 'DataSomeValuesFrom' '(' DataPropertyExpression { DataPropertyExpression } DataRange ')' 105 | 106 | DataAllValuesFrom := 'DataAllValuesFrom' '(' DataPropertyExpression { DataPropertyExpression } DataRange ')' 107 | 108 | DataHasValue := 'DataHasValue' '(' DataPropertyExpression Literal ')' 109 | 110 | DataMinCardinality := 'DataMinCardinality' '(' nonNegativeInteger DataPropertyExpression [ DataRange ] ')' 111 | 112 | DataMaxCardinality := 'DataMaxCardinality' '(' nonNegativeInteger DataPropertyExpression [ DataRange ] ')' 113 | 114 | DataExactCardinality := 'DataExactCardinality' '(' nonNegativeInteger DataPropertyExpression [ DataRange ] ')' 115 | 116 | 117 | 118 | Axiom := Declaration | ClassAxiom | ObjectPropertyAxiom | DataPropertyAxiom | DatatypeDefinition | HasKey | Assertion | AnnotationAxiom 119 | 120 | 121 | 122 | ClassAxiom := SubClassOf | EquivalentClasses | DisjointClasses | DisjointUnion 123 | 124 | SubClassOf := 'SubClassOf' '(' axiomAnnotations subClassExpression superClassExpression ')' 125 | subClassExpression := ClassExpression 126 | superClassExpression := ClassExpression 127 | 128 | EquivalentClasses := 'EquivalentClasses' '(' axiomAnnotations ClassExpression ClassExpression { ClassExpression } ')' 129 | 130 | DisjointClasses := 'DisjointClasses' '(' axiomAnnotations ClassExpression ClassExpression { ClassExpression } ')' 131 | 132 | DisjointUnion := 'DisjointUnion' '(' axiomAnnotations Class disjointClassExpressions ')' 133 | disjointClassExpressions := ClassExpression ClassExpression { ClassExpression } 134 | 135 | 136 | 137 | ObjectPropertyAxiom := 138 | SubObjectPropertyOf | EquivalentObjectProperties | 139 | DisjointObjectProperties | InverseObjectProperties | 140 | ObjectPropertyDomain | ObjectPropertyRange | 141 | FunctionalObjectProperty | InverseFunctionalObjectProperty | 142 | ReflexiveObjectProperty | IrreflexiveObjectProperty | 143 | SymmetricObjectProperty | AsymmetricObjectProperty | 144 | TransitiveObjectProperty 145 | 146 | SubObjectPropertyOf := 'SubObjectPropertyOf' '(' axiomAnnotations subObjectPropertyExpression superObjectPropertyExpression ')' 147 | subObjectPropertyExpression := ObjectPropertyExpression | propertyExpressionChain 148 | propertyExpressionChain := 'ObjectPropertyChain' '(' ObjectPropertyExpression ObjectPropertyExpression { ObjectPropertyExpression } ')' 149 | superObjectPropertyExpression := ObjectPropertyExpression 150 | 151 | EquivalentObjectProperties := 'EquivalentObjectProperties' '(' axiomAnnotations ObjectPropertyExpression ObjectPropertyExpression { ObjectPropertyExpression } ')' 152 | 153 | DisjointObjectProperties := 'DisjointObjectProperties' '(' axiomAnnotations ObjectPropertyExpression ObjectPropertyExpression { ObjectPropertyExpression } ')' 154 | 155 | ObjectPropertyDomain := 'ObjectPropertyDomain' '(' axiomAnnotations ObjectPropertyExpression ClassExpression ')' 156 | 157 | ObjectPropertyRange := 'ObjectPropertyRange' '(' axiomAnnotations ObjectPropertyExpression ClassExpression ')' 158 | 159 | InverseObjectProperties := 'InverseObjectProperties' '(' axiomAnnotations ObjectPropertyExpression ObjectPropertyExpression ')' 160 | 161 | FunctionalObjectProperty := 'FunctionalObjectProperty' '(' axiomAnnotations ObjectPropertyExpression ')' 162 | 163 | InverseFunctionalObjectProperty := 'InverseFunctionalObjectProperty' '(' axiomAnnotations ObjectPropertyExpression ')' 164 | 165 | ReflexiveObjectProperty := 'ReflexiveObjectProperty' '(' axiomAnnotations ObjectPropertyExpression ')' 166 | 167 | IrreflexiveObjectProperty := 'IrreflexiveObjectProperty' '(' axiomAnnotations ObjectPropertyExpression ')' 168 | 169 | SymmetricObjectProperty := 'SymmetricObjectProperty' '(' axiomAnnotations ObjectPropertyExpression ')' 170 | 171 | AsymmetricObjectProperty := 'AsymmetricObjectProperty' '(' axiomAnnotations ObjectPropertyExpression ')' 172 | 173 | TransitiveObjectProperty := 'TransitiveObjectProperty' '(' axiomAnnotations ObjectPropertyExpression ')' 174 | 175 | 176 | 177 | DataPropertyAxiom := 178 | SubDataPropertyOf | EquivalentDataProperties | DisjointDataProperties | 179 | DataPropertyDomain | DataPropertyRange | FunctionalDataProperty 180 | 181 | SubDataPropertyOf := 'SubDataPropertyOf' '(' axiomAnnotations subDataPropertyExpression superDataPropertyExpression ')' 182 | subDataPropertyExpression := DataPropertyExpression 183 | superDataPropertyExpression := DataPropertyExpression 184 | 185 | EquivalentDataProperties := 'EquivalentDataProperties' '(' axiomAnnotations DataPropertyExpression DataPropertyExpression { DataPropertyExpression } ')' 186 | 187 | DisjointDataProperties := 'DisjointDataProperties' '(' axiomAnnotations DataPropertyExpression DataPropertyExpression { DataPropertyExpression } ')' 188 | 189 | DataPropertyDomain := 'DataPropertyDomain' '(' axiomAnnotations DataPropertyExpression ClassExpression ')' 190 | 191 | DataPropertyRange := 'DataPropertyRange' '(' axiomAnnotations DataPropertyExpression DataRange ')' 192 | 193 | FunctionalDataProperty := 'FunctionalDataProperty' '(' axiomAnnotations DataPropertyExpression ')' 194 | 195 | 196 | 197 | DatatypeDefinition := 'DatatypeDefinition' '(' axiomAnnotations Datatype DataRange ')' 198 | 199 | 200 | 201 | HasKey := 'HasKey' '(' axiomAnnotations ClassExpression '(' { ObjectPropertyExpression } ')' '(' { DataPropertyExpression } ')' ')' 202 | 203 | 204 | 205 | Assertion := 206 | SameIndividual | DifferentIndividuals | ClassAssertion | 207 | ObjectPropertyAssertion | NegativeObjectPropertyAssertion | 208 | DataPropertyAssertion | NegativeDataPropertyAssertion 209 | 210 | sourceIndividual := Individual 211 | targetIndividual := Individual 212 | targetValue := Literal 213 | 214 | SameIndividual := 'SameIndividual' '(' axiomAnnotations Individual Individual { Individual } ')' 215 | 216 | DifferentIndividuals := 'DifferentIndividuals' '(' axiomAnnotations Individual Individual { Individual } ')' 217 | 218 | ClassAssertion := 'ClassAssertion' '(' axiomAnnotations ClassExpression Individual ')' 219 | 220 | ObjectPropertyAssertion := 'ObjectPropertyAssertion' '(' axiomAnnotations ObjectPropertyExpression sourceIndividual targetIndividual ')' 221 | 222 | NegativeObjectPropertyAssertion := 'NegativeObjectPropertyAssertion' '(' axiomAnnotations ObjectPropertyExpression sourceIndividual targetIndividual ')' 223 | 224 | DataPropertyAssertion := 'DataPropertyAssertion' '(' axiomAnnotations DataPropertyExpression sourceIndividual targetValue ')' 225 | 226 | NegativeDataPropertyAssertion := 'NegativeDataPropertyAssertion' '(' axiomAnnotations DataPropertyExpression sourceIndividual targetValue ')' 227 | -------------------------------------------------------------------------------- /extract-grammar/src/__tests__/implemented.grammar: -------------------------------------------------------------------------------- 1 | AnnotationSubject := IRI | AnonymousIndividual 2 | AnnotationValue := AnonymousIndividual | IRI | Literal 3 | axiomAnnotations := { Annotation } 4 | 5 | Annotation := 'Annotation' '(' annotationAnnotations AnnotationProperty AnnotationValue ')' 6 | annotationAnnotations := { Annotation } 7 | 8 | AnnotationAxiom := AnnotationAssertion | SubAnnotationPropertyOf | AnnotationPropertyDomain | AnnotationPropertyRange 9 | 10 | AnnotationAssertion := 'AnnotationAssertion' '(' axiomAnnotations AnnotationProperty AnnotationSubject AnnotationValue ')' 11 | 12 | SubAnnotationPropertyOf := 'SubAnnotationPropertyOf' '(' axiomAnnotations subAnnotationProperty superAnnotationProperty ')' 13 | subAnnotationProperty := AnnotationProperty 14 | superAnnotationProperty := AnnotationProperty 15 | 16 | AnnotationPropertyDomain := 'AnnotationPropertyDomain' '(' axiomAnnotations AnnotationProperty IRI ')' 17 | 18 | AnnotationPropertyRange := 'AnnotationPropertyRange' '(' axiomAnnotations AnnotationProperty IRI ')' 19 | 20 | Class := IRI 21 | 22 | Datatype := IRI 23 | 24 | ObjectProperty := IRI 25 | 26 | DataProperty := IRI 27 | 28 | AnnotationProperty := IRI 29 | 30 | Individual := NamedIndividual | AnonymousIndividual 31 | 32 | NamedIndividual := IRI 33 | 34 | AnonymousIndividual := nodeID 35 | 36 | Literal := typedLiteral | stringLiteralNoLanguage | stringLiteralWithLanguage 37 | lexicalForm := quotedString 38 | stringLiteralNoLanguage := quotedString 39 | 40 | 41 | 42 | ObjectPropertyExpression := ObjectProperty | InverseObjectProperty 43 | 44 | InverseObjectProperty := 'ObjectInverseOf' '(' ObjectProperty ')' 45 | 46 | DataPropertyExpression := DataProperty 47 | 48 | 49 | 50 | DataRange := 51 | Datatype | 52 | DataIntersectionOf | 53 | DataUnionOf | 54 | DataComplementOf | 55 | DataOneOf | 56 | DatatypeRestriction 57 | 58 | DataIntersectionOf := 'DataIntersectionOf' '(' DataRange DataRange { DataRange } ')' 59 | 60 | DataUnionOf := 'DataUnionOf' '(' DataRange DataRange { DataRange } ')' 61 | 62 | DataComplementOf := 'DataComplementOf' '(' DataRange ')' 63 | 64 | DataOneOf := 'DataOneOf' '(' Literal { Literal } ')' 65 | 66 | constrainingFacet := IRI 67 | restrictionValue := Literal 68 | 69 | 70 | 71 | ClassExpression := 72 | Class | 73 | ObjectIntersectionOf | ObjectUnionOf | ObjectComplementOf | ObjectOneOf | 74 | ObjectSomeValuesFrom | ObjectAllValuesFrom | ObjectHasValue | ObjectHasSelf | 75 | ObjectMinCardinality | ObjectMaxCardinality | ObjectExactCardinality | 76 | DataSomeValuesFrom | DataAllValuesFrom | DataHasValue | 77 | DataMinCardinality | DataMaxCardinality | DataExactCardinality 78 | 79 | ObjectIntersectionOf := 'ObjectIntersectionOf' '(' ClassExpression ClassExpression { ClassExpression } ')' 80 | 81 | ObjectUnionOf := 'ObjectUnionOf' '(' ClassExpression ClassExpression { ClassExpression } ')' 82 | 83 | ObjectComplementOf := 'ObjectComplementOf' '(' ClassExpression ')' 84 | 85 | ObjectOneOf := 'ObjectOneOf' '(' Individual { Individual }')' 86 | 87 | ObjectSomeValuesFrom := 'ObjectSomeValuesFrom' '(' ObjectPropertyExpression ClassExpression ')' 88 | 89 | ObjectAllValuesFrom := 'ObjectAllValuesFrom' '(' ObjectPropertyExpression ClassExpression ')' 90 | 91 | ObjectHasValue := 'ObjectHasValue' '(' ObjectPropertyExpression Individual ')' 92 | 93 | ObjectHasSelf := 'ObjectHasSelf' '(' ObjectPropertyExpression ')' 94 | 95 | ObjectMinCardinality := 'ObjectMinCardinality' '(' nonNegativeInteger ObjectPropertyExpression [ ClassExpression ] ')' 96 | 97 | ObjectMaxCardinality := 'ObjectMaxCardinality' '(' nonNegativeInteger ObjectPropertyExpression [ ClassExpression ] ')' 98 | 99 | ObjectExactCardinality := 'ObjectExactCardinality' '(' nonNegativeInteger ObjectPropertyExpression [ ClassExpression ] ')' 100 | 101 | DataSomeValuesFrom := 'DataSomeValuesFrom' '(' DataPropertyExpression { DataPropertyExpression } DataRange ')' 102 | 103 | DataAllValuesFrom := 'DataAllValuesFrom' '(' DataPropertyExpression { DataPropertyExpression } DataRange ')' 104 | 105 | DataHasValue := 'DataHasValue' '(' DataPropertyExpression Literal ')' 106 | 107 | DataMinCardinality := 'DataMinCardinality' '(' nonNegativeInteger DataPropertyExpression [ DataRange ] ')' 108 | 109 | DataMaxCardinality := 'DataMaxCardinality' '(' nonNegativeInteger DataPropertyExpression [ DataRange ] ')' 110 | 111 | DataExactCardinality := 'DataExactCardinality' '(' nonNegativeInteger DataPropertyExpression [ DataRange ] ')' 112 | 113 | 114 | 115 | Axiom := Declaration | ClassAxiom | ObjectPropertyAxiom | DataPropertyAxiom | DatatypeDefinition | HasKey | Assertion | AnnotationAxiom 116 | 117 | 118 | 119 | ClassAxiom := SubClassOf | EquivalentClasses | DisjointClasses | DisjointUnion 120 | 121 | SubClassOf := 'SubClassOf' '(' axiomAnnotations subClassExpression superClassExpression ')' 122 | subClassExpression := ClassExpression 123 | superClassExpression := ClassExpression 124 | 125 | EquivalentClasses := 'EquivalentClasses' '(' axiomAnnotations ClassExpression ClassExpression { ClassExpression } ')' 126 | 127 | DisjointClasses := 'DisjointClasses' '(' axiomAnnotations ClassExpression ClassExpression { ClassExpression } ')' 128 | 129 | DisjointUnion := 'DisjointUnion' '(' axiomAnnotations Class disjointClassExpressions ')' 130 | 131 | 132 | 133 | ObjectPropertyAxiom := 134 | SubObjectPropertyOf | EquivalentObjectProperties | 135 | DisjointObjectProperties | InverseObjectProperties | 136 | ObjectPropertyDomain | ObjectPropertyRange | 137 | FunctionalObjectProperty | InverseFunctionalObjectProperty | 138 | ReflexiveObjectProperty | IrreflexiveObjectProperty | 139 | SymmetricObjectProperty | AsymmetricObjectProperty | 140 | TransitiveObjectProperty 141 | 142 | SubObjectPropertyOf := 'SubObjectPropertyOf' '(' axiomAnnotations subObjectPropertyExpression superObjectPropertyExpression ')' 143 | subObjectPropertyExpression := ObjectPropertyExpression | propertyExpressionChain 144 | propertyExpressionChain := 'ObjectPropertyChain' '(' ObjectPropertyExpression ObjectPropertyExpression { ObjectPropertyExpression } ')' 145 | superObjectPropertyExpression := ObjectPropertyExpression 146 | 147 | EquivalentObjectProperties := 'EquivalentObjectProperties' '(' axiomAnnotations ObjectPropertyExpression ObjectPropertyExpression { ObjectPropertyExpression } ')' 148 | 149 | DisjointObjectProperties := 'DisjointObjectProperties' '(' axiomAnnotations ObjectPropertyExpression ObjectPropertyExpression { ObjectPropertyExpression } ')' 150 | 151 | ObjectPropertyDomain := 'ObjectPropertyDomain' '(' axiomAnnotations ObjectPropertyExpression ClassExpression ')' 152 | 153 | ObjectPropertyRange := 'ObjectPropertyRange' '(' axiomAnnotations ObjectPropertyExpression ClassExpression ')' 154 | 155 | InverseObjectProperties := 'InverseObjectProperties' '(' axiomAnnotations ObjectPropertyExpression ObjectPropertyExpression ')' 156 | 157 | FunctionalObjectProperty := 'FunctionalObjectProperty' '(' axiomAnnotations ObjectPropertyExpression ')' 158 | 159 | InverseFunctionalObjectProperty := 'InverseFunctionalObjectProperty' '(' axiomAnnotations ObjectPropertyExpression ')' 160 | 161 | ReflexiveObjectProperty := 'ReflexiveObjectProperty' '(' axiomAnnotations ObjectPropertyExpression ')' 162 | 163 | IrreflexiveObjectProperty := 'IrreflexiveObjectProperty' '(' axiomAnnotations ObjectPropertyExpression ')' 164 | 165 | SymmetricObjectProperty := 'SymmetricObjectProperty' '(' axiomAnnotations ObjectPropertyExpression ')' 166 | 167 | AsymmetricObjectProperty := 'AsymmetricObjectProperty' '(' axiomAnnotations ObjectPropertyExpression ')' 168 | 169 | TransitiveObjectProperty := 'TransitiveObjectProperty' '(' axiomAnnotations ObjectPropertyExpression ')' 170 | 171 | 172 | 173 | DataPropertyAxiom := 174 | SubDataPropertyOf | EquivalentDataProperties | DisjointDataProperties | 175 | DataPropertyDomain | DataPropertyRange | FunctionalDataProperty 176 | 177 | SubDataPropertyOf := 'SubDataPropertyOf' '(' axiomAnnotations subDataPropertyExpression superDataPropertyExpression ')' 178 | subDataPropertyExpression := DataPropertyExpression 179 | superDataPropertyExpression := DataPropertyExpression 180 | 181 | EquivalentDataProperties := 'EquivalentDataProperties' '(' axiomAnnotations DataPropertyExpression DataPropertyExpression { DataPropertyExpression } ')' 182 | 183 | DisjointDataProperties := 'DisjointDataProperties' '(' axiomAnnotations DataPropertyExpression DataPropertyExpression { DataPropertyExpression } ')' 184 | 185 | DataPropertyDomain := 'DataPropertyDomain' '(' axiomAnnotations DataPropertyExpression ClassExpression ')' 186 | 187 | DataPropertyRange := 'DataPropertyRange' '(' axiomAnnotations DataPropertyExpression DataRange ')' 188 | 189 | FunctionalDataProperty := 'FunctionalDataProperty' '(' axiomAnnotations DataPropertyExpression ')' 190 | 191 | 192 | 193 | DatatypeDefinition := 'DatatypeDefinition' '(' axiomAnnotations Datatype DataRange ')' 194 | 195 | 196 | Assertion := 197 | SameIndividual | DifferentIndividuals | ClassAssertion | 198 | ObjectPropertyAssertion | NegativeObjectPropertyAssertion | 199 | DataPropertyAssertion | NegativeDataPropertyAssertion 200 | 201 | sourceIndividual := Individual 202 | targetIndividual := Individual 203 | targetValue := Literal 204 | 205 | SameIndividual := 'SameIndividual' '(' axiomAnnotations Individual Individual { Individual } ')' 206 | 207 | DifferentIndividuals := 'DifferentIndividuals' '(' axiomAnnotations Individual Individual { Individual } ')' 208 | 209 | ClassAssertion := 'ClassAssertion' '(' axiomAnnotations ClassExpression Individual ')' 210 | 211 | ObjectPropertyAssertion := 'ObjectPropertyAssertion' '(' axiomAnnotations ObjectPropertyExpression sourceIndividual targetIndividual ')' 212 | 213 | NegativeObjectPropertyAssertion := 'NegativeObjectPropertyAssertion' '(' axiomAnnotations ObjectPropertyExpression sourceIndividual targetIndividual ')' 214 | 215 | DataPropertyAssertion := 'DataPropertyAssertion' '(' axiomAnnotations DataPropertyExpression sourceIndividual targetValue ')' 216 | 217 | NegativeDataPropertyAssertion := 'NegativeDataPropertyAssertion' '(' axiomAnnotations DataPropertyExpression sourceIndividual targetValue ')' 218 | -------------------------------------------------------------------------------- /extract-grammar/src/__tests__/multiple_definitions.grammar: -------------------------------------------------------------------------------- 1 | sourceIndividual := Individual 2 | targetIndividual := Individual 3 | targetValue := Literal 4 | -------------------------------------------------------------------------------- /extract-grammar/src/__tests__/parseBnf.test.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const path = require('path'); 3 | 4 | const parser = require('../parseBnf'); 5 | 6 | const readGrammar = filename => { 7 | return fs.readFileSync(path.join(__dirname, `${filename}.grammar`), 'utf8'); 8 | }; 9 | 10 | describe('parseBnf', () => { 11 | test('simple definition', () => { 12 | const grammar = readGrammar('simple'); 13 | 14 | const parsed = parser.parseGrammar(grammar)[0]; 15 | expect(parsed.lhs).toBe('LHS'); 16 | expect(parsed.rhs).toBe('RHS'); 17 | }); 18 | 19 | test('multiple definitions', () => { 20 | const grammar = readGrammar('multiple_definitions'); 21 | 22 | const parsed = parser.parseGrammar(grammar); 23 | expect(parsed.length).toBe(3); 24 | }); 25 | 26 | test('RHS alternatives singleline', () => { 27 | const grammar = readGrammar('rhs_multiple_singleline'); 28 | 29 | const parsed = parser.parseGrammar(grammar)[0]; 30 | expect(parsed.lhs).toBe('ClassAxiom'); 31 | expect(Array.isArray(parsed.rhs)).toBe(true); 32 | expect(parsed.rhs.length).toBe(4); 33 | }); 34 | 35 | test('RHS alternatives multiline', () => { 36 | const grammar = readGrammar('rhs_multiple_multiline'); 37 | 38 | const parsed = parser.parseGrammar(grammar)[0]; 39 | expect(parsed.lhs).toBe('Assertion'); 40 | expect(Array.isArray(parsed.rhs)).toBe(true); 41 | }); 42 | 43 | test('RHS function with single param', () => { 44 | const grammar = readGrammar('rhs_function_single_param'); 45 | 46 | const parsed = parser.parseGrammar(grammar)[0]; 47 | expect(parsed.lhs).toBe('InverseObjectProperty'); 48 | expect(parsed.rhs.type).toBe('function'); 49 | expect(parsed.rhs.name).toBe('ObjectInverseOf'); 50 | expect(parsed.rhs.params[0]).toBe('ObjectProperty'); 51 | }); 52 | 53 | test('RHS function with multiple params', () => { 54 | const grammar = readGrammar('rhs_function'); 55 | 56 | const parsed = parser.parseGrammar(grammar)[0]; 57 | expect(parsed.lhs).toBe('TransitiveObjectProperty'); 58 | expect(parsed.rhs.type).toBe('function'); 59 | expect(parsed.rhs.name).toBe('TransitiveObjectProperty'); 60 | expect(parsed.rhs.params.length).toBe(2); 61 | }); 62 | 63 | test('RHS function with mixed params', () => { 64 | const grammar = readGrammar('rhs_mixed_function'); 65 | 66 | const parsed = parser.parseGrammar(grammar)[0]; 67 | expect(parsed.lhs).toBe('DisjointDataProperties'); 68 | expect(parsed.rhs.type).toBe('function'); 69 | expect(parsed.rhs.name).toBe('DisjointDataProperties'); 70 | expect(parsed.rhs.params.length).toBe(4); 71 | }); 72 | 73 | test('RHS zero or more', () => { 74 | const grammar = readGrammar('rhs_zero_or_more'); 75 | 76 | const parsed = parser.parseGrammar(grammar)[0]; 77 | expect(parsed.lhs).toBe('axiomAnnotations'); 78 | expect(parsed.rhs.type).toBe('zeroOrMore'); 79 | expect(parsed.rhs.name).toBe('Annotation'); 80 | }); 81 | 82 | test('RHS zero or one', () => { 83 | const grammar = readGrammar('rhs_zero_or_one'); 84 | 85 | const parsed = parser.parseGrammar(grammar)[0]; 86 | expect(parsed.lhs).toBe('axiomAnnotations'); 87 | expect(parsed.rhs.type).toBe('zeroOrOne'); 88 | expect(parsed.rhs.name).toBe('Annotation'); 89 | }); 90 | 91 | describe('examples', () => { 92 | test('section_1', () => { 93 | const grammar = readGrammar('section_1'); 94 | parser.parseGrammar(grammar); 95 | }); 96 | 97 | test.skip('full', () => { 98 | const grammar = readGrammar('full'); 99 | parser.parseGrammar(grammar); 100 | }); 101 | 102 | test('implemented', () => { 103 | const grammar = readGrammar('implemented'); 104 | parser.parseGrammar(grammar); 105 | }); 106 | }); 107 | }); 108 | -------------------------------------------------------------------------------- /extract-grammar/src/__tests__/rhs_function.grammar: -------------------------------------------------------------------------------- 1 | TransitiveObjectProperty := 'TransitiveObjectProperty' '(' axiomAnnotations ObjectPropertyExpression ')' 2 | -------------------------------------------------------------------------------- /extract-grammar/src/__tests__/rhs_function_single_param.grammar: -------------------------------------------------------------------------------- 1 | InverseObjectProperty := 'ObjectInverseOf' '(' ObjectProperty ')' 2 | -------------------------------------------------------------------------------- /extract-grammar/src/__tests__/rhs_mixed_function.grammar: -------------------------------------------------------------------------------- 1 | DisjointDataProperties := 'DisjointDataProperties' '(' axiomAnnotations DataPropertyExpression DataPropertyExpression { DataPropertyExpression } ')' 2 | -------------------------------------------------------------------------------- /extract-grammar/src/__tests__/rhs_multiple_multiline.grammar: -------------------------------------------------------------------------------- 1 | Assertion := 2 | SameIndividual | DifferentIndividuals | ClassAssertion | 3 | ObjectPropertyAssertion | NegativeObjectPropertyAssertion | 4 | DataPropertyAssertion | NegativeDataPropertyAssertion 5 | -------------------------------------------------------------------------------- /extract-grammar/src/__tests__/rhs_multiple_singleline.grammar: -------------------------------------------------------------------------------- 1 | ClassAxiom := SubClassOf | EquivalentClasses | DisjointClasses | DisjointUnion 2 | -------------------------------------------------------------------------------- /extract-grammar/src/__tests__/rhs_zero_or_more.grammar: -------------------------------------------------------------------------------- 1 | axiomAnnotations := { Annotation } 2 | -------------------------------------------------------------------------------- /extract-grammar/src/__tests__/rhs_zero_or_one.grammar: -------------------------------------------------------------------------------- 1 | axiomAnnotations := [ Annotation ] 2 | -------------------------------------------------------------------------------- /extract-grammar/src/__tests__/section_1.grammar: -------------------------------------------------------------------------------- 1 | Assertion := 2 | SameIndividual | DifferentIndividuals | ClassAssertion | 3 | ObjectPropertyAssertion | NegativeObjectPropertyAssertion | 4 | DataPropertyAssertion | NegativeDataPropertyAssertion 5 | 6 | sourceIndividual := Individual 7 | targetIndividual := Individual 8 | targetValue := Literal 9 | 10 | SameIndividual := 'SameIndividual' '(' axiomAnnotations Individual Individual { Individual } ')' 11 | 12 | DifferentIndividuals := 'DifferentIndividuals' '(' axiomAnnotations Individual Individual { Individual } ')' 13 | 14 | ClassAssertion := 'ClassAssertion' '(' axiomAnnotations ClassExpression Individual ')' 15 | 16 | ObjectPropertyAssertion := 'ObjectPropertyAssertion' '(' axiomAnnotations ObjectPropertyExpression sourceIndividual targetIndividual ')' 17 | 18 | NegativeObjectPropertyAssertion := 'NegativeObjectPropertyAssertion' '(' axiomAnnotations ObjectPropertyExpression sourceIndividual targetIndividual ')' 19 | 20 | DataPropertyAssertion := 'DataPropertyAssertion' '(' axiomAnnotations DataPropertyExpression sourceIndividual targetValue ')' 21 | 22 | NegativeDataPropertyAssertion := 'NegativeDataPropertyAssertion' '(' axiomAnnotations DataPropertyExpression sourceIndividual targetValue ')' 23 | -------------------------------------------------------------------------------- /extract-grammar/src/__tests__/simple.grammar: -------------------------------------------------------------------------------- 1 | LHS := RHS 2 | -------------------------------------------------------------------------------- /extract-grammar/src/bnf_grammar.pegjs: -------------------------------------------------------------------------------- 1 | start 2 | = Grammar 3 | 4 | Grammar 5 | = Definition* 6 | 7 | Divider 8 | = ":=" / "::=" 9 | 10 | OpenParens 11 | = "'('" 12 | 13 | CloseParens 14 | = "')'" 15 | 16 | Definition 17 | = lhs:LHS _ Divider _ rhs:RHS _ { return { "lhs": lhs, "rhs": rhs } } 18 | 19 | LHS 20 | = Identifier 21 | 22 | RHS 23 | = Alternatives / Function 24 | 25 | NIdentifier 26 | = ZeroOrMore / ZeroOrOne / Identifier 27 | 28 | Identifier 29 | = [a-zA-Z]+ { return text(); } 30 | 31 | ZeroOrMore 32 | = "{" _ ident:Identifier _ "}" { 33 | return { 34 | type: "zeroOrMore", 35 | name: ident, 36 | }; 37 | } 38 | 39 | ZeroOrOne 40 | = "[" _ ident:Identifier _ "]" { 41 | return { 42 | type: "zeroOrOne", 43 | name: ident, 44 | }; 45 | } 46 | 47 | IdentifierList 48 | = idents:(NIdentifier _)+ { 49 | return idents.map(match => match[0]); 50 | } 51 | 52 | Literal 53 | = "'" ident:Identifier "'" { return ident; } 54 | 55 | Alternatives 56 | = alternatives:Alternative+ { 57 | if (alternatives.length === 1) { 58 | return alternatives[0]; 59 | } 60 | return alternatives; 61 | } 62 | 63 | Alternative 64 | = ident:(NIdentifier)+ AlternativeDivider? { return ident[0]; } 65 | 66 | AlternativeDivider 67 | = _ "|" _? 68 | 69 | Function 70 | = fnName:Literal _ OpenParens _ params:IdentifierList CloseParens { 71 | return { 72 | type: 'function', 73 | name: fnName, 74 | params: params 75 | }; 76 | } 77 | 78 | _ "whitespace" 79 | = [ \t\n\r]* 80 | 81 | -------------------------------------------------------------------------------- /extract-grammar/src/generatePb2.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | 3 | const grammar = require('../build/intermediate.json'); 4 | 5 | const addHeader = protobuf => { 6 | protobuf.file += 'syntax = "proto2";\n'; 7 | protobuf.file += '\n'; 8 | protobuf.file += 'package rlay.ontology;\n'; 9 | protobuf.file += '\n'; 10 | }; 11 | 12 | const addMessages = (protobuf, grammar) => { 13 | const { kinds } = grammar; 14 | kinds.forEach(kind => { 15 | // message header 16 | protobuf.file += `message ${kind.name} {\n`; 17 | 18 | kind.fields.forEach((field, i) => { 19 | // padding 20 | protobuf.file += ' '; 21 | if (field.kind.endsWith('[]')) { 22 | protobuf.file += `repeated`; 23 | } else if (field.required) { 24 | protobuf.file += `required`; 25 | } else { 26 | protobuf.file += `optional`; 27 | } 28 | protobuf.file += ` bytes ${field.name} = ${i + 1};\n`; 29 | }); 30 | 31 | // message closing 32 | protobuf.file += '}\n'; 33 | protobuf.file += '\n'; 34 | }); 35 | }; 36 | 37 | const main = () => { 38 | const protobuf = { file: '' }; 39 | 40 | addHeader(protobuf); 41 | addMessages(protobuf, grammar); 42 | 43 | const protobufFile = protobuf.file; 44 | console.log(protobufFile); 45 | fs.writeFile('build/ontology_pb2.proto', protobufFile, function(err) { 46 | if (err) throw err; 47 | }); 48 | }; 49 | 50 | main(); 51 | -------------------------------------------------------------------------------- /extract-grammar/src/generateSolidityContracts.js: -------------------------------------------------------------------------------- 1 | const varint = require('varint'); 2 | 3 | const libName = 'OntologyStorageLib'; 4 | const indentLevel = i => Array(i * 4 + 1).join(' '); 5 | 6 | const storageMapName = kind => { 7 | return `${kind.name.toLowerCase()}_hash_map`; 8 | }; 9 | 10 | const storageDelegateName = kind => { 11 | return `${kind.name.toLowerCase()}_storage`; 12 | }; 13 | 14 | const codecName = kind => { 15 | return `${kind.name}Codec`; 16 | }; 17 | 18 | const codecClassName = kind => { 19 | return `${codecName(kind)}.${kind.name}`; 20 | }; 21 | 22 | // returns a rendered string of kind params for use in function signatures 23 | const kindParamsWithType = kind => { 24 | return kind.fields 25 | .map(field => `${solidityTypeForParamKind(field.kind)} _${field.name}`) 26 | .join(', '); 27 | }; 28 | 29 | const kindParams = kind => { 30 | return kind.fields.map(field => `_${field.name}`).join(', '); 31 | }; 32 | 33 | const solidityTypeForParamKind = paramKind => { 34 | if (paramKind.endsWith('[]')) { 35 | return 'bytes[]'; 36 | } else { 37 | return 'bytes'; 38 | } 39 | }; 40 | 41 | const paramsToInstance = kind => { 42 | let line = ''; 43 | 44 | line += indentLevel(2); 45 | line += `${codecClassName(kind)} memory _instance = ${codecClassName(kind)}(`; 46 | line += kindParams(kind); 47 | line += ');\n'; 48 | 49 | return line; 50 | }; 51 | 52 | const addCidConstant = (protobuf, kind, i) => { 53 | const base = 0xc000; 54 | const cidPrefixNumber = base + i; 55 | 56 | const bytes = Buffer.from(varint.encode(cidPrefixNumber)); 57 | // TODO: per-kind value 58 | protobuf.file += `${indentLevel(1)}bytes6 constant cidPrefix${ 59 | kind.name 60 | } = 0x01${bytes.toString('hex')}1b20;\n`; 61 | }; 62 | 63 | const addFunctionHashKind = (protobuf, kind) => { 64 | protobuf.file += indentLevel(1); 65 | protobuf.file += `function hash${kind.name}(${codecClassName( 66 | kind 67 | )} memory _instance)`; 68 | protobuf.file += ` public view returns (bytes32) {\n`; 69 | 70 | protobuf.file += `${indentLevel(2)}bytes memory enc = ${codecName( 71 | kind 72 | )}.encode(_instance);\n`; 73 | protobuf.file += `${indentLevel(2)}bytes32 hash = keccak256(enc);\n`; 74 | protobuf.file += '\n'; 75 | protobuf.file += `${indentLevel(2)}return hash;\n`; 76 | 77 | protobuf.file += `${indentLevel(1)}}\n`; 78 | }; 79 | 80 | const addFunctionCalculateHashKind = (protobuf, kind) => { 81 | protobuf.file += indentLevel(1); 82 | protobuf.file += `function calculateHash${kind.name}(`; 83 | protobuf.file += kindParamsWithType(kind); 84 | protobuf.file += ') public view returns (bytes32) {\n'; 85 | 86 | protobuf.file += paramsToInstance(kind); 87 | 88 | protobuf.file += `${indentLevel(2)}return hash${kind.name}(_instance);\n`; 89 | 90 | protobuf.file += `${indentLevel(1)}}\n`; 91 | }; 92 | 93 | const addFunctionCalculateCidKind = (protobuf, kind) => { 94 | protobuf.file += indentLevel(1); 95 | protobuf.file += `function calculateCid${kind.name}(`; 96 | protobuf.file += kindParamsWithType(kind); 97 | protobuf.file += ') public view returns (bytes _cid) {\n'; 98 | 99 | protobuf.file += paramsToInstance(kind); 100 | 101 | protobuf.file += `${indentLevel(2)}bytes32 _hash = hash${ 102 | kind.name 103 | }(_instance);\n`; 104 | protobuf.file += `${indentLevel(2)}return cid.wrapInCid(cidPrefix${ 105 | kind.name 106 | }, _hash);\n`; 107 | 108 | protobuf.file += `${indentLevel(1)}}\n`; 109 | }; 110 | 111 | const addStorageField = (protobuf, kind) => { 112 | protobuf.file += indentLevel(1); 113 | protobuf.file += `mapping (bytes32 => ${kind.name}Codec.${kind.name})`; 114 | protobuf.file += ` private ${storageMapName(kind)};\n`; 115 | }; 116 | 117 | const addKindStorageDelegateField = (protobuf, kind) => { 118 | protobuf.file += indentLevel(1); 119 | protobuf.file += `I${kind.name}Storage public ${storageDelegateName( 120 | kind 121 | )};\n`; 122 | }; 123 | 124 | const addStoredEvent = (protobuf, kind) => { 125 | protobuf.file += indentLevel(1); 126 | protobuf.file += `event ${kind.name}Stored(bytes _cid);\n`; 127 | }; 128 | 129 | const addFunctionStoreKind = (protobuf, kind) => { 130 | protobuf.file += indentLevel(1); 131 | protobuf.file += `function store${kind.name}(`; 132 | protobuf.file += kindParamsWithType(kind); 133 | protobuf.file += ') public returns (bytes) {\n'; 134 | 135 | protobuf.file += paramsToInstance(kind); 136 | 137 | protobuf.file += `${indentLevel(2)}bytes32 hash = hash${ 138 | kind.name 139 | }(_instance);\n`; 140 | 141 | protobuf.file += '\n'; 142 | protobuf.file += `${indentLevel(2)}${storageMapName( 143 | kind 144 | )}[hash] = _instance;\n`; 145 | protobuf.file += '\n'; 146 | 147 | protobuf.file += `${indentLevel( 148 | 2 149 | )}bytes memory _cid = cid.wrapInCid(cidPrefix${kind.name}, hash);\n`; 150 | 151 | protobuf.file += `${indentLevel(2)}emit ${kind.name}Stored(_cid);\n`; 152 | 153 | protobuf.file += `${indentLevel(2)}return _cid;\n`; 154 | 155 | protobuf.file += `${indentLevel(1)}}\n`; 156 | }; 157 | 158 | const addFunctionDelegateStoreKind = (protobuf, kind) => { 159 | protobuf.file += indentLevel(1); 160 | protobuf.file += `function store${kind.name}(`; 161 | protobuf.file += kindParamsWithType(kind); 162 | protobuf.file += ') public returns (bytes) {\n'; 163 | 164 | protobuf.file += `${indentLevel(2)}bytes memory _cid = ${storageDelegateName( 165 | kind 166 | )}.store${kind.name}(${kindParams(kind)});\n`; 167 | 168 | protobuf.file += `${indentLevel(2)}emit ${kind.name}Stored(_cid);\n`; 169 | 170 | protobuf.file += `${indentLevel(2)}return _cid;\n`; 171 | 172 | protobuf.file += `${indentLevel(1)}}\n`; 173 | }; 174 | 175 | const addFunctionInterfaceStoreKind = (protobuf, kind) => { 176 | protobuf.file += indentLevel(1); 177 | protobuf.file += `function store${kind.name}(`; 178 | protobuf.file += kindParamsWithType(kind); 179 | protobuf.file += ') public returns (bytes);\n'; 180 | }; 181 | 182 | const addFunctionRetrieveKind = (protobuf, kind) => { 183 | protobuf.file += indentLevel(1); 184 | protobuf.file += `function retrieve${kind.name}(bytes _cid)`; 185 | protobuf.file += ' external view returns ('; 186 | protobuf.file += kindParamsWithType(kind); 187 | protobuf.file += ') {\n'; 188 | 189 | protobuf.file += `${indentLevel(2)}bytes32 _hash = cid.unwrapCid(_cid);\n`; 190 | protobuf.file += `${indentLevel(2)}${codecClassName( 191 | kind 192 | )} memory _instance = ${storageMapName(kind)}[_hash];\n`; 193 | 194 | protobuf.file += indentLevel(2); 195 | protobuf.file += 'return ('; 196 | protobuf.file += kind.fields 197 | .map(field => `_instance.${field.name}`) 198 | .join(', '); 199 | protobuf.file += ');\n'; 200 | 201 | protobuf.file += `${indentLevel(1)}}\n`; 202 | }; 203 | 204 | const addFunctionDelegateRetrieveKind = (protobuf, kind) => { 205 | protobuf.file += indentLevel(1); 206 | protobuf.file += `function retrieve${kind.name}(bytes _cid)`; 207 | protobuf.file += ' external view returns ('; 208 | protobuf.file += kindParamsWithType(kind); 209 | protobuf.file += ') {\n'; 210 | 211 | protobuf.file += indentLevel(2); 212 | protobuf.file += `return ${storageDelegateName(kind)}.retrieve${ 213 | kind.name 214 | }(_cid);\n`; 215 | 216 | protobuf.file += `${indentLevel(1)}}\n`; 217 | }; 218 | 219 | const addFunctionInterfaceRetrieveKind = (protobuf, kind) => { 220 | protobuf.file += indentLevel(1); 221 | protobuf.file += `function retrieve${kind.name}(bytes _cid)`; 222 | protobuf.file += ' external view returns ('; 223 | protobuf.file += kindParamsWithType(kind); 224 | protobuf.file += ');\n'; 225 | }; 226 | 227 | const addOntologyStorageContract = (protobuf, grammar) => { 228 | const contractName = 'OntologyStorage'; 229 | 230 | const addConstructor = (protobuf, grammar) => { 231 | protobuf.file += `${indentLevel(1)}constructor(`; 232 | protobuf.file += 'address[] _storage_delegate_addrs'; 233 | protobuf.file += ') {\n'; 234 | 235 | grammar.kinds.forEach((kind, i) => { 236 | protobuf.file += `${indentLevel(2)}${storageDelegateName(kind)} = I${ 237 | kind.name 238 | }Storage(_storage_delegate_addrs[${i}]);\n`; 239 | }); 240 | 241 | protobuf.file += `${indentLevel(1)}}\n`; 242 | }; 243 | 244 | const addKindSection = (protobuf, kind) => { 245 | addFunctionDelegateStoreKind(protobuf, kind); 246 | protobuf.file += '\n'; 247 | addFunctionDelegateRetrieveKind(protobuf, kind); 248 | protobuf.file += '\n'; 249 | }; 250 | 251 | protobuf.file += `contract ${contractName} is ${grammar.kinds 252 | .map(n => `I${n.name}Storage`) 253 | .join(', ')}{\n`; 254 | grammar.kinds.forEach(kind => addKindStorageDelegateField(protobuf, kind)); 255 | protobuf.file += '\n'; 256 | grammar.kinds.forEach(kind => addStoredEvent(protobuf, kind)); 257 | protobuf.file += '\n'; 258 | addConstructor(protobuf, grammar); 259 | protobuf.file += '\n'; 260 | grammar.kinds.forEach(kind => addKindSection(protobuf, kind)); 261 | protobuf.file += '}\n'; 262 | protobuf.file += '\n'; 263 | }; 264 | 265 | const addKindStorageContract = (protobuf, kind, i) => { 266 | const contractName = `${kind.name}Storage`; 267 | 268 | protobuf.file += `contract ${contractName} is I${contractName} {\n`; 269 | addStorageField(protobuf, kind); 270 | protobuf.file += '\n'; 271 | addCidConstant(protobuf, kind, i); 272 | protobuf.file += '\n'; 273 | addStoredEvent(protobuf, kind); 274 | protobuf.file += '\n'; 275 | addFunctionStoreKind(protobuf, kind); 276 | protobuf.file += '\n'; 277 | addFunctionRetrieveKind(protobuf, kind); 278 | protobuf.file += '\n'; 279 | addFunctionHashKind(protobuf, kind); 280 | protobuf.file += '\n'; 281 | addFunctionCalculateCidKind(protobuf, kind); 282 | protobuf.file += '}\n'; 283 | protobuf.file += '\n'; 284 | }; 285 | 286 | const addKindStorageContracts = (protobuf, grammar) => { 287 | grammar.kinds.forEach((kind, i) => addKindStorageContract(protobuf, kind, i)); 288 | }; 289 | 290 | const addKindStorageInterfaceContract = (protobuf, kind) => { 291 | const contractName = `I${kind.name}Storage`; 292 | 293 | protobuf.file += `interface ${contractName} {\n`; 294 | addFunctionInterfaceStoreKind(protobuf, kind); 295 | protobuf.file += '\n'; 296 | addFunctionInterfaceRetrieveKind(protobuf, kind); 297 | protobuf.file += '\n'; 298 | protobuf.file += '}\n'; 299 | protobuf.file += '\n'; 300 | }; 301 | 302 | const addKindStorageInterfaceContracts = (protobuf, grammar) => { 303 | grammar.kinds.forEach(kind => 304 | addKindStorageInterfaceContract(protobuf, kind) 305 | ); 306 | }; 307 | 308 | module.exports = { 309 | addOntologyStorageContract, 310 | addKindStorageContracts, 311 | addKindStorageInterfaceContracts, 312 | }; 313 | -------------------------------------------------------------------------------- /extract-grammar/src/generateSolidty.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | 3 | const grammar = require('../build/intermediate.json'); 4 | const addOntologyStorageContract = require('./generateSolidityContracts.js') 5 | .addOntologyStorageContract; 6 | const addOntologyStorageLibraryContract = require('./generateSolidityContracts.js') 7 | .addOntologyStorageLibraryContract; 8 | const addKindStorageInterfaceContracts = require('./generateSolidityContracts.js') 9 | .addKindStorageInterfaceContracts; 10 | const addKindStorageContracts = require('./generateSolidityContracts.js') 11 | .addKindStorageContracts; 12 | 13 | const indent = i => Array(i + 1).join(' '); 14 | const indentLevel = i => Array(i * 4 + 1).join(' '); 15 | 16 | const addHeader = protobuf => { 17 | protobuf.file += 'pragma solidity ^0.4.21;\n'; 18 | protobuf.file += 'pragma experimental ABIEncoderV2;\n'; 19 | protobuf.file += '\n'; 20 | protobuf.file += 'import "./cid.sol";\n'; 21 | protobuf.file += 'import "./pb_mod.sol";\n'; 22 | protobuf.file += '\n'; 23 | }; 24 | 25 | const main = () => { 26 | const solidity = { file: '' }; 27 | 28 | addHeader(solidity); 29 | addKindStorageInterfaceContracts(solidity, grammar); 30 | addKindStorageContracts(solidity, grammar); 31 | addOntologyStorageContract(solidity, grammar); 32 | 33 | const solidityFile = solidity.file; 34 | console.log(solidityFile); 35 | fs.writeFile('build/OntologyStorage.sol', solidityFile, function(err) { 36 | if (err) throw err; 37 | }); 38 | }; 39 | 40 | main(); 41 | -------------------------------------------------------------------------------- /extract-grammar/src/generateV0FieldMapping.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | 3 | const grammar = require('../build/intermediate.json'); 4 | 5 | const main = () => { 6 | const fields = {}; 7 | grammar.kinds.forEach(kind => { 8 | kind.fields.forEach(field => { 9 | if (!fields[field.name]) { 10 | fields[field.name] = 0; 11 | } 12 | fields[field.name] += 1; 13 | }) 14 | }); 15 | 16 | const fieldEntries = Array.from(Object.entries(fields)); 17 | fieldEntries.sort((a, b) => b[1] - a[1]); 18 | 19 | let mapping = {}; 20 | fieldEntries.forEach(([fieldName, count], i) => { 21 | mapping[i] = fieldName; 22 | }); 23 | const mappingContents = JSON.stringify(mapping, null, 4); 24 | console.log(mappingContents); 25 | fs.writeFile('build/v0_field_mapping.json', mappingContents, function(err) { 26 | if (err) throw err; 27 | }); 28 | }; 29 | 30 | main(); 31 | -------------------------------------------------------------------------------- /extract-grammar/src/generateV0Intermediate.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const varint = require('varint'); 3 | 4 | const parsed = require('../build/grammar.json').parsed; 5 | const paramsConfig = require('./params.js'); 6 | 7 | const checkDependencies = parsedGrammar => { 8 | const lhsIdentifiers = parsedGrammar.map(definition => definition.lhs); 9 | 10 | const checkRHSArray = alternatives => { 11 | alternatives.forEach(alternative => { 12 | const exists = lhsIdentifiers.includes(alternative); 13 | if (!exists) { 14 | console.warn('RHS alternative', alternative, 'does not exist'); 15 | } 16 | }); 17 | }; 18 | 19 | parsedGrammar.forEach(definition => { 20 | if (Array.isArray(definition.rhs)) { 21 | checkRHSArray(definition.rhs); 22 | } else if (definition.rhs.type === 'function') { 23 | checkRHSArray(definition.rhs.params); 24 | } else if (definition.rhs.type === 'zeroOrMore') { 25 | checkRHSArray([definition.rhs.data]); 26 | } else { 27 | console.warn(`Checking of RHS for ${definition.lhs} not implemented yet`); 28 | } 29 | }); 30 | }; 31 | 32 | const kindFieldsFromFunction = funcExpression => { 33 | const mappers = paramsConfig.mappers; 34 | let mapper = mappers[funcExpression.name]; 35 | if (!mapper) { 36 | console.error(`No function param mapper found for ${funcExpression.name}`); 37 | return []; 38 | } 39 | mapper = mapper.functionParams; 40 | if (!mapper) { 41 | console.error(`No function param mapper found for ${funcExpression.name}`); 42 | return []; 43 | } 44 | 45 | return mapper(funcExpression.params); 46 | }; 47 | 48 | const kindFieldsFromAxiom = (kind, axiom) => { 49 | let isAxiomApplicable = true; 50 | 51 | const mappers = paramsConfig.mappers; 52 | const mapper = mappers[axiom.rhs.name]; 53 | if (!mapper) { 54 | isAxiomApplicable = true; 55 | } else { 56 | const expressionCheck = mapper.checkExpressionKind; 57 | if (!expressionCheck) { 58 | isAxiomApplicable = true; 59 | } else { 60 | isAxiomApplicable = expressionCheck(kind.expressionKind); 61 | } 62 | } 63 | 64 | if (!isAxiomApplicable) { 65 | return []; 66 | } 67 | return kindFieldsFromFunction(axiom.rhs); 68 | }; 69 | 70 | const buildExpression = expression => { 71 | const kind = { 72 | name: expression.lhs, 73 | fields: [], 74 | }; 75 | 76 | if ( 77 | expression.rhs === 'IRI' || 78 | (Array.isArray(expression.rhs) && expression.rhs[0] === 'IRI') 79 | ) { 80 | // Base declarations don't get a IRI 81 | } else if (expression.rhs.type === 'function') { 82 | kind.fields = kind.fields.concat(kindFieldsFromFunction(expression.rhs)); 83 | } else { 84 | console.log( 85 | `Not implemented yet - buildExpression for ${JSON.stringify(expression)}` 86 | ); 87 | } 88 | 89 | return kind; 90 | }; 91 | 92 | const decorateKindWithAxiom = (kind, axiom) => { 93 | kind.fields = kind.fields.concat(kindFieldsFromAxiom(kind, axiom)); 94 | }; 95 | 96 | const restrictGrammar = parsedGammar => { 97 | const restrictDefinition = definition => { 98 | const mappers = paramsConfig.restrictGrammar; 99 | const mapper = mappers[definition.lhs]; 100 | if (!mapper) { 101 | return definition; 102 | } 103 | return mapper(definition); 104 | }; 105 | 106 | return parsedGammar.map(restrictDefinition); 107 | }; 108 | 109 | // Build all kinds for expressions in one group, e.g. 'ClassExpression' 110 | const buildExpressionsForGroup = (grammar, groupName) => { 111 | const groupExpressionKinds = []; 112 | const expressionsInGroup = grammar.find(n => n.lhs === groupName); 113 | 114 | const rhs = Array.isArray(expressionsInGroup.rhs) ? expressionsInGroup.rhs : [expressionsInGroup.rhs]; 115 | rhs.forEach(expressionIdent => { 116 | const expression = grammar.find(n => n.lhs === expressionIdent); 117 | const kind = buildExpression(expression); 118 | kind.expressionKind = groupName; 119 | 120 | groupExpressionKinds.push(kind); 121 | }); 122 | 123 | const axioms = grammar.find(n => n.lhs === 'Axiom'); 124 | groupExpressionKinds.forEach(kind => { 125 | axioms.rhs.forEach(axiomGroupIdent => { 126 | const axiomGroup = grammar.find(n => n.lhs === axiomGroupIdent); 127 | axiomGroup.rhs.forEach(axiomIdent => { 128 | const axiom = grammar.find(n => n.lhs === axiomIdent); 129 | 130 | decorateKindWithAxiom(kind, axiom); 131 | }); 132 | }); 133 | }); 134 | 135 | return groupExpressionKinds; 136 | }; 137 | 138 | const buildGrammar = parsedGrammar => { 139 | let kinds = []; 140 | 141 | const grammar = restrictGrammar(parsedGrammar); 142 | 143 | const classExpressionKinds = buildExpressionsForGroup( 144 | grammar, 145 | 'ClassExpression' 146 | ); 147 | const objectPropertyExpressionKinds = buildExpressionsForGroup( 148 | grammar, 'ObjectPropertyExpression' 149 | ); 150 | const dataPropertyExpressionKinds = buildExpressionsForGroup( 151 | grammar, 'DataPropertyExpression' 152 | ); 153 | // Though they are named AnnotationAxiom, they are more like expressions 154 | const annotationAxiomKinds = buildExpressionsForGroup( 155 | grammar, 156 | 'AnnotationAxiom' 157 | ); 158 | 159 | const otherKinds = []; 160 | const annotationDeclaration = grammar.find(n => n.lhs === 'Annotation'); 161 | otherKinds.push(buildExpression(annotationDeclaration)); 162 | // TODO: proper 163 | otherKinds.push({ 164 | name: 'Individual', 165 | fields: [ 166 | { 167 | name: 'annotations', 168 | kind: 'Annotation[]', 169 | }, 170 | { 171 | name: 'class_assertions', 172 | kind: 'ClassAssertion[]', 173 | }, 174 | { 175 | name: 'negative_class_assertions', 176 | kind: 'NegativeClassAssertion[]', 177 | }, 178 | { 179 | name: 'object_property_assertions', 180 | kind: 'ObjectPropertyAssertion[]', 181 | }, 182 | { 183 | name: 'negative_object_property_assertions', 184 | kind: 'NegativeObjectPropertyAssertion[]', 185 | }, 186 | { 187 | name: 'data_property_assertions', 188 | kind: 'DataPropertyAssertion[]', 189 | }, 190 | { 191 | name: 'negative_data_property_assertions', 192 | kind: 'NegativeDataPropertyAssertion[]', 193 | }, 194 | ], 195 | }); 196 | otherKinds.push({ 197 | name: 'AnnotationProperty', 198 | fields: [ 199 | { 200 | name: 'annotations', 201 | kind: 'Annotation[]', 202 | }, 203 | ], 204 | }); 205 | otherKinds.push({ 206 | name: 'ClassAssertion', 207 | fields: [ 208 | { 209 | name: 'annotations', 210 | kind: 'Annotation[]', 211 | }, 212 | { 213 | name: 'subject', 214 | kind: 'IRI', 215 | }, 216 | { 217 | name: 'class', 218 | kind: 'IRI', 219 | required: true, 220 | }, 221 | ], 222 | }); 223 | otherKinds.push({ 224 | name: 'NegativeClassAssertion', 225 | fields: [ 226 | { 227 | name: 'annotations', 228 | kind: 'Annotation[]', 229 | }, 230 | { 231 | name: 'subject', 232 | kind: 'IRI', 233 | }, 234 | { 235 | name: 'class', 236 | kind: 'IRI', 237 | required: true, 238 | }, 239 | ], 240 | }); 241 | otherKinds.push({ 242 | name: 'ObjectPropertyAssertion', 243 | fields: [ 244 | { 245 | name: 'annotations', 246 | kind: 'Annotation[]', 247 | }, 248 | { 249 | name: 'subject', 250 | kind: 'IRI', 251 | }, 252 | { 253 | name: 'property', 254 | kind: 'IRI', 255 | }, 256 | { 257 | name: 'target', 258 | kind: 'IRI', 259 | }, 260 | ], 261 | }); 262 | otherKinds.push({ 263 | name: 'NegativeObjectPropertyAssertion', 264 | fields: [ 265 | { 266 | name: 'annotations', 267 | kind: 'Annotation[]', 268 | }, 269 | { 270 | name: 'subject', 271 | kind: 'IRI', 272 | }, 273 | { 274 | name: 'property', 275 | kind: 'IRI', 276 | }, 277 | { 278 | name: 'target', 279 | kind: 'IRI', 280 | }, 281 | ], 282 | }); 283 | otherKinds.push({ 284 | name: 'DataPropertyAssertion', 285 | fields: [ 286 | { 287 | name: 'annotations', 288 | kind: 'Annotation[]', 289 | }, 290 | { 291 | name: 'subject', 292 | kind: 'IRI', 293 | }, 294 | { 295 | name: 'property', 296 | kind: 'IRI', 297 | }, 298 | { 299 | name: 'target', 300 | kind: 'IRI', 301 | }, 302 | ], 303 | }); 304 | otherKinds.push({ 305 | name: 'NegativeDataPropertyAssertion', 306 | fields: [ 307 | { 308 | name: 'annotations', 309 | kind: 'Annotation[]', 310 | }, 311 | { 312 | name: 'subject', 313 | kind: 'IRI', 314 | }, 315 | { 316 | name: 'property', 317 | kind: 'IRI', 318 | }, 319 | { 320 | name: 'target', 321 | kind: 'IRI', 322 | }, 323 | ], 324 | }); 325 | otherKinds.push({ 326 | name: 'AnnotationAssertion', 327 | fields: [ 328 | { 329 | name: 'annotations', 330 | kind: 'Annotation[]', 331 | }, 332 | { 333 | name: 'subject', 334 | kind: 'IRI', 335 | }, 336 | { 337 | name: 'property', 338 | kind: 'IRI', 339 | }, 340 | { 341 | name: 'value', 342 | kind: 'IRI', 343 | }, 344 | ], 345 | }); 346 | otherKinds.push({ 347 | name: 'NegativeAnnotationAssertion', 348 | fields: [ 349 | { 350 | name: 'annotations', 351 | kind: 'Annotation[]', 352 | }, 353 | { 354 | name: 'subject', 355 | kind: 'IRI', 356 | }, 357 | { 358 | name: 'property', 359 | kind: 'IRI', 360 | }, 361 | { 362 | name: 'value', 363 | kind: 'IRI', 364 | }, 365 | ], 366 | }); 367 | otherKinds.push({ 368 | name: 'DataIntersectionOf', 369 | fields: [ 370 | { 371 | name: 'annotations', 372 | kind: 'Annotation[]', 373 | }, 374 | { 375 | name: 'data_ranges', 376 | kind: 'DataRangeExpression[]', 377 | } 378 | ] 379 | }); 380 | otherKinds.push({ 381 | name: 'DataUnionOf', 382 | fields: [ 383 | { 384 | name: 'annotations', 385 | kind: 'Annotation[]', 386 | }, 387 | { 388 | name: 'data_ranges', 389 | kind: 'DataRangeExpression[]', 390 | } 391 | ] 392 | }); 393 | otherKinds.push({ 394 | name: 'DataComplementOf', 395 | fields: [ 396 | { 397 | name: 'annotations', 398 | kind: 'Annotation[]', 399 | }, 400 | { 401 | name: 'data_range', 402 | kind: 'DataRangeExpression' 403 | } 404 | ] 405 | }); 406 | otherKinds.push({ 407 | name: 'DataOneOf', 408 | fields: [ 409 | { 410 | name: 'annotations', 411 | kind: 'Annotation[]', 412 | }, 413 | { 414 | name: 'values', 415 | kind: 'Multicodec[]', 416 | } 417 | ] 418 | }); 419 | otherKinds.push({ 420 | name: 'DataRestriction', 421 | fields: [ 422 | { 423 | name: 'annotations', 424 | kind: 'Annotation[]', 425 | }, 426 | { 427 | name: 'constraining_facet', 428 | kind: 'ConstrainingFacet', 429 | }, 430 | { 431 | name: 'restriction_value', 432 | kind: 'Multicodec', 433 | }, 434 | ] 435 | }); 436 | otherKinds.push({ 437 | name: 'ConstrainingFacet', 438 | fields: [ 439 | { 440 | name: 'annotations', 441 | kind: 'Annotation[]', 442 | } 443 | ] 444 | }); 445 | otherKinds.push({ 446 | name: 'Datatype', 447 | fields: [ 448 | { 449 | name: 'annotations', 450 | kind: 'Annotation[]', 451 | }, 452 | { 453 | name: 'data_range', 454 | kind: 'DataRangeExpression', 455 | } 456 | ] 457 | }); 458 | 459 | kinds = kinds.concat( 460 | classExpressionKinds, 461 | objectPropertyExpressionKinds, 462 | dataPropertyExpressionKinds, 463 | // annotationAxiomKinds, 464 | otherKinds 465 | ); 466 | kinds.forEach(kind => kind.fields.sort((a, b) => a.name >= b.name)); 467 | 468 | kinds.forEach(kind => { 469 | kind.fields.forEach(field => transformKindField(grammar, field)); 470 | }); 471 | 472 | kinds = kinds.map((kind, i) => Object.assign({}, kind, { 473 | kindId: i, 474 | cidPrefix: calculateCidPrefix(i), 475 | cidPrefixHex: calculateCidPrefixHex(i), 476 | fields: uniqFields(kind.fields), 477 | })); 478 | 479 | return { 480 | kinds, 481 | }; 482 | }; 483 | 484 | const calculateCidPrefix = (i) => { 485 | const base = 0xc000; 486 | const cidPrefixNumber = base + i; 487 | 488 | return cidPrefixNumber; 489 | } 490 | 491 | const calculateCidPrefixHex = (i) => { 492 | const cidPrefixNumber = calculateCidPrefix(i); 493 | const bytes = Buffer.from(varint.encode(cidPrefixNumber)); 494 | return bytes.toString('hex'); 495 | } 496 | 497 | const transformKindField = (grammar, field) => { 498 | const hackyFieldKind = hackyFieldKindTransformation(field); 499 | if (hackyFieldKind) { 500 | console.log('HACKY transform'); 501 | field.kind = hackyFieldKind; 502 | return; 503 | } 504 | 505 | const fieldExpression = grammar.find(n => n.lhs === field.kind); 506 | const mappers = paramsConfig.mappers; 507 | let mapper = mappers[field.kind]; 508 | if (!mapper) { 509 | console.error(`No asFieldKind mapper found for ${field.kind}`); 510 | return; 511 | } 512 | mapper = mapper.asFieldKind; 513 | if (!mapper) { 514 | console.error(`No asFieldKind mapper mapper found for ${field.kind}`); 515 | return []; 516 | } 517 | 518 | field.kind = mapper(fieldExpression.rhs); 519 | }; 520 | 521 | // Unify field names that have same name and same kind 522 | const uniqFields = (fields) => { 523 | const filteredFields = []; 524 | fields.forEach((field) => { 525 | if (!filteredFields.map(n => n.name).includes(field.name)) { 526 | filteredFields.push(field); 527 | return; 528 | } 529 | if (field.name === 'annotations') { 530 | // duplicate annotations fields are ignored as they are produced by a lot of entities 531 | return; 532 | } 533 | throw new Error('Unexpected duplicate field', field.name); 534 | }); 535 | 536 | return filteredFields; 537 | } 538 | 539 | // TODO: move to params.js 540 | // Transforms grammar identifiers of kind fields to other grammar identifiers (ending with "[]" if they are arrays) 541 | const hackyFieldKindTransformation = field => { 542 | if (field.kind === 'superClassExpression') { 543 | return 'ClassExpression[]'; 544 | } 545 | if (field.kind === 'superObjectPropertyExpression') { 546 | return 'ObjectProperyExpression[]'; 547 | } 548 | if (field.kind === 'superDataPropertyExpression') { 549 | return 'DataPropertyExpression[]'; 550 | } 551 | return null; 552 | }; 553 | 554 | const main = () => { 555 | checkDependencies(parsed); 556 | const newGrammar = buildGrammar(parsed); 557 | 558 | console.log('New Grammar:'); 559 | console.log(JSON.stringify(newGrammar.kinds, null, 2)); 560 | fs.writeFile( 561 | 'build/intermediate_v0.json', 562 | JSON.stringify(newGrammar, null, 2), 563 | function(err) { 564 | if (err) throw err; 565 | } 566 | ); 567 | }; 568 | 569 | main(); 570 | -------------------------------------------------------------------------------- /extract-grammar/src/index.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const varint = require('varint'); 3 | 4 | const parsed = require('../build/grammar.json').parsed; 5 | const paramsConfig = require('./params.js'); 6 | 7 | const checkDependencies = parsedGrammar => { 8 | const lhsIdentifiers = parsedGrammar.map(definition => definition.lhs); 9 | 10 | const checkRHSArray = alternatives => { 11 | alternatives.forEach(alternative => { 12 | const exists = lhsIdentifiers.includes(alternative); 13 | if (!exists) { 14 | console.warn('RHS alternative', alternative, 'does not exist'); 15 | } 16 | }); 17 | }; 18 | 19 | parsedGrammar.forEach(definition => { 20 | if (Array.isArray(definition.rhs)) { 21 | checkRHSArray(definition.rhs); 22 | } else if (definition.rhs.type === 'function') { 23 | checkRHSArray(definition.rhs.params); 24 | } else if (definition.rhs.type === 'zeroOrMore') { 25 | checkRHSArray([definition.rhs.data]); 26 | } else { 27 | console.warn(`Checking of RHS for ${definition.lhs} not implemented yet`); 28 | } 29 | }); 30 | }; 31 | 32 | const kindFieldsFromFunction = funcExpression => { 33 | const mappers = paramsConfig.mappers; 34 | let mapper = mappers[funcExpression.name]; 35 | if (!mapper) { 36 | console.error(`No function param mapper found for ${funcExpression.name}`); 37 | return []; 38 | } 39 | mapper = mapper.functionParams; 40 | if (!mapper) { 41 | console.error(`No function param mapper found for ${funcExpression.name}`); 42 | return []; 43 | } 44 | 45 | return mapper(funcExpression.params); 46 | }; 47 | 48 | const kindFieldsFromAxiom = (kind, axiom) => { 49 | let isAxiomApplicable = true; 50 | 51 | const mappers = paramsConfig.mappers; 52 | const mapper = mappers[axiom.rhs.name]; 53 | if (!mapper) { 54 | isAxiomApplicable = true; 55 | } else { 56 | const expressionCheck = mapper.checkExpressionKind; 57 | if (!expressionCheck) { 58 | isAxiomApplicable = true; 59 | } else { 60 | isAxiomApplicable = expressionCheck(kind.expressionKind); 61 | } 62 | } 63 | 64 | if (!isAxiomApplicable) { 65 | return []; 66 | } 67 | return kindFieldsFromFunction(axiom.rhs); 68 | }; 69 | 70 | const buildExpression = expression => { 71 | const kind = { 72 | name: expression.lhs, 73 | fields: [], 74 | }; 75 | 76 | if ( 77 | expression.rhs === 'IRI' || 78 | (Array.isArray(expression.rhs) && expression.rhs[0] === 'IRI') 79 | ) { 80 | // Base declarations don't get a IRI 81 | } else if (expression.rhs.type === 'function') { 82 | kind.fields = kind.fields.concat(kindFieldsFromFunction(expression.rhs)); 83 | } else { 84 | console.log( 85 | `Not implemented yet - buildExpression for ${JSON.stringify(expression)}` 86 | ); 87 | } 88 | 89 | return kind; 90 | }; 91 | 92 | const decorateKindWithAxiom = (kind, axiom) => { 93 | kind.fields = kind.fields.concat(kindFieldsFromAxiom(kind, axiom)); 94 | }; 95 | 96 | const restrictGrammar = parsedGammar => { 97 | const restrictDefinition = definition => { 98 | const mappers = paramsConfig.restrictGrammar; 99 | const mapper = mappers[definition.lhs]; 100 | if (!mapper) { 101 | return definition; 102 | } 103 | return mapper(definition); 104 | }; 105 | 106 | return parsedGammar.map(restrictDefinition); 107 | }; 108 | 109 | // Build all kinds for expressions in one group, e.g. 'ClassExpression' 110 | const buildExpressionsForGroup = (grammar, groupName) => { 111 | const groupExpressionKinds = []; 112 | const expressionsInGroup = grammar.find(n => n.lhs === groupName); 113 | 114 | const rhs = Array.isArray(expressionsInGroup.rhs) ? expressionsInGroup.rhs : [expressionsInGroup.rhs]; 115 | rhs.forEach(expressionIdent => { 116 | const expression = grammar.find(n => n.lhs === expressionIdent); 117 | const kind = buildExpression(expression); 118 | kind.expressionKind = groupName; 119 | 120 | groupExpressionKinds.push(kind); 121 | }); 122 | 123 | const axioms = grammar.find(n => n.lhs === 'Axiom'); 124 | groupExpressionKinds.forEach(kind => { 125 | axioms.rhs.forEach(axiomGroupIdent => { 126 | const axiomGroup = grammar.find(n => n.lhs === axiomGroupIdent); 127 | axiomGroup.rhs.forEach(axiomIdent => { 128 | const axiom = grammar.find(n => n.lhs === axiomIdent); 129 | 130 | decorateKindWithAxiom(kind, axiom); 131 | }); 132 | }); 133 | }); 134 | 135 | return groupExpressionKinds; 136 | }; 137 | 138 | const buildGrammar = parsedGrammar => { 139 | let kinds = []; 140 | 141 | const grammar = restrictGrammar(parsedGrammar); 142 | 143 | const classExpressionKinds = buildExpressionsForGroup( 144 | grammar, 145 | 'ClassExpression' 146 | ); 147 | const objectPropertyExpressionKinds = buildExpressionsForGroup( 148 | grammar, 'ObjectPropertyExpression' 149 | ); 150 | const dataPropertyExpressionKinds = buildExpressionsForGroup( 151 | grammar, 'DataPropertyExpression' 152 | ); 153 | // Though they are named AnnotationAxiom, they are more like expressions 154 | const annotationAxiomKinds = buildExpressionsForGroup( 155 | grammar, 156 | 'AnnotationAxiom' 157 | ); 158 | 159 | const otherKinds = []; 160 | const annotationDeclaration = grammar.find(n => n.lhs === 'Annotation'); 161 | otherKinds.push(buildExpression(annotationDeclaration)); 162 | // TODO: proper 163 | otherKinds.push({ 164 | name: 'Individual', 165 | fields: [ 166 | { 167 | name: 'annotations', 168 | kind: 'Annotation[]', 169 | }, 170 | { 171 | name: 'class_assertions', 172 | kind: 'ClassAssertion[]', 173 | }, 174 | { 175 | name: 'negative_class_assertions', 176 | kind: 'NegativeClassAssertion[]', 177 | }, 178 | { 179 | name: 'object_property_assertions', 180 | kind: 'ObjectPropertyAssertion[]', 181 | }, 182 | { 183 | name: 'negative_object_property_assertions', 184 | kind: 'NegativeObjectPropertyAssertion[]', 185 | }, 186 | { 187 | name: 'data_property_assertions', 188 | kind: 'DataPropertyAssertion[]', 189 | }, 190 | { 191 | name: 'negative_data_property_assertions', 192 | kind: 'NegativeDataPropertyAssertion[]', 193 | }, 194 | ], 195 | }); 196 | otherKinds.push({ 197 | name: 'AnnotationProperty', 198 | fields: [ 199 | { 200 | name: 'annotations', 201 | kind: 'Annotation[]', 202 | }, 203 | ], 204 | }); 205 | otherKinds.push({ 206 | name: 'ClassAssertion', 207 | fields: [ 208 | { 209 | name: 'annotations', 210 | kind: 'Annotation[]', 211 | }, 212 | { 213 | name: 'subject', 214 | kind: 'IRI', 215 | }, 216 | { 217 | name: 'class', 218 | kind: 'IRI', 219 | required: true, 220 | }, 221 | ], 222 | }); 223 | otherKinds.push({ 224 | name: 'NegativeClassAssertion', 225 | fields: [ 226 | { 227 | name: 'annotations', 228 | kind: 'Annotation[]', 229 | }, 230 | { 231 | name: 'subject', 232 | kind: 'IRI', 233 | }, 234 | { 235 | name: 'class', 236 | kind: 'IRI', 237 | required: true, 238 | }, 239 | ], 240 | }); 241 | otherKinds.push({ 242 | name: 'ObjectPropertyAssertion', 243 | fields: [ 244 | { 245 | name: 'annotations', 246 | kind: 'Annotation[]', 247 | }, 248 | { 249 | name: 'subject', 250 | kind: 'IRI', 251 | }, 252 | { 253 | name: 'property', 254 | kind: 'IRI', 255 | }, 256 | { 257 | name: 'target', 258 | kind: 'IRI', 259 | }, 260 | ], 261 | }); 262 | otherKinds.push({ 263 | name: 'NegativeObjectPropertyAssertion', 264 | fields: [ 265 | { 266 | name: 'annotations', 267 | kind: 'Annotation[]', 268 | }, 269 | { 270 | name: 'subject', 271 | kind: 'IRI', 272 | }, 273 | { 274 | name: 'property', 275 | kind: 'IRI', 276 | }, 277 | { 278 | name: 'target', 279 | kind: 'IRI', 280 | }, 281 | ], 282 | }); 283 | otherKinds.push({ 284 | name: 'DataPropertyAssertion', 285 | fields: [ 286 | { 287 | name: 'annotations', 288 | kind: 'Annotation[]', 289 | }, 290 | { 291 | name: 'subject', 292 | kind: 'IRI', 293 | }, 294 | { 295 | name: 'property', 296 | kind: 'IRI', 297 | }, 298 | { 299 | name: 'target', 300 | kind: 'IRI', 301 | }, 302 | ], 303 | }); 304 | otherKinds.push({ 305 | name: 'NegativeDataPropertyAssertion', 306 | fields: [ 307 | { 308 | name: 'annotations', 309 | kind: 'Annotation[]', 310 | }, 311 | { 312 | name: 'subject', 313 | kind: 'IRI', 314 | }, 315 | { 316 | name: 'property', 317 | kind: 'IRI', 318 | }, 319 | { 320 | name: 'target', 321 | kind: 'IRI', 322 | }, 323 | ], 324 | }); 325 | otherKinds.push({ 326 | name: 'AnnotationAssertion', 327 | fields: [ 328 | { 329 | name: 'annotations', 330 | kind: 'Annotation[]', 331 | }, 332 | { 333 | name: 'subject', 334 | kind: 'IRI', 335 | }, 336 | { 337 | name: 'property', 338 | kind: 'IRI', 339 | }, 340 | { 341 | name: 'value', 342 | kind: 'IRI', 343 | }, 344 | ], 345 | }); 346 | otherKinds.push({ 347 | name: 'NegativeAnnotationAssertion', 348 | fields: [ 349 | { 350 | name: 'annotations', 351 | kind: 'Annotation[]', 352 | }, 353 | { 354 | name: 'subject', 355 | kind: 'IRI', 356 | }, 357 | { 358 | name: 'property', 359 | kind: 'IRI', 360 | }, 361 | { 362 | name: 'value', 363 | kind: 'IRI', 364 | }, 365 | ], 366 | }); 367 | otherKinds.push({ 368 | name: 'Literal', 369 | fields: [ 370 | { 371 | name: 'datatype', 372 | kind: 'IRI', 373 | }, 374 | { 375 | name: 'value', 376 | kind: 'IRI', 377 | required: true, 378 | } 379 | ] 380 | }); 381 | otherKinds.push({ 382 | name: 'Datatype', 383 | fields: [ 384 | { 385 | name: 'annotations', 386 | kind: 'Annotation[]', 387 | }, 388 | ] 389 | }); 390 | otherKinds.push({ 391 | name: 'DataIntersectionOf', 392 | fields: [ 393 | { 394 | name: 'annotations', 395 | kind: 'Annotation[]', 396 | }, 397 | { 398 | name: 'datatypes', 399 | kind: 'DatatypeExpression[]', 400 | } 401 | ] 402 | }); 403 | otherKinds.push({ 404 | name: 'DataUnionOf', 405 | fields: [ 406 | { 407 | name: 'annotations', 408 | kind: 'Annotation[]', 409 | }, 410 | { 411 | name: 'datatypes', 412 | kind: 'DatatypeExpression[]', 413 | } 414 | ] 415 | }); 416 | otherKinds.push({ 417 | name: 'DataComplementOf', 418 | fields: [ 419 | { 420 | name: 'annotations', 421 | kind: 'Annotation[]', 422 | }, 423 | { 424 | name: 'datatype', 425 | kind: 'IRI', // DatatypeExpression 426 | } 427 | ] 428 | }); 429 | otherKinds.push({ 430 | name: 'DataOneOf', 431 | fields: [ 432 | { 433 | name: 'annotations', 434 | kind: 'Annotation[]', 435 | }, 436 | { 437 | name: 'values', 438 | kind: 'Literal[]', 439 | } 440 | ] 441 | }); 442 | 443 | kinds = kinds.concat( 444 | classExpressionKinds, 445 | objectPropertyExpressionKinds, 446 | dataPropertyExpressionKinds, 447 | // annotationAxiomKinds, 448 | otherKinds 449 | ); 450 | kinds.forEach(kind => kind.fields.sort((a, b) => a.name >= b.name)); 451 | 452 | kinds.forEach(kind => { 453 | kind.fields.forEach(field => transformKindField(grammar, field)); 454 | }); 455 | 456 | kinds = kinds.map((kind, i) => Object.assign({}, kind, { 457 | kindId: i, 458 | cidPrefix: calculateCidPrefix(i), 459 | cidPrefixHex: calculateCidPrefixHex(i), 460 | fields: uniqFields(kind.fields), 461 | })); 462 | 463 | return { 464 | kinds, 465 | }; 466 | }; 467 | 468 | const calculateCidPrefix = (i) => { 469 | const base = 0xc000; 470 | const cidPrefixNumber = base + i; 471 | 472 | return cidPrefixNumber; 473 | } 474 | 475 | const calculateCidPrefixHex = (i) => { 476 | const cidPrefixNumber = calculateCidPrefix(i); 477 | const bytes = Buffer.from(varint.encode(cidPrefixNumber)); 478 | return bytes.toString('hex'); 479 | } 480 | 481 | const transformKindField = (grammar, field) => { 482 | const hackyFieldKind = hackyFieldKindTransformation(field); 483 | if (hackyFieldKind) { 484 | console.log('HACKY transform'); 485 | field.kind = hackyFieldKind; 486 | return; 487 | } 488 | 489 | const fieldExpression = grammar.find(n => n.lhs === field.kind); 490 | const mappers = paramsConfig.mappers; 491 | let mapper = mappers[field.kind]; 492 | if (!mapper) { 493 | console.error(`No asFieldKind mapper found for ${field.kind}`); 494 | return; 495 | } 496 | mapper = mapper.asFieldKind; 497 | if (!mapper) { 498 | console.error(`No asFieldKind mapper mapper found for ${field.kind}`); 499 | return []; 500 | } 501 | 502 | field.kind = mapper(fieldExpression.rhs); 503 | }; 504 | 505 | // Unify field names that have same name and same kind 506 | const uniqFields = (fields) => { 507 | const filteredFields = []; 508 | fields.forEach((field) => { 509 | if (!filteredFields.map(n => n.name).includes(field.name)) { 510 | filteredFields.push(field); 511 | return; 512 | } 513 | if (field.name === 'annotations') { 514 | // duplicate annotations fields are ignored as they are produced by a lot of entities 515 | return; 516 | } 517 | throw new Error('Unexpected duplicate field', field.name); 518 | }); 519 | 520 | return filteredFields; 521 | } 522 | 523 | // TODO: move to params.js 524 | // Transforms grammar identifiers of kind fields to other grammar identifiers (ending with "[]" if they are arrays) 525 | const hackyFieldKindTransformation = field => { 526 | if (field.kind === 'superClassExpression') { 527 | return 'ClassExpression[]'; 528 | } 529 | if (field.kind === 'superObjectPropertyExpression') { 530 | return 'ObjectProperyExpression[]'; 531 | } 532 | if (field.kind === 'superDataPropertyExpression') { 533 | return 'DataPropertyExpression[]'; 534 | } 535 | return null; 536 | }; 537 | 538 | const main = () => { 539 | checkDependencies(parsed); 540 | const newGrammar = buildGrammar(parsed); 541 | 542 | console.log('New Grammar:'); 543 | console.log(JSON.stringify(newGrammar.kinds, null, 2)); 544 | fs.writeFile( 545 | 'build/intermediate.json', 546 | JSON.stringify(newGrammar, null, 2), 547 | function(err) { 548 | if (err) throw err; 549 | } 550 | ); 551 | }; 552 | 553 | main(); 554 | -------------------------------------------------------------------------------- /extract-grammar/src/manual.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | parsed: [ 3 | { 4 | lhs: 'Axiom', 5 | rhs: [ 6 | // 'Declaration', 7 | 'ClassAxiom', 8 | // "ObjectPropertyAxiom", 9 | // "DataPropertyAxiom", 10 | // "DatatypeDefinition", 11 | // "HasKey", 12 | // "Assertion", 13 | // "AnnotationAxiom" 14 | ], 15 | }, 16 | { 17 | lhs: 'ClassAxiom', 18 | rhs: [ 19 | 'SubClassOf', 20 | // "EquivalentClasses", 21 | // "DisjointClasses", 22 | // "DisjointUnion" 23 | ], 24 | }, 25 | { 26 | lhs: 'SubClassOf', 27 | rhs: { 28 | type: 'function', 29 | name: 'SubClassOf', 30 | params: [ 31 | 'axiomAnnotations', 32 | 'subClassExpression', 33 | 'superClassExpression', 34 | ], 35 | }, 36 | }, 37 | { 38 | lhs: 'Declaration', 39 | rhs: { 40 | type: 'function', 41 | name: 'Declaration', 42 | params: ['axiomAnnotations', 'Entity'], 43 | }, 44 | }, 45 | { 46 | lhs: 'subClassExpression', 47 | rhs: ['ClassExpression'], 48 | }, 49 | { 50 | lhs: 'superClassExpression', 51 | rhs: ['ClassExpression'], 52 | }, 53 | { 54 | lhs: 'ClassExpression', 55 | rhs: [ 56 | 'Class', 57 | // "ObjectIntersectionOf", 58 | // "ObjectUnionOf", 59 | 'ObjectComplementOf', 60 | // "ObjectOneOf", 61 | // "ObjectSomeValuesFrom", 62 | // "ObjectAllValuesFrom", 63 | // "ObjectHasValue", 64 | // "ObjectHasSelf", 65 | // "ObjectMinCardinality", 66 | // "ObjectMaxCardinality", 67 | // "ObjectExactCardinality", 68 | // "DataSomeValuesFrom", 69 | // "DataAllValuesFrom", 70 | // "DataHasValue", 71 | // "DataMinCardinality", 72 | // "DataMaxCardinality", 73 | // "DataExactCardinality" 74 | ], 75 | }, 76 | { 77 | lhs: 'axiomAnnotations', 78 | rhs: { 79 | type: 'zeroOrMore', 80 | data: 'Annotation', 81 | }, 82 | }, 83 | { 84 | lhs: 'Entity', 85 | rhs: [ 86 | 'Class', 87 | // "Datatype", 88 | // "ObjectProperty", 89 | // "DataProperty", 90 | 'AnnotationProperty', 91 | // "NamedIndividual" 92 | ], 93 | }, 94 | { lhs: 'Class', rhs: ['IRI'] }, 95 | // { lhs: 'Datatype', rhs: ['IRI'] }, 96 | { lhs: 'AnnotationProperty', rhs: ['IRI'] }, 97 | { 98 | lhs: 'Annotation', 99 | rhs: { 100 | type: 'function', 101 | name: 'Annotation', 102 | params: [ 103 | 'annotationAnnotations', 104 | 'AnnotationProperty', 105 | 'AnnotationValue', 106 | ], 107 | }, 108 | }, 109 | { 110 | lhs: 'annotationAnnotations', 111 | rhs: { 112 | type: 'zeroOrMore', 113 | data: 'Annotation', 114 | }, 115 | }, 116 | { 117 | lhs: 'AnnotationValue', 118 | rhs: [ 119 | // "AnonymousIndividual", 120 | 'IRI', 121 | // "Literal" 122 | ], 123 | }, 124 | { 125 | lhs: 'ObjectComplementOf', 126 | rhs: { 127 | type: 'function', 128 | name: 'ObjectComplementOf', 129 | params: ['ClassExpression'], 130 | }, 131 | }, 132 | ], 133 | }; 134 | -------------------------------------------------------------------------------- /extract-grammar/src/mapperHelpers.js: -------------------------------------------------------------------------------- 1 | const assert = require('assert'); 2 | 3 | const annotationField = param => { 4 | assert(param === 'annotationAnnotations'); 5 | return { 6 | name: 'annotations', 7 | kind: param, 8 | }; 9 | }; 10 | 11 | module.exports = { 12 | annotationField, 13 | }; 14 | -------------------------------------------------------------------------------- /extract-grammar/src/params.js: -------------------------------------------------------------------------------- 1 | const assert = require('assert'); 2 | const mapperHelpers = require('./mapperHelpers'); 3 | 4 | module.exports = { 5 | mappers: { 6 | Annotation: { 7 | functionParams: params => { 8 | assert(params.length === 3); 9 | return [ 10 | mapperHelpers.annotationField(params[0]), 11 | { 12 | name: 'property', 13 | kind: params[1], 14 | required: true, 15 | }, 16 | { 17 | name: 'value', 18 | kind: params[2], 19 | required: true, 20 | }, 21 | ]; 22 | }, 23 | }, 24 | AnnotationProperty: { 25 | asFieldKind: rhs => rhs, 26 | }, 27 | AnnotationValue: { 28 | asFieldKind: rhs => { 29 | assert(rhs.length === 3); 30 | assert(rhs[1] === 'IRI'); 31 | // TODO: find a better way for "IRI or Literal" 32 | return rhs[1]; 33 | }, 34 | }, 35 | annotationAnnotations: { 36 | asFieldKind: rhs => { 37 | assert(rhs.type === 'zeroOrMore'); 38 | return `${rhs.name}[]`; 39 | }, 40 | }, 41 | axiomAnnotations: { 42 | asFieldKind: rhs => { 43 | assert(rhs.type === 'zeroOrMore'); 44 | return `${rhs.name}[]`; 45 | }, 46 | }, 47 | ObjectComplementOf: { 48 | functionParams: params => { 49 | assert(params.length === 1); 50 | return [ 51 | { 52 | name: 'complementOf', 53 | kind: params[0], 54 | required: true, 55 | }, 56 | ]; 57 | }, 58 | }, 59 | SubClassOf: { 60 | checkExpressionKind: expressionKind => { 61 | return expressionKind === 'ClassExpression'; 62 | }, 63 | functionParams: params => { 64 | assert(params.length === 3); 65 | return [ 66 | { 67 | name: 'annotations', 68 | kind: params[0], 69 | }, 70 | // skip subClassExpression 71 | { 72 | name: 'superClassExpression', 73 | kind: params[2], 74 | }, 75 | ]; 76 | }, 77 | }, 78 | // DisjointClasses: { 79 | // checkExpressionKind: expressionKind => { 80 | // return expressionKind === 'ClassExpression'; 81 | // }, 82 | // functionParams: params => { 83 | // assert(params.length === 4); 84 | // return [ 85 | // { 86 | // name: 'annotations', 87 | // kind: params[0], 88 | // }, 89 | // { 90 | // name: 'disjointClasses', 91 | // kind: `${params[1]}[]`, 92 | // }, 93 | // ]; 94 | // }, 95 | // }, 96 | SubObjectPropertyOf: { 97 | checkExpressionKind: expressionKind => { 98 | return expressionKind === 'ObjectPropertyExpression'; 99 | }, 100 | functionParams: params => { 101 | assert(params.length === 3); 102 | return [ 103 | { 104 | name: 'annotations', 105 | kind: params[0], 106 | }, 107 | // skip subObjectPropertyExpression 108 | { 109 | name: 'superObjectPropertyExpression', 110 | kind: params[2], 111 | }, 112 | ]; 113 | }, 114 | }, 115 | SubDataPropertyOf: { 116 | checkExpressionKind: expressionKind => { 117 | return expressionKind === 'DataPropertyExpression'; 118 | }, 119 | functionParams: params => { 120 | assert(params.length === 3); 121 | return [ 122 | { 123 | name: 'annotations', 124 | kind: params[0], 125 | }, 126 | // skip subDataPropertyExpression 127 | { 128 | name: 'superDataPropertyExpression', 129 | kind: params[2], 130 | }, 131 | ]; 132 | }, 133 | }, 134 | DataPropertyRange: { 135 | checkExpressionKind: expressionKind => { 136 | return expressionKind === 'DataPropertyExpression'; 137 | }, 138 | functionParams: params => { 139 | assert(params.length === 3); 140 | return [ 141 | { 142 | name: 'annotations', 143 | kind: params[0], 144 | }, 145 | // skip DataPropertyExpression 146 | { 147 | name: 'range', 148 | kind: params[2], 149 | }, 150 | ]; 151 | }, 152 | }, 153 | DataPropertyDomain: { 154 | checkExpressionKind: expressionKind => { 155 | return expressionKind === 'DataPropertyExpression'; 156 | }, 157 | functionParams: params => { 158 | assert(params.length === 3); 159 | return [ 160 | { 161 | name: 'annotations', 162 | kind: params[0], 163 | }, 164 | // skip DataPropertyExpression 165 | { 166 | name: 'domain', 167 | kind: params[2], 168 | }, 169 | ]; 170 | }, 171 | }, 172 | }, 173 | restrictGrammar: { 174 | removedDeclarations: [ 175 | 'EquivalentClasses', 176 | ], 177 | Axiom: definition => { 178 | definition.rhs = definition.rhs.filter(n => n !== 'Declaration'); 179 | // TODO: not parsable from grammar yet or can't be processed 180 | definition.rhs = definition.rhs.filter(n => n !== 'DatatypeDefinition'); 181 | definition.rhs = definition.rhs.filter(n => n !== 'HasKey'); 182 | return definition; 183 | }, 184 | // TODO 185 | ClassExpression: definition => { 186 | // definition.rhs = ['Class', 'ObjectComplementOf']; 187 | return definition; 188 | }, 189 | }, 190 | }; 191 | -------------------------------------------------------------------------------- /extract-grammar/src/parseBnf.js: -------------------------------------------------------------------------------- 1 | const peg = require('pegjs'); 2 | const fs = require('fs'); 3 | const path = require('path'); 4 | 5 | const parseGrammar = grammar => { 6 | const parserGrammar = fs.readFileSync( 7 | path.join(__dirname, './bnf_grammar.pegjs'), 8 | 'utf8' 9 | ); 10 | const parser = peg.generate(parserGrammar); 11 | 12 | return parser.parse(grammar); 13 | }; 14 | 15 | module.exports = { 16 | parseGrammar, 17 | }; 18 | -------------------------------------------------------------------------------- /extract-grammar/src/parseBnfCli.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const path = require('path'); 3 | 4 | const parser = require('./parseBnf'); 5 | 6 | const main = () => { 7 | const grammar = fs.readFileSync( 8 | path.join(__dirname, './__tests__/implemented.grammar'), 9 | 'utf8' 10 | ); 11 | 12 | const parsed = parser.parseGrammar(grammar); 13 | const grammarContents = { 14 | parsed, 15 | }; 16 | fs.writeFile( 17 | 'build/grammar.json', 18 | JSON.stringify(grammarContents, null, 2), 19 | function(err) { 20 | if (err) throw err; 21 | } 22 | ); 23 | }; 24 | 25 | main(); 26 | -------------------------------------------------------------------------------- /rlay_ontology/.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | -------------------------------------------------------------------------------- /rlay_ontology/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "rlay_ontology" 3 | description = "Rlay ontology model" 4 | version = "0.2.6" 5 | authors = ["Maximilian Goisser "] 6 | license = "MIT OR Apache-2.0" 7 | edition = "2018" 8 | 9 | [lib] 10 | name = "rlay_ontology" 11 | path = "src/lib.rs" 12 | 13 | [[example]] 14 | name = "example_hashes" 15 | path = "src/bin.rs" 16 | required-features = ["examples"] 17 | 18 | [dependencies] 19 | ambassador = "0.2.1" 20 | 21 | integer-encoding = { version = "1.0", optional = true } 22 | multihash = { version = "0.8.0", optional = true } 23 | cid_fork_rlay = { version = "0.3.1", optional = true } 24 | multibase = { version = "0.6.0", optional = true } 25 | serde_derive = { version = "^1.0.79", optional = true } 26 | serde_cbor = { version = "0.10.1", default-features = false, optional = true } 27 | serde_bytes = { version = "0.11.1", default-features = false, optional = true } 28 | serde = { version = "^1.0.79", default-features = false, optional = true } 29 | 30 | # feature std 31 | prost = { version = "0.6.0", optional = true } 32 | bytes = { version = "0.5.0", optional = true } 33 | rustc-hex = { version = "2.0.1", optional = true } 34 | 35 | # feature web3_compat 36 | ethereum-types = { version = "0.4.0", default-features = false, optional = true } 37 | 38 | # feature pwasm 39 | pwasm-std = { version = "0.10", optional = true } 40 | 41 | # feature wasm-bindgen 42 | wasm-bindgen = { version = "0.2.0", optional = true } 43 | 44 | # feature examples 45 | itertools = { version = "0.7.6", optional = true } 46 | serde_json = { version = "1", optional = true } 47 | strum_macros = "0.16.0" 48 | 49 | [build-dependencies] 50 | rlay_ontology_build = { version = "0.2.5", path = "../rlay_ontology_build" } 51 | 52 | [dev-dependencies] 53 | serde_json = { version = "1" } 54 | 55 | [features] 56 | default = ["std"] 57 | std = ["prost", "bytes", "rustc-hex", "serde/std", "serde_bytes/std", "serde_cbor/std", "ethereum-types/std", "serialize"] 58 | serialize = ["multihash", "cid_fork_rlay", "multibase", "serde_derive", "serde_cbor", "serde_bytes", "serde", "integer-encoding"] 59 | 60 | web3_compat = ["ethereum-types"] 61 | pwasm = ["pwasm-std"] 62 | wasm_bindgen = ["wasm-bindgen", "web3_compat"] 63 | 64 | examples = ["serde_json", "itertools"] 65 | 66 | [package.metadata.release] 67 | no-dev-version = true 68 | tag-prefix = "" 69 | tag-name = "{{version}}" 70 | -------------------------------------------------------------------------------- /rlay_ontology/build.rs: -------------------------------------------------------------------------------- 1 | extern crate rlay_ontology_build; 2 | 3 | fn main() { 4 | rlay_ontology_build::build_files(); 5 | } 6 | -------------------------------------------------------------------------------- /rlay_ontology/build_features.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | set -euxo pipefail 3 | cargo build 4 | cargo build --features web3_compat 5 | cargo +nightly-2018-10-15 build --target wasm32-unknown-unknown --no-default-features --features pwasm 6 | cargo +nightly-2018-10-15 build --target wasm32-unknown-unknown --no-default-features --features pwasm,serialize2 7 | -------------------------------------------------------------------------------- /rlay_ontology/rust-toolchain: -------------------------------------------------------------------------------- 1 | 1.40.0 2 | -------------------------------------------------------------------------------- /rlay_ontology/src/bin.rs: -------------------------------------------------------------------------------- 1 | extern crate cid; 2 | extern crate integer_encoding; 3 | extern crate itertools; 4 | extern crate multibase; 5 | extern crate multihash; 6 | extern crate prost; 7 | extern crate rlay_ontology; 8 | extern crate rustc_hex; 9 | extern crate serde_cbor; 10 | extern crate serde_json; 11 | 12 | use cid_fork_rlay::ToCid; 13 | use integer_encoding::VarInt; 14 | use itertools::Itertools; 15 | use multibase::{encode as base_encode, Base}; 16 | use prost::Message; 17 | use rlay_ontology::prelude::*; 18 | use rustc_hex::FromHex; 19 | use rustc_hex::ToHex; 20 | use std::collections::BTreeMap; 21 | 22 | pub struct AnnotationMap(BTreeMap, Annotation>); 23 | pub struct ClassMap(BTreeMap, Class>); 24 | 25 | struct SolidityBytes<'a>(&'a [u8]); 26 | 27 | impl<'a> std::fmt::Display for SolidityBytes<'a> { 28 | fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { 29 | write!(f, "0x{:02x}", self.0.iter().format("")) 30 | } 31 | } 32 | 33 | struct SolidityBytesChunked<'a>(&'a [u8]); 34 | 35 | impl<'a> std::fmt::Display for SolidityBytesChunked<'a> { 36 | fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { 37 | write!( 38 | f, 39 | "{}", 40 | self.0 41 | .chunks(1) 42 | .map(|n| n 43 | .iter() 44 | .map(|m| format!("{:02x}", m)) 45 | .collect::>() 46 | .join("")) 47 | .collect::>() 48 | .join("") 49 | ) 50 | .unwrap(); 51 | Ok(()) 52 | } 53 | } 54 | 55 | pub fn main() { 56 | // let mut annotation = Annotation::default(); 57 | // annotation.value = 58 | // "019580031b2088868a58d3aac6d2558a29b3b8cacf3c9788364f57a3470158283121a15dcae0" 59 | // .from_hex() 60 | // .unwrap(); 61 | // let serialized = serde_cbor::ser::to_vec_packed(&annotation).unwrap(); 62 | 63 | // let mut serialized_pb = Vec::new(); 64 | // annotation.encode(&mut serialized_pb).unwrap(); 65 | 66 | // println!("Annotation CBOR: {}", SolidityBytesChunked(&serialized)); 67 | // println!("Annotation PB : {}", SolidityBytesChunked(&serialized_pb)); 68 | 69 | let raw_encoded: Vec = "0480".from_hex().unwrap(); 70 | println!("{:?}", raw_encoded); 71 | let decoded: u32 = VarInt::decode_var(&raw_encoded).0; 72 | println!("Decoded: {}", decoded); 73 | println!("Decoded Hex: {:x?}", decoded); 74 | 75 | let raw_value = vec![0xa2, 0x01, 0x41, 0xab, 0x02, 0x41, 0x45]; 76 | let value: AnnotationFormatCompact = serde_cbor::from_slice(&raw_value).unwrap(); 77 | let value = Annotation::from_compact_format(value).to_web3_format(); 78 | println!("{}", serde_json::to_string_pretty(&value).unwrap()); 79 | 80 | let value: AnnotationFormatCompact = serde_cbor::from_slice(&raw_value).unwrap(); 81 | let value = Annotation::from_compact_format(value); 82 | let entity_v0 = EntityV0::Annotation(value); 83 | let mut entity_serialized: Vec = Vec::new(); 84 | entity_v0.serialize(&mut entity_serialized); 85 | println!("{}", SolidityBytes(&entity_serialized)); 86 | 87 | let entity_v0 = EntityV0::deserialize(&mut std::io::Cursor::new(entity_serialized)).unwrap(); 88 | println!("{:?}", &entity_v0); 89 | 90 | // println!( 91 | // "{}", 92 | // SolidityBytesChunked( 93 | // &rlay_ontology::create_label_annotation(String::new()) 94 | // .unwrap() 95 | // .property 96 | // ) 97 | // ); 98 | // println!( 99 | // "\"label\" annotation property: {}", 100 | // SolidityBytes( 101 | // &rlay_ontology::create_label_annotation(String::new()) 102 | // .unwrap() 103 | // .property 104 | // ), 105 | // ); 106 | // let label_annotation = rlay_ontology::create_label_annotation("Organization".to_owned()) 107 | // .unwrap() 108 | // .to_cid() 109 | // .unwrap(); 110 | // let label_hash = label_annotation.to_bytes(); 111 | // println!("Byte part cid: {}", SolidityBytes(&label_annotation.hash)); 112 | // println!("Full cid: {}", SolidityBytes(&label_hash)); 113 | // println!("Full cid: {}", SolidityBytesChunked(&label_hash)); 114 | // let base58_label_hash = base_encode(Base::Base58btc, &label_hash); 115 | // println!("Full cid (base58btc): {}", base58_label_hash); 116 | 117 | // let mut organization = Class::default(); 118 | // organization.annotations.push(label_hash); 119 | // let organization_cid = organization.to_cid().unwrap(); 120 | // let organization_hash = organization_cid.to_bytes(); 121 | // println!("======= Class Organization ======"); 122 | // println!("Byte part cid: {}", SolidityBytes(&organization_cid.hash)); 123 | // println!("Full cid: {}", SolidityBytes(&organization_hash)); 124 | 125 | // let mut company = Class::default(); 126 | // company.sub_class_of_class.push(organization_hash); 127 | // let company_cid = company.to_cid().unwrap(); 128 | // let company_hash = company_cid.to_bytes(); 129 | // println!("======= Class Company ======"); 130 | // println!("Byte part cid: {}", SolidityBytes(&company_cid.hash)); 131 | // println!("Full cid: {}", SolidityBytes(&company_hash)); 132 | } 133 | -------------------------------------------------------------------------------- /rlay_ontology/src/intermediate.json: -------------------------------------------------------------------------------- 1 | { 2 | "kinds": [ 3 | { 4 | "name": "Class", 5 | "fields": [ 6 | { 7 | "name": "annotations", 8 | "kind": "Annotation[]" 9 | }, 10 | { 11 | "name": "superClassExpression", 12 | "kind": "ClassExpression[]" 13 | } 14 | ], 15 | "expressionKind": "ClassExpression", 16 | "kindId": 0, 17 | "cidPrefix": 49152, 18 | "cidPrefixHex": "808003" 19 | }, 20 | { 21 | "name": "ObjectIntersectionOf", 22 | "fields": [ 23 | { 24 | "name": "annotations", 25 | "kind": "Annotation[]" 26 | }, 27 | { 28 | "name": "superClassExpression", 29 | "kind": "ClassExpression[]" 30 | } 31 | ], 32 | "expressionKind": "ClassExpression", 33 | "kindId": 1, 34 | "cidPrefix": 49153, 35 | "cidPrefixHex": "818003" 36 | }, 37 | { 38 | "name": "ObjectUnionOf", 39 | "fields": [ 40 | { 41 | "name": "annotations", 42 | "kind": "Annotation[]" 43 | }, 44 | { 45 | "name": "superClassExpression", 46 | "kind": "ClassExpression[]" 47 | } 48 | ], 49 | "expressionKind": "ClassExpression", 50 | "kindId": 2, 51 | "cidPrefix": 49154, 52 | "cidPrefixHex": "828003" 53 | }, 54 | { 55 | "name": "ObjectComplementOf", 56 | "fields": [ 57 | { 58 | "name": "complementOf", 59 | "kind": "ClassExpression", 60 | "required": true 61 | }, 62 | { 63 | "name": "annotations", 64 | "kind": "Annotation[]" 65 | }, 66 | { 67 | "name": "superClassExpression", 68 | "kind": "ClassExpression[]" 69 | } 70 | ], 71 | "expressionKind": "ClassExpression", 72 | "kindId": 3, 73 | "cidPrefix": 49155, 74 | "cidPrefixHex": "838003" 75 | }, 76 | { 77 | "name": "ObjectOneOf", 78 | "fields": [ 79 | { 80 | "name": "annotations", 81 | "kind": "Annotation[]" 82 | }, 83 | { 84 | "name": "superClassExpression", 85 | "kind": "ClassExpression[]" 86 | } 87 | ], 88 | "expressionKind": "ClassExpression", 89 | "kindId": 4, 90 | "cidPrefix": 49156, 91 | "cidPrefixHex": "848003" 92 | }, 93 | { 94 | "name": "ObjectSomeValuesFrom", 95 | "fields": [ 96 | { 97 | "name": "annotations", 98 | "kind": "Annotation[]" 99 | }, 100 | { 101 | "name": "superClassExpression", 102 | "kind": "ClassExpression[]" 103 | } 104 | ], 105 | "expressionKind": "ClassExpression", 106 | "kindId": 5, 107 | "cidPrefix": 49157, 108 | "cidPrefixHex": "858003" 109 | }, 110 | { 111 | "name": "ObjectAllValuesFrom", 112 | "fields": [ 113 | { 114 | "name": "annotations", 115 | "kind": "Annotation[]" 116 | }, 117 | { 118 | "name": "superClassExpression", 119 | "kind": "ClassExpression[]" 120 | } 121 | ], 122 | "expressionKind": "ClassExpression", 123 | "kindId": 6, 124 | "cidPrefix": 49158, 125 | "cidPrefixHex": "868003" 126 | }, 127 | { 128 | "name": "ObjectHasValue", 129 | "fields": [ 130 | { 131 | "name": "annotations", 132 | "kind": "Annotation[]" 133 | }, 134 | { 135 | "name": "superClassExpression", 136 | "kind": "ClassExpression[]" 137 | } 138 | ], 139 | "expressionKind": "ClassExpression", 140 | "kindId": 7, 141 | "cidPrefix": 49159, 142 | "cidPrefixHex": "878003" 143 | }, 144 | { 145 | "name": "ObjectHasSelf", 146 | "fields": [ 147 | { 148 | "name": "annotations", 149 | "kind": "Annotation[]" 150 | }, 151 | { 152 | "name": "superClassExpression", 153 | "kind": "ClassExpression[]" 154 | } 155 | ], 156 | "expressionKind": "ClassExpression", 157 | "kindId": 8, 158 | "cidPrefix": 49160, 159 | "cidPrefixHex": "888003" 160 | }, 161 | { 162 | "name": "ObjectMinCardinality", 163 | "fields": [ 164 | { 165 | "name": "annotations", 166 | "kind": "Annotation[]" 167 | }, 168 | { 169 | "name": "superClassExpression", 170 | "kind": "ClassExpression[]" 171 | } 172 | ], 173 | "expressionKind": "ClassExpression", 174 | "kindId": 9, 175 | "cidPrefix": 49161, 176 | "cidPrefixHex": "898003" 177 | }, 178 | { 179 | "name": "ObjectMaxCardinality", 180 | "fields": [ 181 | { 182 | "name": "annotations", 183 | "kind": "Annotation[]" 184 | }, 185 | { 186 | "name": "superClassExpression", 187 | "kind": "ClassExpression[]" 188 | } 189 | ], 190 | "expressionKind": "ClassExpression", 191 | "kindId": 10, 192 | "cidPrefix": 49162, 193 | "cidPrefixHex": "8a8003" 194 | }, 195 | { 196 | "name": "ObjectExactCardinality", 197 | "fields": [ 198 | { 199 | "name": "annotations", 200 | "kind": "Annotation[]" 201 | }, 202 | { 203 | "name": "superClassExpression", 204 | "kind": "ClassExpression[]" 205 | } 206 | ], 207 | "expressionKind": "ClassExpression", 208 | "kindId": 11, 209 | "cidPrefix": 49163, 210 | "cidPrefixHex": "8b8003" 211 | }, 212 | { 213 | "name": "DataSomeValuesFrom", 214 | "fields": [ 215 | { 216 | "name": "annotations", 217 | "kind": "Annotation[]" 218 | }, 219 | { 220 | "name": "superClassExpression", 221 | "kind": "ClassExpression[]" 222 | } 223 | ], 224 | "expressionKind": "ClassExpression", 225 | "kindId": 12, 226 | "cidPrefix": 49164, 227 | "cidPrefixHex": "8c8003" 228 | }, 229 | { 230 | "name": "DataAllValuesFrom", 231 | "fields": [ 232 | { 233 | "name": "annotations", 234 | "kind": "Annotation[]" 235 | }, 236 | { 237 | "name": "superClassExpression", 238 | "kind": "ClassExpression[]" 239 | } 240 | ], 241 | "expressionKind": "ClassExpression", 242 | "kindId": 13, 243 | "cidPrefix": 49165, 244 | "cidPrefixHex": "8d8003" 245 | }, 246 | { 247 | "name": "DataHasValue", 248 | "fields": [ 249 | { 250 | "name": "annotations", 251 | "kind": "Annotation[]" 252 | }, 253 | { 254 | "name": "superClassExpression", 255 | "kind": "ClassExpression[]" 256 | } 257 | ], 258 | "expressionKind": "ClassExpression", 259 | "kindId": 14, 260 | "cidPrefix": 49166, 261 | "cidPrefixHex": "8e8003" 262 | }, 263 | { 264 | "name": "DataMinCardinality", 265 | "fields": [ 266 | { 267 | "name": "annotations", 268 | "kind": "Annotation[]" 269 | }, 270 | { 271 | "name": "superClassExpression", 272 | "kind": "ClassExpression[]" 273 | } 274 | ], 275 | "expressionKind": "ClassExpression", 276 | "kindId": 15, 277 | "cidPrefix": 49167, 278 | "cidPrefixHex": "8f8003" 279 | }, 280 | { 281 | "name": "DataMaxCardinality", 282 | "fields": [ 283 | { 284 | "name": "annotations", 285 | "kind": "Annotation[]" 286 | }, 287 | { 288 | "name": "superClassExpression", 289 | "kind": "ClassExpression[]" 290 | } 291 | ], 292 | "expressionKind": "ClassExpression", 293 | "kindId": 16, 294 | "cidPrefix": 49168, 295 | "cidPrefixHex": "908003" 296 | }, 297 | { 298 | "name": "DataExactCardinality", 299 | "fields": [ 300 | { 301 | "name": "annotations", 302 | "kind": "Annotation[]" 303 | }, 304 | { 305 | "name": "superClassExpression", 306 | "kind": "ClassExpression[]" 307 | } 308 | ], 309 | "expressionKind": "ClassExpression", 310 | "kindId": 17, 311 | "cidPrefix": 49169, 312 | "cidPrefixHex": "918003" 313 | }, 314 | { 315 | "name": "ObjectProperty", 316 | "fields": [ 317 | { 318 | "name": "annotations", 319 | "kind": "Annotation[]" 320 | }, 321 | { 322 | "name": "superObjectPropertyExpression", 323 | "kind": "ObjectProperyExpression[]" 324 | } 325 | ], 326 | "expressionKind": "ObjectPropertyExpression", 327 | "kindId": 18, 328 | "cidPrefix": 49170, 329 | "cidPrefixHex": "928003" 330 | }, 331 | { 332 | "name": "InverseObjectProperty", 333 | "fields": [ 334 | { 335 | "name": "annotations", 336 | "kind": "Annotation[]" 337 | }, 338 | { 339 | "name": "superObjectPropertyExpression", 340 | "kind": "ObjectProperyExpression[]" 341 | } 342 | ], 343 | "expressionKind": "ObjectPropertyExpression", 344 | "kindId": 19, 345 | "cidPrefix": 49171, 346 | "cidPrefixHex": "938003" 347 | }, 348 | { 349 | "name": "DataProperty", 350 | "fields": [ 351 | { 352 | "name": "annotations", 353 | "kind": "Annotation[]" 354 | }, 355 | { 356 | "name": "superDataPropertyExpression", 357 | "kind": "DataPropertyExpression[]" 358 | }, 359 | { 360 | "name": "domain", 361 | "kind": "ClassExpression" 362 | }, 363 | { 364 | "name": "range", 365 | "kind": "DataRange" 366 | } 367 | ], 368 | "expressionKind": "DataPropertyExpression", 369 | "kindId": 20, 370 | "cidPrefix": 49172, 371 | "cidPrefixHex": "948003" 372 | }, 373 | { 374 | "name": "Annotation", 375 | "fields": [ 376 | { 377 | "name": "annotations", 378 | "kind": "Annotation[]" 379 | }, 380 | { 381 | "name": "property", 382 | "kind": "IRI", 383 | "required": true 384 | }, 385 | { 386 | "name": "value", 387 | "kind": "IRI", 388 | "required": true 389 | } 390 | ], 391 | "kindId": 21, 392 | "cidPrefix": 49173, 393 | "cidPrefixHex": "958003" 394 | }, 395 | { 396 | "name": "Individual", 397 | "fields": [ 398 | { 399 | "name": "annotations", 400 | "kind": "Annotation[]" 401 | }, 402 | { 403 | "name": "class_assertions", 404 | "kind": "ClassAssertion[]" 405 | }, 406 | { 407 | "name": "negative_class_assertions", 408 | "kind": "NegativeClassAssertion[]" 409 | }, 410 | { 411 | "name": "object_property_assertions", 412 | "kind": "ObjectPropertyAssertion[]" 413 | }, 414 | { 415 | "name": "negative_object_property_assertions", 416 | "kind": "NegativeObjectPropertyAssertion[]" 417 | }, 418 | { 419 | "name": "data_property_assertions", 420 | "kind": "DataPropertyAssertion[]" 421 | }, 422 | { 423 | "name": "negative_data_property_assertions", 424 | "kind": "NegativeDataPropertyAssertion[]" 425 | } 426 | ], 427 | "kindId": 22, 428 | "cidPrefix": 49174, 429 | "cidPrefixHex": "968003" 430 | }, 431 | { 432 | "name": "AnnotationProperty", 433 | "fields": [ 434 | { 435 | "name": "annotations", 436 | "kind": "Annotation[]" 437 | } 438 | ], 439 | "kindId": 23, 440 | "cidPrefix": 49175, 441 | "cidPrefixHex": "978003" 442 | }, 443 | { 444 | "name": "ClassAssertion", 445 | "fields": [ 446 | { 447 | "name": "annotations", 448 | "kind": "Annotation[]" 449 | }, 450 | { 451 | "name": "subject", 452 | "kind": "IRI" 453 | }, 454 | { 455 | "name": "class", 456 | "kind": "IRI", 457 | "required": true 458 | } 459 | ], 460 | "kindId": 24, 461 | "cidPrefix": 49176, 462 | "cidPrefixHex": "988003" 463 | }, 464 | { 465 | "name": "NegativeClassAssertion", 466 | "fields": [ 467 | { 468 | "name": "annotations", 469 | "kind": "Annotation[]" 470 | }, 471 | { 472 | "name": "subject", 473 | "kind": "IRI" 474 | }, 475 | { 476 | "name": "class", 477 | "kind": "IRI", 478 | "required": true 479 | } 480 | ], 481 | "kindId": 25, 482 | "cidPrefix": 49177, 483 | "cidPrefixHex": "998003" 484 | }, 485 | { 486 | "name": "ObjectPropertyAssertion", 487 | "fields": [ 488 | { 489 | "name": "annotations", 490 | "kind": "Annotation[]" 491 | }, 492 | { 493 | "name": "subject", 494 | "kind": "IRI" 495 | }, 496 | { 497 | "name": "property", 498 | "kind": "IRI" 499 | }, 500 | { 501 | "name": "target", 502 | "kind": "IRI" 503 | } 504 | ], 505 | "kindId": 26, 506 | "cidPrefix": 49178, 507 | "cidPrefixHex": "9a8003" 508 | }, 509 | { 510 | "name": "NegativeObjectPropertyAssertion", 511 | "fields": [ 512 | { 513 | "name": "annotations", 514 | "kind": "Annotation[]" 515 | }, 516 | { 517 | "name": "subject", 518 | "kind": "IRI" 519 | }, 520 | { 521 | "name": "property", 522 | "kind": "IRI" 523 | }, 524 | { 525 | "name": "target", 526 | "kind": "IRI" 527 | } 528 | ], 529 | "kindId": 27, 530 | "cidPrefix": 49179, 531 | "cidPrefixHex": "9b8003" 532 | }, 533 | { 534 | "name": "DataPropertyAssertion", 535 | "fields": [ 536 | { 537 | "name": "annotations", 538 | "kind": "Annotation[]" 539 | }, 540 | { 541 | "name": "subject", 542 | "kind": "IRI" 543 | }, 544 | { 545 | "name": "property", 546 | "kind": "IRI" 547 | }, 548 | { 549 | "name": "target", 550 | "kind": "IRI" 551 | } 552 | ], 553 | "kindId": 28, 554 | "cidPrefix": 49180, 555 | "cidPrefixHex": "9c8003" 556 | }, 557 | { 558 | "name": "NegativeDataPropertyAssertion", 559 | "fields": [ 560 | { 561 | "name": "annotations", 562 | "kind": "Annotation[]" 563 | }, 564 | { 565 | "name": "subject", 566 | "kind": "IRI" 567 | }, 568 | { 569 | "name": "property", 570 | "kind": "IRI" 571 | }, 572 | { 573 | "name": "target", 574 | "kind": "IRI" 575 | } 576 | ], 577 | "kindId": 29, 578 | "cidPrefix": 49181, 579 | "cidPrefixHex": "9d8003" 580 | }, 581 | { 582 | "name": "AnnotationAssertion", 583 | "fields": [ 584 | { 585 | "name": "annotations", 586 | "kind": "Annotation[]" 587 | }, 588 | { 589 | "name": "subject", 590 | "kind": "IRI" 591 | }, 592 | { 593 | "name": "property", 594 | "kind": "IRI" 595 | }, 596 | { 597 | "name": "value", 598 | "kind": "IRI" 599 | } 600 | ], 601 | "kindId": 30, 602 | "cidPrefix": 49182, 603 | "cidPrefixHex": "9e8003" 604 | }, 605 | { 606 | "name": "NegativeAnnotationAssertion", 607 | "fields": [ 608 | { 609 | "name": "annotations", 610 | "kind": "Annotation[]" 611 | }, 612 | { 613 | "name": "subject", 614 | "kind": "IRI" 615 | }, 616 | { 617 | "name": "property", 618 | "kind": "IRI" 619 | }, 620 | { 621 | "name": "value", 622 | "kind": "IRI" 623 | } 624 | ], 625 | "kindId": 31, 626 | "cidPrefix": 49183, 627 | "cidPrefixHex": "9f8003" 628 | }, 629 | { 630 | "name": "Literal", 631 | "fields": [ 632 | { 633 | "name": "datatype", 634 | "kind": "IRI" 635 | }, 636 | { 637 | "name": "value", 638 | "kind": "IRI", 639 | "required": true 640 | } 641 | ], 642 | "kindId": 32, 643 | "cidPrefix": 49184, 644 | "cidPrefixHex": "a08003" 645 | }, 646 | { 647 | "name": "Datatype", 648 | "fields": [ 649 | { 650 | "name": "annotations", 651 | "kind": "Annotation[]" 652 | } 653 | ], 654 | "kindId": 33, 655 | "cidPrefix": 49185, 656 | "cidPrefixHex": "a18003" 657 | }, 658 | { 659 | "name": "DataIntersectionOf", 660 | "fields": [ 661 | { 662 | "name": "annotations", 663 | "kind": "Annotation[]" 664 | }, 665 | { 666 | "name": "datatypes", 667 | "kind": "DatatypeExpression[]" 668 | } 669 | ], 670 | "kindId": 34, 671 | "cidPrefix": 49186, 672 | "cidPrefixHex": "a28003" 673 | }, 674 | { 675 | "name": "DataUnionOf", 676 | "fields": [ 677 | { 678 | "name": "annotations", 679 | "kind": "Annotation[]" 680 | }, 681 | { 682 | "name": "datatypes", 683 | "kind": "DatatypeExpression[]" 684 | } 685 | ], 686 | "kindId": 35, 687 | "cidPrefix": 49187, 688 | "cidPrefixHex": "a38003" 689 | }, 690 | { 691 | "name": "DataComplementOf", 692 | "fields": [ 693 | { 694 | "name": "annotations", 695 | "kind": "Annotation[]" 696 | }, 697 | { 698 | "name": "datatype", 699 | "kind": "IRI" 700 | } 701 | ], 702 | "kindId": 36, 703 | "cidPrefix": 49188, 704 | "cidPrefixHex": "a48003" 705 | }, 706 | { 707 | "name": "DataOneOf", 708 | "fields": [ 709 | { 710 | "name": "annotations", 711 | "kind": "Annotation[]" 712 | }, 713 | { 714 | "name": "values", 715 | "kind": "Literal[]" 716 | } 717 | ], 718 | "kindId": 37, 719 | "cidPrefix": 49189, 720 | "cidPrefixHex": "a58003" 721 | } 722 | ] 723 | } -------------------------------------------------------------------------------- /rlay_ontology/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![cfg_attr(not(feature = "std"), no_std)] 2 | #![cfg_attr(feature = "pwasm", feature(alloc))] 3 | // #![cfg_attr(all(feature = "wasm_bindgen", nightly), feature(custom_attribute))] 4 | 5 | #[cfg(feature = "serde")] 6 | extern crate serde; 7 | #[cfg(feature = "serde_derive")] 8 | #[macro_use] 9 | extern crate serde_derive; 10 | 11 | #[cfg(feature = "std")] 12 | use cid_fork_rlay::{Cid, Codec, Error as CidError, Version}; 13 | #[cfg(feature = "std")] 14 | use integer_encoding::VarIntReader; 15 | 16 | pub mod ontology; 17 | pub mod prelude { 18 | #[cfg(feature = "serde")] 19 | pub use crate::ontology::compact::*; 20 | #[cfg(feature = "std")] 21 | pub use crate::ontology::v0::*; 22 | #[cfg(feature = "web3_compat")] 23 | pub use crate::ontology::web3::*; 24 | pub use crate::ontology::*; 25 | } 26 | 27 | #[cfg(feature = "std")] 28 | pub trait ToCidUnknown { 29 | fn to_cid_unknown(&self, permitted: Option) -> Result; 30 | } 31 | 32 | #[cfg(feature = "std")] 33 | impl ToCidUnknown for String { 34 | fn to_cid_unknown(&self, permitted: Option) -> Result { 35 | let bytes = multibase::decode(self).unwrap().1; 36 | bytes.to_cid_unknown(permitted) 37 | } 38 | } 39 | 40 | #[cfg(feature = "std")] 41 | use std::io::Cursor; 42 | #[cfg(feature = "std")] 43 | impl ToCidUnknown for [u8] { 44 | fn to_cid_unknown(&self, permitted: Option) -> Result { 45 | let mut cur = Cursor::new(self); 46 | let raw_version = cur.read_varint()?; 47 | let raw_codec = cur.read_varint()?; 48 | 49 | let version = Version::from(raw_version)?; 50 | match permitted { 51 | Some(permitted) => { 52 | if raw_codec != permitted { 53 | return Err(CidError::UnknownCodec); 54 | } 55 | } 56 | None => {} 57 | } 58 | let codec = Codec::Unknown(raw_codec); 59 | let hash = &self[cur.position() as usize..]; 60 | 61 | multihash::decode(hash)?; 62 | 63 | Ok(Cid::new(codec, version, hash)) 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /rlay_ontology/src/ontology/mod.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "web3_compat")] 2 | pub mod web3; 3 | 4 | #[cfg(feature = "web3_compat")] 5 | use self::web3::{FromABIV2Response, FromABIV2ResponseHinted}; 6 | #[cfg(feature = "std")] 7 | use ambassador::delegatable_trait_remote; 8 | use ambassador::{delegatable_trait, Delegate}; 9 | #[cfg(feature = "std")] 10 | use cid_fork_rlay::{Cid, Codec, Error as CidError, ToCid, Version}; 11 | #[cfg(feature = "std")] 12 | use multihash::encode; 13 | #[cfg(feature = "std")] 14 | use multihash::Hash; 15 | #[cfg(feature = "std")] 16 | use prost::Message; 17 | #[cfg(feature = "pwasm")] 18 | use pwasm_std::*; 19 | #[cfg(feature = "std")] 20 | use serde::de::{Deserialize, Deserializer}; 21 | #[cfg(feature = "wasm_bindgen")] 22 | use wasm_bindgen::prelude::*; 23 | 24 | #[cfg_attr(feature = "std", delegatable_trait_remote)] 25 | #[cfg(feature = "std")] 26 | trait ToCid { 27 | fn to_cid(&self) -> Result; 28 | } 29 | 30 | #[delegatable_trait] 31 | pub trait Canonicalize { 32 | fn canonicalize(&mut self); 33 | } 34 | 35 | pub trait AssociatedCodec { 36 | const CODEC_CODE: u64; 37 | } 38 | 39 | pub trait CidFields<'a> { 40 | type Iter: Iterator>; 41 | 42 | fn iter_cid_fields(&'a self) -> Self::Iter; 43 | } 44 | 45 | pub trait DataFields<'a> { 46 | type Iter: Iterator>; 47 | 48 | fn iter_data_fields(&'a self) -> Self::Iter; 49 | } 50 | 51 | pub trait CidFieldNames { 52 | fn cid_field_names() -> &'static [&'static str]; 53 | } 54 | 55 | pub trait DataFieldNames { 56 | fn data_field_names() -> &'static [&'static str]; 57 | } 58 | 59 | // include!(concat!(env!("OUT_DIR"), "/rlay.ontology.rs")); 60 | include!(concat!(env!("OUT_DIR"), "/rlay.ontology.entities.rs")); 61 | 62 | include!("./rlay.ontology.macros.rs"); 63 | include!(concat!(env!("OUT_DIR"), "/rlay.ontology.macros_applied.rs")); 64 | 65 | impl EntityKind { 66 | pub fn from_event_name(event_name: &str) -> Result { 67 | let name = event_name.replace("Stored", ""); 68 | 69 | Self::from_name(&name) 70 | } 71 | 72 | pub fn retrieve_fn_name(&self) -> String { 73 | format!("retrieve{}", Into::<&str>::into(self)) 74 | } 75 | } 76 | 77 | impl Entity { 78 | #[cfg(feature = "std")] 79 | pub fn to_bytes(&self) -> Vec { 80 | self.to_cid().unwrap().to_bytes() 81 | } 82 | 83 | pub fn get_subject(&self) -> Option<&Vec> { 84 | match &self { 85 | Entity::ClassAssertion(ent) => ent.get_subject(), 86 | Entity::NegativeClassAssertion(ent) => ent.get_subject(), 87 | _ => None, 88 | } 89 | } 90 | 91 | pub fn as_class_assertion(&self) -> Option<&ClassAssertion> { 92 | match *self { 93 | Entity::ClassAssertion(ref val) => Some(&*val), 94 | _ => None, 95 | } 96 | } 97 | 98 | pub fn as_negative_class_assertion(&self) -> Option<&NegativeClassAssertion> { 99 | match *self { 100 | Entity::NegativeClassAssertion(ref val) => Some(&*val), 101 | _ => None, 102 | } 103 | } 104 | } 105 | 106 | pub use self::custom::*; 107 | 108 | /// Compact serialization format that allows for omitting empty fields. 109 | #[cfg(feature = "serde")] 110 | pub mod compact { 111 | use super::*; 112 | 113 | pub trait FormatCompact<'a> { 114 | type Formatted: serde::Deserialize<'a> + serde::Serialize; 115 | 116 | fn to_compact_format(self) -> Self::Formatted; 117 | 118 | fn from_compact_format(formatted: Self::Formatted) -> Self; 119 | } 120 | 121 | include!(concat!(env!("OUT_DIR"), "/rlay.ontology.compact.rs")); 122 | } 123 | 124 | /// Hand-written extension traits that expose values common over some of the entity kinds. 125 | mod custom { 126 | use super::*; 127 | 128 | pub trait GetAssertionComplement { 129 | type Complement; 130 | 131 | fn get_assertion_complement(&self) -> Self::Complement; 132 | } 133 | 134 | impl GetAssertionComplement for ClassAssertion { 135 | type Complement = NegativeClassAssertion; 136 | 137 | fn get_assertion_complement(&self) -> Self::Complement { 138 | NegativeClassAssertion { 139 | annotations: vec![], 140 | subject: self.subject.clone(), 141 | class: self.class.clone(), 142 | } 143 | } 144 | } 145 | 146 | impl GetAssertionComplement for NegativeClassAssertion { 147 | type Complement = ClassAssertion; 148 | 149 | fn get_assertion_complement(&self) -> Self::Complement { 150 | ClassAssertion { 151 | annotations: vec![], 152 | subject: self.subject.clone(), 153 | class: self.class.clone(), 154 | } 155 | } 156 | } 157 | 158 | impl ClassAssertion { 159 | pub fn get_subject(&self) -> Option<&Vec> { 160 | self.subject.as_ref() 161 | } 162 | } 163 | 164 | impl NegativeClassAssertion { 165 | pub fn get_subject(&self) -> Option<&Vec> { 166 | self.subject.as_ref() 167 | } 168 | } 169 | } 170 | 171 | /// Serialization format for the canonical v0 cbor-based format. 172 | #[cfg(feature = "std")] 173 | pub mod v0 { 174 | use super::*; 175 | use crate::ontology::compact::FormatCompact; 176 | use integer_encoding::VarIntReader; 177 | use integer_encoding::VarIntWriter; 178 | 179 | include!(concat!(env!("OUT_DIR"), "/rlay.ontology.v0.rs")); 180 | } 181 | -------------------------------------------------------------------------------- /rlay_ontology/src/ontology/rlay.ontology.macros.rs: -------------------------------------------------------------------------------- 1 | macro_rules! impl_to_cid { 2 | ($v:path) => ( 3 | impl ToCid for $v { 4 | fn to_cid(&self) -> Result { 5 | let mut encoded = Vec::::new(); 6 | let mut cloned = self.clone(); 7 | cloned.canonicalize(); 8 | cloned.encode(&mut encoded).map_err(|_| CidError::ParsingError)?; 9 | let hashed = encode(Hash::Keccak256, &encoded).map_err(|_| CidError::ParsingError)?; 10 | 11 | let cid = Cid::new(Codec::Unknown(::CODEC_CODE), Version::V1, &hashed); 12 | Ok(cid) 13 | } 14 | }) 15 | ; 16 | } 17 | 18 | macro_rules! codec_code { 19 | ($v:path, $c:expr) => ( 20 | impl AssociatedCodec for $v { 21 | const CODEC_CODE: u64 = $c; 22 | } 23 | ); 24 | } 25 | 26 | macro_rules! impl_canonicalize { 27 | ($v:path; $($field_name:ident),*) => ( 28 | impl Canonicalize for $v { 29 | fn canonicalize(&mut self) { 30 | $(self.$field_name.sort());* 31 | } 32 | } 33 | ); 34 | } 35 | 36 | macro_rules! impl_into_entity_kind { 37 | ($v:path, $wrapper:path) => ( 38 | impl Into for $v { 39 | fn into(self) -> Entity { 40 | $wrapper(self) 41 | } 42 | } 43 | ); 44 | } 45 | -------------------------------------------------------------------------------- /rlay_ontology/src/ontology/web3.rs: -------------------------------------------------------------------------------- 1 | //! Serialization format compatible with the Web3 ecosystem, specifically the Web3 JSONRPC. 2 | use super::*; 3 | use ethereum_types::U256; 4 | use rustc_hex::{FromHex, ToHex}; 5 | use serde::de::{self, Deserialize, Deserializer, MapAccess, Visitor}; 6 | use serde::ser::{Serialize, SerializeSeq, SerializeStruct}; 7 | 8 | #[derive(Clone)] 9 | pub struct FormatWeb3(pub T); 10 | 11 | pub trait SerializeFormatWeb3 { 12 | fn serialize_format_web3(&self, serializer: S) -> Result 13 | where 14 | S: serde::Serializer; 15 | } 16 | 17 | impl serde::Serialize for FormatWeb3 { 18 | fn serialize(&self, serializer: S) -> Result 19 | where 20 | S: serde::Serializer, 21 | { 22 | SerializeFormatWeb3::serialize_format_web3(&self.0, serializer) 23 | } 24 | } 25 | 26 | impl From for FormatWeb3 { 27 | fn from(original: T) -> Self { 28 | FormatWeb3(original) 29 | } 30 | } 31 | 32 | impl SerializeFormatWeb3 for &T { 33 | fn serialize_format_web3(&self, serializer: S) -> Result 34 | where 35 | S: serde::Serializer, 36 | { 37 | SerializeFormatWeb3::serialize_format_web3(*self, serializer) 38 | } 39 | } 40 | 41 | impl SerializeFormatWeb3 for Vec { 42 | fn serialize_format_web3(&self, serializer: S) -> Result 43 | where 44 | S: serde::Serializer, 45 | { 46 | let mut seq = serializer.serialize_seq(Some(self.len()))?; 47 | for entry in self.iter() { 48 | seq.serialize_element(&FormatWeb3(entry))?; 49 | } 50 | seq.end() 51 | } 52 | } 53 | 54 | impl SerializeFormatWeb3 for Option { 55 | fn serialize_format_web3(&self, serializer: S) -> Result 56 | where 57 | S: serde::Serializer, 58 | { 59 | match self { 60 | Some(inner) => SerializeFormatWeb3::serialize_format_web3(inner, serializer), 61 | None => serializer.serialize_none(), 62 | } 63 | } 64 | } 65 | 66 | impl SerializeFormatWeb3 for Vec { 67 | fn serialize_format_web3(&self, serializer: S) -> Result 68 | where 69 | S: serde::Serializer, 70 | { 71 | serializer.serialize_str(&format!("0x{}", self.to_hex::())) 72 | } 73 | } 74 | 75 | pub trait DeserializeFormatWeb3<'de>: Sized { 76 | fn deserialize_format_web3(deserializer: D) -> Result 77 | where 78 | D: Deserializer<'de>; 79 | } 80 | 81 | impl<'de, T: DeserializeFormatWeb3<'de> + Clone> Deserialize<'de> for FormatWeb3 { 82 | fn deserialize(deserializer: D) -> Result 83 | where 84 | D: Deserializer<'de>, 85 | { 86 | DeserializeFormatWeb3::deserialize_format_web3(deserializer) 87 | } 88 | } 89 | 90 | impl<'de, T: DeserializeFormatWeb3<'de> + Clone> DeserializeFormatWeb3<'de> for FormatWeb3 { 91 | fn deserialize_format_web3(deserializer: D) -> Result 92 | where 93 | D: Deserializer<'de>, 94 | { 95 | Ok(FormatWeb3(DeserializeFormatWeb3::deserialize_format_web3( 96 | deserializer, 97 | )?)) 98 | } 99 | } 100 | 101 | impl<'de> DeserializeFormatWeb3<'de> for Vec { 102 | fn deserialize_format_web3(deserializer: D) -> Result 103 | where 104 | D: Deserializer<'de>, 105 | { 106 | struct StringVisitor; 107 | 108 | impl<'de> Visitor<'de> for StringVisitor { 109 | type Value = Vec; 110 | 111 | fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { 112 | write!(formatter, "a hex encoded string prefixed by 0x") 113 | } 114 | 115 | fn visit_str(self, s: &str) -> Result 116 | where 117 | E: de::Error, 118 | { 119 | if &s[0..2] != "0x" { 120 | return Err(de::Error::invalid_value(de::Unexpected::Str(s), &self)); 121 | } 122 | Ok(s[2..].from_hex().map_err(de::Error::custom)?) 123 | } 124 | } 125 | 126 | deserializer.deserialize_str(StringVisitor) 127 | } 128 | } 129 | 130 | /// Decode a single ethabi param of type bytes 131 | fn decode_bytes(bytes: &[u8]) -> Vec { 132 | let length = U256::from_big_endian(&bytes[0..32]); 133 | bytes[((32) as usize)..((length).as_u64() as usize + 32)].to_owned() 134 | } 135 | 136 | /// Decode a single ethabi param of type bytes[] 137 | fn decode_bytes_array(bytes: &[u8]) -> Vec> { 138 | let num_elements = U256::from_big_endian(&bytes[0..32]); 139 | 140 | let element_offsets: Vec = (0..num_elements.as_u64()) 141 | .map(|element_i| { 142 | let element_data_offset = U256::from_big_endian( 143 | // additional offset of 1 to account for leading word that holds the number of elements 144 | &bytes[(32 * (element_i + 1) as usize)..(32 * (element_i + 2) as usize)], 145 | ); 146 | // + 32 because of leading word 147 | element_data_offset + Into::::into(32) 148 | }) 149 | .collect(); 150 | 151 | element_offsets 152 | .into_iter() 153 | .map(|element_start_offset| { 154 | decode_bytes(&bytes[(element_start_offset.as_u64() as usize)..bytes.len()]) 155 | }) 156 | .collect() 157 | } 158 | 159 | fn to_option_bytes(bytes: Vec) -> Option> { 160 | match bytes.len() { 161 | 0 => None, 162 | _ => Some(bytes), 163 | } 164 | } 165 | 166 | pub trait FromABIV2Response { 167 | fn from_abiv2(bytes: &[u8]) -> Self; 168 | } 169 | 170 | pub trait FromABIV2ResponseHinted { 171 | fn from_abiv2(bytes: &[u8], kind: &EntityKind) -> Self; 172 | } 173 | 174 | macro_rules! decode_offset { 175 | ($bytes_var:ident, $offset_var:ident, $start:expr, $end:expr) => { 176 | let $offset_var = U256::from_big_endian(&$bytes_var[$start..$end]); 177 | }; 178 | } 179 | 180 | macro_rules! decode_param { 181 | (bytes_array; $bytes_var:ident, $param_var:ident, $start:expr, $end:expr) => { 182 | let $param_var = 183 | decode_bytes_array(&$bytes_var[($start.as_u64() as usize)..($end.as_u64() as usize)]); 184 | }; 185 | (bytes_array; $bytes_var:ident, $param_var:ident, $start:expr) => { 186 | let $param_var = 187 | decode_bytes_array(&$bytes_var[($start.as_u64() as usize)..$bytes_var.len()]); 188 | }; 189 | (bytes; $bytes_var:ident, $param_var:ident, $start:expr, $end:expr) => { 190 | let $param_var = 191 | decode_bytes(&$bytes_var[($start.as_u64() as usize)..($end.as_u64() as usize)]); 192 | }; 193 | (bytes; $bytes_var:ident, $param_var:ident, $start:expr) => { 194 | let $param_var = decode_bytes(&$bytes_var[($start.as_u64() as usize)..$bytes_var.len()]); 195 | }; 196 | } 197 | 198 | include!(concat!(env!("OUT_DIR"), "/rlay.ontology.web3_applied.rs")); 199 | -------------------------------------------------------------------------------- /rlay_ontology/tests/core.rs: -------------------------------------------------------------------------------- 1 | use rlay_ontology::prelude::*; 2 | 3 | #[test] 4 | fn entity_variants() { 5 | let entity_variants = EntityKind::variants(); 6 | 7 | assert!(entity_variants.contains(&"Annotation")); 8 | } 9 | 10 | #[test] 11 | fn annotation_data_field_names() { 12 | assert!(Annotation::data_field_names().contains(&"value")); 13 | } 14 | 15 | #[test] 16 | fn annotation_cid_field_names() { 17 | assert!(Annotation::cid_field_names().contains(&"property")); 18 | } 19 | 20 | #[test] 21 | fn call_with_entity_kinds() { 22 | let mut _abc = vec![]; 23 | 24 | macro_rules! test_field_names { 25 | ($kind:path) => { 26 | _abc = <$kind>::data_field_names().into_iter().collect(); 27 | }; 28 | } 29 | 30 | rlay_ontology::call_with_entity_kinds!(ALL; test_field_names!); 31 | } 32 | -------------------------------------------------------------------------------- /rlay_ontology/tests/protobuf_format.rs: -------------------------------------------------------------------------------- 1 | use cid_fork_rlay::ToCid; 2 | use prost::Message; 3 | use rlay_ontology::ontology::*; 4 | use rustc_hex::FromHex; 5 | 6 | #[test] 7 | fn protobuf_format_encoding() { 8 | let klass = Class { 9 | annotations: vec![b"\x01\x02\x03".to_vec()], 10 | ..Class::default() 11 | }; 12 | 13 | let mut encoded_klass = Vec::::new(); 14 | klass.encode(&mut encoded_klass).unwrap(); 15 | 16 | let expected_bytes = b"\x0a\x03\x01\x02\x03".to_vec(); 17 | assert_eq!(expected_bytes, encoded_klass); 18 | } 19 | 20 | #[test] 21 | fn protobuf_format_decoding() { 22 | let bytes = b"\x0a\x05\x01\x02\x03\x02\x03".to_vec(); 23 | 24 | let expected_klass = Class { 25 | annotations: vec![b"\x01\x02\x03\x02\x03".to_vec()], 26 | ..Class::default() 27 | }; 28 | 29 | let decoded_klass = Class::decode(bytes.as_slice()).unwrap(); 30 | 31 | assert_eq!(expected_klass, decoded_klass); 32 | } 33 | 34 | #[test] 35 | /// This highlights one of the shortcomings of the protobuf format: different entities are encoded 36 | /// to the same bytes 37 | fn protobuf_format_encoding_equal() { 38 | let ann = DataPropertyAssertion { 39 | annotations: vec![b"\x01\x02\x03".to_vec()], 40 | ..DataPropertyAssertion::default() 41 | }; 42 | 43 | let klass = Class { 44 | annotations: vec![b"\x01\x02\x03".to_vec()], 45 | ..Class::default() 46 | }; 47 | 48 | let mut encoded_ann = Vec::::new(); 49 | ann.encode(&mut encoded_ann).unwrap(); 50 | 51 | let mut encoded_klass = Vec::::new(); 52 | klass.encode(&mut encoded_klass).unwrap(); 53 | 54 | assert_eq!(encoded_ann, encoded_klass); 55 | } 56 | 57 | #[test] 58 | fn protobuf_cid() { 59 | let klass = Class { 60 | annotations: vec![b"\x01\x02\x03".to_vec()], 61 | ..Class::default() 62 | }; 63 | 64 | let cid = klass.to_cid().unwrap().to_bytes(); 65 | 66 | let expected_bytes: Vec = 67 | "018080031b20e74c92dfbce4b3219c3106b978aa99b8c0b1a34f90e60c947615752d37c210f9" 68 | .from_hex() 69 | .unwrap(); 70 | assert_eq!(expected_bytes, cid); 71 | } 72 | -------------------------------------------------------------------------------- /rlay_ontology/tests/web3.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate serde_json; 3 | 4 | use rlay_ontology::prelude::*; 5 | 6 | #[test] 7 | fn ignores_cid_field() { 8 | let content = json!({ 9 | "cid": "0x1234", 10 | "type": "Annotation", 11 | "property": "0x", 12 | "value": "0x" 13 | }); 14 | 15 | let parsed_annotation: FormatWeb3 = serde_json::from_value(content).unwrap(); 16 | let expected_annotation: Entity = Annotation::default().into(); 17 | 18 | assert_eq!(expected_annotation, parsed_annotation.0); 19 | } 20 | -------------------------------------------------------------------------------- /rlay_ontology_build/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "rlay_ontology_build" 3 | description = "Rlay ontology model" 4 | version = "0.2.5" 5 | authors = ["Maximilian Goisser "] 6 | license = "MIT OR Apache-2.0" 7 | edition = "2018" 8 | 9 | [lib] 10 | path = "src/lib.rs" 11 | 12 | [dependencies] 13 | serde = { version = "1.0.66", default-features = false } 14 | serde_derive = "1.0.66" 15 | heck = "0.3.0" 16 | syn = { version = "1.0", features = ["full", "extra-traits"] } 17 | quote = "1.0" 18 | proc-macro2 = "1.0" 19 | serde_json = { version = "1.0.27", optional = true } 20 | 21 | [features] 22 | default = ["std"] 23 | std = ["serde_json"] 24 | -------------------------------------------------------------------------------- /rlay_ontology_build/src/compact.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | 3 | pub fn build_file(src_path: &str, out_path: &str) { 4 | let out_dir = env::var("OUT_DIR").unwrap(); 5 | let dest_path = Path::new(&out_dir).join(out_path); 6 | 7 | let mut intermediate_file = File::open(src_path).expect("file not found"); 8 | 9 | let mut intermediate_contents = String::new(); 10 | intermediate_file 11 | .read_to_string(&mut intermediate_contents) 12 | .unwrap(); 13 | let intermediate = parse_intermediate_contents(&intermediate_contents); 14 | 15 | let mut out_file = File::create(&dest_path).unwrap(); 16 | 17 | let kinds = intermediate.kinds; 18 | for raw_kind in kinds { 19 | let kind_name = &raw_kind.name; 20 | let fields = raw_kind.fields.clone(); 21 | 22 | write_variant_format_compact(&mut out_file, kind_name, &fields); 23 | } 24 | } 25 | 26 | fn write_variant_format_compact(writer: &mut W, kind_name: &str, fields: &[Field]) { 27 | write_format_variant_wrapper(writer, "Compact", kind_name, fields, true); 28 | write_format_compact_impl_serialize(writer, kind_name, fields); 29 | write_format_compact_impl_deserialize(writer, kind_name, fields); 30 | } 31 | 32 | fn write_format_compact_impl_serialize( 33 | writer: &mut W, 34 | kind_name: &str, 35 | fields: &[Field], 36 | ) { 37 | let helper_fields: TokenStream = fields 38 | .iter() 39 | .map(|field| { 40 | let field_ident = field.field_ident(); 41 | let tokens: TokenStream = match (field.is_array_kind(), field.required) { 42 | (true, _) => parse_quote! { 43 | #[serde(skip_serializing_if = "Vec::is_empty")] 44 | // TODO: bytes serialize 45 | pub #field_ident: &'a Vec>, 46 | }, 47 | (false, true) => parse_quote! { 48 | #[serde(with = "serde_bytes")] 49 | pub #field_ident: &'a Vec, 50 | }, 51 | (false, false) => parse_quote! { 52 | #[serde(skip_serializing_if = "Option::is_none")] 53 | // TODO: bytes serialize 54 | pub #field_ident: &'a Option>, 55 | }, 56 | }; 57 | tokens 58 | }) 59 | .collect(); 60 | 61 | let wrap_helper_fields: TokenStream = fields 62 | .iter() 63 | .map(|field| { 64 | let field_ident = field.field_ident(); 65 | let tokens: TokenStream = parse_quote!(#field_ident: &self.inner.#field_ident,); 66 | tokens 67 | }) 68 | .collect(); 69 | 70 | let wrapper_ty: syn::Type = syn::parse_str(&format!("{}FormatCompact", kind_name)).unwrap(); 71 | let trait_impl: TokenStream = parse_quote! { 72 | #[cfg(feature = "std")] 73 | impl ::serde::Serialize for #wrapper_ty { 74 | fn serialize(&self, serializer: S) -> Result 75 | where 76 | S: ::serde::Serializer, 77 | { 78 | #[derive(Serialize)] 79 | #[allow(non_snake_case)] 80 | struct SerializeHelper<'a> { 81 | #helper_fields 82 | } 83 | 84 | let ext = SerializeHelper { 85 | #wrap_helper_fields 86 | }; 87 | 88 | Ok(ext.serialize(serializer)?) 89 | } 90 | } 91 | }; 92 | write!(writer, "{}", trait_impl).unwrap(); 93 | } 94 | 95 | fn write_format_compact_impl_deserialize( 96 | writer: &mut W, 97 | kind_name: &str, 98 | fields: &[Field], 99 | ) { 100 | let helper_fields: TokenStream = fields 101 | .iter() 102 | .map(|field| { 103 | let field_ident = field.field_ident(); 104 | let stmt: TokenStream = match (field.is_array_kind(), field.required) { 105 | (true, _) => parse_quote! { 106 | #[serde(default, deserialize_with = "nullable_vec")] 107 | #field_ident: Vec>, 108 | }, 109 | (false, true) => parse_quote! { 110 | #[serde(with = "serde_bytes")] 111 | #field_ident: Vec, 112 | }, 113 | (false, false) => parse_quote! { 114 | #[serde(default)] 115 | // TODO: bytes serialize 116 | #field_ident: Option>, 117 | }, 118 | }; 119 | stmt 120 | }) 121 | .collect(); 122 | 123 | let kind_ty: syn::Type = syn::parse_str(kind_name).unwrap(); 124 | let wrapper_ty: syn::Type = syn::parse_str(&format!("{}FormatCompact", kind_name)).unwrap(); 125 | let field_idents: Vec<_> = fields.iter().map(|n| n.field_ident()).collect(); 126 | let constructor_call: TokenStream = parse_quote! { 127 | Ok(#wrapper_ty { 128 | inner: #kind_ty { 129 | #(#field_idents: helper_instance.#field_idents), 130 | * 131 | } 132 | }) 133 | }; 134 | 135 | let trait_impl: TokenStream = parse_quote! { 136 | #[cfg(feature = "std")] 137 | impl<'de> Deserialize<'de> for #wrapper_ty { 138 | fn deserialize(deserializer: D) -> Result 139 | where D: Deserializer<'de>, 140 | { 141 | #[derive(Deserialize)] 142 | struct DeserializeHelper { 143 | #helper_fields 144 | } 145 | 146 | #[allow(dead_code)] 147 | fn nullable_vec<'de, D>(deserializer: D) -> Result>, D::Error> 148 | where D: Deserializer<'de> 149 | { 150 | let opt: Option> = Option::deserialize(deserializer)?; 151 | let val = opt 152 | .unwrap_or_else(Vec::new) 153 | .into_iter() 154 | .map(|n| (*n).to_vec()) 155 | .collect(); 156 | Ok(val) 157 | } 158 | 159 | let helper_instance = DeserializeHelper::deserialize(deserializer)?; 160 | #constructor_call 161 | } 162 | } 163 | }; 164 | 165 | write!(writer, "{}", trait_impl).unwrap(); 166 | } 167 | -------------------------------------------------------------------------------- /rlay_ontology_build/src/core.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | 3 | pub fn build_macros_applied_file(src_path: &str, out_path: &str) { 4 | let out_dir = env::var("OUT_DIR").unwrap(); 5 | let dest_path = Path::new(&out_dir).join(out_path); 6 | 7 | let mut intermediate_file = File::open(src_path).expect("file not found"); 8 | 9 | let mut intermediate_contents = String::new(); 10 | intermediate_file 11 | .read_to_string(&mut intermediate_contents) 12 | .unwrap(); 13 | let intermediate = parse_intermediate_contents(&intermediate_contents); 14 | 15 | let mut out_file = File::create(&dest_path).unwrap(); 16 | 17 | let kinds = intermediate.kinds; 18 | for raw_kind in kinds.iter() { 19 | let kind_name = &raw_kind.name; 20 | let kind_cid_prefix = raw_kind.cidPrefix; 21 | 22 | // Header line 23 | write!(out_file, "\n// {}\n", kind_name).unwrap(); 24 | // impl AssociatedCodec 25 | write!( 26 | out_file, 27 | "codec_code!({}, {});\n", 28 | kind_name, kind_cid_prefix 29 | ) 30 | .unwrap(); 31 | // impl ToCid 32 | let kind_ty: syn::Type = syn::parse_str(kind_name).unwrap(); 33 | let impl_to_cid: TokenStream = parse_quote! { 34 | #[cfg(feature = "std")] 35 | impl_to_cid!(#kind_ty); 36 | }; 37 | write!(out_file, "{}", impl_to_cid).unwrap(); 38 | // impl Canonicalize 39 | { 40 | let kind_ty: syn::Type = syn::parse_str(kind_name).unwrap(); 41 | let fields: Vec = raw_kind 42 | .fields 43 | .clone() 44 | .into_iter() 45 | .filter(|n| n.is_array_kind()) 46 | .map(|n| n.field_ident()) 47 | .collect(); 48 | let impl_canonicalize: TokenStream = parse_quote! { 49 | impl_canonicalize!(#kind_ty; #(#fields),*); 50 | }; 51 | write!(out_file, "{}", impl_canonicalize).unwrap(); 52 | } 53 | // impl CidFields 54 | write_impl_cid_fields(&mut out_file, kind_name, &raw_kind.fields); 55 | // impl DataFields 56 | write_impl_data_fields(&mut out_file, kind_name, &raw_kind.fields); 57 | // impl CidFieldNames 58 | write_impl_cid_field_names(&mut out_file, kind_name, &raw_kind.fields); 59 | // impl DataFieldNames 60 | write_impl_data_field_names(&mut out_file, kind_name, &raw_kind.fields); 61 | 62 | write!( 63 | out_file, 64 | "impl_into_entity_kind!({0}, Entity::{0});\n", 65 | kind_name 66 | ) 67 | .unwrap(); 68 | } 69 | 70 | let kind_names: Vec = kinds 71 | .iter() 72 | .map(|raw_kind| raw_kind.name.to_owned()) 73 | .collect(); 74 | let kind_ids: Vec = kinds.iter().map(|raw_kind| raw_kind.kindId).collect(); 75 | let kind_types: Vec = kind_names 76 | .iter() 77 | .map(|kind_name| syn::parse_str(kind_name).unwrap()) 78 | .collect(); 79 | 80 | let macro_call_with_entity_kinds = quote! { 81 | #[macro_export] 82 | macro_rules! call_with_entity_kinds { 83 | (ALL; $cb:ident!) => { 84 | #($cb!(#kind_types);)* 85 | }; 86 | } 87 | }; 88 | write!(out_file, "{}", macro_call_with_entity_kinds,).unwrap(); 89 | write_entity_kind(&mut out_file, kind_names.clone(), kind_ids.clone()); 90 | write_entity(&mut out_file, kind_names.clone()); 91 | } 92 | 93 | fn get_cid_fields(kind_name: &str, fields: &[Field]) -> Vec { 94 | fields 95 | .to_owned() 96 | .into_iter() 97 | .filter(|field| { 98 | if kind_name == "Annotation" && field.name == "value" { 99 | return false; 100 | } 101 | if kind_name == "DataPropertyAssertion" && field.name == "target" { 102 | return false; 103 | } 104 | if kind_name == "NegativeDataPropertyAssertion" && field.name == "target" { 105 | return false; 106 | } 107 | true 108 | }) 109 | .collect() 110 | } 111 | 112 | fn write_impl_cid_field_names(writer: &mut W, kind_name: &str, fields: &[Field]) { 113 | let fields = get_cid_fields(kind_name, fields); 114 | let kind_ty: syn::Type = syn::parse_str(kind_name).unwrap(); 115 | 116 | let field_names: Vec<_> = fields.into_iter().map(|n| n.name).collect(); 117 | 118 | let impl_for_struct: TokenStream = parse_quote! { 119 | impl CidFieldNames for #kind_ty { 120 | fn cid_field_names() -> &'static [&'static str] { 121 | &[#(#field_names),*] 122 | } 123 | } 124 | }; 125 | write!(writer, "{}", impl_for_struct).unwrap(); 126 | } 127 | 128 | fn write_impl_cid_fields(writer: &mut W, kind_name: &str, fields: &[Field]) { 129 | let fields = get_cid_fields(kind_name, fields); 130 | let kind_ty: syn::Type = syn::parse_str(kind_name).unwrap(); 131 | let iter_struct_name: syn::Type = syn::parse_str(&format!("{}CidFields", kind_name)).unwrap(); 132 | 133 | if fields.is_empty() { 134 | let impl_cid_fields: TokenStream = parse_quote! { 135 | impl<'a> CidFields<'a> for #kind_ty { 136 | type Iter = core::iter::Empty>; 137 | 138 | fn iter_cid_fields(&'a self) -> Self::Iter { 139 | core::iter::empty() 140 | } 141 | } 142 | }; 143 | write!(writer, "{}", impl_cid_fields).unwrap(); 144 | return; 145 | } 146 | 147 | let iter_struct: TokenStream = parse_quote! { 148 | pub struct #iter_struct_name<'a> { 149 | #[allow(dead_code)] 150 | inner: &'a #kind_ty, 151 | #[allow(dead_code)] 152 | field_index: usize, 153 | #[allow(dead_code)] 154 | field_vec_index: usize, 155 | } 156 | }; 157 | write!(writer, "{}", iter_struct).unwrap(); 158 | 159 | let kind_ty: syn::Type = syn::parse_str(kind_name).unwrap(); 160 | let iter_struct_name: syn::Type = syn::parse_str(&format!("{}CidFields", kind_name)).unwrap(); 161 | let iter_struct_impl: TokenStream = parse_quote! { 162 | impl<'a> #iter_struct_name<'a> { 163 | fn new(inner: &'a #kind_ty) -> Self { 164 | Self { 165 | inner, 166 | field_index: 0, 167 | field_vec_index: 0, 168 | } 169 | } 170 | } 171 | }; 172 | write!(writer, "{}", iter_struct_impl).unwrap(); 173 | 174 | let iter_blocks: Vec = fields 175 | .iter() 176 | .map(|field| { 177 | let field_ident = field.field_ident(); 178 | let stmt: TokenStream = match (field.is_array_kind(), field.required) { 179 | (true, _) => parse_quote! { 180 | item = self.inner.#field_ident.get(self.field_vec_index); 181 | self.field_vec_index += 1; 182 | if self.inner.#field_ident.len() <= self.field_vec_index { 183 | self.field_vec_index = 0; 184 | self.field_index += 1; 185 | } 186 | }, 187 | (false, true) => parse_quote! { 188 | item = Some(&self.inner.#field_ident); 189 | self.field_index += 1; 190 | }, 191 | (false, false) => parse_quote! { 192 | item = self.inner.#field_ident.as_ref(); 193 | self.field_index += 1; 194 | }, 195 | }; 196 | stmt 197 | }) 198 | .collect(); 199 | 200 | let iter_struct_name: syn::Type = syn::parse_str(&format!("{}CidFields", kind_name)).unwrap(); 201 | let field_indices: Vec<_> = (0..fields.len()).collect(); 202 | let iter_struct_impl_iterator: TokenStream = parse_quote! { 203 | impl<'a> Iterator for #iter_struct_name<'a> { 204 | type Item = &'a Vec; 205 | 206 | fn next(&mut self) -> Option { 207 | let mut item = None; 208 | 209 | #( 210 | if item == None && self.field_index == #field_indices { 211 | #iter_blocks 212 | } 213 | )* 214 | 215 | item 216 | } 217 | } 218 | }; 219 | write!(writer, "{}", iter_struct_impl_iterator).unwrap(); 220 | 221 | let kind_ty: syn::Type = syn::parse_str(kind_name).unwrap(); 222 | let iter_struct_name: syn::Type = syn::parse_str(&format!("{}CidFields", kind_name)).unwrap(); 223 | let impl_cid_fields: TokenStream = parse_quote! { 224 | impl<'a> CidFields<'a> for #kind_ty { 225 | type Iter = #iter_struct_name<'a>; 226 | 227 | fn iter_cid_fields(&'a self) -> #iter_struct_name { 228 | #iter_struct_name::new(self) 229 | } 230 | } 231 | }; 232 | write!(writer, "{}", impl_cid_fields).unwrap(); 233 | } 234 | 235 | fn get_data_fields(kind_name: &str, fields: &[Field]) -> Vec { 236 | fields 237 | .to_owned() 238 | .into_iter() 239 | .filter(|field| { 240 | if kind_name == "Annotation" && field.name == "value" { 241 | return true; 242 | } 243 | if kind_name == "AnnotationAssertion" && field.name == "value" { 244 | return true; 245 | } 246 | if kind_name == "NegativeAnnotationAssertion" && field.name == "value" { 247 | return true; 248 | } 249 | if kind_name == "DataPropertyAssertion" && field.name == "target" { 250 | return true; 251 | } 252 | if kind_name == "NegativeDataPropertyAssertion" && field.name == "target" { 253 | return true; 254 | } 255 | false 256 | }) 257 | .collect() 258 | } 259 | 260 | fn write_impl_data_field_names(writer: &mut W, kind_name: &str, fields: &[Field]) { 261 | let fields = get_data_fields(kind_name, fields); 262 | let kind_ty: syn::Type = syn::parse_str(kind_name).unwrap(); 263 | 264 | let field_names: Vec<_> = fields.into_iter().map(|n| n.name).collect(); 265 | 266 | let impl_for_struct: TokenStream = parse_quote! { 267 | impl DataFieldNames for #kind_ty { 268 | fn data_field_names() -> &'static [&'static str] { 269 | &[#(#field_names),*] 270 | } 271 | } 272 | }; 273 | write!(writer, "{}", impl_for_struct).unwrap(); 274 | } 275 | 276 | fn write_impl_data_fields(writer: &mut W, kind_name: &str, fields: &[Field]) { 277 | let fields = get_data_fields(kind_name, fields); 278 | 279 | let kind_ty: syn::Type = syn::parse_str(kind_name).unwrap(); 280 | let iter_struct_name: syn::Type = syn::parse_str(&format!("{}DataFields", kind_name)).unwrap(); 281 | let iter_struct: TokenStream = parse_quote! { 282 | pub struct #iter_struct_name<'a> { 283 | #[allow(dead_code)] 284 | inner: &'a #kind_ty, 285 | #[allow(dead_code)] 286 | field_index: usize, 287 | #[allow(dead_code)] 288 | field_vec_index: usize, 289 | } 290 | }; 291 | write!(writer, "{}", iter_struct).unwrap(); 292 | 293 | let kind_ty: syn::Type = syn::parse_str(kind_name).unwrap(); 294 | let iter_struct_name: syn::Type = syn::parse_str(&format!("{}DataFields", kind_name)).unwrap(); 295 | let iter_struct_impl: TokenStream = parse_quote! { 296 | impl<'a> #iter_struct_name<'a> { 297 | fn new(inner: &'a #kind_ty) -> Self { 298 | Self { 299 | inner, 300 | field_index: 0, 301 | field_vec_index: 0, 302 | } 303 | } 304 | } 305 | }; 306 | write!(writer, "{}", iter_struct_impl).unwrap(); 307 | 308 | let iter_blocks: Vec = fields 309 | .iter() 310 | .map(|field| { 311 | let field_ident = field.field_ident(); 312 | let stmt: TokenStream = match (field.is_array_kind(), field.required) { 313 | (true, _) => parse_quote! { 314 | item = self.inner.#field_ident.get(self.field_vec_index); 315 | self.field_vec_index += 1; 316 | if self.inner.#field_ident.len() <= self.field_vec_index { 317 | self.field_vec_index = 0; 318 | self.field_index += 1; 319 | } 320 | }, 321 | (false, true) => parse_quote! { 322 | item = Some(&self.inner.#field_ident); 323 | self.field_index += 1; 324 | }, 325 | (false, false) => parse_quote! { 326 | item = self.inner.#field_ident.as_ref(); 327 | self.field_index += 1; 328 | }, 329 | }; 330 | stmt 331 | }) 332 | .collect(); 333 | 334 | let iter_struct_name: syn::Type = syn::parse_str(&format!("{}DataFields", kind_name)).unwrap(); 335 | let field_indices: Vec<_> = (0..fields.len()).collect(); 336 | let iter_struct_impl_iterator: TokenStream = match fields.is_empty() { 337 | true => { 338 | parse_quote! { 339 | impl<'a> Iterator for #iter_struct_name<'a> { 340 | type Item = &'a Vec; 341 | 342 | fn next(&mut self) -> Option { 343 | None 344 | } 345 | } 346 | } 347 | } 348 | false => { 349 | parse_quote! { 350 | impl<'a> Iterator for #iter_struct_name<'a> { 351 | type Item = &'a Vec; 352 | 353 | fn next(&mut self) -> Option { 354 | let mut item = None; 355 | 356 | #( 357 | if item == None && self.field_index == #field_indices { 358 | #iter_blocks 359 | } 360 | )* 361 | 362 | item 363 | } 364 | } 365 | } 366 | } 367 | }; 368 | write!(writer, "{}", iter_struct_impl_iterator).unwrap(); 369 | 370 | let kind_ty: syn::Type = syn::parse_str(kind_name).unwrap(); 371 | let iter_struct_name: syn::Type = syn::parse_str(&format!("{}DataFields", kind_name)).unwrap(); 372 | let impl_cid_fields: TokenStream = parse_quote! { 373 | impl<'a> DataFields<'a> for #kind_ty { 374 | type Iter = #iter_struct_name<'a>; 375 | 376 | fn iter_data_fields(&'a self) -> #iter_struct_name { 377 | #iter_struct_name::new(self) 378 | } 379 | } 380 | }; 381 | write!(writer, "{}", impl_cid_fields).unwrap(); 382 | } 383 | 384 | fn write_entity_kind(writer: &mut W, kind_names: Vec, kind_ids: Vec) { 385 | let variants = kind_names_types(&kind_names); 386 | // EntityKind 387 | { 388 | let type_impl: TokenStream = parse_quote! { 389 | #[derive(Debug, Clone, PartialEq, strum_macros::EnumVariantNames)] 390 | pub enum EntityKind { 391 | #(#variants), 392 | * 393 | } 394 | }; 395 | write!(writer, "{}", type_impl).unwrap(); 396 | } 397 | // Into<&'a str> 398 | { 399 | let trait_impl: TokenStream = parse_quote! { 400 | impl<'a> Into<&'a str> for EntityKind { 401 | fn into(self) -> &'a str { 402 | match &self { 403 | #(EntityKind::#variants => #kind_names), 404 | * 405 | } 406 | } 407 | } 408 | 409 | impl<'a> Into<&'a str> for &'a EntityKind { 410 | fn into(self) -> &'a str { 411 | match &self { 412 | #(EntityKind::#variants => #kind_names), 413 | * 414 | } 415 | } 416 | } 417 | }; 418 | write!(writer, "{}", trait_impl).unwrap(); 419 | } 420 | // impl EntityKind 421 | { 422 | let trait_impl: TokenStream = parse_quote! { 423 | impl EntityKind { 424 | pub fn from_name(name: &str) -> Result { 425 | match name { 426 | #(#kind_names => Ok(EntityKind::#variants)),*, 427 | _ => Err(()), 428 | } 429 | } 430 | 431 | pub fn empty_entity(&self) -> Entity { 432 | match self { 433 | #(EntityKind::#variants => #variants::default().into()),* 434 | } 435 | } 436 | 437 | pub fn id(&self) -> u64 { 438 | match self { 439 | #(EntityKind::#variants => #kind_ids),* 440 | } 441 | } 442 | } 443 | }; 444 | write!(writer, "{}", trait_impl).unwrap(); 445 | } 446 | } 447 | 448 | fn write_entity(writer: &mut W, kind_names: Vec) { 449 | let variants = kind_names_types(&kind_names); 450 | 451 | // Entity 452 | { 453 | let type_impl: TokenStream = parse_quote! { 454 | #[derive(Debug, Clone, PartialEq, Delegate)] 455 | #[delegate(Canonicalize)] 456 | #[cfg_attr(feature = "std", delegate(ToCid))] 457 | pub enum Entity { 458 | #(#variants(#variants)), 459 | * 460 | } 461 | }; 462 | write!(writer, "{}", type_impl).unwrap(); 463 | } 464 | // impl CidFields 465 | { 466 | let variants_iter_structs: Vec = kind_names 467 | .clone() 468 | .into_iter() 469 | .map(|variant| syn::parse_str(&format!("{}CidFields", variant)).unwrap()) 470 | .collect(); 471 | 472 | let enum_impl: TokenStream = parse_quote! { 473 | pub enum EntityCidFields<'a> { 474 | #(#variants(#variants_iter_structs<'a>)), 475 | * 476 | } 477 | }; 478 | write!(writer, "{}", enum_impl).unwrap(); 479 | 480 | let enum_impl_iterator: TokenStream = parse_quote! { 481 | impl<'a> Iterator for EntityCidFields<'a> { 482 | type Item = &'a Vec; 483 | 484 | fn next(&mut self) -> Option { 485 | match self { 486 | #(EntityCidFields::#variants(inner) => inner.next()), 487 | * 488 | } 489 | } 490 | } 491 | }; 492 | write!(writer, "{}", enum_impl_iterator).unwrap(); 493 | 494 | let trait_impl: TokenStream = parse_quote! { 495 | impl<'a> CidFields<'a> for Entity { 496 | type Iter = EntityCidFields<'a>; 497 | 498 | fn iter_cid_fields(&'a self) -> EntityCidFields { 499 | match self { 500 | #(Entity::#variants(inner) => EntityCidFields::#variants(inner.iter_cid_fields())), 501 | * 502 | } 503 | } 504 | } 505 | }; 506 | write!(writer, "{}", trait_impl).unwrap(); 507 | } 508 | // impl DataFields 509 | { 510 | let variants_iter_structs: Vec = kind_names 511 | .clone() 512 | .into_iter() 513 | .map(|variant| syn::parse_str(&format!("{}DataFields", variant)).unwrap()) 514 | .collect(); 515 | 516 | let enum_impl: TokenStream = parse_quote! { 517 | pub enum EntityDataFields<'a> { 518 | #(#variants(#variants_iter_structs<'a>)), 519 | * 520 | } 521 | }; 522 | write!(writer, "{}", enum_impl).unwrap(); 523 | 524 | let enum_impl_iterator: TokenStream = parse_quote! { 525 | impl<'a> Iterator for EntityDataFields<'a> { 526 | type Item = &'a Vec; 527 | 528 | fn next(&mut self) -> Option { 529 | match self { 530 | #(EntityDataFields::#variants(inner) => inner.next()), 531 | * 532 | } 533 | } 534 | } 535 | }; 536 | write!(writer, "{}", enum_impl_iterator).unwrap(); 537 | 538 | let trait_impl: TokenStream = parse_quote! { 539 | impl<'a> DataFields<'a> for Entity { 540 | type Iter = EntityDataFields<'a>; 541 | 542 | fn iter_data_fields(&'a self) -> EntityDataFields { 543 | match self { 544 | #(Entity::#variants(inner) => EntityDataFields::#variants(inner.iter_data_fields())), 545 | * 546 | } 547 | } 548 | } 549 | }; 550 | write!(writer, "{}", trait_impl).unwrap(); 551 | } 552 | // impl Entity 553 | { 554 | let type_impl: TokenStream = parse_quote! { 555 | impl Entity { 556 | pub fn kind(&self) -> EntityKind { 557 | match &self { 558 | #(Entity::#variants(_) => EntityKind::#variants), 559 | * 560 | } 561 | } 562 | } 563 | }; 564 | write!(writer, "{}", type_impl).unwrap(); 565 | } 566 | // impl FromABIV2ResponseHinted 567 | { 568 | let trait_impl: TokenStream = parse_quote! { 569 | #[cfg(feature = "web3_compat")] 570 | impl FromABIV2ResponseHinted for Entity { 571 | fn from_abiv2(bytes: &[u8], kind: &EntityKind) -> Self { 572 | match kind { 573 | #(EntityKind::#variants => Entity::#variants(FromABIV2Response::from_abiv2(bytes))), 574 | * 575 | } 576 | } 577 | } 578 | }; 579 | write!(writer, "{}", trait_impl).unwrap(); 580 | } 581 | } 582 | -------------------------------------------------------------------------------- /rlay_ontology_build/src/entities.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | 3 | pub fn build_file(src_path: &str, out_path: &str) { 4 | let out_dir = env::var("OUT_DIR").unwrap(); 5 | let dest_path = Path::new(&out_dir).join(out_path); 6 | 7 | let mut intermediate_file = File::open(src_path).expect("file not found"); 8 | 9 | let mut intermediate_contents = String::new(); 10 | intermediate_file 11 | .read_to_string(&mut intermediate_contents) 12 | .unwrap(); 13 | let intermediate = parse_intermediate_contents(&intermediate_contents); 14 | 15 | let mut out_file = File::create(&dest_path).unwrap(); 16 | 17 | let kinds = intermediate.kinds; 18 | for raw_kind in kinds.iter() { 19 | let kind_name = &raw_kind.name; 20 | let fields: Vec<_> = raw_kind.fields.clone(); 21 | 22 | write_entity(&mut out_file, kind_name, &fields); 23 | } 24 | } 25 | 26 | fn write_entity(writer: &mut W, kind_name: &str, fields: &[Field]) { 27 | let fields: TokenStream = fields 28 | .iter() 29 | .enumerate() 30 | .map(|(i, field)| { 31 | let field_ident = field.field_ident(); 32 | let i_str = (i + 1).to_string(); 33 | let prost_attribute: TokenStream = match (field.is_array_kind(), field.required) { 34 | (true, _) => parse_quote!(prost(bytes, repeated, tag=#i_str)), 35 | (false, true) => parse_quote!(prost(bytes, required, tag=#i_str)), 36 | (false, false) => parse_quote!(prost(bytes, optional, tag=#i_str)), 37 | }; 38 | let field_ty: syn::Type = match (field.is_array_kind(), field.required) { 39 | (true, _) => parse_quote!(Vec>), 40 | (false, true) => parse_quote!(Vec), 41 | (false, false) => parse_quote!(Option>), 42 | }; 43 | let tokens: TokenStream = parse_quote! { 44 | #[cfg_attr(feature = "std", #prost_attribute)] 45 | pub #field_ident: #field_ty, 46 | }; 47 | tokens 48 | }) 49 | .collect(); 50 | 51 | let entity_ty: syn::Type = syn::parse_str(kind_name).unwrap(); 52 | let entity_struct: TokenStream = parse_quote! { 53 | #[derive(Clone, PartialEq)] 54 | #[cfg_attr(not(feature = "std"), derive(Debug, Default))] 55 | #[cfg_attr(feature = "std", derive(Message))] 56 | pub struct #entity_ty { 57 | #fields 58 | } 59 | }; 60 | write!(writer, "{}", entity_struct).unwrap(); 61 | } 62 | -------------------------------------------------------------------------------- /rlay_ontology_build/src/intermediate.rs: -------------------------------------------------------------------------------- 1 | #![allow(non_snake_case)] 2 | use heck::SnakeCase; 3 | use std::collections::BTreeMap; 4 | use syn; 5 | 6 | pub fn parse_intermediate_contents(contents: &str) -> Intermediate { 7 | serde_json::from_str(contents).unwrap() 8 | } 9 | 10 | #[derive(Deserialize, Serialize, Clone)] 11 | pub struct Intermediate { 12 | pub kinds: Vec, 13 | } 14 | 15 | #[derive(Deserialize, Serialize, Clone)] 16 | pub struct Kind { 17 | pub name: String, 18 | pub fields: Vec, 19 | pub expressionKind: Option, 20 | pub kindId: u64, 21 | pub cidPrefix: u64, 22 | pub cidPrefixHex: String, 23 | } 24 | 25 | #[derive(Deserialize, Serialize, Clone)] 26 | pub struct Field { 27 | pub name: String, 28 | pub kind: String, 29 | #[serde(default)] 30 | pub required: bool, 31 | } 32 | 33 | impl Field { 34 | pub fn is_array_kind(&self) -> bool { 35 | self.kind.ends_with("[]") 36 | } 37 | 38 | pub fn field_ident(&self) -> syn::Ident { 39 | syn::parse_str(&self.name.to_snake_case()).unwrap() 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /rlay_ontology_build/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![allow(unused_imports)] 2 | extern crate heck; 3 | extern crate proc_macro2; 4 | #[macro_use] 5 | extern crate quote; 6 | extern crate serde; 7 | #[macro_use] 8 | extern crate serde_derive; 9 | #[macro_use] 10 | extern crate syn; 11 | 12 | #[cfg(feature = "serde_json")] 13 | extern crate serde_json; 14 | 15 | mod compact; 16 | mod core; 17 | mod entities; 18 | mod intermediate; 19 | mod v0; 20 | mod web3; 21 | 22 | use heck::SnakeCase; 23 | use proc_macro2::TokenStream; 24 | use std::env; 25 | use std::fs::File; 26 | use std::io::prelude::*; 27 | use std::io::Write; 28 | use std::path::Path; 29 | use std::process::Command; 30 | 31 | use crate::intermediate::{parse_intermediate_contents, Field, Kind}; 32 | 33 | pub fn build_files() { 34 | entities::build_file("src/intermediate.json", "rlay.ontology.entities.rs"); 35 | fmt_file("rlay.ontology.entities.rs"); 36 | core::build_macros_applied_file("src/intermediate.json", "rlay.ontology.macros_applied.rs"); 37 | fmt_file("rlay.ontology.macros_applied.rs"); 38 | web3::build_applied_file("src/intermediate.json", "rlay.ontology.web3_applied.rs"); 39 | fmt_file("rlay.ontology.web3_applied.rs"); 40 | compact::build_file("src/intermediate.json", "rlay.ontology.compact.rs"); 41 | fmt_file("rlay.ontology.compact.rs"); 42 | v0::build_file("src/intermediate.json", "rlay.ontology.v0.rs"); 43 | fmt_file("rlay.ontology.v0.rs"); 44 | } 45 | 46 | fn fmt_file(path: &str) { 47 | let rustfmt_available = Command::new("which") 48 | .arg("rustfmt") 49 | .output() 50 | .unwrap() 51 | .status 52 | .success(); 53 | if !rustfmt_available { 54 | return; 55 | } 56 | 57 | let out_dir = env::var("OUT_DIR").unwrap(); 58 | let dest_path = Path::new(&out_dir).join(path); 59 | Command::new("rustfmt").arg(dest_path).output().unwrap(); 60 | } 61 | 62 | fn kind_names_types(kind_names: &[String]) -> Vec { 63 | kind_names 64 | .iter() 65 | .map(|n| syn::parse_str(n).unwrap()) 66 | .collect() 67 | } 68 | 69 | fn write_format_variant_wrapper( 70 | writer: &mut W, 71 | format_suffix: &str, 72 | kind_name: &str, 73 | _fields: &[Field], 74 | write_conversion_trait: bool, 75 | ) { 76 | // Wrapper 77 | let wrapper_ty: syn::Type = 78 | syn::parse_str(&format!("{}Format{}", kind_name, format_suffix)).unwrap(); 79 | let inner_ty: syn::Type = syn::parse_str(kind_name).unwrap(); 80 | let wrapper_struct: TokenStream = parse_quote! { 81 | #[cfg_attr(feature = "wasm_bindgen", wasm_bindgen)] 82 | #[derive(Debug, Clone, PartialEq, Default)] 83 | pub struct #wrapper_ty { 84 | inner: #inner_ty 85 | } 86 | }; 87 | write!(writer, "{}", wrapper_struct).unwrap(); 88 | // From 89 | { 90 | let trait_impl: TokenStream = parse_quote! { 91 | impl From<#inner_ty> for #wrapper_ty { 92 | fn from(original: #inner_ty) -> Self { 93 | Self { 94 | inner: original 95 | } 96 | } 97 | } 98 | }; 99 | write!(writer, "{}", trait_impl).unwrap(); 100 | } 101 | // Into 102 | { 103 | let trait_impl: TokenStream = parse_quote! { 104 | impl Into<#inner_ty> for #wrapper_ty { 105 | fn into(self) -> #inner_ty { 106 | self.inner 107 | } 108 | } 109 | }; 110 | write!(writer, "{}", trait_impl).unwrap(); 111 | } 112 | if write_conversion_trait { 113 | let conversion_trait: syn::Type = 114 | syn::parse_str(&format!("Format{}", format_suffix)).unwrap(); 115 | let format_suffix_lc = format_suffix.to_lowercase(); 116 | let to_fn_ident = format_ident!("to_{}_format", format_suffix_lc); 117 | let from_fn_ident = format_ident!("from_{}_format", format_suffix_lc); 118 | 119 | let trait_impl: TokenStream = parse_quote! { 120 | #[cfg(feature = "std")] 121 | impl<'a> #conversion_trait<'a> for #inner_ty { 122 | type Formatted = #wrapper_ty; 123 | fn #to_fn_ident(self) -> Self::Formatted { 124 | #wrapper_ty::from(self) 125 | } 126 | 127 | fn #from_fn_ident(formatted: Self::Formatted) -> Self { 128 | formatted.into() 129 | } 130 | } 131 | }; 132 | write!(writer, "{}", trait_impl).unwrap(); 133 | } 134 | } 135 | -------------------------------------------------------------------------------- /rlay_ontology_build/src/v0.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | 3 | pub fn build_file(src_path: &str, out_path: &str) { 4 | let out_dir = env::var("OUT_DIR").unwrap(); 5 | let dest_path = Path::new(&out_dir).join(out_path); 6 | 7 | let mut intermediate_file = File::open(src_path).expect("file not found"); 8 | 9 | let mut intermediate_contents = String::new(); 10 | intermediate_file 11 | .read_to_string(&mut intermediate_contents) 12 | .unwrap(); 13 | let intermediate = parse_intermediate_contents(&intermediate_contents); 14 | 15 | let mut out_file = File::create(&dest_path).unwrap(); 16 | 17 | let kinds = intermediate.kinds; 18 | let kind_names: Vec = kinds 19 | .iter() 20 | .map(|raw_kind| raw_kind.name.to_owned()) 21 | .collect(); 22 | let kind_ids: Vec = kinds.iter().map(|raw_kind| raw_kind.kindId).collect(); 23 | 24 | write_entity(&mut out_file, kind_names, kind_ids); 25 | } 26 | 27 | fn write_entity(writer: &mut W, kind_names: Vec, kind_ids: Vec) { 28 | let variants = kind_names_types(&kind_names); 29 | 30 | // Entity 31 | { 32 | let type_impl: TokenStream = parse_quote! { 33 | #[derive(Debug, Clone, PartialEq)] 34 | pub enum EntityV0 { 35 | #(#variants(#variants)), 36 | * 37 | } 38 | }; 39 | write!(writer, "{}", type_impl).unwrap(); 40 | } 41 | // impl ToCid 42 | // { 43 | // let variants = variants.clone(); 44 | // let trait_impl: TokenStream = parse_quote! { 45 | // impl ToCid for Entity { 46 | // fn to_cid(&self) -> Result { 47 | // match &self { 48 | // #(Entity::#variants(ent) => ent.to_cid()), 49 | // * 50 | // } 51 | // } 52 | // } 53 | // }; 54 | // write!(writer, "{}", trait_impl).unwrap(); 55 | // } 56 | // impl Into 57 | { 58 | let type_impl: TokenStream = parse_quote! { 59 | impl Into for EntityV0 { 60 | fn into(self) -> Entity { 61 | match self { 62 | #(EntityV0::#variants(ent) => Entity::#variants(ent)), 63 | * 64 | } 65 | } 66 | } 67 | }; 68 | write!(writer, "{}", type_impl).unwrap(); 69 | } 70 | // impl Into 71 | { 72 | let type_impl: TokenStream = parse_quote! { 73 | impl Into for Entity { 74 | fn into(self) -> EntityV0 { 75 | match self { 76 | #(Entity::#variants(ent) => EntityV0::#variants(ent)), 77 | * 78 | } 79 | } 80 | } 81 | }; 82 | write!(writer, "{}", type_impl).unwrap(); 83 | } 84 | // impl EntityV0 85 | { 86 | let trait_impl: TokenStream = parse_quote! { 87 | impl EntityV0 { 88 | #[cfg(feature = "std")] 89 | pub fn serialize(&self, writer: &mut W) -> Result<(), std::io::Error> { 90 | let version_number = 0; 91 | writer.write_varint(version_number)?; 92 | 93 | let kind_id = Into::::into(self.clone()).kind().id(); 94 | writer.write_varint(kind_id)?; 95 | 96 | Ok(match &self { 97 | #(&EntityV0::#variants(ent) => serde_cbor::ser::to_writer(writer, &ent.clone().to_compact_format()).unwrap()), 98 | * 99 | }) 100 | } 101 | 102 | #[cfg(feature = "std")] 103 | pub fn deserialize(reader: &mut R) -> Result { 104 | let version_number: u64 = reader.read_varint()?; 105 | if version_number != 0 { 106 | // TODO 107 | panic!("Can only parse version 0 entity."); 108 | } 109 | 110 | let kind_id: u64 = reader.read_varint()?; 111 | Ok(match kind_id { 112 | #(#kind_ids => EntityV0::#variants(FormatCompact::from_compact_format(serde_cbor::de::from_reader(reader).unwrap()))), 113 | *, 114 | // TODO 115 | _ => panic!("Unrecognized kind id.") 116 | }) 117 | } 118 | } 119 | }; 120 | write!(writer, "{}", trait_impl).unwrap(); 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /rlay_ontology_build/src/web3.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | 3 | pub fn build_applied_file(src_path: &str, out_path: &str) { 4 | let out_dir = env::var("OUT_DIR").unwrap(); 5 | let dest_path = Path::new(&out_dir).join(out_path); 6 | 7 | let mut intermediate_file = File::open(src_path).expect("file not found"); 8 | 9 | let mut intermediate_contents = String::new(); 10 | intermediate_file 11 | .read_to_string(&mut intermediate_contents) 12 | .unwrap(); 13 | let intermediate = parse_intermediate_contents(&intermediate_contents); 14 | 15 | let mut out_file = File::create(&dest_path).unwrap(); 16 | 17 | let kinds = intermediate.kinds; 18 | // impl FromABIV2Response 19 | for raw_kind in kinds.iter() { 20 | let kind_name = &raw_kind.name; 21 | let fields = raw_kind.fields.clone(); 22 | 23 | write_entity_impl_from_abiv2_response(&mut out_file, raw_kind); 24 | write_variant_format_web3(&mut out_file, kind_name, &fields); 25 | } 26 | 27 | let kind_names: Vec = kinds 28 | .iter() 29 | .map(|raw_kind| raw_kind.name.to_owned()) 30 | .collect(); 31 | write_entity_format_web3(&mut out_file, kind_names); 32 | } 33 | 34 | fn write_entity_impl_from_abiv2_response(writer: &mut W, raw_kind: &Kind) { 35 | let kind_name = &raw_kind.name; 36 | let fields = raw_kind.fields.clone(); 37 | 38 | let decode_offset_macros: TokenStream = fields 39 | .iter() 40 | .enumerate() 41 | .map(|(i, field)| { 42 | let offset_ident = format_ident!("{}_offset", field.name.to_snake_case()); 43 | let offset_start = i * 32; 44 | let offset_end = (i + 1) * 32; 45 | let tokens: TokenStream = parse_quote! { 46 | decode_offset!(bytes, #offset_ident, #offset_start, #offset_end); 47 | }; 48 | tokens 49 | }) 50 | .collect(); 51 | 52 | let decode_param_macros: TokenStream = fields 53 | .iter() 54 | .enumerate() 55 | .map(|(i, field)| { 56 | let field_kind_marker: syn::Ident = syn::parse_str(match field.is_array_kind() { 57 | true => "bytes_array", 58 | false => "bytes", 59 | }).unwrap(); 60 | 61 | let field_ident = field.field_ident(); 62 | let offset_ident = format_ident!("{}_offset", field.name.to_snake_case()); 63 | 64 | let next_field = fields.get(i + 1); 65 | let tokens: TokenStream = match next_field { 66 | Some(next_field) => { 67 | let next_offset_ident = format_ident!("{}_offset", next_field.name.to_snake_case()); 68 | parse_quote! { 69 | decode_param!(#field_kind_marker; bytes, #field_ident, #offset_ident, #next_offset_ident); 70 | } 71 | } 72 | None => { 73 | parse_quote! { 74 | decode_param!(#field_kind_marker; bytes, #field_ident, #offset_ident); 75 | } 76 | } 77 | }; 78 | tokens 79 | }) 80 | .collect(); 81 | 82 | let wrap_option_fields: TokenStream = fields 83 | .iter() 84 | .filter_map(|field| { 85 | if field.required || field.is_array_kind() { 86 | return None; 87 | } 88 | let field_ident = field.field_ident(); 89 | let tokens: TokenStream = parse_quote! { 90 | let #field_ident: Option> = to_option_bytes(#field_ident); 91 | }; 92 | Some(tokens) 93 | }) 94 | .collect(); 95 | 96 | let field_idents: Vec<_> = fields.iter().map(|n| n.field_ident()).collect(); 97 | let constructor: TokenStream = parse_quote! { 98 | Self { 99 | #(#field_idents),* 100 | } 101 | }; 102 | 103 | let kind_name_ty: syn::Type = syn::parse_str(kind_name).unwrap(); 104 | let trait_impl: TokenStream = parse_quote! { 105 | impl FromABIV2Response for #kind_name_ty { 106 | fn from_abiv2(bytes: &[u8]) -> Self { 107 | #decode_offset_macros 108 | #decode_param_macros 109 | #wrap_option_fields 110 | 111 | #constructor 112 | } 113 | } 114 | }; 115 | write!(writer, "{}", trait_impl,).unwrap(); 116 | } 117 | 118 | fn write_entity_format_web3(writer: &mut W, kind_names: Vec) { 119 | let variants = kind_names_types(&kind_names); 120 | let wrapper_variants: Vec = kind_names 121 | .iter() 122 | .map(|n| syn::parse_str(&format!("FormatWeb3<{}>", n)).unwrap()) 123 | .collect(); 124 | 125 | // SerializeFormatWeb3 for Entity 126 | { 127 | let type_impl: TokenStream = parse_quote! { 128 | impl SerializeFormatWeb3 for Entity { 129 | fn serialize_format_web3(&self, serializer: S) -> Result 130 | where 131 | S: serde::Serializer, 132 | { 133 | #[derive(Serialize)] 134 | #[serde(tag = "type")] 135 | pub enum EntityFormatWeb3 { 136 | #(#variants(#wrapper_variants)), 137 | * 138 | } 139 | 140 | impl Into for Entity { 141 | fn into(self) -> EntityFormatWeb3 { 142 | match self { 143 | #(Entity::#variants(ent) => EntityFormatWeb3::#variants(ent.into())), 144 | * 145 | } 146 | } 147 | } 148 | 149 | let proxy: EntityFormatWeb3 = self.to_owned().into(); 150 | proxy.serialize(serializer) 151 | } 152 | } 153 | }; 154 | write!(writer, "{}", type_impl).unwrap(); 155 | } 156 | // DeserializeFormatWeb3 for Entity 157 | { 158 | let type_impl: TokenStream = parse_quote! { 159 | impl<'de> DeserializeFormatWeb3<'de> for Entity { 160 | fn deserialize_format_web3(deserializer: D) -> Result 161 | where 162 | D: Deserializer<'de>, 163 | { 164 | #[derive(Deserialize)] 165 | #[serde(tag = "type")] 166 | pub enum EntityFormatWeb3 { 167 | #(#variants(#wrapper_variants)), 168 | * 169 | } 170 | 171 | impl From for Entity { 172 | fn from(original: EntityFormatWeb3) -> Entity { 173 | match original { 174 | #(EntityFormatWeb3::#variants(ent) => Entity::#variants(ent.0)), 175 | * 176 | } 177 | } 178 | } 179 | 180 | let deserialized = EntityFormatWeb3::deserialize(deserializer)?; 181 | Ok(deserialized.into()) 182 | } 183 | } 184 | }; 185 | write!(writer, "{}", type_impl).unwrap(); 186 | } 187 | } 188 | 189 | fn write_variant_format_web3(writer: &mut W, kind_name: &str, fields: &[Field]) { 190 | write_format_web3_impl_serialize_format_web3(writer, kind_name, fields); 191 | write_format_web3_impl_deserialize_format_web3(writer, kind_name, fields); 192 | } 193 | 194 | fn write_format_web3_impl_serialize_format_web3( 195 | writer: &mut W, 196 | kind_name: &str, 197 | fields: &[Field], 198 | ) { 199 | let kind_type: syn::Type = syn::parse_str(kind_name).unwrap(); 200 | // +1 for "cid" field 201 | let field_num = fields.len() + 1; 202 | let field_idents: Vec = fields.iter().map(|field| field.field_ident()).collect(); 203 | let field_names: Vec = fields.iter().map(|n| n.name.to_string()).collect(); 204 | let trait_impl: TokenStream = parse_quote! { 205 | impl SerializeFormatWeb3 for #kind_type { 206 | fn serialize_format_web3(&self, serializer: S) -> Result 207 | where 208 | S: serde::Serializer, 209 | { 210 | let mut s = serializer.serialize_struct(#kind_name, #field_num)?; 211 | s.serialize_field("cid", &self.to_cid().ok().map(|n| FormatWeb3(n.to_bytes())))?; 212 | #(s.serialize_field(#field_names, &FormatWeb3(&self.#field_idents))?;)* 213 | 214 | s.end() 215 | } 216 | } 217 | }; 218 | 219 | write!(writer, "{}", trait_impl).unwrap(); 220 | } 221 | 222 | fn write_format_web3_impl_deserialize_format_web3( 223 | writer: &mut W, 224 | kind_name: &str, 225 | fields: &[Field], 226 | ) { 227 | let field_names: Vec = fields.iter().map(|n| n.name.clone()).collect(); 228 | let field_names_const_decl: TokenStream = syn::parse_str(&format!( 229 | "const FIELDS: &'static [&'static str] = &{:?};", 230 | field_names 231 | )) 232 | .unwrap(); 233 | 234 | // intializes a empty Option variable for each field 235 | let initialize_empty_fields: TokenStream = fields 236 | .iter() 237 | .map(|field| { 238 | let field_ident = field.field_ident(); 239 | let stmt: TokenStream = parse_quote!(let mut #field_ident: Option<_> = None;); 240 | stmt 241 | }) 242 | .collect(); 243 | 244 | // tries to extract set the field variable if the field exists in the map 245 | let field_names_raw: Vec = fields.iter().map(|n| n.name.clone()).collect(); 246 | let field_names_snake: Vec = fields.iter().map(|n| n.field_ident()).collect(); 247 | let extract_key_blocks: Vec = fields 248 | .iter() 249 | .map(|field| { 250 | let field_ident = field.field_ident(); 251 | let stmt: TokenStream = match (field.is_array_kind(), field.required) { 252 | (true, _) => parse_quote! { 253 | let inner_val: Vec>> = map.next_value()?; 254 | let inner_val: Vec> = inner_val.into_iter().map(|n| n.0).collect(); 255 | #field_ident = Some(inner_val); 256 | }, 257 | (false, true) => parse_quote! { 258 | let inner_val: FormatWeb3> = map.next_value()?; 259 | #field_ident = Some(inner_val.0); 260 | }, 261 | (false, false) => parse_quote! { 262 | let inner_val: Option>> = map.next_value()?; 263 | #field_ident = Some(inner_val.map(|n| n.0)); 264 | }, 265 | }; 266 | stmt 267 | }) 268 | .collect(); 269 | let extract_keys_loop: TokenStream = parse_quote! { 270 | loop { 271 | let key = map.next_key::()?; 272 | match key { 273 | #( 274 | Some(ref key) if key == #field_names_raw => { 275 | if #field_names_snake.is_some() {{ 276 | return Err(de::Error::duplicate_field(#field_names_raw)); 277 | }} 278 | #extract_key_blocks 279 | } 280 | )* 281 | // ignore unknown fields 282 | Some(_) => {} 283 | None => break, 284 | } 285 | } 286 | }; 287 | 288 | let enforce_required_fields: Vec = fields 289 | .iter() 290 | .map(|field| { 291 | let field_name_raw = &field.name; 292 | let field_ident = field.field_ident(); 293 | let stmt: TokenStream = match field.required { 294 | true => parse_quote! { 295 | let #field_ident = #field_ident.ok_or(de::Error::missing_field(#field_name_raw))?; 296 | }, 297 | false => parse_quote! { 298 | let #field_ident = #field_ident.unwrap_or_default(); 299 | }, 300 | }; 301 | stmt 302 | }) 303 | .collect(); 304 | let enforce_required_fields: TokenStream = parse_quote! { 305 | #(#enforce_required_fields) 306 | * 307 | }; 308 | 309 | let kind_ty: syn::Type = syn::parse_str(kind_name).unwrap(); 310 | let field_idents: Vec<_> = fields.iter().map(|n| n.field_ident()).collect(); 311 | let constructor_call: TokenStream = parse_quote! { 312 | Ok(#kind_ty { 313 | #(#field_idents), 314 | * 315 | }) 316 | }; 317 | 318 | let expecting_msg = format!("struct {}", kind_name); 319 | let trait_impl: TokenStream = parse_quote! { 320 | impl<'de> DeserializeFormatWeb3<'de> for #kind_ty { 321 | fn deserialize_format_web3(deserializer: D) -> Result 322 | where D: Deserializer<'de>, 323 | { 324 | struct ThisEntityVisitor; 325 | 326 | #field_names_const_decl 327 | 328 | impl<'de> Visitor<'de> for ThisEntityVisitor { 329 | type Value = #kind_ty; 330 | 331 | fn expecting(&self, formatter: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { 332 | formatter.write_str(#expecting_msg) 333 | } 334 | 335 | fn visit_map(self, mut map: V) -> Result 336 | where V: MapAccess<'de>, 337 | { 338 | #initialize_empty_fields 339 | #extract_keys_loop 340 | #enforce_required_fields 341 | #constructor_call 342 | 343 | } 344 | } 345 | deserializer.deserialize_struct(#kind_name, FIELDS, ThisEntityVisitor) 346 | } 347 | } 348 | }; 349 | 350 | write!(writer, "{}", trait_impl).unwrap(); 351 | } 352 | -------------------------------------------------------------------------------- /rlay_ontology_js/.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | pkg 3 | -------------------------------------------------------------------------------- /rlay_ontology_js/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "rlay_ontology_js" 3 | version = "0.1.0-alpha.1" 4 | authors = ["Maximilian Goisser "] 5 | edition = "2018" 6 | 7 | [lib] 8 | crate-type = ["cdylib"] 9 | path = "src/lib.rs" 10 | 11 | [dependencies] 12 | rlay_ontology = { path = "../rlay_ontology", features = ["wasm_bindgen"] } 13 | 14 | wasm-bindgen = { version = "0.2.0", features = ["serde-serialize"] } 15 | cid_fork_rlay = { version = "0.3.1" } 16 | serde = "^1.0.59" 17 | serde_derive = "^1.0.59" 18 | serde_json = "^1.0.37" 19 | bytes = { git = "https://github.com/ThinkAlexandria/bytes.git", rev = "26b32fb605392015d0efdf7243b064b53759885d" } 20 | 21 | [dev-dependencies] 22 | wasm-bindgen-test = "0.2" 23 | -------------------------------------------------------------------------------- /rlay_ontology_js/prepare-pkg.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const path = require('path'); 3 | 4 | const packageJsonPath = path.join(__dirname, './pkg/package.json'); 5 | 6 | const contents = JSON.parse(fs.readFileSync(packageJsonPath)); 7 | // set name 8 | contents.name = '@rlay/ontology'; 9 | contents.files.push("rlay_ontology_js_nodejs_bg.js"); 10 | 11 | fs.writeFileSync(packageJsonPath, JSON.stringify(contents, null, 4)); 12 | -------------------------------------------------------------------------------- /rlay_ontology_js/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![allow(non_snake_case)] 2 | 3 | use cid_fork_rlay::ToCid; 4 | use rlay_ontology::prelude::*; 5 | use wasm_bindgen::prelude::*; 6 | 7 | #[wasm_bindgen] 8 | pub fn getEntityCid(val: JsValue) -> JsValue { 9 | let web3_value: FormatWeb3 = val.into_serde().unwrap(); 10 | let cid_value = web3_value.0.to_cid().ok().map(|n| FormatWeb3(n.to_bytes())); 11 | 12 | JsValue::from_serde(&cid_value).unwrap() 13 | } 14 | -------------------------------------------------------------------------------- /rlay_ontology_js/test.js: -------------------------------------------------------------------------------- 1 | const assert = require('assert'); 2 | const { getEntityCid } = require('./pkg/rlay_ontology_js_nodejs'); 3 | 4 | const result = getEntityCid({ 5 | "type": "Individual", 6 | "data_property_assertions": [ 7 | "0x019580031b20567c6c54ad4525f1529268a90c0633377596697338a48d36624f180f73b46959" 8 | ] 9 | }); 10 | 11 | assert.strictEqual('0x019680031b2071906e776f4606dfb007c3f8ac3981b4c7cce9188365e1c649f205d7159d0163', result); 12 | console.log(result); 13 | -------------------------------------------------------------------------------- /rust-toolchain: -------------------------------------------------------------------------------- 1 | 1.40.0 2 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | edition = "2018" 2 | -------------------------------------------------------------------------------- /update_grammar.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | set -euxo pipefail 3 | 4 | cd extract-grammar 5 | npm run build 6 | cp build/ontology_pb2.proto ../rlay_ontology/src/ontology.proto 7 | cp build/intermediate.json ../rlay_ontology/src/intermediate.json 8 | --------------------------------------------------------------------------------