├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.yml │ └── tiny_tweak.yml └── workflows │ ├── create-version.yml │ ├── prepare-release.yml │ └── release.yml ├── .gitignore ├── .prettierrc.json ├── CHANGELOG.md ├── LICENSE ├── MAINTENANCE.md ├── README.md ├── SchemaSync.png ├── install └── schema-sync │ ├── config.js │ ├── data │ └── README.md │ └── directus_config.js ├── package-lock.json ├── package.json ├── scripts ├── release-channel.js └── version.cjs ├── src ├── collectionExporter.ts ├── condenseAction.ts ├── copyConfig.ts ├── default_config.ts ├── dialects │ └── postgres │ │ ├── utils.test.ts │ │ └── utils.ts ├── exportManager.ts ├── index.ts ├── schemaExporter.ts ├── schemaExporterHooks.ts ├── types.ts ├── updateManager.ts ├── utils.test.ts └── utils.ts ├── tsconfig.json └── tsconfig.test.json /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | name: Report a bug 2 | description: Create a report to help us improve 3 | body: 4 | - type: markdown 5 | attributes: 6 | value: | 7 | Hi, thank you for taking the time to create an issue! Before you get started, please ensure the following are correct: 8 | 9 | - I'm using the [latest version](https://www.npmjs.com/package/directus-extension-schema-sync?activeTab=versions) 10 | - There's [no other issue](https://github.com/bcc-code/directus-schema-sync/issues?q=is%3Aissue) that already describes the problem. 11 | - type: textarea 12 | attributes: 13 | label: Describe the Bug 14 | description: A clear and concise description of what the bug is. 15 | validations: 16 | required: true 17 | - type: textarea 18 | attributes: 19 | label: To Reproduce 20 | description: 21 | Steps to reproduce the behavior. Contributors should be able to follow the steps provided in order to reproduce 22 | the bug. 23 | validations: 24 | required: true 25 | - type: input 26 | attributes: 27 | label: Version 28 | placeholder: 1.x.x 29 | validations: 30 | required: true 31 | - type: dropdown 32 | id: deployment 33 | attributes: 34 | label: Installed Extension Via 35 | options: 36 | - Docker file 37 | - NPM Custom 38 | - Custom 39 | validations: 40 | required: true 41 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/tiny_tweak.yml: -------------------------------------------------------------------------------- 1 | name: Tiny tweak 2 | description: Suggest a small tweak or improvement 3 | body: 4 | - type: markdown 5 | attributes: 6 | value: | 7 | Hi, thank you for taking the time to create a suggestion issue! Before you get started, please ensure the following are correct: 8 | 9 | - There's [no other issue](https://github.com/bcc-code/directus-schema-sync/issues?q=is%3Aissue) that already describes the problem. 10 | 11 | **If your suggestion is a new feature, please open a [Feature Request](https://github.com/bcc-code/directus-schema-sync/discussions/new?category=feature-requests) instead.** 12 | - type: textarea 13 | attributes: 14 | label: Describe the Improvement 15 | description: A clear and concise description of what the enhancement is. Please include as much detail as you can. 16 | validations: 17 | required: true 18 | -------------------------------------------------------------------------------- /.github/workflows/create-version.yml: -------------------------------------------------------------------------------- 1 | name: Create New Version 2 | run-name: Creating new ${{ github.event.inputs.version }} ${{ github.event.inputs.channel }} version 3 | 4 | on: 5 | workflow_dispatch: 6 | inputs: 7 | version: 8 | description: Version 9 | type: choice 10 | options: 11 | - major 12 | - minor 13 | - patch 14 | default: minor 15 | channel: 16 | description: Channel 17 | type: choice 18 | options: 19 | - release 20 | - beta 21 | default: release 22 | 23 | env: 24 | CI: true 25 | 26 | permissions: 27 | contents: write 28 | 29 | jobs: 30 | version: 31 | runs-on: ubuntu-latest 32 | 33 | steps: 34 | - uses: actions/checkout@v4 35 | with: 36 | token: ${{ secrets.BCC_BOT_ACCESS_TOKEN }} 37 | 38 | - name: Setup Node.js 39 | uses: actions/setup-node@v4 40 | with: 41 | node-version: '20' 42 | registry-url: 'https://registry.npmjs.org' 43 | 44 | - name: Set commit author to bcc-bot 45 | run: | 46 | git config --global user.name "bcc-bot" 47 | git config --global user.email "84040471+bcc-bot@users.noreply.github.com" 48 | 49 | - name: Version new ${{ github.event.inputs.version }} version 50 | run: npm run create-version -- ${{ github.event.inputs.version }} ${{ github.event.inputs.channel }} 51 | 52 | - name: Push 53 | run: git push --follow-tags --force -------------------------------------------------------------------------------- /.github/workflows/prepare-release.yml: -------------------------------------------------------------------------------- 1 | name: "Prepare release" 2 | 3 | on: 4 | workflow_dispatch: 5 | push: 6 | tags: 7 | - 'v*' 8 | 9 | env: 10 | CI: true 11 | 12 | permissions: 13 | contents: read 14 | 15 | jobs: 16 | build: 17 | permissions: 18 | contents: write # for softprops/action-gh-release to create GitHub release 19 | 20 | runs-on: ubuntu-latest 21 | 22 | steps: 23 | - uses: actions/checkout@v4 24 | 25 | - run: git fetch --tags -f 26 | 27 | - name: Resolve version 28 | id: vars 29 | run: | 30 | echo "TAG_NAME=$(git describe --tags --abbrev=0)" >> $GITHUB_ENV 31 | 32 | - name: Create Release 33 | uses: softprops/action-gh-release@v1 34 | with: 35 | draft: true 36 | name: "schema-sync ${{ env.TAG_NAME }}" 37 | tag_name: ${{ env.TAG_NAME }} 38 | generate_release_notes: true -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | permissions: 8 | contents: read 9 | 10 | env: 11 | CI: true 12 | 13 | jobs: 14 | build: 15 | runs-on: ubuntu-latest 16 | 17 | steps: 18 | - uses: actions/checkout@v4 19 | 20 | - name: Setup Node.js 21 | uses: actions/setup-node@v4 22 | with: 23 | node-version: '20' 24 | registry-url: 'https://registry.npmjs.org' 25 | cache: 'npm' 26 | 27 | - name: Install dependencies 28 | run: npm ci 29 | 30 | - name: Build Extension 31 | run: npm run build 32 | 33 | - name: Set release channel 34 | run: | 35 | echo "RELEASE_CHANNEL=$(npm run release-channel --silent)" >> $GITHUB_ENV 36 | 37 | - name: Publish Extension 38 | run: npm publish --tag ${{ env.RELEASE_CHANNEL }} 39 | env: 40 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules 3 | dist 4 | dist-test -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "tabWidth": 2, 3 | "bracketSpacing": true, 4 | "bracketSameLine": true, 5 | "printWidth": 120, 6 | "trailingComma": "es5", 7 | "arrowParens": "avoid", 8 | "singleQuote": true, 9 | "useTabs": true 10 | } 11 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Version 3.0.8 2 | 3 | - **Fixed** Slight performance improvement when importing data. 4 | 5 | ## Version 3.0.7 6 | 7 | - **Fixed** SCHEMA_SYNC_DATA_ONLY being the opposite of what it should be. 8 | 9 | ## ~~Version 3.0.6~~ 10 | 11 | - **Added** Way to only import data and not the schema, for cases where you use something else to migrate your schema. 12 | - Set `SCHEMA_SYNC_DATA_ONLY=true` in your environment file. 13 | - Or when using `import` command, use `--data` option. 14 | 15 | ## Version 3.0.5 16 | 17 | - **Added** `SCHEMA_SYNC_MERGE` option to environment variables 18 | - Only insert and update items found in the import set (including duplicates). Does not remove items in the DB that are not in the import set. 19 | - **Fixed** issue where duplicate items were not being deleted when importing when `--merge` was used. 20 | - **Fixed** order of deleting duplicates before inserting new items. 21 | 22 | ## Version 2.1.2 23 | 24 | - Remove old schema files when generating new schema files. 25 | - This is to remove files of deleted collections. 26 | 27 | ## Version 2.1.1 28 | 29 | - Outputs additional helpful information when error occurs. 30 | - Link to an issue when trying to import empty files. 31 | 32 | ## Version 2.1.0 33 | 34 | - **Added** `groupBy` parameter to collection options 35 | - (optional) array of fields to group the exported data into multiple files, eg. ['collection'] (per collection) 36 | - This is useful when you have a large amount of data in a collection (such as `directus_permissions`) and want to split it up into multiple files. 37 | 38 | ## Version 2.0.0 ⚠️ Breaking change, due to new feature 39 | 40 | **By default will split schema file up into multiple files per collection** 41 | 42 | - **Why?** This is to make it easier to manage the schema files in git, as well as to make it easier to work with the schema files in general. 43 | 44 | - **How?** The schema files will be split up into multiple files per collection. The main file will be named `schema.json` and will contain a flag stating if it is partial (split). The collection files will be named after the collection name, and will contain the schema for that collection. All collection files will be placed in `schema-sync/data/schema`. 45 | 46 | - **What do I need to do?** 47 | 48 | - If you have any custom scripts that rely on the schema file, you will need to update them to work with the new structure. 49 | - You will also need to export the schema again to get the new structure. 50 | - Add the new schema files to git. 51 | 52 | - **What if I don't want this?** You can set `SCHEMA_SYNC_SPLIT=false` in your environment file to disable this feature. 53 | 54 | 55 | ## Version 1.6.4 56 | 57 | - Update hash and timestamp after manually triggering import via cli. 58 | - This will also force reset the lock if any errors occurred during an earlier import. 59 | 60 | ## Version 1.6.3 61 | 62 | - Add `linkedFields` to fix inserting ordered items that might depend on each other. 63 | - Specifically fixes importing of flow operations, update the directus_config and add `linkedFields`. 64 | ```javascript 65 | { 66 | directus_operations: { 67 | watch: ['operations'], 68 | excludeFields: ['user_created'], 69 | linkedFields: ['resolve', 'reject'], 70 | query: { 71 | filter: { 72 | flow: { trigger: { _neq: 'webhook' } }, 73 | }, 74 | }, 75 | }, 76 | } 77 | ``` 78 | - Also fix auto IMPORT issue when mv_ts is null. 79 | 80 | ## Version 1.6.2 81 | 82 | - (postgres only) Fixed import issue due to default_value containing functions for auto increment 83 | 84 | ## Version 1.6.1 85 | 86 | - Fixed error thrown when installing config folder without --force 87 | - Fixed installing of config folder 88 | 89 | ## Version 1.5.0 ⚠️ 90 | 91 | - **Sorts exported object keys** 92 | - Fixes issue where the order of the exported object keys would change between exports, causing unnecessary changes in git. 93 | - **merge option fixed** 94 | - The merge option was introduced in version 1.3.0, but it was not working as intended. This has now been fixed. 95 | 96 | ## Version 1.4.2 97 | 98 | - Add `import-schema` and `export-schema` commands to import/export only the schema. 99 | 100 | ## Version 1.4.1 101 | 102 | - Fixed config for `directus_presets` getKey should be `${o.role ?? 'all'}-${o.collection}-${o.bookmark || 'default'}` instead of `${o.role ?? 'all'}-${o.collection}-${o.name || 'default'}` 103 | 104 | ## Version 1.4.0 105 | 106 | - Replaced cli command for install from `npx schema-sync install` to `npx directus schema-sync install`. 107 | - This way we directly create the required columns in the `directus_settings` table. 108 | - NOTE: If you have installed before, you can now remove the migration file from your migrations folder. 109 | - Added config for directus_presets to sync global presets. 110 | 111 | ## Version 1.3.1 112 | 113 | - Fix bug where process returns early without importing all data, when using onImport. 114 | 115 | ## ~~Version 1.3.0~~ 116 | 117 | - Add `--merge` option to import command in order to upsert data only and not delete other data. 118 | - Add `onImport` and `onExport` hooks to allow for custom logic to be run before items are imported/exported. 119 | - Can be used to encode or decode data before it is imported/exported. 120 | - Return `null` to skip the item. 121 | - Fixed invalid field issue due to memory leak. 122 | 123 | ## Version 1.2.2 124 | 125 | - Sort automatically by sort field first with fallback to primary key. 126 | 127 | ## Version 1.2.1 ⚠️ 128 | 129 | - **NOTE** This update will remove duplicate rows if the same key matches multiple rows. 130 | - This is to fix an issue where the same permission was imported multiple times. 131 | - Change order in which deletions work. 132 | - This fixes an issue where a collection with a relation to another collection would fail to import due to the relation being a constraint. 133 | - Add try/catch to use best-effort when importing data. 134 | 135 | ## Version 1.2.0 136 | 137 | - Excluding alias fields by default. 138 | 139 | - **Benefit** Reduces the amount of fields you have to add to the exclude list. 140 | 141 | - **Why?** Since alias fields don't exist in the DB, they often end up being an array containing the ids of the related items. Since you should be exporting the related items anyways with the foreign key fields, this is both redundant and causes issues when importing. 142 | 143 | ## Version 1.1.7 144 | 145 | - Fix order in which tasks are exported and imported. Specifically crucial for fields and relations. 146 | - Fix issue for dynamic import of config on windows systems. 147 | Update logging to reflect amount of items being imported/exported. 148 | 149 | ## ~~Version 1.1.6~~ 150 | 151 | - Switch from using path.join to path.resolve to use absolute paths as required by Windows. 152 | 153 | ## Version 1.1.5 154 | 155 | - Set query limit to -1, unless manually set in config.js inside the query option. 156 | - This fixes an issue with not all permissions being fetched. 157 | 158 | ## Version 1.1.4 159 | 160 | - Add optional prefix option to differentiate between different sets of data. 161 | - Prefix can be set on the collection configuration in config.js eg. `test` or `prod`. 162 | 163 | ## Version 1.1.3 164 | 165 | - Fix issue with syncing across servers with different timezones. 166 | 167 | ## Version 1.1.2 168 | 169 | - Add hash command, to regenerate hash for all data files. 170 | - Add delay when reinstalling Schema Sync 171 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /MAINTENANCE.md: -------------------------------------------------------------------------------- 1 | # Maintenance instructions 2 | 3 | ## Releasing a new version 4 | 5 | A new version can be released by running the [Create New Version](https://github.com/bcc-code/directus-schema-sync/actions/workflows/create-version.yml) workflow from GitHub. 6 | 7 | This will update the version in the `package.json`, push a Git commit and tag, and create a new [release](https://github.com/bcc-code/directus-schema-sync/releases) in GitHub. 8 | 9 | Maintainers can publish this release, after which the new version will be pushed to npm with the `latest` tag. 10 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Schema Sync for Directus 2 | 3 | The better way to sync your Directus schema, **configuration** and selected **data** between environments. 4 | 5 | Splits the schema file into multiple files once per collection, for easier management in git and working with the schema files in general. 6 | 7 | ![Schema Sync](https://raw.githubusercontent.com/bcc-code/directus-schema-sync/8b44ddba3a07dc881b44c003b39b7951b80a22f3/SchemaSync.png) 8 | 9 | Sync **chosen** data such as 10 | - **Permissions** 11 | - **Flows** 12 | - **Dashboards** 13 | - **Your own data** 14 | - **Test data for CI/CD flows** 15 | 16 | **Automatically** export and import both the schema and data when you make changes via Directus or in the **json data files** 17 | 18 | 19 | Upon installing configure what data you wish to export from the DB and be able to commit to your repository. Then on the other environments you can import the schema and data automatically. 20 | 21 | **Auto Import** the schema and data when Directus starts up. This is useful for production environments. Using a **locking mechanism**, only one instance of Directus will import the schema and data. 22 | 23 | **IMPORTANT** Always be careful with what you export and commit to your repository. 24 | 25 | # Install 26 | 27 | Depending on how you already are using Directus you can either install this plugin using a custom Docker image, or npm. 28 | 29 | NOTE: Installing via marketplace is not recommended as you would still need to execute the install command as well as configure the config files. 30 | 31 | - ** DIRECTUS 11 ** Use latest version 32 | - ** DIRECTUS < 10 ** Use version 2.* 33 | 34 | --- 35 | 36 | Follow the instructions below to install the extension, ensure to first install the extension on your local environment and then export the schema and data. This will create the necessary files for the extension to work. Only once you have the files can you update your .env file with the `SCHEMA_SYNC` variable. 37 | 38 | ## 1 via Docker 39 | 40 | If you don't already have a Dockerfile, you can use the following [instructions to get started.](https://docs.directus.io/extensions/installing-extensions.html) 41 | 42 | Create a folder called `schema-sync` on root. This will soon contain the config and data files for what you want to import and export. 43 | 44 | Update your Dockerfile to include the following: 45 | 46 | ```dockerfile 47 | RUN pnpm install directus-extension-schema-sync 48 | # This is for when building for production 49 | COPY ./schema-sync ./schema-sync 50 | ``` 51 | 52 | In your `docker-compose` file we need to add the `schema-sync` so that we can commit it to git, and so you can edit/change the config files and have it in sync with the docker container 53 | ```yaml 54 | // docker-compose.yaml 55 | volumes: 56 | - ./schema-sync:/directus/schema-sync 57 | ``` 58 | 59 | (re)Build and run your container. 60 | 61 | Once it is running, run the following command (from host) to install the extension's columns in the database and add the config folder. 62 | 63 | Replace the `my-directus` with the name of your service running directus if it is different 64 | 65 | ```bash 66 | // docker exec -it 67 | docker compose exec -it my-directus npx directus schema-sync install --force 68 | ``` 69 | 70 | We are using force since we do want to replace the `schema-sync` folder already added as a volume 71 | 72 | --- 73 | 74 | ## 1 via NPM (Assuming you are running Directus via NPM) 75 | 76 | 1. `npm install directus-extension-schema-sync` 77 | 2. Then run `directus schema-sync install` to install the extension's columns in the database and add the config folder 78 | 3. Edit the `config.js` in the schema directory and add your collections you want to sync 79 | 4. Finally run `directus schema-sync export` to export the schema and data from the Directus API 80 | 81 | ## 2 Configuration 82 | 83 | View and edit the schema-sync/*_config.js_ file to include the collections you want to sync. 84 | 85 | To automatically import and export the schema and data, set the `SCHEMA_SYNC` environment variable to `IMPORT`, `EXPORT` or `BOTH`. 86 | In production it is advised to set `SCHEMA_SYNC` to `IMPORT` and in local development to `BOTH`. 87 | 88 | Note: This extension will not work if there is no row in the `directus_settings` database table. To avoid this happening, make sure `PROJECT_NAME` configuration variable is set when Directus is first time installed into the database. Alternatively, if Directus is already installed, just manually create a row in `directus_settings`, if one is not already there, with whatever project name you want and keep everything else to defaults. 89 | 90 | ### Tips 91 | 92 | **Order matters** when importing and exporting. For example if you have a collection (A) with a relation to another collection (B), then ensure in the config that collection (B) comes first. This is so when we import, we first import B, then A. Deletions happen afterward in the reverse order. 93 | 94 | You can create **additional config files** with the other config files, and set the name on the `SCHEMA_SYNC_CONFIG` env variable. For example to include a test data set used during testing in CI/CD. Additional config files need to export `syncCustomCollections` object like in the `config.js` file. 95 | 96 | View the comments in the `config.js` file for more information. 97 | 98 | ### Exporting users with passwords 99 | This does not work out of the box due to Directus masking the exported password. In order to export the hashed value you can add the following to the `schema-sync/directus_config.js` file within the `directus_users` object. 100 | 101 | ```js 102 | onExport: async (item, itemsSrv) => { 103 | if (item.password && item.password === '**********') { 104 | const user = await itemsSrv.knex.select('password').from('directus_users').where('id', item.id).first(); 105 | if (user) { 106 | item.password = user.password; 107 | } 108 | } 109 | 110 | return item; 111 | }, 112 | // And then to import the password 113 | onImport: async (item, itemsSrv) => { 114 | if (item.password && item.password.startsWith('$argon')) { 115 | await itemsSrv.knex('directus_users').update('password', item.password).where('id', item.id); 116 | item.password = '**********'; 117 | } 118 | 119 | return item; 120 | }, 121 | ``` 122 | 123 | ## Environment Variables 124 | 125 | | Variable | Description | Default | 126 | | -------- | ----------- | ------- | 127 | | `SCHEMA_SYNC` | Set to automatically do **IMPORT**, **EXPORT** or **BOTH** | `null` | 128 | | `SCHEMA_SYNC_CONFIG` | (optional) An additional config file to use in addition, eg. `test_config.js` | `null` | 129 | | `SCHEMA_SYNC_SPLIT` | (optional) Splits the schema file into multiple files once per collection | `true` | 130 | | `SCHEMA_SYNC_MERGE` | (optional) Only insert and update items found in the import set (including duplicates). Does not remove items in the DB that are not in the import set. | `false` | 131 | | `SCHEMA_SYNC_DATA_ONLY` | (optional) Only sync data. Does not sync the schema (schema.json or the split schema). | `false` | 132 | 133 | 134 | ## CI Commands 135 | 136 | Besides auto importing and exporting, you can also run the commands manually. 137 | 138 | `npx directus schema-sync [command]` 139 | 140 | | Command | Description | 141 | | ------- | ----------- | 142 | | `export` | Export the schema and data from the Directus API | 143 | | `import` | Import the schema and data to the Directus API (options: `merge`, `data`) | 144 | | `hash`| Recalculate the hash for all the data files (already happens after export) | 145 | | `install` | Install the extension's columns in the database and add the config folder (options: `force`) | 146 | | `export-schema` | Export only the schema (options: --split ) | 147 | | `import-schema` | Import only the schema | 148 | 149 | ## Migrating from V2 to V3 150 | 151 | Update the `schema-sync/directus_config.js` file with the following: 152 | 153 | Replace `directus_roles` 154 | Add `directus_policies` 155 | Replace `directus_permissions` 156 | Add `directus_access` 157 | 158 | ``` 159 | … 160 | directus_roles: { 161 | watch: ['roles'], 162 | linkedFields: ['parent'], 163 | query: { 164 | sort: ['name'], 165 | }, 166 | }, 167 | directus_policies: { 168 | watch: ['policies'], 169 | query: { 170 | sort: ['name'], 171 | }, 172 | }, 173 | directus_permissions: { 174 | watch: ['permissions', 'collections', 'fields'], 175 | excludeFields: ['id'], 176 | getKey: o => `${o.policy}-${o.collection}-${o.action}`, 177 | query: { 178 | sort: ['policy', 'collection', 'action'], 179 | }, 180 | }, 181 | directus_access: { 182 | watch: ['access'], 183 | excludeFields: ['id'], 184 | getKey: o => `${o.role ?? o.user ?? 'public'}-${o.policy}`, 185 | query: { 186 | sort: ['role', 'policy'], 187 | filter: { 188 | user: { _null: true }, 189 | }, 190 | }, 191 | }, 192 | … 193 | ``` 194 | 195 | ## Contributing 196 | 197 | Contributions are welcome. Please open an issue or pull request. 198 | 199 | View changelog for more information. [CHANGELOG.md](https://github.com/bcc-code/directus-schema-sync/blob/main/CHANGELOG.md) -------------------------------------------------------------------------------- /SchemaSync.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bcc-code/directus-schema-sync/79d11d1a94bf8a99bd4118b4d7fc9240a72c7135/SchemaSync.png -------------------------------------------------------------------------------- /install/schema-sync/config.js: -------------------------------------------------------------------------------- 1 | /** 2 | * This file is used to configure which data from your collections you want to export and eventually import accross environments. 3 | * 4 | * Schema: 5 | * collectionName: 6 | * watch: array of events to watch for changes, eg. 'posts.items', 7 | * excludeFields: (optional) array of fields to exclude from the export, 8 | * groupBy: (optional) array of fields to group the exported data into multiple files, eg. ['collection'] (per collection) 9 | * linkedFields: (optional) array of fields to treat as many-to-one relationships for hierarchy, eg. ['parent'] 10 | * getKey: (optional) function to get the key for the item, defaults to primary key found on schema, 11 | * query: (optional) query to use when exporting the collection, valid options are: (limit=-1 | filter | sort) 12 | * prefix: (optional) prefix the exported json file with this string (useful for separating test data from production data) 13 | * onExport: (optional) (object) => object: Function to parse the data before exporting, useful for encoding/sanitizing secrets or other sensitive data 14 | * onImport: (optional) (object) => object: Function to parse the data before importing, useful for decoding secrets 15 | */ 16 | export const syncCustomCollections = { 17 | /* 18 | posts: { 19 | watch: ['posts.items'], 20 | excludeFields: ['user_created', 'user_updated'], 21 | linkedFields: ['parent'], 22 | query: { 23 | filter: { 24 | shared: { _eq: true } 25 | }, 26 | sort: ['published_at'], 27 | }, 28 | }, 29 | */ 30 | }; -------------------------------------------------------------------------------- /install/schema-sync/data/README.md: -------------------------------------------------------------------------------- 1 | All exported data will be stored in this directory -------------------------------------------------------------------------------- /install/schema-sync/directus_config.js: -------------------------------------------------------------------------------- 1 | /** 2 | * This file contains the default configuration for the schema exporter. 3 | * 4 | * Some possibly sensitive collections are commented out, remove the comments and add filters if needed 5 | * 6 | * Uncomment the collection you want to export. 7 | * 8 | * These are just some sensible settings, but you might not want to export everything 9 | * 10 | * Add custom collections to the syncCustomCollections object in the config.js file. 11 | */ 12 | export const syncDirectusCollections = { 13 | directus_folders: { 14 | watch: ['folders'], 15 | excludeFields: [], 16 | linkedFields: ['parent'], 17 | query: { 18 | sort: ['parent', 'id'], 19 | }, 20 | }, 21 | /* directus_files: { 22 | watch: ['files'], 23 | excludeFields: [], 24 | query: { 25 | filter: { 26 | storage: { 27 | _eq: 'local', 28 | }, 29 | } 30 | }, 31 | },*/ 32 | directus_roles: { 33 | watch: ['roles'], 34 | linkedFields: ['parent'], 35 | query: { 36 | sort: ['name'], 37 | }, 38 | }, 39 | directus_policies: { 40 | watch: ['policies'], 41 | query: { 42 | sort: ['name'], 43 | }, 44 | }, 45 | directus_permissions: { 46 | watch: ['permissions', 'collections', 'fields'], 47 | excludeFields: ['id'], 48 | getKey: o => `${o.policy}-${o.collection}-${o.action}`, 49 | query: { 50 | sort: ['policy', 'collection', 'action'], 51 | }, 52 | }, 53 | directus_access: { 54 | watch: ['access'], 55 | excludeFields: ['id'], 56 | getKey: o => `${o.role ?? o.user ?? 'public'}-${o.policy}`, 57 | query: { 58 | sort: ['policy'], 59 | }, 60 | }, 61 | /* directus_users: { 62 | watch: ['users'], 63 | excludeFields: ['avatar'], 64 | query: { 65 | filter: { 66 | id: { 67 | _in: [], // insert id of users you want to export 68 | }, 69 | }, 70 | limit: 1, 71 | sort: ['d] 72 | }, 73 | /* // Uncomment this to export the password 74 | onExport: async (item, itemsSrv) => { 75 | if (item.password && item.password === '**********') { 76 | const user = await itemsSrv.knex.select('password').from('directus_users').where('id', item.id).first(); 77 | if (user) { 78 | item.password = user.password; 79 | } 80 | } 81 | 82 | return item; 83 | }, 84 | // And then to import the password 85 | onImport: async (item, itemsSrv) => { 86 | if (item.password && item.password.startsWith('$argon')) { 87 | await itemsSrv.knex('directus_users').update('password', item.password).where('id', item.id); 88 | item.password = '**********'; 89 | } 90 | 91 | return item; 92 | }, 93 | },*/ 94 | directus_settings: { 95 | watch: ['settings'], 96 | excludeFields: [ 97 | 'project_url', 98 | // always keep these 3 excluded 99 | 'mv_hash', 'mv_ts', 'mv_locked', 100 | ], 101 | }, 102 | directus_dashboards: { 103 | watch: ['dashboards'], 104 | excludeFields: ['user_created', 'panels'], 105 | }, 106 | directus_panels: { 107 | watch: ['panels'], 108 | excludeFields: ['user_created'], 109 | }, 110 | directus_presets: { 111 | watch: ['presets'], 112 | excludeFields: ['id'], 113 | getKey: (o) => `${o.role ?? 'all'}-${o.collection}-${o.bookmark || 'default'}`, 114 | query: { 115 | filter: { 116 | user: { _null: true} 117 | } 118 | } 119 | }, 120 | /*directus_flows: { 121 | watch: ['flows'], 122 | excludeFields: ['operations', 'user_created'], 123 | query: { 124 | filter: { 125 | trigger: { _neq: 'webhook' }, 126 | }, 127 | }, 128 | }, 129 | directus_operations: { 130 | watch: ['operations'], 131 | excludeFields: ['user_created'], 132 | linkedFields: ['resolve', 'reject'], 133 | query: { 134 | filter: { 135 | flow: { trigger: { _neq: 'webhook' } }, 136 | }, 137 | }, 138 | },*/ 139 | directus_translations: { 140 | watch: ['translations'], 141 | excludeFields: ['id'], 142 | getKey: (o) => `${o.key}-${o.language}`, 143 | query: { 144 | sort: ['key', 'language'], 145 | }, 146 | } 147 | }; -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "directus-extension-schema-sync", 3 | "description": "Sync schema and data betwreen Directus instances", 4 | "icon": "sync_alt", 5 | "version": "3.0.8", 6 | "repository": { 7 | "type": "git", 8 | "url": "https://github.com/bcc-code/directus-schema-sync.git" 9 | }, 10 | "keywords": [ 11 | "directus", 12 | "directus-extension", 13 | "directus-custom-hook", 14 | "directus-schema", 15 | "directus-data" 16 | ], 17 | "type": "module", 18 | "directus:extension": { 19 | "type": "hook", 20 | "path": "dist/index.js", 21 | "source": "src/index.ts", 22 | "host": "^11.0.0" 23 | }, 24 | "files": [ 25 | "dist", 26 | "install", 27 | "README.md" 28 | ], 29 | "scripts": { 30 | "build": "directus-extension build", 31 | "dev": "directus-extension build -w --no-minify", 32 | "link": "directus-extension link", 33 | "pre-test": "tsc -p tsconfig.test.json", 34 | "test": "npm run pre-test && node --test dist-test/", 35 | "create-version": "node ./scripts/version.cjs", 36 | "release-channel": "node ./scripts/release-channel.js" 37 | }, 38 | "devDependencies": { 39 | "@directus/api": "^22.0.0", 40 | "@directus/extensions": "^3.0.0", 41 | "@directus/extensions-sdk": "^12.1.3", 42 | "@directus/types": "^13.0.0", 43 | "@types/keyv": "^4.2.0", 44 | "@types/node": "^20.14.15", 45 | "typescript": "^5.5.4" 46 | }, 47 | "dependencies": { 48 | "glob": "^10.4.5" 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /scripts/release-channel.js: -------------------------------------------------------------------------------- 1 | // Given a version, figure out what the release channel is so that we can publish to the correct 2 | // channel on npm. 3 | // Copied from https://github.com/tailwindlabs/heroicons/blob/eee05eb77af6cdedfb97f128d1998773fc984b28/scripts/release-channel.js 4 | // 5 | // E.g.: 6 | // 7 | // 1.2.3 -> latest (default) 8 | // 0.0.0-insiders.ffaa88 -> insiders 9 | // 4.1.0-alpha.4 -> alpha 10 | 11 | let version = 12 | process.argv[2] || process.env.npm_package_version || require("../package.json").version; 13 | 14 | let match = /\d+\.\d+\.\d+-(.*)\.\d+/g.exec(version); 15 | if (match) { 16 | console.log(match[1]); 17 | } else { 18 | console.log("latest"); 19 | } -------------------------------------------------------------------------------- /scripts/version.cjs: -------------------------------------------------------------------------------- 1 | const { exec } = require("node:child_process"); 2 | const { exit } = require("node:process"); 3 | const currentVersion = require("../package.json").version; 4 | 5 | let increment = process.argv[2]; 6 | let channel = process.argv[3] ?? "release"; 7 | 8 | if (!increment) { 9 | console.log("No version increment given! Exiting..."); 10 | exit(); 11 | } 12 | 13 | let update = `pre${increment}`; 14 | if (currentVersion.includes(channel)) { 15 | update = "prerelease"; 16 | } 17 | 18 | const versionParts = /(\d+\.\d+\.\d+)-(.*)\.\d+/g.exec(currentVersion); 19 | 20 | // If there is a prerelease tag in the name but the channel is for public release, 21 | // just strip the prerelease tag from the name 22 | if (versionParts && channel == "release") { 23 | increment = versionParts[1]; 24 | } 25 | 26 | const command = `npm version ${ 27 | channel == "release" ? increment : `${update} --preid ${channel}` 28 | } --no-git-tag-version`; 29 | 30 | // Version package 31 | exec(command, (error, newVersion) => { 32 | if (error) console.error(error); 33 | const tagVersion = newVersion.replace("\n", ""); 34 | exec( 35 | `git add . && git commit -m "schema-sync ${tagVersion}" && git tag -am ${tagVersion} "${tagVersion}"` 36 | ); 37 | 38 | console.log(`Tagged new version ${tagVersion}`) 39 | }); -------------------------------------------------------------------------------- /src/collectionExporter.ts: -------------------------------------------------------------------------------- 1 | import { Item, PrimaryKey, Query } from '@directus/types'; 2 | import type { ApiExtensionContext } from '@directus/extensions'; 3 | import { mkdir, readFile, rm, writeFile } from 'fs/promises'; 4 | import { condenseAction } from './condenseAction.js'; 5 | import type { CollectionExporterOptions, IExporter, IGetItemsService, ItemsService, ToUpdateItemDiff } from './types'; 6 | import { ExportHelper, getDiff, sortObject } from './utils.js'; 7 | import { glob } from 'glob'; 8 | 9 | type PARTIAL_CONFIG = { count: number; groupedBy: string[]; partial: true }; 10 | 11 | const DEFAULT_COLLECTION_EXPORTER_OPTIONS: CollectionExporterOptions = { 12 | excludeFields: [], 13 | groupBy: [], 14 | query: { 15 | limit: -1, 16 | }, 17 | }; 18 | 19 | class CollectionExporter implements IExporter { 20 | protected _getService: () => Promise; 21 | protected collection: string; 22 | 23 | protected options: CollectionExporterOptions; 24 | protected filePath: string; 25 | 26 | constructor( 27 | collectionName: string, 28 | getItemsService: IGetItemsService, 29 | options = DEFAULT_COLLECTION_EXPORTER_OPTIONS, 30 | protected logger: ApiExtensionContext['logger'] 31 | ) { 32 | const { query, ...otherOpts } = options ?? {}; 33 | this.options = { 34 | excludeFields: [], 35 | query: { 36 | limit: -1, 37 | ...query, 38 | }, 39 | ...otherOpts, 40 | }; 41 | 42 | let srv: ItemsService; 43 | this._getService = async () => srv || (srv = await getItemsService(collectionName)); 44 | 45 | this.collection = collectionName; 46 | 47 | const fileName = this.options.prefix ? `${this.options.prefix}_${collectionName}` : collectionName; 48 | this.filePath = `${ExportHelper.dataDir}/${fileName}.json`; 49 | } 50 | 51 | protected ensureCollectionGroupDir = async () => { 52 | if (!(await ExportHelper.fileExists(`${ExportHelper.dataDir}/${this.collection}`))) { 53 | await mkdir(`${ExportHelper.dataDir}/${this.collection}`, { recursive: true }); 54 | } else { 55 | // Clean up old files 56 | const files = await glob(this.groupedFilesPath('*')); 57 | for (const file of files) { 58 | await rm(file); 59 | } 60 | } 61 | }; 62 | 63 | protected itemGroupFilename(item: Item) { 64 | if (!this.options.groupBy?.length) throw new Error('groupBy option not set'); 65 | // Use double dash to avoid conflicts with slugified names 66 | return this.options.groupBy 67 | .map(field => item[field]) 68 | .join('--') 69 | .replace(/\s/g, '_'); 70 | } 71 | 72 | protected groupedFilesPath(fileName: string) { 73 | fileName = `${this.options.prefix || '_'}_${fileName}`; 74 | return `${ExportHelper.dataDir}/${this.collection}/${fileName}.json`; 75 | } 76 | 77 | get name() { 78 | return this.collection; 79 | } 80 | 81 | protected _persistQueue = condenseAction(() => this.exportCollectionToFile()); 82 | public export = () => this._persistQueue(); 83 | 84 | public async load(merge = false) { 85 | let json; 86 | try { 87 | json = await readFile(this.filePath, { encoding: 'utf8' }); 88 | } catch (e) { 89 | return null; 90 | } 91 | 92 | if (!json) { 93 | throw new Error(`Collection ${this.name} has invalid content: ${json}`); 94 | } 95 | const parsedJSON = JSON.parse(json) as Array | PARTIAL_CONFIG; 96 | 97 | if (Array.isArray(parsedJSON)) { 98 | return this.loadItems(parsedJSON, merge); 99 | } else if (!parsedJSON.partial) { 100 | throw new Error(`Collection ${this.name} has invalid JSON: ${json}`); 101 | } 102 | 103 | return await this.loadGroupedItems(parsedJSON, merge); 104 | } 105 | 106 | protected exportCollectionToFile = async () => { 107 | const items = await this.getItemsForExport(); 108 | 109 | this.logger.debug(`Exporting ${this.collection}`); 110 | 111 | let json = ''; 112 | if (Array.isArray(items)) { 113 | json = JSON.stringify(sortObject(items), null, 2); 114 | } else { 115 | await this.ensureCollectionGroupDir(); 116 | 117 | const config: PARTIAL_CONFIG = { 118 | count: 0, 119 | groupedBy: this.options.groupBy!, 120 | partial: true, 121 | }; 122 | 123 | for (const [key, group] of Object.entries(items)) { 124 | config.count += group.length; 125 | const filePath = this.groupedFilesPath(key); 126 | const groupJson = JSON.stringify(sortObject(group), null, 2); 127 | await writeFile(filePath, groupJson); 128 | } 129 | 130 | json = JSON.stringify(config, null, 2); 131 | } 132 | 133 | await writeFile(this.filePath, json); 134 | }; 135 | 136 | protected _settings: { 137 | inclFields: Array; 138 | exclFields: Array; 139 | linkedFields: NonNullable; 140 | getKey: (o: Item) => PrimaryKey; 141 | getPrimary: (o: Item) => PrimaryKey; 142 | query: Query; 143 | queryWithPrimary: Query; 144 | } | null = null; 145 | 146 | protected async settings() { 147 | if (this._settings) return this._settings; 148 | 149 | const itemsSvc = await this._getService(); 150 | const schema = itemsSvc.schema.collections[this.collection]; 151 | 152 | if (!schema) { 153 | throw new Error(`Schema for ${this.collection} not found`); 154 | } 155 | 156 | const exclFields = this.options.excludeFields || []; 157 | if (exclFields.includes(schema.primary) && !this.options.getKey) { 158 | throw new Error(`Can't exclude primary field ${schema.primary} without providing a getKey function`); 159 | } 160 | 161 | let inclFields = []; 162 | for (const fieldName in schema.fields) { 163 | const field = schema.fields[fieldName]!; 164 | if (!field.alias && !exclFields.includes(fieldName)) { 165 | inclFields.push(fieldName); 166 | } 167 | } 168 | 169 | const getPrimary = (o: Item) => o[schema.primary]; 170 | const getKey = this.options.getKey || getPrimary; 171 | 172 | const query: Query = this.options.query || {}; 173 | query.fields = inclFields; 174 | query.limit = query.limit || -1; 175 | query.sort = query.sort || [schema.sortField || schema.primary]; 176 | 177 | const queryWithPrimary: Query = exclFields.includes(schema.primary) 178 | ? { ...query, fields: [...inclFields, schema.primary] } 179 | : query; 180 | 181 | return (this._settings = { 182 | inclFields, 183 | exclFields, 184 | linkedFields: this.options.linkedFields || [], 185 | getKey, 186 | getPrimary, 187 | query, 188 | queryWithPrimary, 189 | }); 190 | } 191 | 192 | public async getItemsForExport(): Promise | Record>> { 193 | const itemsSvc = await this._getService(); 194 | const { query } = await this.settings(); 195 | 196 | let items = await itemsSvc.readByQuery(query); 197 | if (!items.length) return []; 198 | 199 | if (this.options.onExport) { 200 | const alteredItems = []; 201 | for (const item of items) { 202 | const alteredItem = await this.options.onExport(item, itemsSvc); 203 | if (alteredItem) alteredItems.push(alteredItem); 204 | } 205 | 206 | items = alteredItems; 207 | } 208 | 209 | // If groupBy is set, group the json by the specified fields 210 | if (this.options.groupBy?.length) { 211 | const groupedItems = items.reduce((map, item) => { 212 | const key = this.itemGroupFilename(item); 213 | if (!map[key]) map[key] = []; 214 | map[key].push(item); 215 | return map; 216 | }, {} as Record>); 217 | 218 | return groupedItems; 219 | } 220 | 221 | return items; 222 | } 223 | 224 | /** 225 | * Orders items so that items that are linked are inserted after the items they reference 226 | * Only works with items that have a primary key 227 | * Assumes items not in given items list are already in the database 228 | * @param items 229 | * @returns 230 | */ 231 | protected async sortbyIfLinked(items: Array) { 232 | const { getPrimary, linkedFields } = await this.settings(); 233 | if (!linkedFields.length) return false; 234 | 235 | const itemsMap = items.reduce((map, o) => { 236 | o.__dependents = []; 237 | map[getPrimary(o)] = o; 238 | return map; 239 | }, {} as Record); 240 | 241 | items.forEach(o => { 242 | for (const fieldName of linkedFields) { 243 | const value = o[fieldName]; 244 | if (value && itemsMap[value]) { 245 | itemsMap[value].__dependents.push(o); 246 | } 247 | } 248 | }); 249 | 250 | items.sort((a, b) => this.countDependents(b) - this.countDependents(a)); 251 | items.forEach(o => delete o.__dependents); 252 | 253 | return true; 254 | } 255 | // Recursively count dependents 256 | private countDependents(o: any): number { 257 | if (!o.__dependents.length) return 0; 258 | return (o.__dependents as Array).reduce((acc, o) => acc + this.countDependents(o), o.__dependents.length); 259 | } 260 | 261 | /** 262 | * Fetches the items from grouped files and then subsequently loads the items 263 | * 264 | * @param config 265 | * @param merge {see loadItems} 266 | * @returns 267 | */ 268 | public async loadGroupedItems(config: PARTIAL_CONFIG, merge = false) { 269 | const loadedItems = []; 270 | 271 | let found = 0; 272 | const files = await glob(this.groupedFilesPath('*')); 273 | for (const file of files) { 274 | const groupJson = await readFile(file, { encoding: 'utf8' }); 275 | const items = JSON.parse(groupJson) as Array; 276 | if (!Array.isArray(items)) { 277 | this.logger.warn(`Not items found in ${file}`); 278 | continue; 279 | } 280 | 281 | found += items.length; 282 | loadedItems.push(...items); 283 | } 284 | 285 | if (found !== config.count) { 286 | if (found === 0) { 287 | throw new Error('No items found in grouped files for ${this.collection}'); 288 | } 289 | 290 | this.logger.warn(`Expected ${config.count} items, but found ${found}`); 291 | } 292 | 293 | this.logger.info(`Stitched ${found} items for ${this.collection} from ${files.length} files`); 294 | 295 | return this.loadItems(loadedItems, merge); 296 | } 297 | 298 | /** 299 | * Loads the items and updates the database 300 | * 301 | * @param loadedItems An array of loaded items to sync with the database 302 | * @param merge boolean indicating whether to merge the items or replace them, ie. delete all items not in the JSON 303 | * @returns 304 | */ 305 | public async loadItems(loadedItems: Array, merge = false) { 306 | if (merge && !loadedItems.length) return null; 307 | 308 | const itemsSvc = await this._getService(); 309 | const { getKey, getPrimary, queryWithPrimary } = await this.settings(); 310 | 311 | const items = await itemsSvc.readByQuery(queryWithPrimary); 312 | 313 | const itemsMap = new Map(); 314 | const duplicatesToDelete: Array = []; 315 | 316 | // First pass: identify duplicates and build initial map 317 | items.forEach(item => { 318 | const itemKey = getKey(item); 319 | if (itemsMap.has(itemKey)) { 320 | const itemId = getPrimary(itemsMap.get(itemKey)!); 321 | this.logger.warn(`Will delete duplicate ${this.collection} item found #${itemId}`); 322 | duplicatesToDelete.push(itemId); 323 | } 324 | 325 | itemsMap.set(itemKey, item); 326 | }); 327 | 328 | // Delete duplicates first 329 | if (duplicatesToDelete.length > 0) { 330 | this.logger.debug(`Deleting ${duplicatesToDelete.length} duplicate ${this.collection} items`); 331 | await itemsSvc.deleteMany(duplicatesToDelete); 332 | } 333 | 334 | const toUpdate = new Map(); 335 | const toInsert: Record = {}; 336 | const toDeleteItems = new Map(itemsMap); 337 | 338 | // Process imported items 339 | for (let lr of loadedItems) { 340 | if (this.options.onImport) { 341 | lr = (await this.options.onImport(lr, itemsSvc)) as Item; 342 | if (!lr) continue; 343 | } 344 | 345 | const lrKey = getKey(lr); 346 | 347 | const existing = itemsMap.get(lrKey); 348 | 349 | if (existing) { 350 | // We delete the item from the map so that we can later check which items were deleted 351 | toDeleteItems.delete(lrKey); 352 | 353 | const diff = getDiff(lr, existing); 354 | if (diff) { 355 | toUpdate.set(lrKey, { 356 | pkey: getPrimary(existing), 357 | diff, 358 | }); 359 | } 360 | } else { 361 | toInsert[lrKey] = lr; 362 | } 363 | } 364 | 365 | // Insert 366 | let toInsertValues = Object.values(toInsert); 367 | if (toInsertValues.length > 0) { 368 | this.logger.debug(`Inserting ${toInsertValues.length} x ${this.collection} items`); 369 | if (await this.sortbyIfLinked(toInsertValues)) { 370 | for (const item of toInsertValues) { 371 | await itemsSvc.createOne(item); 372 | } 373 | } else { 374 | await itemsSvc.createMany(toInsertValues); 375 | } 376 | } 377 | 378 | // Update 379 | if (toUpdate.size > 0) { 380 | this.logger.debug(`Updating ${toUpdate.size} x ${this.collection} items`); 381 | for (const [_key, item] of toUpdate) { 382 | await itemsSvc.updateOne(item.pkey, item.diff); 383 | } 384 | } 385 | 386 | const finishUp = async () => { 387 | if (!merge) { 388 | // When not merging, delete items that weren't in the import set 389 | const toDelete = Array.from(toDeleteItems.values(), getPrimary); 390 | if (toDelete.length > 0) { 391 | this.logger.debug(`Deleting ${toDelete.length} x ${this.collection} items`); 392 | await itemsSvc.deleteMany(toDelete); 393 | } 394 | } 395 | }; 396 | 397 | return finishUp; 398 | } 399 | } 400 | 401 | export { CollectionExporter }; 402 | -------------------------------------------------------------------------------- /src/condenseAction.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Utility function to wrap an asynchronous action with condensed execution behavior. 3 | * If the action is called multiple times while it's still running, only the last call will be executed post completion. 4 | * Handles function parameters, error logging, and maintains original return values. 5 | */ 6 | export function condenseAction Promise>(fn: T): T { 7 | let actionInProgress = false; 8 | let actionRequested = false; 9 | 10 | return new Proxy(fn, { 11 | async apply(target, thisArg, args) { 12 | if (actionInProgress) { 13 | actionRequested = true; 14 | return; 15 | } 16 | 17 | do { 18 | actionInProgress = true; 19 | actionRequested = false; 20 | 21 | try { 22 | const result = await Reflect.apply(target, thisArg, args); 23 | return result; 24 | } catch (err) { 25 | console.error('Error in condensed action:', err); 26 | } finally { 27 | actionInProgress = false; 28 | } 29 | } while (actionRequested); 30 | }, 31 | }) as T; 32 | } 33 | -------------------------------------------------------------------------------- /src/copyConfig.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs/promises'; 2 | import path from 'path'; 3 | import { fileURLToPath } from 'url'; 4 | 5 | export async function copyConfig(force: boolean, { logger }: { logger: any }) { 6 | const __dirname = path.dirname(fileURLToPath(import.meta.url)); 7 | const srcDir = path.resolve(__dirname, '../install'); 8 | const targetDir = process.cwd(); 9 | 10 | // Test if it doesn't already exist then if it does exit 11 | if (!force) { 12 | await fs 13 | .access(path.resolve(targetDir, 'schema-sync')) 14 | .then(() => { 15 | logger.info('Config folder already exists, use --force to override'); 16 | process.exit(0); 17 | }) 18 | .catch(() => { 19 | logger.info('Creating config folder...'); 20 | }); 21 | } 22 | 23 | await fs.cp(srcDir, targetDir, { recursive: true }); 24 | } 25 | -------------------------------------------------------------------------------- /src/default_config.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This file contains the default configuration for the schema exporter. 3 | * 4 | * ONLY CHANGE THIS FILE IF YOU REALLY HAVE TO AND KNOW WHAT YOU ARE DOING! 5 | */ 6 | 7 | import { ExportCollectionConfig } from './types'; 8 | 9 | export const syncDirectusCollections: ExportCollectionConfig = { 10 | directus_collections: { 11 | watch: ['relations'], 12 | query: { 13 | sort: ['collection'], 14 | }, 15 | }, 16 | directus_fields: { 17 | watch: ['fields', 'collections'], 18 | excludeFields: ['id'], 19 | getKey: o => `${o.collection}-${o.field}`, 20 | query: { 21 | sort: ['collection', 'field'], 22 | }, 23 | }, 24 | directus_relations: { 25 | watch: ['relations'], 26 | excludeFields: ['id'], 27 | getKey: o => `${o.many_collection}-${o.many_field}`, 28 | query: { 29 | sort: ['many_collection', 'many_field'], 30 | }, 31 | }, 32 | directus_roles: { 33 | watch: ['roles'], 34 | excludeFields: ['users'], 35 | query: { 36 | filter: { name: { _neq: 'Administrator' } }, 37 | }, 38 | }, 39 | directus_folders: { 40 | watch: ['folders'], 41 | excludeFields: [], 42 | query: { 43 | sort: ['parent', 'id'], 44 | }, 45 | }, 46 | directus_permissions: { 47 | watch: ['permissions', 'collections', 'fields'], 48 | excludeFields: ['id'], 49 | getKey: o => `${o.role ?? 'public'}-${o.collection}--${o.action}`, 50 | query: { 51 | sort: ['role', 'collection', 'action'], 52 | }, 53 | }, 54 | directus_settings: { 55 | watch: ['settings'], 56 | excludeFields: ['mv_hash', 'mv_ts', 'mv_locked', 'project_url'], 57 | }, 58 | directus_dashboards: { 59 | watch: ['dashboards'], 60 | excludeFields: ['user_created', 'panels'], 61 | }, 62 | directus_panels: { 63 | watch: ['panels'], 64 | excludeFields: ['user_created'], 65 | }, 66 | directus_flows: { 67 | watch: ['flows'], 68 | excludeFields: ['operations', 'user_created'], 69 | }, 70 | directus_operations: { 71 | watch: ['operations'], 72 | excludeFields: ['user_created'], 73 | linkedFields: ['resolve', 'reject'], 74 | }, 75 | directus_translations: { 76 | watch: ['translations'], 77 | excludeFields: ['id'], 78 | getKey: o => `${o.key}-${o.language}`, 79 | query: { 80 | sort: ['key', 'language'], 81 | }, 82 | }, 83 | directus_webhooks: { 84 | watch: ['webhooks'], 85 | excludeFields: ['url'], 86 | }, 87 | }; 88 | -------------------------------------------------------------------------------- /src/dialects/postgres/utils.test.ts: -------------------------------------------------------------------------------- 1 | import assert from "node:assert"; 2 | import { describe, it } from "node:test"; 3 | import { sequenceToSerialType } from "./utils.js"; 4 | 5 | describe('sequenceToSerialType', () => { 6 | it('should remove nextval default value and set has_auto_increment to true', () => { 7 | const obj1 = { 8 | "fields": [ 9 | { 10 | "collection": "test_collection", 11 | "field": "id", 12 | "type": "integer", 13 | "meta": null, 14 | "schema": { 15 | "name": "id", 16 | "table": "test_collection", 17 | "data_type": "integer", 18 | "default_value": "nextval('test_collection_id_seq'::regclass)", 19 | "max_length": null, 20 | "numeric_precision": 32, 21 | "numeric_scale": 0, 22 | "is_nullable": false, 23 | "is_unique": true, 24 | "is_primary_key": true, 25 | "is_generated": false, 26 | "generation_expression": null, 27 | "has_auto_increment": false, 28 | "foreign_key_table": null, 29 | "foreign_key_column": null 30 | } 31 | } 32 | ] 33 | }; 34 | const obj2 = { 35 | "fields": [ 36 | { 37 | "collection": "test_collection", 38 | "field": "id", 39 | "type": "integer", 40 | "meta": null, 41 | "schema": { 42 | "name": "id", 43 | "table": "test_collection", 44 | "data_type": "integer", 45 | "default_value": null, 46 | "max_length": null, 47 | "numeric_precision": 32, 48 | "numeric_scale": 0, 49 | "is_nullable": false, 50 | "is_unique": true, 51 | "is_primary_key": true, 52 | "is_generated": false, 53 | "generation_expression": null, 54 | "has_auto_increment": true, 55 | "foreign_key_table": null, 56 | "foreign_key_column": null 57 | } 58 | } 59 | ] 60 | }; 61 | assert.deepEqual(sequenceToSerialType(obj1), obj2); 62 | }); 63 | 64 | it('should return same snapshot if serial type is already used everywhere', () => { 65 | const obj1 = { 66 | "fields": [ 67 | { 68 | "collection": "test_collection", 69 | "field": "id", 70 | "type": "string", 71 | "meta": null, 72 | "schema": { 73 | "name": "id", 74 | "table": "test_collection", 75 | "data_type": "integer", 76 | "default_value": "test", 77 | "max_length": null, 78 | "numeric_precision": 32, 79 | "numeric_scale": 0, 80 | "is_nullable": false, 81 | "is_unique": true, 82 | "is_primary_key": true, 83 | "is_generated": false, 84 | "generation_expression": null, 85 | "has_auto_increment": false, 86 | "foreign_key_table": null, 87 | "foreign_key_column": null 88 | } 89 | } 90 | ] 91 | }; 92 | const obj2 = { 93 | "fields": [ 94 | { 95 | "collection": "test_collection", 96 | "field": "id", 97 | "type": "string", 98 | "meta": null, 99 | "schema": { 100 | "name": "id", 101 | "table": "test_collection", 102 | "data_type": "integer", 103 | "default_value": "test", 104 | "max_length": null, 105 | "numeric_precision": 32, 106 | "numeric_scale": 0, 107 | "is_nullable": false, 108 | "is_unique": true, 109 | "is_primary_key": true, 110 | "is_generated": false, 111 | "generation_expression": null, 112 | "has_auto_increment": false, 113 | "foreign_key_table": null, 114 | "foreign_key_column": null 115 | } 116 | } 117 | ] 118 | }; 119 | assert.deepEqual(sequenceToSerialType(obj1), obj2); 120 | }); 121 | }); -------------------------------------------------------------------------------- /src/dialects/postgres/utils.ts: -------------------------------------------------------------------------------- 1 | export function sequenceToSerialType>(snapshot: T): T { 2 | snapshot.fields 3 | .map( (field: any) => { 4 | if (field.schema?.default_value=="nextval('"+field.schema?.table+"_"+field.schema?.name+"_seq'::regclass)") { 5 | field.schema.default_value = null; 6 | field.schema.has_auto_increment = true; 7 | } 8 | return field; 9 | }) as T; 10 | return snapshot; 11 | } -------------------------------------------------------------------------------- /src/exportManager.ts: -------------------------------------------------------------------------------- 1 | import { ActionHandler } from '@directus/types'; 2 | import type { ApiExtensionContext } from '@directus/extensions'; 3 | import { CollectionExporter } from './collectionExporter.js'; 4 | import { ExportCollectionConfig, IExporterConfig, IGetItemsService } from './types'; 5 | 6 | export class ExportManager { 7 | protected exporters: IExporterConfig[] = []; 8 | 9 | constructor(protected logger: ApiExtensionContext['logger']) {} 10 | 11 | // FIRST: Add exporters 12 | public addExporter(exporterConfig: IExporterConfig) { 13 | this.exporters.push(exporterConfig); 14 | } 15 | 16 | public addCollectionExporter(config: ExportCollectionConfig, getItemsService: IGetItemsService) { 17 | for (let collectionName in config) { 18 | const opts = config[collectionName]!; 19 | this.exporters.push({ 20 | watch: opts.watch, 21 | exporter: new CollectionExporter(collectionName, getItemsService, opts, this.logger), 22 | }); 23 | } 24 | } 25 | 26 | // SECOND: Import if needed 27 | public async loadAll(merge = false) { 28 | await this._loadNextExporter(0, merge); 29 | } 30 | 31 | protected async _loadNextExporter(i = 0, merge = false) { 32 | if (i >= this.exporters.length) return; 33 | 34 | try { 35 | const finishUp = await this.exporters[i]!.exporter.load(merge); 36 | await this._loadNextExporter(i + 1, merge); 37 | if (typeof finishUp === 'function') await finishUp(); 38 | } catch (e) { 39 | this.logger.error(`Failed loading "${this.exporters[i]!.exporter.name}".`); 40 | throw e; 41 | } 42 | } 43 | 44 | // THIRD: Start watching for changes 45 | public attachAllWatchers(action: (event: string, handler: ActionHandler) => void, updateMeta: () => Promise) { 46 | // EXPORT SCHEMAS & COLLECTIONS ON CHANGE // 47 | const actions = ['create', 'update', 'delete']; 48 | this.exporters.forEach(({ watch, exporter }) => { 49 | watch.forEach(col => { 50 | actions.forEach(evt => { 51 | action(`${col}.${evt}`, async () => { 52 | await exporter.export(); 53 | await updateMeta(); 54 | }); 55 | }); 56 | }); 57 | }); 58 | } 59 | 60 | public async exportAll() { 61 | console.log('Exporting ', this.exporters.length, ' exporters'); 62 | await Promise.all(this.exporters.map(e => e.exporter.export())); 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import type { HookConfig } from '@directus/extensions'; 2 | import type { SchemaOverview } from '@directus/types'; 3 | import { condenseAction } from './condenseAction'; 4 | import { copyConfig } from './copyConfig'; 5 | import { ExportManager } from './exportManager'; 6 | import { SchemaExporter } from './schemaExporter'; 7 | import type { ExportCollectionConfig, IGetItemsService, ItemsService } from './types'; 8 | import { UpdateManager } from './updateManager'; 9 | import { ADMIN_ACCOUNTABILITY, ExportHelper, nodeImport } from './utils'; 10 | 11 | const registerHook: HookConfig = async ({ action, init }, { env, services, database, getSchema, logger }) => { 12 | const { SchemaService, ItemsService } = services; 13 | 14 | const schemaOptions = { 15 | split: typeof env.SCHEMA_SYNC_SPLIT === 'boolean' ? env.SCHEMA_SYNC_SPLIT : true, 16 | }; 17 | 18 | let schema: SchemaOverview | null; 19 | const getAdminSchema = async () => 20 | schema || 21 | (schema = await getSchema({ database })); 22 | const clearAdminSchema = () => (schema = null); 23 | const getSchemaService = () => 24 | new SchemaService({ 25 | knex: database, 26 | accountability: ADMIN_ACCOUNTABILITY, 27 | }); 28 | const getItemsService: IGetItemsService = async (collectionName: string) => 29 | new ItemsService(collectionName, { 30 | schema: await getAdminSchema(), 31 | accountability: ADMIN_ACCOUNTABILITY, 32 | knex: database, 33 | }) as ItemsService; 34 | 35 | const updateManager = new UpdateManager(database); 36 | 37 | // We need to do this in async in order to load the config files 38 | let _exportManager: ExportManager; 39 | 40 | const createExportManager = async (dataOnly = false) => { 41 | const exportMng = new ExportManager(logger); 42 | 43 | if (!dataOnly) { 44 | exportMng.addExporter({ 45 | watch: ['collections', 'fields', 'relations'], 46 | exporter: new SchemaExporter(getSchemaService, logger, schemaOptions), 47 | }); 48 | } 49 | 50 | const { syncDirectusCollections } = (await nodeImport(ExportHelper.schemaDir, 'directus_config.js')) as { 51 | syncDirectusCollections: ExportCollectionConfig; 52 | }; 53 | const { syncCustomCollections } = (await nodeImport(ExportHelper.schemaDir, 'config.js')) as { 54 | syncCustomCollections: ExportCollectionConfig; 55 | }; 56 | exportMng.addCollectionExporter(syncDirectusCollections, getItemsService); 57 | exportMng.addCollectionExporter(syncCustomCollections, getItemsService); 58 | 59 | // Additional config 60 | if (env.SCHEMA_SYNC_CONFIG) { 61 | const { syncCustomCollections } = (await nodeImport(ExportHelper.schemaDir, env.SCHEMA_SYNC_CONFIG)) as { 62 | syncCustomCollections: ExportCollectionConfig; 63 | }; 64 | if (syncCustomCollections) { 65 | exportMng.addCollectionExporter(syncCustomCollections, getItemsService); 66 | } else { 67 | logger.warn(`Additonal config specified but not exporting "syncCustomCollections"`); 68 | } 69 | } 70 | 71 | return exportMng; 72 | } 73 | 74 | const exportManager = async (dataOnly = false) => { 75 | if (dataOnly && env.SCHEMA_SYNC_DATA_ONLY !== true) { 76 | return await createExportManager(true); 77 | } 78 | 79 | if (!_exportManager) { 80 | _exportManager = await createExportManager(!!env.SCHEMA_SYNC_DATA_ONLY); 81 | } 82 | 83 | return _exportManager; 84 | }; 85 | 86 | const updateMeta = condenseAction(async (saveToDb = true) => { 87 | const meta = await ExportHelper.updateExportMeta(); 88 | if (saveToDb && meta && (await updateManager.lockForUpdates(meta.hash, meta.ts))) { 89 | await updateManager.commitUpdates(); 90 | } 91 | }); 92 | 93 | function attachExporters() { 94 | if (env.SCHEMA_SYNC === 'BOTH' || env.SCHEMA_SYNC === 'EXPORT') { 95 | exportManager().then(expMng => expMng.attachAllWatchers(action, updateMeta)); 96 | } 97 | } 98 | 99 | // LOAD EXPORTED SCHEMAS & COLLECTIONS 100 | if (env.SCHEMA_SYNC === 'BOTH' || env.SCHEMA_SYNC === 'IMPORT') { 101 | init('app.before', async () => { 102 | try { 103 | const meta = await ExportHelper.getExportMeta(); 104 | if (!meta) return logger.info('Nothing exported yet it seems'); 105 | if (!(await updateManager.lockForUpdates(meta.hash, meta.ts))) return; // Schema is locked / no change, nothing to do 106 | 107 | logger.info(`Updating schema and data with hash: ${meta.hash}`); 108 | const expMng = await exportManager(); 109 | await expMng.loadAll(!!env.SCHEMA_SYNC_MERGE); 110 | 111 | await updateManager.commitUpdates(); 112 | clearAdminSchema(); 113 | } catch (e) { 114 | logger.error(e); 115 | logger.info('Releasing lock...'); 116 | await updateManager.releaseLock(); 117 | } finally { 118 | await attachExporters(); 119 | } 120 | }); 121 | } else { 122 | attachExporters(); 123 | } 124 | 125 | init('cli.before', async ({ program }) => { 126 | const dbCommand = program.command('schema-sync'); 127 | 128 | dbCommand 129 | .command('export-schema') 130 | .description('Export only the schema file') 131 | .option('-S, --split', `Splits the schema file into multiple files per collection`) 132 | .action(async (args: { split: boolean }) => { 133 | logger.info('Exporting schema...'); 134 | const exportSchema = new SchemaExporter( 135 | getSchemaService, 136 | logger, 137 | args && 'split' in args ? args : schemaOptions 138 | ); 139 | await exportSchema.export(); 140 | 141 | await updateMeta(); 142 | 143 | logger.info('Done!'); 144 | process.exit(0); 145 | }); 146 | 147 | dbCommand 148 | .command('import-schema') 149 | .description('Import only the schema file') 150 | .action(async () => { 151 | logger.info('Importing schema...'); 152 | const meta = await ExportHelper.getExportMeta(); 153 | if (!meta) return logger.info('Nothing exported yet it seems'); 154 | 155 | const exportSchema = new SchemaExporter(getSchemaService, logger, schemaOptions); 156 | await exportSchema.load(); 157 | 158 | await updateManager.forceCommitUpdates(meta.hash, meta.ts); 159 | logger.info('Done!'); 160 | process.exit(0); 161 | }); 162 | 163 | dbCommand 164 | .command('install') 165 | .description('Ensures the DB is ready for schema sync, and creates the schema-sync config folder') 166 | .option('--force', 'Override schema-sync config folder') 167 | .action(async ({ force }: { force: boolean }) => { 168 | logger.info('Installing Schema sync...'); 169 | await updateManager.ensureInstalled(); 170 | await copyConfig(force, { logger }); 171 | (await exportManager()).exportAll(); 172 | await updateMeta(); 173 | 174 | logger.info('Done!'); 175 | process.exit(0); 176 | }); 177 | 178 | dbCommand 179 | .command('hash') 180 | .description('Recalculate the hash for all the data files') 181 | .action(async () => { 182 | await updateMeta(false); 183 | logger.info('Done!'); 184 | process.exit(0); 185 | }); 186 | 187 | dbCommand 188 | .command('import') 189 | .description('Import the schema and all available data from file to DB.') 190 | .option('--merge', 'Only upsert data and not delete') 191 | .option('--data', 'Only import data and not schema') 192 | .action(async ({ merge, data }: { merge: boolean; data: boolean }) => { 193 | try { 194 | logger.info(`Importing everything from: ${ExportHelper.dataDir}`); 195 | const expMng = await exportManager(data); 196 | await expMng.loadAll(merge); 197 | 198 | logger.info('Done!'); 199 | process.exit(0); 200 | } catch (err: any) { 201 | logger.error(err); 202 | process.exit(1); 203 | } 204 | }); 205 | 206 | dbCommand 207 | .command('export') 208 | .description('Export the schema and all data as configured from DB to file') 209 | .action(async () => { 210 | try { 211 | logger.info(`Exporting everything to: ${ExportHelper.dataDir}`); 212 | const expMng = await exportManager(); 213 | await expMng.exportAll(); 214 | 215 | await updateMeta(); 216 | 217 | logger.info('Done!'); 218 | process.exit(0); 219 | } catch (err: any) { 220 | logger.error(err); 221 | process.exit(1); 222 | } 223 | }); 224 | }); 225 | }; 226 | 227 | export default registerHook; 228 | -------------------------------------------------------------------------------- /src/schemaExporter.ts: -------------------------------------------------------------------------------- 1 | import type { Snapshot, SnapshotField, SnapshotRelation } from '@directus/api/dist/types'; 2 | import type { ApiExtensionContext } from '@directus/extensions'; 3 | import { Collection } from '@directus/types'; 4 | import { readFile, writeFile, mkdir, rm } from 'fs/promises'; 5 | import { glob } from 'glob'; 6 | import { condenseAction } from './condenseAction.js'; 7 | import { exportHook } from './schemaExporterHooks.js'; 8 | import type { IExporter } from './types'; 9 | import { ExportHelper } from './utils.js'; 10 | 11 | export class SchemaExporter implements IExporter { 12 | protected _filePath: string; 13 | protected _getSchemaService: () => any; 14 | protected _exportHandler = condenseAction(() => this.createAndSaveSnapshot()); 15 | 16 | // Directus SchemaService, database and getSchema 17 | constructor( 18 | getSchemaService: () => any, 19 | protected logger: ApiExtensionContext['logger'], 20 | protected options = { split: true } 21 | ) { 22 | this._getSchemaService = () => getSchemaService(); 23 | this._filePath = `${ExportHelper.dataDir}/schema.json`; 24 | } 25 | 26 | protected ensureSchemaFilesDir = async () => { 27 | if (!(await ExportHelper.fileExists(`${ExportHelper.dataDir}/schema`))) { 28 | await mkdir(`${ExportHelper.dataDir}/schema`, { recursive: true }); 29 | } else { 30 | // Clean up old schema files 31 | const files = await glob(this.schemaFilesPath('*')); 32 | await Promise.all(files.map(file => rm(file))); 33 | } 34 | }; 35 | 36 | protected schemaFilesPath(collection: string) { 37 | return `${ExportHelper.dataDir}/schema/${collection}.json`; 38 | } 39 | 40 | get name() { 41 | return 'schema'; 42 | } 43 | 44 | public export = () => this._exportHandler(); 45 | 46 | /** 47 | * Import the schema from file to the database 48 | */ 49 | public load = async () => { 50 | const svc = this._getSchemaService(); 51 | let json; 52 | try { 53 | json = await readFile(this._filePath, { encoding: 'utf8' }); 54 | } catch (e) { 55 | return; 56 | } 57 | if (json) { 58 | const schemaParsed = JSON.parse(json); 59 | // For older versions, the snapshot was stored under the key `snapshot` 60 | const { partial, hash, ...snapshot } = ( 61 | (schemaParsed as any).snapshot 62 | ? Object.assign((schemaParsed as any).snapshot, { hash: schemaParsed.hash }) 63 | : schemaParsed 64 | ) as Snapshot & { partial?: boolean; hash: string }; 65 | 66 | if (partial) { 67 | snapshot.collections = []; 68 | snapshot.fields = []; 69 | snapshot.relations = []; 70 | 71 | let found = 0; 72 | const files = await glob(this.schemaFilesPath('*')); 73 | await Promise.all(files.map(async (file) => { 74 | const collectionJson = await readFile(file, { encoding: 'utf8' }); 75 | const { fields, relations, ...collectionInfo } = JSON.parse(collectionJson) as Collection & { 76 | fields: SnapshotField[]; 77 | relations: SnapshotRelation[]; 78 | }; 79 | ++found; 80 | 81 | // Only add collection if it has a meta definition (actual table or group) 82 | if (collectionInfo.meta) { 83 | snapshot.collections.push(collectionInfo); 84 | } 85 | 86 | for (const field of fields) { 87 | snapshot.fields.push(Object.assign({ collection: collectionInfo.collection }, field)); 88 | } 89 | for (const relation of relations) { 90 | snapshot.relations.push(Object.assign({ collection: collectionInfo.collection }, relation)); 91 | } 92 | })); 93 | 94 | if (found === 0) { 95 | this.logger.error('No schema files found in schema directory'); 96 | return; 97 | } 98 | 99 | this.logger.info(`Stitched ${found} partial schema files`); 100 | 101 | snapshot.collections.sort((a, b) => a.collection.localeCompare(b.collection)); 102 | // Sort non-table collections to the start 103 | snapshot.collections.sort((a, b) => String(!!a.schema).localeCompare(String(!!b.schema))); 104 | 105 | // Sort fields and relations by collection 106 | snapshot.fields.sort((a, b) => a.collection.localeCompare(b.collection)); 107 | snapshot.relations.sort((a, b) => a.collection.localeCompare(b.collection)); 108 | } 109 | 110 | const currentSnapshot = await svc.snapshot(); 111 | const currentHash = svc.getHashedSnapshot(currentSnapshot).hash; 112 | if (currentHash === hash) { 113 | this.logger.debug('Schema is already up-to-date'); 114 | return; 115 | } 116 | const diff = await svc.diff(snapshot, { currentSnapshot, force: true }); 117 | if (diff !== null) { 118 | await svc.apply({ diff, hash: currentHash }); 119 | } 120 | } 121 | }; 122 | 123 | /** 124 | * Create and save the schema snapshot to file 125 | */ 126 | protected createAndSaveSnapshot = async () => { 127 | const svc = this._getSchemaService(); 128 | let snapshot = (await svc.snapshot()) as Snapshot; 129 | snapshot = exportHook(snapshot); 130 | let hash = svc.getHashedSnapshot(snapshot).hash; 131 | 132 | if (this.options.split) { 133 | await this.ensureSchemaFilesDir(); 134 | const { collections, fields, relations, ...meta } = snapshot; 135 | 136 | // Sort on field name to ensure consistent order 137 | fields.sort((a, b) => a.field.localeCompare(b.field)); 138 | relations.sort((a, b) => a.field.localeCompare(b.field)); 139 | 140 | // Sort relations also by related_collection 141 | relations.sort((a, b) => 142 | a.related_collection && b.related_collection ? a.related_collection.localeCompare(b.related_collection) : 0 143 | ); 144 | 145 | const map: Record = {}; 146 | collections.forEach(item => { 147 | map[item.collection] = item; 148 | map[item.collection].fields = [] as SnapshotField[]; 149 | map[item.collection].relations = [] as SnapshotRelation[]; 150 | }); 151 | 152 | for (const field of fields) { 153 | const { collection, ...fieldMeta } = field; 154 | if (!map[collection]) { 155 | map[collection] = { collection, fields: [], relations: [] }; 156 | } 157 | map[collection].fields.push(fieldMeta); 158 | } 159 | 160 | for (const relation of relations) { 161 | const { collection, ...relationMeta } = relation; 162 | if (!map[collection]) { 163 | map[collection] = { collection, fields: [], relations: [] }; 164 | } 165 | 166 | map[collection].relations.push(relationMeta); 167 | } 168 | 169 | // Save inital snapshot file as partial 170 | const schemaJson = JSON.stringify(Object.assign({ hash, partial: true }, meta), null, 2); 171 | await writeFile(this._filePath, schemaJson); 172 | 173 | // Save all collections with fields as individual files 174 | await Promise.all( 175 | Object.entries(map).map(([collection, item]) => 176 | writeFile(this.schemaFilesPath(collection), JSON.stringify(item, null, 2)) 177 | ) 178 | ); 179 | } else { 180 | const schemaJson = JSON.stringify(Object.assign({ hash }, snapshot), null, 2); 181 | await writeFile(this._filePath, schemaJson); 182 | } 183 | }; 184 | } 185 | -------------------------------------------------------------------------------- /src/schemaExporterHooks.ts: -------------------------------------------------------------------------------- 1 | import * as pgUtils from './dialects/postgres/utils.js'; 2 | 3 | const modifiers: modifiersType = { 4 | postgres: [pgUtils.sequenceToSerialType], 5 | }; 6 | 7 | export function exportHook>(snapshot: T) { 8 | if (modifiers[snapshot.vendor]?.length) 9 | return modifiers[snapshot.vendor]!.reduce((_snapshot, modifier) => { 10 | return modifier(_snapshot); 11 | }, snapshot); 12 | return snapshot; 13 | } 14 | 15 | type modifiersType = Record; 16 | 17 | type snapshotFunctionType = >(snapshotWithHash: T) => T; 18 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | import type { Accountability, Item, PrimaryKey, Query, SchemaOverview } from '@directus/types'; 2 | import type Keyv from 'keyv'; 3 | import type { Knex } from 'knex'; 4 | 5 | export type JSONString = string; 6 | 7 | export type IGetItemsService = (collectionName: string) => Promise; 8 | 9 | export interface IExporter { 10 | name: string; 11 | load: (merge?: boolean) => Promise<(() => Promise) | null | void>; 12 | export: () => Promise; 13 | } 14 | 15 | export interface IExporterConfig { 16 | watch: string[]; 17 | exporter: IExporter; 18 | } 19 | 20 | type CollectionName = string; 21 | export type ExportCollectionConfig = Record< 22 | CollectionName, 23 | CollectionExporterOptions & { 24 | watch: string[]; 25 | } 26 | >; 27 | 28 | export type CollectionExporterOptions = { 29 | // Fields to exclude from checking/exporting 30 | excludeFields?: string[]; 31 | 32 | // Fields on the same collection that are linked to form a hierarchy 33 | linkedFields?: string[]; 34 | 35 | // Fields on which to group the items into multiple exported files 36 | groupBy?: string[]; 37 | 38 | // Function to get a unique generated key for the item 39 | getKey?: (o: Item) => PrimaryKey; 40 | 41 | // Specify additional query options to filter, sort and limit the exported items 42 | query?: Pick; 43 | 44 | // Prefix to add to the exported file name 45 | prefix?: string; 46 | onExport?: (item: Item, srv: ItemsService) => Promise; 47 | onImport?: (item: Item, srv: ItemsService) => Promise; 48 | }; 49 | 50 | export type ToUpdateItemDiff = { 51 | pkey: PrimaryKey; 52 | diff: any; 53 | }; 54 | 55 | // 56 | // Defining used Directus types here in order to get type hinting without installing entire Directus 57 | // 58 | export type MutationOptions = { 59 | emitEvents?: boolean; 60 | }; 61 | export interface ItemsService { 62 | collection: string; 63 | knex: Knex; 64 | accountability: Accountability | null; 65 | eventScope: string; 66 | schema: SchemaOverview; 67 | cache: Keyv | null; 68 | 69 | createOne(data: Partial, opts?: MutationOptions): Promise; 70 | createMany(data: Partial[], opts?: MutationOptions): Promise; 71 | 72 | readOne(key: PrimaryKey, query?: any, opts?: MutationOptions): Promise; 73 | readMany(keys: PrimaryKey[], query?: any, opts?: MutationOptions): Promise; 74 | readByQuery(query: any, opts?: MutationOptions): Promise; 75 | 76 | updateOne(key: PrimaryKey, data: Partial, opts?: MutationOptions): Promise; 77 | updateMany(keys: PrimaryKey[], data: Partial, opts?: MutationOptions): Promise; 78 | 79 | upsertMany(payloads: Partial[], opts?: MutationOptions): Promise; 80 | 81 | deleteOne(key: PrimaryKey, opts?: MutationOptions): Promise; 82 | deleteMany(keys: PrimaryKey[], opts?: MutationOptions): Promise; 83 | deleteByQuery(query: any, opts?: MutationOptions): Promise; 84 | } 85 | -------------------------------------------------------------------------------- /src/updateManager.ts: -------------------------------------------------------------------------------- 1 | import type { Knex } from 'knex'; 2 | import { ExportHelper } from './utils'; 3 | 4 | export class UpdateManager { 5 | protected db: Knex; 6 | protected tableName = 'directus_settings'; 7 | protected rowId = 1; 8 | 9 | protected _locking = false; 10 | protected _locked: 11 | | { 12 | hash: string; 13 | ts: string; 14 | } 15 | | false = false; 16 | 17 | constructor(database: Knex) { 18 | this.db = database; 19 | } 20 | 21 | /** 22 | * Acquire the lock to make updates 23 | * @param newHash - New hash value of latest changes 24 | * @param isoTS - ISO timestamp 25 | * @returns 26 | */ 27 | public async lockForUpdates(newHash: string, isoTS: string) { 28 | if (this._locked || this._locking) return false; 29 | this._locking = true; 30 | 31 | // Don't lock if schema sync is not installed yet 32 | const isInstalled = await this.db.schema.hasColumn(this.tableName, 'mv_hash'); 33 | if (!isInstalled) { 34 | this._locking = false; 35 | return true; 36 | } 37 | 38 | const succeeded = await this.db.transaction(async trx => { 39 | const rows = await trx(this.tableName) 40 | .select('*') 41 | .where('id', this.rowId) 42 | .where('mv_locked', false) 43 | // Only need to migrate if hash is different 44 | .andWhereNot('mv_hash', newHash) 45 | // And only if the previous hash is older than the current one 46 | .andWhere('mv_ts', '<', isoTS) 47 | .orWhereNull('mv_ts') 48 | .forUpdate(); // This locks the row 49 | 50 | // If row is found, lock it 51 | if (rows.length) { 52 | await trx(this.tableName).where('id', this.rowId).update({ 53 | mv_locked: true, 54 | }); 55 | this._locked = { 56 | hash: newHash, 57 | ts: isoTS, 58 | }; 59 | return true; 60 | } 61 | 62 | return false; 63 | }); 64 | 65 | this._locking = false; 66 | return succeeded; 67 | } 68 | 69 | public async commitUpdates() { 70 | if (!this._locked) return false; 71 | 72 | await this.db(this.tableName).where('id', this.rowId).update({ 73 | mv_hash: this._locked.hash, 74 | mv_ts: this._locked.ts, 75 | mv_locked: false, 76 | }); 77 | 78 | this._locked = false; 79 | return true; 80 | } 81 | 82 | public async forceCommitUpdates(newHash: string, isoTS: string) { 83 | await this.db(this.tableName).where('id', this.rowId).update({ 84 | mv_hash: newHash, 85 | mv_ts: isoTS, 86 | mv_locked: false, 87 | }); 88 | 89 | this._locked = false; 90 | return true; 91 | } 92 | 93 | public async releaseLock() { 94 | if (!this._locked) return false; 95 | 96 | await this.db(this.tableName).where('id', this.rowId).update({ 97 | mv_locked: false, 98 | }); 99 | 100 | this._locked = false; 101 | return true; 102 | } 103 | 104 | 105 | public async ensureInstalled() { 106 | const tableName = 'directus_settings'; 107 | 108 | const isInstalled = await this.db.schema.hasColumn(tableName, 'mv_hash'); 109 | 110 | if (!isInstalled) { 111 | await this.db.schema.table(tableName, table => { 112 | table.string('mv_hash').defaultTo('').notNullable(); 113 | table.timestamp('mv_ts', { useTz: true }).defaultTo('2020-01-01').notNullable(); 114 | table.boolean('mv_locked').defaultTo(false).notNullable(); 115 | }); 116 | return true; 117 | } 118 | return false; 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /src/utils.test.ts: -------------------------------------------------------------------------------- 1 | import assert from 'node:assert'; 2 | import { describe, it } from 'node:test'; 3 | import { deepEqual, getDiff, sortObject } from './utils.js'; 4 | 5 | describe('sortObject', () => { 6 | it('should sort object keys alphabetically', () => { 7 | const input = { c: 1, a: 2, b: 3 }; 8 | const assertedOutput = { a: 2, b: 3, c: 1 }; 9 | assert.deepStrictEqual(sortObject(input), assertedOutput); 10 | }); 11 | 12 | it('should sort nested object keys alphabetically', () => { 13 | const input = { c: 1, a: { d: 4, b: 3 }, e: 5 }; 14 | const assertedOutput = { a: { b: 3, d: 4 }, c: 1, e: 5 }; 15 | assert.deepStrictEqual(sortObject(input), assertedOutput); 16 | }); 17 | 18 | it('should sort array elements recursively', () => { 19 | const input = [{ c: 1, a: 2 }, { b: 3 }]; 20 | const assertedOutput = [{ a: 2, c: 1 }, { b: 3 }]; 21 | assert.deepStrictEqual(sortObject(input), assertedOutput); 22 | }); 23 | 24 | it('should return input if it is not an object', () => { 25 | assert.deepStrictEqual(sortObject(null as any), null); 26 | assert.deepStrictEqual(sortObject(42 as any), 42); 27 | assert.deepStrictEqual(sortObject('hello' as any), 'hello'); 28 | }); 29 | }); 30 | 31 | describe('getDiff', () => { 32 | it('should return the entire new object if the old object is null', () => { 33 | const newObj = { a: 1, b: 2 }; 34 | const oldObj = null; 35 | const assertedOutput = { a: 1, b: 2 }; 36 | assert.deepStrictEqual(getDiff(newObj, oldObj), assertedOutput); 37 | }); 38 | 39 | it('should return null if the new and old objects are equal', () => { 40 | const newObj = { a: 1, b: 2 }; 41 | const oldObj = { a: 1, b: 2 }; 42 | assert.deepStrictEqual(getDiff(newObj, oldObj), null); 43 | }); 44 | 45 | it('should return only the different properties between the new and old objects', () => { 46 | const newObj = { a: 1, b: 2, c: 3 }; 47 | const oldObj = { a: 1, b: 3, d: 4 }; 48 | const assertedOutput = { b: 2, c: 3 }; 49 | assert.deepStrictEqual(getDiff(newObj, oldObj), assertedOutput); 50 | }); 51 | 52 | it('should handle nested objects', () => { 53 | const newObj = { a: 1, b: { c: 2, d: 3 } }; 54 | const oldObj = { a: 1, b: { c: 2, d: 4 } }; 55 | const assertedOutput = { b: { d: 3 } }; 56 | assert.deepStrictEqual(getDiff(newObj, oldObj), assertedOutput); 57 | }); 58 | 59 | it('should handle arrays', () => { 60 | const newObj = { a: 1, b: [1, 2, 3] }; 61 | const oldObj = { a: 1, b: [1, 2, 4] }; 62 | const assertedOutput = { b: [1, 2, 3] }; 63 | assert.deepStrictEqual(getDiff(newObj, oldObj), assertedOutput); 64 | }); 65 | }); 66 | 67 | describe('deepEqual', () => { 68 | it('should return true for equal objects', () => { 69 | const obj1 = { a: 1, b: { c: 2 } }; 70 | const obj2 = { a: 1, b: { c: 2 } }; 71 | assert.strictEqual(deepEqual(obj1, obj2), true); 72 | }); 73 | 74 | it('should return false for different objects', () => { 75 | const obj1 = { a: 1, b: { c: 2 } }; 76 | const obj2 = { a: 1, b: { c: 3 } }; 77 | assert.strictEqual(deepEqual(obj1, obj2), false); 78 | }); 79 | 80 | it('should return true for equal arrays', () => { 81 | const arr1 = [1, 2, { a: 3 }]; 82 | const arr2 = [1, 2, { a: 3 }]; 83 | assert.strictEqual(deepEqual(arr1, arr2), true); 84 | }); 85 | 86 | it('should return false for different arrays', () => { 87 | const arr1 = [1, 2, { a: 3 }]; 88 | const arr2 = [1, 2, { a: 4 }]; 89 | assert.strictEqual(deepEqual(arr1, arr2), false); 90 | }); 91 | 92 | it('should return true for equal primitives', () => { 93 | assert.strictEqual(deepEqual(1, 1), true); 94 | assert.strictEqual(deepEqual('hello', 'hello'), true); 95 | assert.strictEqual(deepEqual(null, null), true); 96 | assert.strictEqual(deepEqual(undefined, undefined), true); 97 | }); 98 | 99 | it('should return false for different primitives', () => { 100 | assert.strictEqual(deepEqual(1, 2), false); 101 | assert.strictEqual(deepEqual('hello', 'world'), false); 102 | assert.strictEqual(deepEqual(null, undefined), false); 103 | }); 104 | }); 105 | -------------------------------------------------------------------------------- /src/utils.ts: -------------------------------------------------------------------------------- 1 | import { createHash } from 'crypto'; 2 | import { access, readFile, readdir, writeFile } from 'fs/promises'; 3 | import { resolve } from 'path'; 4 | import { pathToFileURL } from 'url'; 5 | 6 | export const ADMIN_ACCOUNTABILITY = { role: '', admin: true }; 7 | 8 | export function nodeImport(dir: string, file: string) { 9 | return import(pathToFileURL(resolve(dir, file)).href); 10 | } 11 | 12 | export class ExportHelper { 13 | static get schemaDir() { 14 | return resolve(process.cwd(), 'schema-sync'); 15 | } 16 | 17 | static get dataDir() { 18 | return resolve(ExportHelper.schemaDir, 'data'); 19 | } 20 | 21 | static get hashFile() { 22 | return resolve(ExportHelper.schemaDir, 'hash.txt'); 23 | } 24 | 25 | static utcTS(isoTimestamp: string = new Date().toISOString()) { 26 | return isoTimestamp.replace('T', ' ').replace(/\.\d*Z/, ''); 27 | } 28 | 29 | static async updateExportMeta(currentHash = '') { 30 | const hasher = createHash('sha256'); 31 | const files = await readdir(ExportHelper.dataDir); 32 | for (const file of files) { 33 | if (file.endsWith('.json')) { 34 | const json = await readFile(`${ExportHelper.dataDir}/${file}`, { encoding: 'utf8' }); 35 | hasher.update(json); 36 | } 37 | } 38 | const hash = hasher.digest('hex'); 39 | 40 | // Only update hash file if it has changed 41 | if (hash === currentHash) return false; 42 | 43 | const ts = ExportHelper.utcTS(); 44 | const txt = hash + '@' + ts; 45 | 46 | await writeFile(this.hashFile, txt); 47 | return { 48 | hash, 49 | ts, 50 | }; 51 | } 52 | 53 | static async fileExists(path: string) { 54 | try { 55 | await access(path); 56 | return true; 57 | } catch { 58 | return false; 59 | } 60 | } 61 | 62 | static async getExportMeta() { 63 | try { 64 | const content = await readFile(this.hashFile, { encoding: 'utf8' }); 65 | const [hash, ts] = content.split('@'); 66 | 67 | if (hash && ts && new Date(ts).toString() !== 'Invalid Date') { 68 | return { 69 | hash, 70 | ts, 71 | }; 72 | } 73 | } catch { 74 | // ignore 75 | } 76 | return null; 77 | } 78 | } 79 | 80 | export function deepEqual(obj1: any, obj2: any): boolean { 81 | if (obj1 === obj2) return true; 82 | 83 | if (typeof obj1 !== 'object' || obj1 === null || typeof obj2 !== 'object' || obj2 === null) { 84 | return false; 85 | } 86 | 87 | const keys1 = Object.keys(obj1); 88 | const keys2 = Object.keys(obj2); 89 | 90 | if (keys1.length !== keys2.length) return false; 91 | 92 | for (let key of keys1) { 93 | if (!keys2.includes(key)) return false; 94 | if (!deepEqual(obj1[key], obj2[key])) return false; 95 | } 96 | 97 | return true; 98 | } 99 | 100 | export function getDiff(newObj: Record, oldObj: any) { 101 | if (!oldObj) return newObj; 102 | 103 | const result: Record = {}; 104 | let isDifferent = false; 105 | Object.keys(newObj).forEach(key => { 106 | if (!deepEqual(newObj[key], oldObj[key])) { 107 | result[key] = newObj[key]; 108 | isDifferent = true; 109 | } 110 | }); 111 | return isDifferent ? result : null; 112 | } 113 | 114 | export function sortObject>(obj: T): T; 115 | export function sortObject(obj: T[]): T[]; 116 | export function sortObject | T[]>(obj: T): T { 117 | if (typeof obj !== 'object' || obj === null) { 118 | return obj; 119 | } 120 | 121 | if (Array.isArray(obj)) { 122 | return obj.map(sortObject) as unknown as T; 123 | } 124 | 125 | const sortedObj: Record = {}; 126 | Object.keys(obj) 127 | .sort() 128 | .forEach(key => { 129 | sortedObj[key] = sortObject((obj as Record)[key]); 130 | }); 131 | return sortedObj as T; 132 | } 133 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2022", 4 | "lib": ["ES2022", "DOM"], 5 | "module": "ES2022", 6 | "moduleResolution": "node", 7 | "strict": true, 8 | "noFallthroughCasesInSwitch": true, 9 | "esModuleInterop": true, 10 | "noImplicitAny": true, 11 | "noImplicitThis": true, 12 | "noImplicitReturns": true, 13 | "noUnusedLocals": true, 14 | "noUncheckedIndexedAccess": true, 15 | "noUnusedParameters": true, 16 | "alwaysStrict": true, 17 | "strictNullChecks": true, 18 | "strictFunctionTypes": true, 19 | "strictBindCallApply": true, 20 | "strictPropertyInitialization": true, 21 | "resolveJsonModule": false, 22 | "skipLibCheck": true, 23 | "forceConsistentCasingInFileNames": true, 24 | "allowSyntheticDefaultImports": true, 25 | "isolatedModules": true, 26 | "rootDir": "./src" 27 | }, 28 | "include": ["./src/**/*.ts"] 29 | } -------------------------------------------------------------------------------- /tsconfig.test.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2022", 4 | "lib": ["ES2022", "DOM"], 5 | "module": "ES2022", 6 | "moduleResolution": "node", 7 | "strict": true, 8 | "noFallthroughCasesInSwitch": true, 9 | "esModuleInterop": true, 10 | "noImplicitAny": true, 11 | "noImplicitThis": true, 12 | "noImplicitReturns": true, 13 | "noUnusedLocals": true, 14 | "noUncheckedIndexedAccess": true, 15 | "noUnusedParameters": true, 16 | "alwaysStrict": true, 17 | "strictNullChecks": true, 18 | "strictFunctionTypes": true, 19 | "strictBindCallApply": true, 20 | "strictPropertyInitialization": true, 21 | "resolveJsonModule": false, 22 | "skipLibCheck": true, 23 | "forceConsistentCasingInFileNames": true, 24 | "allowSyntheticDefaultImports": true, 25 | "isolatedModules": true, 26 | "rootDir": "./src", 27 | "outDir": "./dist-test", 28 | }, 29 | "include": ["src/utils.ts", "src/utils.test.ts", "src/dialects/postgres/utils.ts", "src/dialects/postgres/utils.test.ts"] 30 | } --------------------------------------------------------------------------------