├── .all-contributorsrc ├── .env.example ├── .github └── workflows │ └── main.yml ├── .gitignore ├── .prettierignore ├── .prettierrc ├── .releaserc ├── .travis.yml ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE ├── MIGRATING.md ├── README.md ├── commitlint.config.js ├── jest.config.js ├── package.json ├── scripts └── test-studio │ ├── init-vars.js │ ├── init.sh │ └── initialSanityData.tar.gz ├── src ├── __tests__ │ ├── __recordings__ │ │ └── Full-migration_1592800280 │ │ │ └── should-print-the-same-output-given-the-same-HTTP-requests_3762760973 │ │ │ └── recording.har │ ├── __snapshots__ │ │ └── migrate.integration.test.ts.snap │ ├── migrate.integration.test.ts │ └── migrate.test.ts ├── config.ts ├── index.ts ├── modules.d.ts ├── transactions.ts ├── types.ts └── utils │ ├── __mocks__ │ └── logging.ts │ ├── __tests__ │ ├── misc.test.ts │ ├── parsing.test.ts │ ├── queue.test.ts │ └── stubs.ts │ ├── index.ts │ ├── logging.ts │ ├── misc.ts │ ├── parsing.ts │ └── queue.ts ├── test-studio └── .gitkeep ├── tsconfig.json └── yarn.lock /.all-contributorsrc: -------------------------------------------------------------------------------- 1 | { 2 | "files": [ 3 | "README.md" 4 | ], 5 | "imageSize": 100, 6 | "commit": false, 7 | "contributors": [ 8 | { 9 | "login": "good-idea", 10 | "name": "Joseph Thomas", 11 | "avatar_url": "https://avatars.githubusercontent.com/u/11514928?v=4", 12 | "profile": "https://www.good-idea.studio", 13 | "contributions": [ 14 | "doc", 15 | "code", 16 | "test" 17 | ] 18 | } 19 | ], 20 | "contributorsPerLine": 7, 21 | "projectName": "sanity-graph-import", 22 | "projectOwner": "sanctuarycomputer", 23 | "repoType": "github", 24 | "repoHost": "https://github.com", 25 | "skipCi": true 26 | } 27 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | 2 | TEST_STUDIO_SOURCE_PROJECTID=XXXXXX 3 | TEST_STUDIO_SOURCE_DATASET=production 4 | TEST_STUDIO_TARGET_PROJECTID=XXXXXX 5 | TEST_STUDIO_TARGET_DATASET=staging 6 | TEST_STUDIO_TARGET_TOKEN=XXXXXX 7 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: [push] 3 | jobs: 4 | build: 5 | name: Build, lint, and test on Node ${{ matrix.node }} 6 | 7 | runs-on: ubuntu-latest 8 | strategy: 9 | matrix: 10 | node: ['10.x', '12.x', '14.x'] 11 | 12 | steps: 13 | - name: Checkout repo 14 | uses: actions/checkout@v2 15 | 16 | - name: Use Node ${{ matrix.node }} 17 | uses: actions/setup-node@v1 18 | with: 19 | node-version: ${{ matrix.node }} 20 | 21 | - name: Install deps and build 22 | run: yarn install 23 | 24 | - name: Lint 25 | run: yarn lint 26 | 27 | - name: Test 28 | run: yarn test --ci --coverage --maxWorkers=2 29 | env: 30 | TEST_STUDIO_SOURCE_PROJECTID: ${{ secrets.TEST_STUDIO_SOURCE_PROJECTID }} 31 | TEST_STUDIO_SOURCE_DATASET: ${{ secrets.TEST_STUDIO_SOURCE_DATASET }} 32 | TEST_STUDIO_TARGET_PROJECTID: ${{ secrets.TEST_STUDIO_TARGET_PROJECTID }} 33 | TEST_STUDIO_TARGET_DATASET: ${{ secrets.TEST_STUDIO_TARGET_DATASET }} 34 | TEST_STUDIO_TARGET_TOKEN: ${{ secrets.TEST_STUDIO_TARGET_TOKEN }} 35 | 36 | - name: Upload Coverage artifact 37 | uses: actions/upload-artifact@v2 38 | with: 39 | name: coverage 40 | path: ./coverage 41 | 42 | publish: 43 | name: Publish to NPM 44 | needs: build 45 | runs-on: ubuntu-latest 46 | if: github.ref == 'refs/heads/main' 47 | steps: 48 | - name: Checkout repo 49 | uses: actions/checkout@v2 50 | 51 | - name: Install dependencies and build 52 | run: yarn install 53 | 54 | - name: Semantic Release 55 | run: yarn semantic-release 56 | env: 57 | GITHUB_TOKEN: ${{ secrets.SEMANTIC_RELEASE_GH_TOKEN }} 58 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }} 59 | 60 | - name: Download Coverage artifact 61 | uses: actions/download-artifact@v2 62 | with: 63 | name: coverage 64 | 65 | - name: Code Coverage 66 | run: yarn test:coverage:send 67 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.log 2 | .DS_Store 3 | node_modules 4 | dist 5 | coverage 6 | 7 | .env 8 | !.env.example 9 | 10 | test-studio/* 11 | !test-studio/.gitkeep 12 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | # package.json is formatted by package managers, so we ignore it here 2 | package.json -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "useTabs": false, 3 | "tabWidth": 2, 4 | "semi": false, 5 | "singleQuote": true, 6 | "printWidth": 80, 7 | "arrowParens": "always" 8 | } 9 | -------------------------------------------------------------------------------- /.releaserc: -------------------------------------------------------------------------------- 1 | { 2 | "plugins": [ 3 | "@semantic-release/commit-analyzer", 4 | "@semantic-release/release-notes-generator", 5 | "@semantic-release/changelog", 6 | "@semantic-release/npm", 7 | ["@semantic-release/git", { 8 | "assets": ["package.json", "CHANGELOG.md"], 9 | "message": "chore(release): ${nextRelease.version} [skip ci]\n\n${nextRelease.notes}" 10 | }], 11 | "@semantic-release/github" 12 | ], 13 | "branches": ["main"] 14 | } 15 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: false 2 | language: node_js 3 | node_js: 4 | - '10' 5 | cache: 6 | directories: 7 | - '$HOME/.npm' 8 | - node_modules 9 | jobs: 10 | include: 11 | - stage: test 12 | script: yarn test 13 | - stage: release 14 | if: branch = main 15 | node_js: lts/* 16 | before_install: skip 17 | script: 18 | # Update Tables of Content in the relevant `.md` files 19 | - markdown-toc -i CONTRIBUTING.md 20 | - markdown-toc -i README.md 21 | deploy: 22 | provider: script 23 | skip_cleanup: true 24 | script: npm run test && npm run build && npm run semantic-release && npm run test:coverage:send 25 | on: 26 | branch: main 27 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## [1.1.2](https://github.com/sanctuarycomputer/sanity-graph-import/compare/v1.1.1...v1.1.2) (2021-03-03) 2 | 3 | 4 | ### Bug Fixes 5 | 6 | * **lib:** fix empty array mapping in utils/misc/chunk ([f669fef](https://github.com/sanctuarycomputer/sanity-graph-import/commit/f669fef27899bb2380616a3135c59ba3f867f36d)) 7 | 8 | ## [1.1.1](https://github.com/sanctuarycomputer/sanity-graph-import/compare/v1.1.0...v1.1.1) (2021-02-06) 9 | 10 | 11 | ### Bug Fixes 12 | 13 | * **repo:** make package public, oops ([477bb1b](https://github.com/sanctuarycomputer/sanity-graph-import/commit/477bb1bb1a7017af9959efd2470094639ce5dc74)) 14 | 15 | # [1.1.0](https://github.com/sanctuarycomputer/sanity-graph-import/compare/v1.0.1...v1.1.0) (2021-01-28) 16 | 17 | 18 | ### Bug Fixes 19 | 20 | * **lib:** fix weird duplicate image issue ([a52abc1](https://github.com/sanctuarycomputer/sanity-graph-import/commit/a52abc1eb48addcfdf4f13394faaaf6c2ae136ef)) 21 | 22 | 23 | ### Features 24 | 25 | * **lib:** add delete boolean option to config ([79abd46](https://github.com/sanctuarycomputer/sanity-graph-import/commit/79abd460baf8f71eed05a740c0b9787b2b3c1104)) 26 | 27 | ## [1.0.1](https://github.com/sanctuarycomputer/sanity-graph-import/compare/v1.0.0...v1.0.1) (2021-01-26) 28 | 29 | 30 | ### Bug Fixes 31 | 32 | * **lib:** await for success message ([fd5a32a](https://github.com/sanctuarycomputer/sanity-graph-import/commit/fd5a32a96a489b92a1c8564718c20ebebf4977b8)) 33 | 34 | # 1.0.0 (2021-01-26) 35 | 36 | 37 | ### Bug Fixes 38 | 39 | * **lib:** fix linting errors ([38dcc0e](https://github.com/sanctuarycomputer/sanity-graph-import/commit/38dcc0ef8b2622fb95a921b2aa9e1a61c580e90e)) 40 | 41 | 42 | ### Features 43 | 44 | * **lib:** initial release ([1a575f6](https://github.com/sanctuarycomputer/sanity-graph-import/commit/1a575f64f9e4f82890e4ecf72a22387995e27ac4)) 45 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Thanks for your interest in making a contribution! 4 | 5 | 6 | ## Setup 7 | 8 | To get started: 9 | 10 | - Fork this repo 11 | - Clone your fork: `git clone git@github.com:your-github-handle/sanity-graph-import` 12 | - Install packages: `yarn install` 13 | - Set up your test studio (required for running tests) `yarn test-studio:init` 14 | - Set up tests: `yarn test:init` (TODO #7) 15 | 16 | You will need to be logged into Sanity - if you haven't done this on your machine, run `npx @sanity/cli login`. 17 | 18 | You'll now have a sample test studio under your account that you can use to try things out. 19 | 20 | ## Development 21 | 22 | You can now run `yarn start` to compile on file changes and `yarn test:watch` to get the tests running. 23 | 24 | Make changes, then commit them with a commit message that matches [conventional commit](https://www.conventionalcommits.org/en/v1.0.0/) guidelines. For example: 25 | 26 | - `git commit -m "fix(lib): fixes some bug"` 27 | - `git commit -m "chore(repo): update dependencies"` 28 | - `git commit -m "docs(repo): update contributors doc"` 29 | 30 | The format of the message is `[type]([scope]): your message` 31 | 32 | Where `scope` is one of: 33 | 34 | - `lib`: changes to the library. You'll probably be using this one. 35 | - `repo`: general changes to the repo that don't affect the code of the library (i.e. adding scripts, modifying configuration settings) 36 | 37 | And `type` is one of: 38 | 39 | - `fix`: You fixed a bug! This increments the patch version, i.e. `1.0.1` -> `1.0.2` 40 | - `feat`: You added a new feature! This increments the minor version, i.e. `1.0.2` -> `1.1.0` 41 | - `refactor`: You changed some internals on existing features, but the API has not changed 42 | - `perf`: You enhanced performance 43 | - `style`: You updated styles (not applicable to this project) 44 | - `test`: You updated or added tests 45 | - `build`: Updates to build scripts, etc 46 | - `chore`: Housekeeping, such as updating dependencies or removing unused files 47 | - `ci`: Updates to CI configuration 48 | 49 | If your updates change the API of the package in a way where users will need to update their usage, include `BREAKING CHANGE` as the last line in your commit message. If you do this, be sure to add a note to [MIGRATING.md](MIGRATING.md)! 50 | 51 | 52 | ## Pull Requests 53 | 54 | Make a pull request from your updated fork and ask for a review! We will add you to the list of contributors, or you can add yourself by commenting in the PR with: `@all-contributors please add @ for `. See a list of contribution types [here](https://allcontributors.org/docs/en/emoji-key). 55 | 56 | ## Linking 57 | 58 | If you want to test your changes on another project, you can do this with `yarn link`: 59 | 60 | - run `yarn link` from within this repository's directory 61 | - run `yarn link @sanctucompu/sanity-graph-import` within your other project 62 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Sanctuary Computer 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /MIGRATING.md: -------------------------------------------------------------------------------- 1 | Still on verion 1, nothing to migrate to yet! 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Sanity Graph Import 2 | 3 |

4 | 5 | Code Coverage 6 | 7 | Latest Release 8 | Build Status 9 | Code Type 10 | 11 | Contributor Count 12 | 13 |

14 | 15 | `sanity-graph-import` is a tool for making partial migrations from one [Sanity](https://www.sanity.io) dataset or project to another. This can be useful if you only want to import particular documents and those they reference. A common use case might be "refreshing" a staging dataset from production - when the production dataset is large and would otherwise take a lot of time and bandwidth to export & import. 16 | 17 | This script will take a selection of initial documents (provided by you as simple Sanity queries), then traverse through all documents it references, find all assets used by any of these documents, then add them to your destination dataset as a "complete" dataset with fully resolved references. 18 | 19 | Example: Your **production** dataset has 1000's of `article` documents, each of which contain references to one or more `author` documents. For your staging dataset, you only want the first 10 articles _and_ their authors -- as well as any image & file assets these documents include. 20 | 21 | Looking to copy an entire dataset? Use [Sanity's CLI instead](https://www.sanity.io/docs/importing-data) instead. 22 | 23 | ### Coming Soon 24 | 25 | - Run from the command line 26 | - Specify the depth of graph traversal (current default is 1) 27 | 28 | # Installation 29 | 30 | `yarn add @sanctucompu/sanity-graph-import` 31 | 32 | # Usage 33 | 34 | ```js 35 | // my-project/scripts/migrate.js 36 | import { migrate } from '@sanctucompu/sanity-graph-import' 37 | import CreateClient from '@sanity/client' 38 | 39 | const sourceClient = CreateClient({ 40 | projectId: 'abc123xyz', 41 | dataset: 'production', 42 | }) 43 | 44 | const destinationClient = CreateClient({ 45 | projectId: 'abc123xyz', 46 | dataset: 'staging', 47 | token: '789abc123xyz', // Required! 48 | }) 49 | 50 | const initialQueries = [ 51 | /* Fetch the 10 latest articles */ 52 | { 53 | query: ` 54 | *[_type == 'article'] 55 | | order(releaseDate desc) 56 | | order(_createdAt desc) [0...$count] 57 | `, 58 | params: { 59 | count: 10, 60 | }, 61 | }, 62 | /* Fetch the homepage document */ 63 | { 64 | query: `*[_type == 'homepage']`, 65 | }, 66 | ] 67 | 68 | async function run() { 69 | const config = { 70 | source: { 71 | client: sourceClient, 72 | initialQueries, 73 | }, 74 | desitination: { 75 | client: destinationClient, 76 | }, 77 | } 78 | 79 | await migrate(config) 80 | } 81 | 82 | run() 83 | ``` 84 | 85 | Then, run `node my-project/scripts/migrate.js` 86 | 87 | This configuration will populate your destination dataset with: 88 | 89 | - 10 article documents 90 | - Every author document referenced in those articles 91 | - The homepage document 92 | - All assets from all of the above 93 | - And any other documents referenced in the articles or the homepage 94 | 95 | # API 96 | 97 | ## `migrate(config)` 98 | 99 | **Returns** 100 | 101 | A promise that resolve's with the final mutation results. 102 | 103 | **`config`**: `ImportConfig` 104 | 105 | ```ts 106 | interface ImportConfig { 107 | source: SourceConfig 108 | destination: DestinationConfig 109 | } 110 | 111 | type QueryParams = Record 112 | 113 | interface QueryArgs { 114 | query: string 115 | params?: QueryParams 116 | } 117 | 118 | interface SourceConfig { 119 | initialQueries: QueryArgs[] 120 | client: SanityClient 121 | } 122 | 123 | interface DestinationConfig { 124 | /* The destination client must have a write token! */ 125 | client: SanityClient 126 | /** 127 | * The number of documents to include in a batch. 128 | * 129 | * default: 35 130 | * 131 | * If you are getting 'content-length' errors during migration, 132 | * set this to a lower number. 133 | */ 134 | batchSize?: number 135 | /** 136 | * Set this value to true or false to skip the prompt to delete 137 | * all documents in the target dataset. 138 | * 139 | * default: undefined 140 | * 141 | * Leave undefined to include the prompt. 142 | */ 143 | deleteData?: boolean | void 144 | } 145 | ``` 146 | 147 | # Contributing 148 | 149 | See [CONTRIBUTING.md](CONTRIBUTING.md) 150 | 151 | # License 152 | 153 | MIT 154 | 155 | ## Contributors ✨ 156 | 157 | Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)): 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 |

Joseph Thomas

📖 💻 ⚠️
167 | 168 | 169 | 170 | 171 | 172 | 173 | This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome! 174 | -------------------------------------------------------------------------------- /commitlint.config.js: -------------------------------------------------------------------------------- 1 | // [ 2 | // 'build', 3 | // 'ci', 4 | // 'chore', 5 | // 'docs', 6 | // 'feat', 7 | // 'fix', 8 | // 'perf', 9 | // 'refactor', 10 | // 'revert', 11 | // 'style', 12 | // 'test' 13 | // ] 14 | 15 | module.exports = { 16 | extends: ['@commitlint/config-conventional'], 17 | rules: { 18 | 'scope-empty': [2, 'never'], 19 | 'scope-enum': [2, 'always', ['release', 'repo', 'lib']], 20 | }, 21 | } 22 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | // Set timeout to 3 minutes if recording polly HTTP requests 3 | testTimeout: process.env.POLLY_MODE === 'record' ? 3 * 60 * 1000 : 5000, 4 | setupFilesAfterEnv: ['@scaleleap/jest-polly'], 5 | coveragePathIgnorePatterns: ['!*.d.ts'], 6 | } 7 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@sanctucompu/sanity-graph-import", 3 | "author": "Sanctuary Computer", 4 | "version": "1.1.2", 5 | "license": "MIT", 6 | "main": "dist/index.js", 7 | "typings": "dist/index.d.ts", 8 | "private": false, 9 | "files": [ 10 | "dist", 11 | "src" 12 | ], 13 | "engines": { 14 | "node": ">=10" 15 | }, 16 | "scripts": { 17 | "start": "tsdx watch", 18 | "build": "tsdx build", 19 | "test": "tsdx test", 20 | "test:init": "yarn test-studio:init && yarn test:record", 21 | "test:coverage": "tsdx test --collectCoverage", 22 | "test:coverage:send": "codecov", 23 | "test:watch": "tsdx test --watch", 24 | "test:record": "POLLY_MODE=record yarn test", 25 | "lint": "tsdx lint src", 26 | "prepare": "tsdx build", 27 | "size": "size-limit", 28 | "analyze": "size-limit --why", 29 | "test-studio:init": "sh ./scripts/test-studio/init.sh", 30 | "semantic-release": "semantic-release" 31 | }, 32 | "module": "dist/sanity-graph-import.esm.js", 33 | "size-limit": [ 34 | { 35 | "path": "dist/sanity-graph-import.cjs.production.min.js", 36 | "limit": "10 KB" 37 | }, 38 | { 39 | "path": "dist/sanity-graph-import.esm.js", 40 | "limit": "10 KB" 41 | } 42 | ], 43 | "devDependencies": { 44 | "@commitlint/cli": "^11.0.0", 45 | "@commitlint/config-conventional": "^11.0.0", 46 | "@sanity/cli": "^2.2.3", 47 | "@sanity/client": "^2.1.4", 48 | "@scaleleap/jest-polly": "^1.5.19", 49 | "@semantic-release/changelog": "^5.0.1", 50 | "@semantic-release/commit-analyzer": "^8.0.1", 51 | "@semantic-release/exec": "^5.0.0", 52 | "@semantic-release/git": "^9.0.0", 53 | "@semantic-release/github": "^7.2.0", 54 | "@semantic-release/npm": "^7.0.10", 55 | "@semantic-release/release-notes-generator": "^9.0.1", 56 | "@size-limit/preset-small-lib": "^4.9.1", 57 | "@types/cli-progress": "^3.8.0", 58 | "@types/eslint-plugin-prettier": "^3.1.0", 59 | "@types/hash-sum": "^1.0.0", 60 | "@types/lodash": "^4.14.168", 61 | "@types/prettier": "^2.1.6", 62 | "@types/semantic-release": "^17.2.0", 63 | "codecov": "^3.8.1", 64 | "eslint-config-prettier": "^7.2.0", 65 | "eslint-config-react-app": "^6.0.0", 66 | "eslint-plugin-prettier": "^3.3.1", 67 | "eslint-plugin-react": "^7.22.0", 68 | "husky": "^4.3.8", 69 | "lodash": "^4.17.20", 70 | "markdown-toc": "^1.2.0", 71 | "prettier": "^2.2.1", 72 | "pretty-quick": "^3.1.0", 73 | "semantic-release": "^17.3.7", 74 | "size-limit": "^4.9.1", 75 | "ts-node": "^9.1.1", 76 | "tsdx": "^0.14.1", 77 | "tslib": "^2.1.0", 78 | "typescript": "^4.1.3", 79 | "typesync": "^0.8.0" 80 | }, 81 | "peerDependencies": {}, 82 | "husky": { 83 | "hooks": { 84 | "pre-commit": "tsdx lint && pretty-quick --staged", 85 | "commit-msg": "commitlint -E HUSKY_GIT_PARAMS" 86 | } 87 | }, 88 | "dependencies": { 89 | "@sanity/import": "^2.1.4", 90 | "@typescript-eslint/eslint-plugin": "^4.14.1", 91 | "@typescript-eslint/parser": "^4.14.1", 92 | "chalk": "^4.1.0", 93 | "cli-progress": "^3.8.2", 94 | "dotenv": "^8.2.0", 95 | "hash-sum": "^2.0.0", 96 | "p-queue": "^6.6.2", 97 | "prompt-confirm": "^2.0.4", 98 | "tiny-invariant": "^1.1.0" 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /scripts/test-studio/init-vars.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs') 2 | const path = require('path') 3 | 4 | const configPath = path.resolve( 5 | __dirname, 6 | '..', 7 | '..', 8 | 'test-studio', 9 | 'sanity.json' 10 | ) 11 | const sanityConfig = require(configPath) 12 | 13 | const dotenvPath = path.resolve(__dirname, '..', '..', '.env') 14 | 15 | const projectId = sanityConfig.api.projectId 16 | const sourceDataset = sanityConfig.api.dataset 17 | 18 | const updatedSanityConfig = { 19 | ...sanityConfig, 20 | env: { 21 | ...sanityConfig.env, 22 | development: { 23 | dataset: 'staging', 24 | ...sanityConfig.env.development, 25 | }, 26 | }, 27 | } 28 | 29 | fs.writeFileSync( 30 | configPath, 31 | JSON.stringify(updatedSanityConfig, null, 2), 32 | 'utf8' 33 | ) 34 | 35 | const envContents = ` 36 | TEST_STUDIO_SOURCE_PROJECTID=${projectId} 37 | TEST_STUDIO_SOURCE_DATASET=${sourceDataset} 38 | TEST_STUDIO_TARGET_PROJECTID=${projectId} 39 | TEST_STUDIO_TARGET_DATASET=staging 40 | TEST_STUDIO_TARGET_TOKEN=your-token-here 41 | ` 42 | 43 | fs.writeFileSync(dotenvPath, envContents, 'utf8') 44 | -------------------------------------------------------------------------------- /scripts/test-studio/init.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Create a test studio 4 | # rm -rf test-studio/* 5 | # yarn sanity init -y --create-project "SGI Test Studio" --dataset production --template moviedb --output-path test-studio 6 | # cd test-studio 7 | # yarn add @sanity/cli 8 | # yarn sanity dataset create staging --visibility public 9 | # yarn sanity dataset import ../scripts/test-studio/initialSanityData.tar.gz production --replace 10 | # 11 | # 12 | # cd .. 13 | # # Populate the envirnment variables 14 | # node ./scripts/test-studio/init-vars.js 15 | # 16 | echo "Almost there! To complete setup, create a token in your project settings, then add it to .env" 17 | read -p "Press enter to open the management console for your project" y 18 | 19 | cd test-studio 20 | yarn sanity manage 21 | 22 | 23 | -------------------------------------------------------------------------------- /scripts/test-studio/initialSanityData.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sanctuarycomputer/sanity-graph-import/bc995fb2dbb10435b197e837322688dafdd156e4/scripts/test-studio/initialSanityData.tar.gz -------------------------------------------------------------------------------- /src/__tests__/__snapshots__/migrate.integration.test.ts.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`Full migration should print the same output given the same HTTP requests 1`] = ` 4 | Array [ 5 | "Migrating from 98nzfixt/production to 98nzfixt/staging ", 6 | ] 7 | `; 8 | 9 | exports[`Full migration should print the same output given the same HTTP requests 2`] = ` 10 | Array [ 11 | "Found 5 initial documents", 12 | ] 13 | `; 14 | 15 | exports[`Full migration should print the same output given the same HTTP requests 3`] = ` 16 | Array [ 17 | " + 78 referenced documents", 18 | ] 19 | `; 20 | 21 | exports[`Full migration should print the same output given the same HTTP requests 4`] = ` 22 | Array [ 23 | "Fetched all referenced documents", 24 | ] 25 | `; 26 | 27 | exports[`Full migration should print the same output given the same HTTP requests 5`] = ` 28 | Array [ 29 | " + 63 source assets", 30 | ] 31 | `; 32 | 33 | exports[`Full migration should print the same output given the same HTTP requests 6`] = ` 34 | Array [ 35 | "Clearing 98nzfixt/staging dataset...", 36 | ] 37 | `; 38 | 39 | exports[`Full migration should print the same output given the same HTTP requests 7`] = ` 40 | Array [ 41 | "Removed 137 documents from 98nzfixt/staging", 42 | ] 43 | `; 44 | 45 | exports[`Full migration should print the same output given the same HTTP requests 8`] = ` 46 | Array [ 47 | "Found 63 new assets to upload", 48 | ] 49 | `; 50 | 51 | exports[`Full migration should print the same output given the same HTTP requests 9`] = ` 52 | Array [ 53 | "Inserting 74 documents into dataset \\"staging\\"", 54 | ] 55 | `; 56 | 57 | exports[`Full migration should print the same output given the same HTTP requests 10`] = ` 58 | Array [ 59 | "Inserted 74 documents", 60 | ] 61 | `; 62 | 63 | exports[`Full migration should print the same output given the same HTTP requests 11`] = ` 64 | Array [ 65 | "Strengthening references..", 66 | ] 67 | `; 68 | 69 | exports[`Full migration should print the same output given the same HTTP requests 12`] = ` 70 | Array [ 71 | "Success! 🎉", 72 | ] 73 | `; 74 | -------------------------------------------------------------------------------- /src/__tests__/migrate.integration.test.ts: -------------------------------------------------------------------------------- 1 | import CreateClient from '@sanity/client' 2 | import dotenv from 'dotenv' 3 | import { migrate } from '../../src' 4 | 5 | jest.mock('../utils/logging') 6 | dotenv.config() 7 | 8 | global.console.warn = jest.fn() 9 | const sourceClient = CreateClient({ 10 | projectId: process.env.TEST_STUDIO_SOURCE_PROJECTID as string, 11 | dataset: process.env.TEST_STUDIO_SOURCE_DATASET as string, 12 | useProjectHostname: true, 13 | }) 14 | 15 | const destinationClient = CreateClient({ 16 | projectId: process.env.TEST_STUDIO_TARGET_PROJECTID as string, 17 | dataset: process.env.TEST_STUDIO_TARGET_DATASET as string, 18 | token: process.env.TEST_STUDIO_TARGET_TOKEN as string, 19 | useProjectHostname: true, 20 | }) 21 | 22 | const initialQueries = [ 23 | { 24 | query: ` 25 | *[_type == "movie"] 26 | | order(releaseDate desc) 27 | | order(_createdAt desc) [0...$count] 28 | `, 29 | params: { count: 5 }, 30 | }, 31 | ] 32 | 33 | describe('Full migration', () => { 34 | it('should print the same output given the same HTTP requests', async () => { 35 | const config = { 36 | source: { 37 | client: sourceClient, 38 | initialQueries, 39 | }, 40 | destination: { 41 | client: destinationClient, 42 | deleteData: true, 43 | }, 44 | } 45 | await expect(migrate(config)).resolves.toBe(undefined) 46 | // @ts-ignore 47 | global.console.log.mock.calls.forEach((call) => { 48 | expect(call).toMatchSnapshot() 49 | }) 50 | }) 51 | }) 52 | -------------------------------------------------------------------------------- /src/__tests__/migrate.test.ts: -------------------------------------------------------------------------------- 1 | import CreateClient from '@sanity/client' 2 | import { migrate } from '../index' 3 | 4 | const sourceClientOpts = { 5 | projectId: '123', 6 | dataset: 'production', 7 | useProjectHostname: true, 8 | useCdn: true, 9 | } 10 | 11 | const destClientOpts = { 12 | projectId: '123', 13 | dataset: 'staging', 14 | useProjectHostname: true, 15 | token: 'abc123xyz', 16 | useCdn: true, 17 | } 18 | 19 | const initialQueries = [{ query: '*[_type == "someType"]' }] 20 | 21 | describe('migrate/setup', () => { 22 | it('should throw if both clients are the same', async () => { 23 | const config = { 24 | destination: { 25 | client: CreateClient(sourceClientOpts), 26 | }, 27 | source: { 28 | client: CreateClient(sourceClientOpts), 29 | initialQueries, 30 | }, 31 | } 32 | 33 | await expect(migrate(config)).rejects.toThrow( 34 | 'Both clients have the same configuration' 35 | ) 36 | }) 37 | 38 | it('should throw if the source client has a token', async () => { 39 | /* eslint-disable no-console */ 40 | console.warn = () => undefined 41 | const config = { 42 | source: { 43 | client: CreateClient({ 44 | ...sourceClientOpts, 45 | token: '123qwerty', 46 | }), 47 | initialQueries, 48 | }, 49 | 50 | destination: { 51 | client: CreateClient(destClientOpts), 52 | }, 53 | } 54 | await expect(migrate(config)).rejects.toThrow( 55 | 'The source client must not have a token' 56 | ) 57 | }) 58 | 59 | it('should throw if there are no initial queries', async () => { 60 | const config = { 61 | source: { 62 | client: CreateClient(sourceClientOpts), 63 | initialQueries: [], 64 | }, 65 | 66 | destination: { 67 | client: CreateClient(destClientOpts), 68 | }, 69 | } 70 | await expect(migrate(config)).rejects.toThrow( 71 | 'You must include at least one initial query' 72 | ) 73 | }) 74 | 75 | it('should throw if the destination client does not have a token', async () => { 76 | const config = { 77 | source: { 78 | client: CreateClient(sourceClientOpts), 79 | initialQueries, 80 | }, 81 | 82 | destination: { 83 | client: CreateClient({ ...destClientOpts, token: undefined }), 84 | }, 85 | } 86 | await expect(migrate(config)).rejects.toThrow('must have a write token') 87 | }) 88 | }) 89 | -------------------------------------------------------------------------------- /src/config.ts: -------------------------------------------------------------------------------- 1 | export const DEFAULT_BATCH_SIZE = 35 2 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import type { SanityClient } from '@sanity/client' 2 | import PromptConfirm from 'prompt-confirm' 3 | import invariant from 'tiny-invariant' 4 | import { 5 | logHeading, 6 | flat, 7 | unique, 8 | findReferencedIds, 9 | logFetch, 10 | queue, 11 | } from './utils' 12 | import { deleteAll, insertDocuments } from './transactions' 13 | import { SanityDocument, SanityAssetDocument } from './types' 14 | import { DEFAULT_BATCH_SIZE } from './config' 15 | 16 | type QueryParams = Record 17 | 18 | interface QueryArgs { 19 | query: string 20 | params?: QueryParams 21 | } 22 | 23 | interface SourceConfig { 24 | initialQueries: QueryArgs[] 25 | client: SanityClient 26 | } 27 | 28 | interface DestinationConfig { 29 | /* The destination client must have a write token! */ 30 | client: SanityClient 31 | /** 32 | * The number of documents to include in a batch. 33 | * 34 | * default: 35 35 | * 36 | * If you are getting 'content-length' errors during migration, 37 | * set this to a lower number. 38 | */ 39 | batchSize?: number 40 | 41 | /** 42 | * Set this value to true or false to skip the prompt to delete 43 | * all documents in the target dataset. 44 | * 45 | * default: undefined 46 | * 47 | * Leave undefined to include the prompt. 48 | */ 49 | deleteData?: boolean | void 50 | } 51 | 52 | interface ImportConfig { 53 | source: SourceConfig 54 | destination: DestinationConfig 55 | } 56 | 57 | const getConfigString = (client: SanityClient): string => { 58 | const { projectId, dataset } = client.config() 59 | if (!projectId) invariant('Your client must include a projectId') 60 | if (!dataset) invariant('Your client must include a dataset') 61 | return [projectId, dataset].join(':') 62 | } 63 | 64 | export const migrate = async ({ 65 | source, 66 | destination, 67 | }: ImportConfig): Promise => { 68 | const sourceClient = source.client 69 | const destinationClient = destination.client 70 | 71 | /** 72 | * Validate config 73 | */ 74 | const sourceConfigString = getConfigString(sourceClient) 75 | const destConfigString = getConfigString(destinationClient) 76 | 77 | invariant( 78 | !(sourceConfigString === destConfigString), 79 | `Both clients have the same configuration: ${sourceConfigString}` 80 | ) 81 | 82 | invariant( 83 | Boolean(destinationClient.config().token), 84 | 'The destination client must have a write token' 85 | ) 86 | 87 | invariant( 88 | !Boolean(sourceClient.config().token), 89 | 'The source client must not have a token' 90 | ) 91 | 92 | invariant( 93 | source.initialQueries.length !== 0, 94 | 'You must include at least one initial query' 95 | ) 96 | 97 | /** 98 | * Fetch initial documents 99 | */ 100 | 101 | logHeading( 102 | `Migrating from ${sourceClient.config().projectId}/${ 103 | sourceClient.config().dataset 104 | } to ${destinationClient.config().projectId}/${ 105 | destinationClient.config().dataset 106 | } ` 107 | ) 108 | const initialDocuments = flat( 109 | await queue( 110 | source.initialQueries.map(({ query, params }) => () => 111 | sourceClient.fetch(query, params || {}) 112 | ) 113 | ) 114 | ) 115 | 116 | const docIds = initialDocuments.map(({ _id }) => _id) 117 | const referencedIds = flat(initialDocuments.map(findReferencedIds)) 118 | const allIds = unique(docIds.concat(referencedIds)) 119 | 120 | logFetch(`Found ${initialDocuments.length} initial documents`) 121 | logFetch(` + ${referencedIds.length} referenced documents`) 122 | 123 | const sourceDocuments = await sourceClient.fetch( 124 | `*[_id in $allIds && _type != 'sanity.imageAsset' && _type != 'sanity.fileAsset']`, 125 | { allIds } 126 | ) 127 | logFetch(`Fetched all referenced documents`) 128 | 129 | const assetIds = flat(sourceDocuments.map(findReferencedIds)).filter( 130 | (id) => id.startsWith('image-') || id.startsWith('file-') 131 | ) 132 | const sourceAssets = await sourceClient.fetch( 133 | `*[_id in $assetIds]`, 134 | { assetIds } 135 | ) 136 | logFetch(` + ${sourceAssets.length} source assets`) 137 | 138 | const { deleteData } = destination 139 | 140 | if (deleteData === undefined) { 141 | const confirmDelete = new PromptConfirm( 142 | 'Do you want to remove all data from the destination dataset?' 143 | ) 144 | const confirmed = await confirmDelete.run() 145 | if (confirmed) { 146 | await deleteAll(destinationClient) 147 | } 148 | } else if (deleteData === true) { 149 | await deleteAll(destinationClient) 150 | } 151 | const batchSize = destination.batchSize ?? DEFAULT_BATCH_SIZE 152 | 153 | await insertDocuments(destinationClient, sourceDocuments, sourceAssets, { 154 | batchSize, 155 | }) 156 | 157 | logHeading('Success! 🎉') 158 | } 159 | -------------------------------------------------------------------------------- /src/modules.d.ts: -------------------------------------------------------------------------------- 1 | declare module 'prompt-confirm' 2 | declare module '@sanity/import/*' 3 | -------------------------------------------------------------------------------- /src/transactions.ts: -------------------------------------------------------------------------------- 1 | import { 2 | SanityClient, 3 | SanityAssetDocument, 4 | Transaction, 5 | MultipleMutationResult, 6 | } from '@sanity/client' 7 | import cliProgress from 'cli-progress' 8 | import getHashedBufferForUri from '@sanity/import/src/util/getHashedBufferForUri' 9 | import { MigratedAsset, UnMigratedAsset, SanityDocument } from './types' 10 | import { 11 | definitely, 12 | logWrite, 13 | logDelete, 14 | partition, 15 | getImageHash, 16 | getAssetType, 17 | isMigratedDocument, 18 | getUploadedFilename, 19 | chunk, 20 | createRemapReferences, 21 | queue, 22 | } from './utils' 23 | 24 | export const deleteAll = async ( 25 | client: SanityClient 26 | ): Promise => { 27 | const { dataset, projectId } = client.config() 28 | logDelete(`Clearing ${projectId}/${dataset} dataset...`) 29 | const allDocuments = await client.fetch('*[]') 30 | const ids = allDocuments 31 | .map((doc) => doc._id) 32 | .filter( 33 | (id) => 34 | /* Reserved and internal documents start with "_." Omit these. */ 35 | !/^_\./.test(id) 36 | ) 37 | const initialTrx = client.transaction() 38 | const finalTrx = ids.reduce( 39 | (previousTrx, id) => previousTrx.delete(id), 40 | initialTrx 41 | ) 42 | const result = await finalTrx.commit() 43 | logDelete(`Removed ${ids.length} documents from ${projectId}/${dataset}`) 44 | return result 45 | } 46 | 47 | export const uploadAsset = async ( 48 | client: SanityClient, 49 | originalAsset: SanityAssetDocument 50 | ): Promise => { 51 | const url = originalAsset.url 52 | const assetType = getAssetType(originalAsset) 53 | const { buffer } = await getHashedBufferForUri(url) 54 | 55 | const options = { 56 | label: getImageHash(originalAsset), 57 | filename: getUploadedFilename(originalAsset), 58 | source: { 59 | id: originalAsset._id, 60 | source: 'migration', 61 | url: originalAsset.url, 62 | }, 63 | } 64 | const uploadResult = await client.assets.upload( 65 | // @ts-ignore not sure how to enforce the string type for the overloads 66 | assetType, 67 | buffer, 68 | options 69 | ) 70 | return { 71 | source: originalAsset, 72 | destination: uploadResult, 73 | } 74 | } 75 | 76 | export const uploadAssets = async ( 77 | client: SanityClient, 78 | sourceAssets: SanityAssetDocument[] 79 | ): Promise => { 80 | const originalHashes = definitely(sourceAssets.map(getImageHash)) 81 | 82 | /* Find assets that already exist */ 83 | const existingAssets = await client.fetch( 84 | `*[label in $originalHashes]`, 85 | { originalHashes } 86 | ) 87 | 88 | const assetPairs = sourceAssets.map( 89 | (source) => ({ 90 | source, 91 | destination: existingAssets.find( 92 | (existingAsset) => existingAsset.label === getImageHash(source) 93 | ), 94 | }) 95 | ) 96 | 97 | const [migrated, unmigrated] = partition( 98 | assetPairs, 99 | isMigratedDocument 100 | ) 101 | 102 | if (unmigrated.length === 0) { 103 | logWrite('Found no new assets to upload') 104 | return migrated 105 | } 106 | 107 | logWrite(`Found ${unmigrated.length} new assets to upload`) 108 | 109 | const uploadProgressBar = new cliProgress.SingleBar( 110 | {}, 111 | cliProgress.Presets.shades_classic 112 | ) 113 | 114 | uploadProgressBar.start(unmigrated.length, 0) 115 | const newAssets = await queue( 116 | unmigrated.map(({ source }) => async () => { 117 | const uploadResult = await uploadAsset(client, source) 118 | uploadProgressBar.increment() 119 | return uploadResult 120 | }) 121 | ) 122 | uploadProgressBar.stop() 123 | return [...migrated, ...newAssets] 124 | } 125 | 126 | interface InsertDocumentOptions { 127 | batchSize: number 128 | } 129 | 130 | export const insertDocuments = async ( 131 | client: SanityClient, 132 | sourceDocuments: SanityDocument[], 133 | sourceAssets: SanityAssetDocument[], 134 | { batchSize }: InsertDocumentOptions 135 | ) => { 136 | const dataset = client.config().dataset 137 | const uploadedAssets = await uploadAssets(client, sourceAssets) 138 | 139 | const remapRefs = createRemapReferences(sourceDocuments, uploadedAssets) 140 | const updatedDocuments = sourceDocuments.map((doc) => remapRefs(doc)) 141 | 142 | const insertBatch = async ( 143 | batch: SanityDocument[] 144 | ): Promise => { 145 | const transaction = batch.reduce( 146 | (prevTrx, document) => prevTrx.createOrReplace(document), 147 | client.transaction() 148 | ) 149 | 150 | return transaction.commit() 151 | } 152 | 153 | const documentBatches = chunk(updatedDocuments, batchSize) 154 | 155 | logWrite( 156 | `Inserting ${sourceDocuments.length} documents into dataset "${dataset}"` 157 | ) 158 | 159 | const batchProgressBar = new cliProgress.SingleBar( 160 | {}, 161 | cliProgress.Presets.shades_classic 162 | ) 163 | 164 | batchProgressBar.start(updatedDocuments.length, 0) 165 | await queue( 166 | documentBatches.map((batch) => async () => { 167 | const result = await insertBatch(batch) 168 | batchProgressBar.increment(batch.length) 169 | return result 170 | }) 171 | ) 172 | batchProgressBar.stop() 173 | logWrite(`Inserted ${updatedDocuments.length} documents`) 174 | const updatedStrong = sourceDocuments.map((doc) => remapRefs(doc, false)) 175 | const strongBatches = chunk(updatedStrong, batchSize) 176 | 177 | logWrite('Strengthening references..') 178 | 179 | const strongProgressBar = new cliProgress.SingleBar( 180 | {}, 181 | cliProgress.Presets.shades_classic 182 | ) 183 | 184 | strongProgressBar.start(updatedStrong.length, 0) 185 | 186 | const strongResults = await queue( 187 | strongBatches.map((batch) => async () => { 188 | const result = await insertBatch(batch) 189 | strongProgressBar.increment(batch.length) 190 | return result 191 | }) 192 | ) 193 | strongProgressBar.stop() 194 | return strongResults 195 | } 196 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | import { 2 | SanityAssetDocument as SanityAssetDocumentType, 3 | SanityDocument as SanityDocumentType, 4 | } from '@sanity/client' 5 | 6 | export type Maybe = T | null | undefined | void 7 | 8 | export type SanityAssetDocument = SanityAssetDocumentType 9 | 10 | export interface SanityObject { 11 | [key: string]: SanityFieldValue 12 | } 13 | 14 | export type SanityFieldValue = 15 | | SanityFieldValue[] 16 | | SanityReference 17 | | SanityObject 18 | | SanityAssetObject 19 | | boolean 20 | | string 21 | | number 22 | | void 23 | 24 | export type SanityDocument = Pick & { 25 | [key: string]: SanityFieldValue 26 | } 27 | 28 | export interface MigratedDocument< 29 | DocType extends SanityDocument = SanityDocument 30 | > { 31 | source: DocType 32 | destination: DocType 33 | } 34 | 35 | export interface UnMigratedDocument< 36 | DocType extends SanityDocument = SanityDocument 37 | > { 38 | source: DocType 39 | destination?: DocType 40 | } 41 | 42 | export type MigratedAsset = MigratedDocument 43 | export type UnMigratedAsset = UnMigratedDocument 44 | 45 | export interface SanityReference extends SanityObject { 46 | _type: 'reference' 47 | _ref: string 48 | } 49 | 50 | export interface SanityAssetObject extends SanityObject { 51 | _type: 'image' | 'file' 52 | asset: SanityReference 53 | } 54 | -------------------------------------------------------------------------------- /src/utils/__mocks__/logging.ts: -------------------------------------------------------------------------------- 1 | global.console.log = jest.fn() 2 | 3 | export const logHeading = console.log 4 | export const logFetch = console.log 5 | export const logDelete = console.log 6 | export const logWrite = console.log 7 | export const logError = console.log 8 | -------------------------------------------------------------------------------- /src/utils/__tests__/misc.test.ts: -------------------------------------------------------------------------------- 1 | import { flat, unique, chunk, partition, definitely } from '../misc' 2 | 3 | describe('flat', () => { 4 | it('should return a flattened array', () => { 5 | const arr = [ 6 | [1, 2], 7 | [3, 4], 8 | ] 9 | expect(flat(arr)).toEqual([1, 2, 3, 4]) 10 | }) 11 | }) 12 | 13 | describe('unique', () => { 14 | it('should return a unique array', () => { 15 | const arr = [1, 2, 3, 1, 2, 3] 16 | expect(unique(arr)).toEqual([1, 2, 3]) 17 | }) 18 | }) 19 | 20 | describe('chunk', () => { 21 | it('should chunk an array into N items', () => { 22 | const arr = [1, 2, 3, 4, 5, 6, 7] 23 | const chunked = chunk(arr, 2) 24 | expect(chunked[0]).toEqual([1, 2]) 25 | expect(chunked[1]).toEqual([3, 4]) 26 | expect(chunked[2]).toEqual([5, 6]) 27 | expect(chunked[3]).toEqual([7]) 28 | }) 29 | }) 30 | 31 | describe('partition', () => { 32 | it('should split an array into two based on a predicate function', () => { 33 | const arr = ['one', 'two', 3, 4, 'five', 6] 34 | const isString = (arg: string | number): arg is string => 35 | typeof arg === 'string' 36 | 37 | const [strings, nums] = partition(arr, isString) 38 | expect(strings).toEqual(['one', 'two', 'five']) 39 | expect(nums).toEqual([3, 4, 6]) 40 | }) 41 | }) 42 | 43 | describe('definitely', () => { 44 | it('should return an empty array by default', () => { 45 | expect(definitely()).toEqual([]) 46 | }) 47 | 48 | it('should return an array without null or void values', () => { 49 | const arr = [1, 2, undefined, 3, null, 4] 50 | expect(definitely(arr)).toEqual([1, 2, 3, 4]) 51 | }) 52 | }) 53 | -------------------------------------------------------------------------------- /src/utils/__tests__/parsing.test.ts: -------------------------------------------------------------------------------- 1 | import { get } from 'lodash' 2 | import { 3 | createRemapReferences, 4 | createRemapDocumentReferences, 5 | createRemapAssetReferences, 6 | findReferencedIds, 7 | getUploadedFilename, 8 | getAssetType, 9 | isAsset, 10 | isSanityAssetObject, 11 | isSanityObject, 12 | isReference, 13 | isMigratedDocument, 14 | } from '../parsing' 15 | import { 16 | sourceImage1, 17 | migratedImage1, 18 | migratedImage2, 19 | migratedAssets, 20 | mockDoc, 21 | mockAuthors, 22 | mockSections, 23 | mockArticles, 24 | } from './stubs' 25 | 26 | describe('findReferencedIds', () => { 27 | it('should extract a list of referenced IDs from a sanity document', () => { 28 | expect(findReferencedIds(mockDoc)).toEqual([ 29 | 'section-1', 30 | 'article-1', 31 | 'article-2', 32 | 'image-2', 33 | 'author-1', 34 | 'author-2', 35 | 'image-1', 36 | ]) 37 | }) 38 | }) 39 | 40 | describe('remapAssetReference', () => { 41 | it('should replace asset references with the migrated asset', () => { 42 | const remapped = createRemapAssetReferences(migratedAssets)(mockDoc) 43 | expect(get(remapped, 'featuredImage.asset._ref')).toBe(migratedImage1._id) 44 | // Within arrays 45 | expect(get(remapped, 'gallery[0].asset._ref')).toBe(migratedImage1._id) 46 | expect(get(remapped, 'gallery[1].asset._ref')).toBe(migratedImage1._id) 47 | // Deeply nested 48 | expect(get(remapped, 'nestedGallery.modules[0].images[0].asset._ref')).toBe( 49 | migratedImage1._id 50 | ) 51 | expect( 52 | get(remapped, 'layoutModules[0].widgets.slides[0].image.asset._ref') 53 | ).toBe(migratedImage2._id) 54 | expect( 55 | get(remapped, 'layoutModules[0].widgets.slides[1].image.asset._ref') 56 | ).toBe(migratedImage2._id) 57 | }) 58 | 59 | it('should throw if unable to find an original asset match', () => { 60 | const remap = createRemapAssetReferences(migratedAssets.slice(2)) 61 | const fn = () => { 62 | remap(mockDoc) 63 | } 64 | expect(fn).toThrow('Could not find original asset match for') 65 | }) 66 | 67 | it('should throw if unable to find a migrated asset match', () => { 68 | const remap = createRemapAssetReferences([ 69 | { 70 | source: migratedAssets[0].source, 71 | destination: { 72 | ...migratedAssets[0].destination, 73 | label: 'bad-ref', 74 | }, 75 | }, 76 | ...migratedAssets.slice(1), 77 | ]) 78 | const fn = () => { 79 | remap(mockDoc) 80 | } 81 | expect(fn).toThrow('Could not find migrated asset match for') 82 | }) 83 | }) 84 | 85 | describe('remapDocumentReferences', () => { 86 | it('should omit references to documents that will not be uploaded', () => { 87 | const remapped = createRemapDocumentReferences([mockDoc, mockAuthors[0]])( 88 | mockDoc 89 | ) 90 | expect(remapped.section).toBe(undefined) 91 | expect(get(remapped, 'authors.length')).toBe(1) 92 | }) 93 | 94 | it('should preserve all existing properties', () => { 95 | const remapped = createRemapDocumentReferences([mockDoc])(mockDoc) 96 | expect(Object.keys(remapped)).toEqual(Object.keys(mockDoc)) 97 | }) 98 | 99 | it('should make all existing references weak', () => { 100 | const remapped = createRemapDocumentReferences([ 101 | mockDoc, 102 | ...mockArticles, 103 | ...mockAuthors, 104 | ...mockSections, 105 | ])(mockDoc) 106 | expect(get(remapped, 'section._weak')).toBe(true) 107 | expect(get(remapped, 'authors[0]._weak')).toBe(true) 108 | expect(get(remapped, 'authors[1]._weak')).toBe(true) 109 | expect(get(remapped, 'story.article._weak')).toBe(true) 110 | // an object with no "_type" 111 | expect(get(remapped, 'stories1.article1._weak')).toBe(true) 112 | }) 113 | 114 | it('should make all existing references strong when specified', () => { 115 | const remapped = createRemapDocumentReferences([ 116 | mockDoc, 117 | ...mockArticles, 118 | ...mockAuthors, 119 | ...mockSections, 120 | ])(mockDoc, false) 121 | expect(get(remapped, 'section._weak')).toBe(false) 122 | expect(get(remapped, 'authors[0]._weak')).toBe(false) 123 | expect(get(remapped, 'authors[1]._weak')).toBe(false) 124 | expect(get(remapped, 'story.article._weak')).toBe(false) 125 | // an object with no "_type" 126 | expect(get(remapped, 'stories1.article1._weak')).toBe(false) 127 | }) 128 | }) 129 | 130 | describe('remapReferences', () => { 131 | it('should not omit asset references', () => { 132 | const remap = createRemapReferences([mockDoc], migratedAssets) 133 | const remapped = remap(mockDoc) 134 | expect(get(remapped, 'featuredImage.asset._ref')).toBe(migratedImage1._id) 135 | expect(get(remapped, 'featuredImage.asset._weak')).toBe(true) 136 | }) 137 | }) 138 | 139 | describe('getUploadedFilename', () => { 140 | it('should return the filename from the URL', () => { 141 | expect(getUploadedFilename(migratedImage1)).toBe('food-logo.svg') 142 | }) 143 | }) 144 | 145 | describe('typeguards', () => { 146 | it('isAsset', () => { 147 | expect(isAsset(mockArticles[0])).toBe(false) 148 | expect(isAsset(migratedImage1)).toBe(true) 149 | }) 150 | 151 | it('isSanityAssetObject', () => { 152 | expect(isSanityAssetObject(mockDoc.featuredImage)).toBe(true) 153 | expect(isSanityAssetObject(migratedImage1)).toBe(false) 154 | }) 155 | 156 | it('isSanityObject', () => { 157 | expect(isSanityObject(mockDoc.stories1)).toBe(true) 158 | expect(isSanityObject(mockDoc.layoutModules)).toBe(false) 159 | }) 160 | 161 | it('isReference', () => { 162 | expect(isReference(mockDoc.story)).toBe(false) 163 | expect(isReference(mockDoc.section)).toBe(true) 164 | }) 165 | 166 | it('isMigratedDocument', () => { 167 | expect(isMigratedDocument(migratedAssets[0])).toBe(true) 168 | expect( 169 | isMigratedDocument({ 170 | source: migratedAssets[0].source, 171 | }) 172 | ).toBe(false) 173 | }) 174 | }) 175 | 176 | describe('getAssetType', () => { 177 | it('should return "image" for images', () => { 178 | expect(getAssetType(sourceImage1)).toBe('image') 179 | }) 180 | 181 | it('should return "file" for files', () => { 182 | const dummyFile = { 183 | ...sourceImage1, 184 | _type: 'sanity.fileAsset', 185 | } 186 | expect(getAssetType(dummyFile)).toBe('file') 187 | }) 188 | 189 | it('should throw if given a non-asset', () => { 190 | const fn = () => getAssetType(mockSections[0]) 191 | expect(fn).toThrow('"section" is not a valid sanity asset type') 192 | }) 193 | }) 194 | -------------------------------------------------------------------------------- /src/utils/__tests__/queue.test.ts: -------------------------------------------------------------------------------- 1 | import { queue } from '../queue' 2 | 3 | const dogs = ['frank', 'muenster', 'ida', 'moe'] 4 | 5 | const upCase = (str: string): string => 6 | [str.charAt(0).toUpperCase(), str.slice(1)].join('') 7 | 8 | const upCaseAsync = async (str: string): Promise => 9 | [str.charAt(0).toUpperCase(), str.slice(1)].join('') 10 | 11 | const sleep = (ms: number): Promise => 12 | new Promise((resolve) => setTimeout(resolve, ms)) 13 | 14 | describe('queue', () => { 15 | it('should work with sync functions', async () => { 16 | const results = await queue(dogs.map((dog) => () => upCase(dog))) 17 | expect(results[0]).toBe('Frank') 18 | expect(results[1]).toBe('Muenster') 19 | }) 20 | 21 | it('should work with async functions', async () => { 22 | const results = await queue(dogs.map((dog) => () => upCaseAsync(dog))) 23 | expect(results[0]).toBe('Frank') 24 | expect(results[1]).toBe('Muenster') 25 | }) 26 | 27 | it('should apply custom addAll options', async () => { 28 | const result = await queue( 29 | dogs.map((dog) => async () => { 30 | await sleep(50) 31 | return upCaseAsync(dog) 32 | }), 33 | // The promises will time out and return undefined 34 | { timeout: 1 } 35 | ) 36 | expect(result).toEqual([undefined, undefined, undefined, undefined]) 37 | }) 38 | }) 39 | -------------------------------------------------------------------------------- /src/utils/__tests__/stubs.ts: -------------------------------------------------------------------------------- 1 | import { SanityAssetDocument } from '@sanity/client' 2 | import { MigratedAsset, SanityDocument } from '../../types' 3 | import { getImageHash } from '../parsing' 4 | 5 | export const sourceImage1: SanityAssetDocument = { 6 | _createdAt: '2020-03-19T16:12:52Z', 7 | _id: 'image-1', 8 | _rev: 'o4d7Jp97n3QbbasabvN5uU', 9 | _type: 'sanity.imageAsset', 10 | _updatedAt: '2020-03-19T16:12:52Z', 11 | assetId: '16e00765731c34d54d301d7f11d435fc6d2ffd8e', 12 | extension: 'svg', 13 | mimeType: 'image/svg+xml', 14 | originalFilename: 'Sports Logo Copy.svg', 15 | path: 16 | 'images/z2aip6ei/production/16e00765731c34d54d301d7f11d435fc6d2ffd8e-467x100.svg', 17 | sha1hash: '16e00765731c34d54d301d7f11d435fc6d2ffd8e', 18 | size: 4477, 19 | uploadId: '4gjptnOTKAcKOS3N08hxV6a2OR8Ltgcw', 20 | url: 21 | 'https://cdn.sanity.io/images/z2aip6ei/production/16e00765731c34d54d301d7f11d435fc6d2ffd8e-467x100.svg', 22 | metadata: { 23 | lqip: 'a123456789', 24 | }, 25 | } 26 | 27 | export const migratedImage1: SanityAssetDocument = { 28 | _type: 'sanity.imageAsset', 29 | _rev: '123', 30 | _createdAt: 'yesterday', 31 | _updatedAt: 'yesterday', 32 | uploadId: '9JKHaboqln0ypHm1KTr7qSz9QdyOizjg', 33 | _id: 'new-image-1', 34 | label: getImageHash(sourceImage1), 35 | assetId: '0a22f19843395c27c549d2f345035b3e7120e037', 36 | sha1hash: '0a22f19843395c27c549d2f345035b3e7120e037', 37 | path: 'images/nys454mt/staging/food-logo.svg', 38 | url: 'https://cdn.sanity.io/images/nys454mt/staging/food-logo.svg', 39 | originalFilename: '16e00765731c34d54d301d7f11d435fc6d2ffd8e-467x100.svg', 40 | extension: 'svg', 41 | size: 4378, 42 | mimeType: 'image/svg+xml', 43 | metadata: { 44 | lqip: 'a123456789', 45 | }, 46 | } 47 | 48 | export const sourceImage2: SanityAssetDocument = { 49 | _createdAt: '2020-03-19T16:12:52Z', 50 | _id: 'image-2', 51 | _rev: 'o4d7Jp97n3QbbasabvN5uU', 52 | _type: 'sanity.imageAsset', 53 | _updatedAt: '2020-03-19T16:12:52Z', 54 | assetId: 'zzz00765731c34d54d301d7f11d435fc6d2ffd8e', 55 | extension: 'svg', 56 | mimeType: 'image/svg+xml', 57 | originalFilename: 'frank.png', 58 | path: 59 | 'images/z2aip6ei/production/16e00765731c34d54d301d7f11d435fc6d2ffd8e-467x100.svg', 60 | sha1hash: '16e00765731c34d54d301d7f11d435fc6d2ffd8e', 61 | size: 4477, 62 | uploadId: '4gjptnOTKAcKOS3N08hxV6a2OR8Ltgcw', 63 | url: 64 | 'https://cdn.sanity.io/images/z2aip6ei/production/16e00765731c34d54d301d7f11d435fc6d2ffd8e-467x100.svg', 65 | metadata: { 66 | lqip: 'b123456789', 67 | }, 68 | } 69 | 70 | export const migratedImage2: SanityAssetDocument = { 71 | _type: 'sanity.imageAsset', 72 | _rev: '123', 73 | _createdAt: 'yesterday', 74 | _updatedAt: 'yesterday', 75 | uploadId: '9JKHaboqln0ypHm1KTr7qSz9QdyOizjg', 76 | _id: 'new-image-2', 77 | label: getImageHash(sourceImage2), 78 | assetId: 'f3e2f19843395c27c549d2f345035b3e7120e037', 79 | sha1hash: 'f3e2f19843395c27c549d2f345035b3e7120e037', 80 | path: 81 | 'images/nys454mt/staging/f3e2f19843395c27c549d2f345035b3e7120e037-467x100.svg', 82 | url: 83 | 'https://cdn.sanity.io/images/nys454mt/staging/f3e2f19843395c27c549d2f345035b3e7120e037-467x100.svg', 84 | originalFilename: '16e00765731c34d54d301d7f11d435fc6d2ffd8e-467x100.svg', 85 | extension: 'svg', 86 | size: 4378, 87 | mimeType: 'image/svg+xml', 88 | metadata: { 89 | lqip: 'b123456789', 90 | }, 91 | } 92 | 93 | export const mockSections: SanityDocument[] = [ 94 | { 95 | _type: 'section', 96 | _id: 'section-1', 97 | title: 'Section One', 98 | }, 99 | ] 100 | 101 | export const mockAuthors: SanityDocument[] = [ 102 | { 103 | _type: 'author', 104 | _id: 'author-1', 105 | name: 'Frank', 106 | }, 107 | { 108 | _type: 'author', 109 | _id: 'author-2', 110 | name: 'Muenster', 111 | }, 112 | ] 113 | 114 | export const mockArticles: SanityDocument[] = [ 115 | { 116 | _id: 'article-1', 117 | _type: 'article', 118 | title: 'How to give your dog a haircut at home', 119 | }, 120 | { 121 | _id: 'article-2', 122 | _type: 'article', 123 | title: 'How to give your dog a haircut at home part 2', 124 | }, 125 | ] 126 | 127 | export const mockDoc: SanityDocument = { 128 | _id: 'abc', 129 | _type: 'article', 130 | _rev: '123', 131 | title: 'Some Article', 132 | someField: undefined, 133 | _updatedAt: 'yesterday', 134 | _createdAt: 'yesterday', 135 | section: { 136 | _type: 'reference', 137 | _ref: 'section-1', 138 | }, 139 | story: { 140 | _type: 'storyBlock', 141 | article: { 142 | _type: 'reference', 143 | _ref: 'article-1', 144 | }, 145 | }, 146 | stories1: { 147 | article1: { 148 | _type: 'reference', 149 | _ref: 'article-2', 150 | }, 151 | }, 152 | layoutModules: [ 153 | { 154 | _type: 'layoutModule', 155 | widgets: { 156 | _key: '7230a163c7df', 157 | _type: 'photoSlideshowWidget', 158 | slides: [ 159 | { 160 | _type: 'slide', 161 | _key: '18e6005a89d5', 162 | image: { 163 | _type: 'image', 164 | asset: { 165 | _ref: sourceImage2._id, 166 | _type: 'reference', 167 | }, 168 | }, 169 | }, 170 | { 171 | _key: '3b6796158e6a', 172 | _type: 'slide', 173 | image: { 174 | _type: 'image', 175 | asset: { 176 | _ref: sourceImage2._id, 177 | _type: 'reference', 178 | }, 179 | }, 180 | }, 181 | ], 182 | title: 'Journal of a Plague', 183 | }, 184 | }, 185 | ], 186 | 187 | authors: [ 188 | { 189 | _ref: 'author-1', 190 | _type: 'reference', 191 | }, 192 | { 193 | _ref: 'author-2', 194 | _type: 'reference', 195 | }, 196 | ], 197 | featuredImage: { 198 | _type: 'image', 199 | asset: { 200 | _type: 'reference', 201 | _ref: sourceImage1._id, 202 | }, 203 | }, 204 | gallery: [ 205 | { 206 | _type: 'image', 207 | asset: { 208 | _type: 'reference', 209 | _ref: sourceImage1._id, 210 | }, 211 | }, 212 | { 213 | _type: 'image', 214 | asset: { 215 | _type: 'reference', 216 | _ref: sourceImage1._id, 217 | }, 218 | }, 219 | ], 220 | nestedGallery: { 221 | _type: 'gallery', 222 | modules: [ 223 | { 224 | _id: 'module-1', 225 | _type: 'module', 226 | images: [ 227 | { 228 | _type: 'image', 229 | asset: { 230 | _type: 'reference', 231 | _ref: sourceImage1._id, 232 | }, 233 | }, 234 | ], 235 | }, 236 | ], 237 | }, 238 | } 239 | 240 | export const migratedAssets: MigratedAsset[] = [ 241 | { 242 | source: sourceImage1, 243 | destination: migratedImage1, 244 | }, 245 | { 246 | source: sourceImage2, 247 | destination: migratedImage2, 248 | }, 249 | ] 250 | -------------------------------------------------------------------------------- /src/utils/index.ts: -------------------------------------------------------------------------------- 1 | export * from './misc' 2 | export * from './parsing' 3 | export * from './logging' 4 | export * from './queue' 5 | -------------------------------------------------------------------------------- /src/utils/logging.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-console */ 2 | import chalk from 'chalk' 3 | 4 | const mainColor = chalk.black.bgYellow 5 | const errorColor = chalk.bold.red 6 | const writeColor = chalk.green 7 | const fetchColor = chalk.green 8 | const deleteColor = chalk.yellow 9 | 10 | /* a nice big block */ 11 | export const logHeading = (str: string) => { 12 | console.log('') 13 | const padding = Array.from({ length: str.length + 4 }, () => ' ').join('') 14 | console.log(mainColor(padding)) 15 | console.log(mainColor(` ${str} `)) 16 | console.log(mainColor(padding)) 17 | console.log('') 18 | } 19 | export const logFetch = (str: string) => console.log(fetchColor(str)) 20 | export const logDelete = (str: string) => console.log(deleteColor(str)) 21 | export const logWrite = (str: string) => console.log(writeColor(str)) 22 | export const logError = (str: string) => console.log(errorColor(str)) 23 | -------------------------------------------------------------------------------- /src/utils/misc.ts: -------------------------------------------------------------------------------- 1 | /* weird eslint bug */ 2 | /* eslint-disble-next-line */ 3 | import { Maybe } from '../types' 4 | 5 | export const flat = (arrays: T[][]): T[] => 6 | arrays.reduce((flattened, array) => [...flattened, ...array], []) 7 | 8 | /** 9 | * unique. 10 | * 11 | * @param {T[]} arr 12 | * @returns {T[]} 13 | */ 14 | export const unique = (arr: T[]): T[] => 15 | arr.reduce((allItems, currentItem) => { 16 | if (allItems.includes(currentItem)) return allItems 17 | return [...allItems, currentItem] 18 | }, []) 19 | 20 | /* yanked from: 21 | * https://stackoverflow.com/questions/8495687/split-array-into-chunks */ 22 | export const chunk = (arr: T[], size: number): T[][] => 23 | [...Array(Math.ceil(arr.length / size))] 24 | .fill(undefined) 25 | .map((_, i) => arr.slice(size * i, size + size * i)) 26 | 27 | /* yanked and adapted from: 28 | * https://stackoverflow.com/questions/11731072/dividing-an-array-by-filter-function */ 29 | 30 | type PredicateFn = (item: T1 | T2) => item is T1 31 | type Partitioned = [T1[], T2[]] 32 | 33 | /** 34 | * partition. 35 | * 36 | * @param array - An array of items to partition 37 | * @param predicate - A function that will return true or false for each item 38 | */ 39 | export function partition( 40 | array: Array, 41 | predicate: PredicateFn 42 | ): Partitioned { 43 | return array.reduce>( 44 | (acc, item) => 45 | predicate(item) ? (acc[0].push(item), acc) : (acc[1].push(item), acc), 46 | [[], []] 47 | ) 48 | } 49 | 50 | export function definitely(items?: Maybe[] | null): T[] { 51 | if (!items) return [] 52 | return items.filter((i): i is T => Boolean(i)) 53 | } 54 | -------------------------------------------------------------------------------- /src/utils/parsing.ts: -------------------------------------------------------------------------------- 1 | import { SanityAssetDocument } from '@sanity/client' 2 | import hash from 'hash-sum' 3 | import { isPlainObject } from 'lodash' 4 | import { 5 | MigratedAsset, 6 | MigratedDocument, 7 | SanityDocument, 8 | SanityFieldValue, 9 | SanityObject, 10 | SanityReference, 11 | SanityAssetObject, 12 | } from '../types' 13 | import { flat, unique, definitely } from './misc' 14 | 15 | /** 16 | * Getters 17 | */ 18 | 19 | export const findReferencedIds = ( 20 | doc: SanityDocument | SanityObject 21 | ): string[] => { 22 | const getIds = (value: SanityFieldValue): string[] => { 23 | if (!value) return [] 24 | if (isReference(value)) return [value._ref] 25 | if (Array.isArray(value)) { 26 | return flat(definitely(value.map(getIds))) 27 | } 28 | if (isSanityObject(value)) { 29 | return findReferencedIds(value) 30 | } 31 | return [] 32 | } 33 | const result = Object.values(doc).reduce((referenceIds, value) => { 34 | const ids = getIds(value) 35 | return [...referenceIds, ...ids] 36 | }, []) 37 | return unique(result) 38 | } 39 | 40 | export const getUploadedFilename = (asset: SanityAssetDocument): string => { 41 | return asset.path.replace(/(.*\/)*/, '') 42 | } 43 | 44 | export const getImageHash = (asset: SanityAssetDocument): string => 45 | hash(asset.metadata.lqip) 46 | 47 | export const getAssetType = (document: SanityDocument) => { 48 | const { _type } = document 49 | const assetType = _type.replace(/^sanity./, '').replace(/Asset$/, '') 50 | if ( 51 | !/^sanity\./.test(_type) || 52 | (assetType !== 'file' && assetType !== 'image') 53 | ) { 54 | throw new Error(`"${_type}" is not a valid sanity asset type`) 55 | } 56 | return assetType as 'image' | 'file' 57 | } 58 | 59 | /** 60 | * Typeguards 61 | */ 62 | 63 | export const isAsset = (doc: SanityDocument): doc is SanityAssetDocument => 64 | Boolean(doc) && 65 | (doc._type === 'sanity.imageAsset' || doc._type === 'sanity.fileAsset') 66 | 67 | export const isSanityAssetObject = (obj: any): obj is SanityAssetObject => 68 | Boolean(obj) && 69 | (obj._type === 'image' || obj._type === 'file') && 70 | obj.asset !== undefined && 71 | isReference(obj.asset) 72 | 73 | export const isSanityObject = (obj: any): obj is SanityObject => { 74 | return isPlainObject(obj) 75 | } 76 | 77 | export const isReference = (obj: any): obj is SanityReference => 78 | Boolean(obj) && 79 | obj._type === 'reference' && 80 | typeof obj._ref === 'string' && 81 | obj._ref.length > 0 82 | 83 | export function isMigratedDocument( 84 | pair: any 85 | ): pair is MigratedDocument { 86 | return Boolean(pair) && Boolean(pair.source) && Boolean(pair.destination) 87 | } 88 | 89 | /** 90 | * Transformers 91 | */ 92 | 93 | export const createRemapAssetReferences = (uploadedAssets: MigratedAsset[]) => ( 94 | document: SanityDocument 95 | ): SanityDocument => { 96 | const originalAssets = uploadedAssets.map(({ source }) => source) 97 | const newAssets = uploadedAssets.map(({ destination }) => destination) 98 | 99 | const findMigratedReference = ( 100 | refObject: SanityAssetObject 101 | ): SanityAssetObject => { 102 | const originalAsset = originalAssets.find( 103 | (originalAsset) => originalAsset._id === refObject.asset._ref 104 | ) 105 | if (!originalAsset) { 106 | throw new Error( 107 | `Could not find original asset match for ${refObject.asset._ref}` 108 | ) 109 | } 110 | const newAsset = newAssets.find( 111 | (newAsset) => getImageHash(originalAsset) === newAsset.label 112 | ) 113 | if (!newAsset) { 114 | throw new Error( 115 | `Could not find migrated asset match for ${refObject.asset._ref}` 116 | ) 117 | } 118 | 119 | return { 120 | _type: refObject._type, 121 | migrationId: refObject.asset._ref, 122 | asset: { 123 | _type: 'reference', 124 | _ref: newAsset._id, 125 | }, 126 | } 127 | } 128 | 129 | /* Recursively update all asset references */ 130 | const remap = (obj: T): T => { 131 | const getNewValue = ( 132 | value: T 133 | ): SanityAssetObject | SanityAssetObject[] | T => { 134 | if (isReference(value)) return value 135 | if (isSanityAssetObject(value)) return findMigratedReference(value) 136 | if (Array.isArray(value)) { 137 | return value.map(getNewValue) 138 | } 139 | if (isSanityObject(value)) { 140 | return remap(value) 141 | } 142 | return value 143 | } 144 | return Object.entries(obj).reduce((previousValues, [key, oldValue]) => { 145 | const newValue = getNewValue(oldValue) 146 | return { 147 | ...previousValues, 148 | [key]: newValue, 149 | } 150 | }, {} as T) 151 | } 152 | 153 | const parsed = remap(document) 154 | 155 | return parsed 156 | } 157 | 158 | export const createRemapDocumentReferences = (allDocs: SanityDocument[]) => ( 159 | doc: SanityDocument, 160 | _weak = true 161 | ): SanityDocument => { 162 | const remap = (obj: T): T => { 163 | const getNewValue = ( 164 | oldValue: SanityFieldValue 165 | ): SanityFieldValue | void => { 166 | if (isReference(oldValue)) { 167 | const referencedDocExists = allDocs.some( 168 | (doc) => doc._id === oldValue._ref 169 | ) 170 | // Strip out references to documents that do not exist 171 | if (!referencedDocExists) return undefined 172 | return { 173 | ...oldValue, 174 | _weak, 175 | } 176 | } 177 | if (isSanityObject(oldValue)) { 178 | return remap(oldValue) 179 | } 180 | if (Array.isArray(oldValue)) { 181 | return definitely(oldValue.map(getNewValue)) 182 | } 183 | return oldValue 184 | } 185 | return Object.entries(obj).reduce((previousValues, [key, oldValue]) => { 186 | const newValue = getNewValue(oldValue) 187 | return { 188 | ...previousValues, 189 | [key]: newValue, 190 | } 191 | }, {} as T) 192 | } 193 | return remap(doc) 194 | } 195 | 196 | export const createRemapReferences = ( 197 | allDocs: SanityDocument[], 198 | uploadedAssets: MigratedAsset[] 199 | ) => (document: SanityDocument, _weak = true): SanityDocument => { 200 | const remapDocumentReferences = createRemapDocumentReferences( 201 | allDocs.concat(uploadedAssets.map(({ destination }) => destination)) 202 | ) 203 | const remapAssetReferences = createRemapAssetReferences(uploadedAssets) 204 | return remapDocumentReferences(remapAssetReferences(document), _weak) 205 | } 206 | -------------------------------------------------------------------------------- /src/utils/queue.ts: -------------------------------------------------------------------------------- 1 | import PQueue from 'p-queue' 2 | 3 | type QueueOpts = ConstructorParameters[0] 4 | type Task = 5 | | (() => PromiseLike) 6 | | (() => TaskResultType) 7 | 8 | const defaultOptions = { 9 | concurrency: 1, 10 | interval: 1000 / 25, 11 | } 12 | 13 | export const queue = async ( 14 | tasks: ReadonlyArray>, 15 | options: QueueOpts = {} 16 | ): Promise => { 17 | const queue = new PQueue({ 18 | ...defaultOptions, 19 | ...options, 20 | }) 21 | return queue.addAll(tasks) 22 | } 23 | -------------------------------------------------------------------------------- /test-studio/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sanctuarycomputer/sanity-graph-import/bc995fb2dbb10435b197e837322688dafdd156e4/test-studio/.gitkeep -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | // see https://www.typescriptlang.org/tsconfig to better understand tsconfigs 3 | "include": ["src", "types"], 4 | "files": ["src/modules.d.ts"], 5 | "compilerOptions": { 6 | "module": "esnext", 7 | "lib": ["dom", "esnext"], 8 | "importHelpers": true, 9 | // output .d.ts declaration files for consumers 10 | "declaration": true, 11 | // output .js.map sourcemap files for consumers 12 | "sourceMap": true, 13 | // match output dir to input dir. e.g. dist/index instead of dist/src/index 14 | "rootDir": "./src", 15 | // stricter type-checking for stronger correctness. Recommended by TS 16 | "strict": true, 17 | // linter checks for common issues 18 | "noImplicitReturns": true, 19 | "noFallthroughCasesInSwitch": true, 20 | // noUnused* overlap with @typescript-eslint/no-unused-vars, can disable if duplicative 21 | "noUnusedLocals": true, 22 | "noUnusedParameters": true, 23 | // use Node's module resolution algorithm, instead of the legacy TS one 24 | "moduleResolution": "node", 25 | // transpile JSX to React.createElement 26 | "jsx": "react", 27 | // interop between ESM and CJS modules. Recommended by TS 28 | "esModuleInterop": true, 29 | // significant perf increase by skipping checking .d.ts files, particularly those in node_modules. Recommended by TS 30 | "skipLibCheck": true, 31 | // error out if import and file system have a casing mismatch. Recommended by TS 32 | "forceConsistentCasingInFileNames": true, 33 | // `tsdx build` ignores this option, but it is commonly used when type-checking separately with `tsc` 34 | "noEmit": true 35 | } 36 | } 37 | --------------------------------------------------------------------------------