├── .github ├── dependabot.yml └── workflows │ ├── main.yml │ └── npm-publish.yml ├── .gitignore ├── .npmignore ├── .prettierrc.json ├── LICENSE ├── README.md ├── data ├── V2 │ ├── 1d.chunked.compressed.i2.zarr │ │ ├── 0 │ │ ├── 1 │ │ └── .zarray │ ├── 1d.chunked.filled.compressed.i2.zarr │ │ ├── 0 │ │ ├── 1 │ │ └── .zarray │ ├── 1d.chunked.ragged.compressed.i2.zarr │ │ ├── 0 │ │ ├── 1 │ │ ├── 2 │ │ └── .zarray │ ├── 1d.contiguous.compressed.S7.zarr │ │ ├── 0 │ │ └── .zarray │ ├── 1d.contiguous.compressed.U13.zarr │ │ ├── 0 │ │ └── .zarray │ ├── 1d.contiguous.compressed.U7.zarr │ │ ├── 0 │ │ └── .zarray │ ├── 1d.contiguous.compressed.b1.zarr │ │ ├── 0 │ │ └── .zarray │ ├── 1d.contiguous.compressed.f4.zarr │ │ ├── 0 │ │ └── .zarray │ ├── 1d.contiguous.compressed.f8.zarr │ │ ├── 0 │ │ └── .zarray │ ├── 1d.contiguous.compressed.i2.group.zarr │ │ ├── .zgroup │ │ ├── .zmetadata │ │ ├── a │ │ │ ├── 0 │ │ │ └── .zarray │ │ └── b │ │ │ ├── 0 │ │ │ └── .zarray │ ├── 1d.contiguous.compressed.i2.zarr │ │ ├── 0 │ │ └── .zarray │ ├── 1d.contiguous.compressed.i4.zarr │ │ ├── 0 │ │ └── .zarray │ ├── 1d.contiguous.compressed.u1.zarr │ │ ├── 0 │ │ └── .zarray │ ├── 1d.contiguous.uncompressed.S7.zarr │ │ ├── 0 │ │ └── .zarray │ ├── 1d.contiguous.uncompressed.U13.zarr │ │ ├── 0 │ │ └── .zarray │ ├── 1d.contiguous.uncompressed.U7.zarr │ │ ├── 0 │ │ └── .zarray │ ├── 1d.contiguous.uncompressed.b1.zarr │ │ ├── 0 │ │ └── .zarray │ ├── 1d.contiguous.uncompressed.f8.zarr │ │ ├── 0 │ │ └── .zarray │ ├── 1d.contiguous.uncompressed.i2.zarr │ │ ├── 0 │ │ └── .zarray │ ├── 1d.contiguous.uncompressed.i4.zarr │ │ ├── 0 │ │ └── .zarray │ ├── 2d.chunked.compressed.U7.zarr │ │ ├── .zarray │ │ ├── 0.0 │ │ ├── 0.1 │ │ ├── 1.0 │ │ └── 1.1 │ ├── 2d.chunked.compressed.i2.zarr │ │ ├── .zarray │ │ ├── 0.0 │ │ ├── 0.1 │ │ ├── 1.0 │ │ └── 1.1 │ ├── 2d.chunked.filled.compressed.U7.zarr │ │ ├── .zarray │ │ ├── 0.0 │ │ ├── 0.1 │ │ ├── 1.0 │ │ └── 1.1 │ ├── 2d.chunked.ragged.compressed.i2.zarr │ │ ├── .zarray │ │ ├── 0.0 │ │ ├── 0.1 │ │ ├── 1.0 │ │ └── 1.1 │ ├── 2d.contiguous.compressed.i2.zarr │ │ ├── .zarray │ │ └── 0.0 │ ├── 3d.chunked.compressed.i2.zarr │ │ ├── .zarray │ │ ├── 0.0.0 │ │ ├── 0.0.1 │ │ ├── 0.0.2 │ │ ├── 0.1.0 │ │ ├── 0.1.1 │ │ ├── 0.1.2 │ │ ├── 0.2.0 │ │ ├── 0.2.1 │ │ ├── 0.2.2 │ │ ├── 1.0.0 │ │ ├── 1.0.1 │ │ ├── 1.0.2 │ │ ├── 1.1.0 │ │ ├── 1.1.1 │ │ ├── 1.1.2 │ │ ├── 1.2.0 │ │ ├── 1.2.1 │ │ ├── 1.2.2 │ │ ├── 2.0.0 │ │ ├── 2.0.1 │ │ ├── 2.0.2 │ │ ├── 2.1.0 │ │ ├── 2.1.1 │ │ ├── 2.1.2 │ │ ├── 2.2.0 │ │ ├── 2.2.1 │ │ └── 2.2.2 │ ├── 3d.chunked.mixed.compressed.i2.zarr │ │ ├── .zarray │ │ ├── 0.0.0 │ │ ├── 0.0.1 │ │ └── 0.0.2 │ └── 3d.contiguous.compressed.i2.zarr │ │ ├── .zarray │ │ └── 0.0.0 └── v3 │ ├── 1d.chunked.compressed.i2.zarr │ ├── c │ │ ├── 0 │ │ └── 1 │ └── zarr.json │ ├── 1d.chunked.compressed.sharded.i2.zarr │ ├── c │ │ ├── 0 │ │ └── 1 │ └── zarr.json │ ├── 1d.chunked.filled.compressed.i2.zarr │ ├── c │ │ └── 0 │ └── zarr.json │ ├── 1d.chunked.filled.compressed.sharded.i2.zarr │ ├── c │ │ └── 0 │ └── zarr.json │ ├── 1d.chunked.ragged.compressed.i2.zarr │ ├── c │ │ ├── 0 │ │ ├── 1 │ │ └── 2 │ └── zarr.json │ ├── 1d.contiguous.compressed.b1.zarr │ ├── c │ │ └── 0 │ └── zarr.json │ ├── 1d.contiguous.compressed.f4.zarr │ ├── c │ │ └── 0 │ └── zarr.json │ ├── 1d.contiguous.compressed.f8.zarr │ ├── c │ │ └── 0 │ └── zarr.json │ ├── 1d.contiguous.compressed.i2.group.zarr │ ├── a │ │ ├── c │ │ │ └── 0 │ │ └── zarr.json │ ├── b │ │ ├── c │ │ │ └── 0 │ │ └── zarr.json │ └── zarr.json │ ├── 1d.contiguous.compressed.i2.zarr │ ├── c │ │ └── 0 │ └── zarr.json │ ├── 1d.contiguous.compressed.i4.zarr │ ├── c │ │ └── 0 │ └── zarr.json │ ├── 1d.contiguous.compressed.sharded.b1.zarr │ ├── c │ │ └── 0 │ └── zarr.json │ ├── 1d.contiguous.compressed.sharded.f4.zarr │ ├── c │ │ └── 0 │ └── zarr.json │ ├── 1d.contiguous.compressed.sharded.f8.zarr │ ├── c │ │ └── 0 │ └── zarr.json │ ├── 1d.contiguous.compressed.sharded.i2.group.zarr │ ├── a │ │ ├── c │ │ │ └── 0 │ │ └── zarr.json │ ├── b │ │ ├── c │ │ │ └── 0 │ │ └── zarr.json │ └── zarr.json │ ├── 1d.contiguous.compressed.sharded.i2.zarr │ ├── c │ │ └── 0 │ └── zarr.json │ ├── 1d.contiguous.compressed.sharded.i4.zarr │ ├── c │ │ └── 0 │ └── zarr.json │ ├── 1d.contiguous.compressed.sharded.u1.zarr │ ├── c │ │ └── 0 │ └── zarr.json │ ├── 1d.contiguous.compressed.u1.zarr │ ├── c │ │ └── 0 │ └── zarr.json │ ├── 1d.contiguous.uncompressed.b1.zarr │ ├── c │ │ └── 0 │ └── zarr.json │ ├── 1d.contiguous.uncompressed.f8.zarr │ ├── c │ │ └── 0 │ └── zarr.json │ ├── 1d.contiguous.uncompressed.i2.zarr │ ├── c │ │ └── 0 │ └── zarr.json │ ├── 1d.contiguous.uncompressed.i4.zarr │ ├── c │ │ └── 0 │ └── zarr.json │ ├── 2d.chunked.compressed.i2.zarr │ ├── c │ │ ├── 0 │ │ │ ├── 0 │ │ │ └── 1 │ │ └── 1 │ │ │ ├── 0 │ │ │ └── 1 │ └── zarr.json │ ├── 2d.chunked.compressed.sharded.filled.i2.zarr │ ├── c │ │ ├── 0 │ │ │ ├── 0 │ │ │ └── 1 │ │ └── 1 │ │ │ ├── 0 │ │ │ └── 1 │ └── zarr.json │ ├── 2d.chunked.compressed.sharded.i2.zarr │ ├── c │ │ ├── 0 │ │ │ ├── 0 │ │ │ └── 1 │ │ └── 1 │ │ │ ├── 0 │ │ │ └── 1 │ └── zarr.json │ ├── 2d.chunked.ragged.compressed.i2.zarr │ ├── c │ │ ├── 0 │ │ │ ├── 0 │ │ │ └── 1 │ │ └── 1 │ │ │ ├── 0 │ │ │ └── 1 │ └── zarr.json │ ├── 2d.chunked.ragged.compressed.sharded.i2.zarr │ ├── c │ │ ├── 0 │ │ │ ├── 0 │ │ │ └── 1 │ │ └── 1 │ │ │ ├── 0 │ │ │ └── 1 │ └── zarr.json │ ├── 2d.contiguous.compressed.i2.zarr │ ├── c │ │ └── 0 │ │ │ └── 0 │ └── zarr.json │ ├── 2d.contiguous.compressed.sharded.i2.zarr │ ├── c │ │ └── 0 │ │ │ └── 0 │ └── zarr.json │ ├── 3d.chunked.compressed.i2.zarr │ ├── c │ │ ├── 0 │ │ │ ├── 0 │ │ │ │ ├── 0 │ │ │ │ └── 1 │ │ │ └── 1 │ │ │ │ ├── 0 │ │ │ │ └── 1 │ │ └── 1 │ │ │ ├── 0 │ │ │ ├── 0 │ │ │ └── 1 │ │ │ └── 1 │ │ │ ├── 0 │ │ │ └── 1 │ └── zarr.json │ ├── 3d.chunked.compressed.sharded.i2.zarr │ ├── c │ │ ├── 0 │ │ │ ├── 0 │ │ │ │ ├── 0 │ │ │ │ └── 1 │ │ │ └── 1 │ │ │ │ ├── 0 │ │ │ │ └── 1 │ │ └── 1 │ │ │ ├── 0 │ │ │ ├── 0 │ │ │ └── 1 │ │ │ └── 1 │ │ │ ├── 0 │ │ │ └── 1 │ └── zarr.json │ ├── 3d.chunked.mixed.compressed.i2.zarr │ ├── c │ │ └── 0 │ │ │ └── 0 │ │ │ ├── 0 │ │ │ ├── 1 │ │ │ └── 2 │ └── zarr.json │ ├── 3d.chunked.mixed.compressed.sharded.i2.zarr │ ├── c │ │ └── 0 │ │ │ └── 0 │ │ │ └── 0 │ └── zarr.json │ ├── 3d.contiguous.compressed.i2.zarr │ ├── c │ │ └── 0 │ │ │ └── 0 │ │ │ └── 0 │ └── zarr.json │ └── 3d.contiguous.compressed.sharded.i2.zarr │ ├── c │ └── 0 │ │ └── 0 │ │ └── 0 │ └── zarr.json ├── example.js ├── example.zarr ├── 0 └── .zarray ├── example_group.zarr ├── .zgroup ├── .zmetadata ├── a │ ├── 0 │ └── .zarray └── b │ ├── 0 │ └── .zarray ├── generate ├── v2.py └── v3.py ├── index.js ├── package-lock.json ├── package.json ├── src ├── v2.js └── v3.js └── test ├── v2.js └── v3.js /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: npm 4 | directory: '/' 5 | schedule: 6 | interval: daily 7 | - package-ecosystem: 'github-actions' 8 | directory: '/' 9 | schedule: 10 | interval: 'daily' 11 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | 9 | strategy: 10 | matrix: 11 | node-version: [14.x, 16.x] 12 | 13 | steps: 14 | - uses: actions/checkout@v2 15 | - name: Use Node.js ${{ matrix.node-version }} 16 | uses: actions/setup-node@v2.4.0 17 | with: 18 | node-version: ${{ matrix.node-version }} 19 | - name: install 20 | run: npm install --production=false 21 | - name: test 22 | run: npm run test 23 | - name: lint 24 | run: npm run prettier 25 | -------------------------------------------------------------------------------- /.github/workflows/npm-publish.yml: -------------------------------------------------------------------------------- 1 | name: npm-publish 2 | on: 3 | push: 4 | branches: 5 | - master 6 | jobs: 7 | npm-publish: 8 | name: npm-publish 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Checkout repository 12 | uses: actions/checkout@master 13 | - name: Set up Node.js 14 | uses: actions/setup-node@v2.1.2 15 | with: 16 | node-version: '14' 17 | - name: Publish if version has been updated 18 | uses: pascalgn/npm-publish-action@1.3.4 19 | env: 20 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 21 | NPM_AUTH_TOKEN: ${{ secrets.NPM_AUTH_TOKEN }} 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | 3 | # Logs 4 | logs 5 | *.log 6 | npm-debug.log* 7 | yarn-debug.log* 8 | yarn-error.log* 9 | lerna-debug.log* 10 | 11 | # Diagnostic reports (https://nodejs.org/api/report.html) 12 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 13 | 14 | # Runtime data 15 | pids 16 | *.pid 17 | *.seed 18 | *.pid.lock 19 | 20 | # Directory for instrumented libs generated by jscoverage/JSCover 21 | lib-cov 22 | 23 | # Coverage directory used by tools like istanbul 24 | coverage 25 | *.lcov 26 | 27 | # nyc test coverage 28 | .nyc_output 29 | 30 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 31 | .grunt 32 | 33 | # Bower dependency directory (https://bower.io/) 34 | bower_components 35 | 36 | # node-waf configuration 37 | .lock-wscript 38 | 39 | # Compiled binary addons (https://nodejs.org/api/addons.html) 40 | build/Release 41 | 42 | # Dependency directories 43 | node_modules/ 44 | jspm_packages/ 45 | 46 | # TypeScript v1 declaration files 47 | typings/ 48 | 49 | # TypeScript cache 50 | *.tsbuildinfo 51 | 52 | # Optional npm cache directory 53 | .npm 54 | 55 | # Optional eslint cache 56 | .eslintcache 57 | 58 | # Microbundle cache 59 | .rpt2_cache/ 60 | .rts2_cache_cjs/ 61 | .rts2_cache_es/ 62 | .rts2_cache_umd/ 63 | 64 | # Optional REPL history 65 | .node_repl_history 66 | 67 | # Output of 'npm pack' 68 | *.tgz 69 | 70 | # Yarn Integrity file 71 | .yarn-integrity 72 | 73 | # dotenv environment variables file 74 | .env 75 | .env.test 76 | 77 | # parcel-bundler cache (https://parceljs.org/) 78 | .cache 79 | 80 | # Next.js build output 81 | .next 82 | 83 | # Nuxt.js build / generate output 84 | .nuxt 85 | dist 86 | 87 | # Gatsby files 88 | .cache/ 89 | # Comment in the public line in if your project uses Gatsby and *not* Next.js 90 | # https://nextjs.org/blog/next-9-1#public-directory-support 91 | # public 92 | 93 | # vuepress build output 94 | .vuepress/dist 95 | 96 | # Serverless directories 97 | .serverless/ 98 | 99 | # FuseBox cache 100 | .fusebox/ 101 | 102 | # DynamoDB Local files 103 | .dynamodb/ 104 | 105 | # TernJS port file 106 | .tern-port 107 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | test 3 | generate 4 | example.zarr 5 | example_group.zarr 6 | data -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "tabWidth": 2, 3 | "semi": false, 4 | "singleQuote": true, 5 | "printWidth": 80, 6 | "quoteProps": "as-needed", 7 | "jsxSingleQuote": true 8 | } 9 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Jeremy Freeman 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # zarr-js 2 | 3 | > load chunked binary zarr files in javascript 4 | 5 | [Zarr](https://zarr.readthedocs.io/en/stable/) is a chunked binary format for storing n-dimensional arrays with great support for parallel access in cloud environments. This is a minimal library purely for reading Zarr files in Javascript. Other libraries exist with more features, and might suit you better! 6 | 7 | This library was originally developed for Zarr v2, but we've recently added experimental support for [Zarr v3](https://zarr-specs.readthedocs.io/en/latest/v3/core/v3.0.html), including support for [sharding](https://zarr-specs.readthedocs.io/en/latest/v3/codecs/sharding-indexed/v1.0.html), which is useful for visualization use cases. See "V3 support" below for more details. 8 | 9 | ## install 10 | 11 | Add to your project with 12 | 13 | ``` 14 | npm install zarr-js 15 | ``` 16 | 17 | ## example 18 | 19 | You need to wrap a module for making async requests. For most use cases in the browser you can use `fetch`, which is the default if nothing is passed. You can also use `fsPromises.readFile` for local files in `node`. We'll use `fsPromises.readFile` in these examples. 20 | 21 | The `load` method loads the entire array. 22 | 23 | ```js 24 | const fs = require('fs/promises') 25 | const zarr = require('zarr-js')(fs.readFile) 26 | 27 | zarr.load('example.zarr', (err, array) => { 28 | console.log(array.data) 29 | }) 30 | 31 | >> Int16Array [ 1, 2, 3, 4 ] 32 | ``` 33 | 34 | The `open` method can instead be used to read only the metadata and then load individual chunks on demand based on their key. This is useful in applications where you want to load chunks laziliy, e.g. tiles in a map viewer. 35 | 36 | ```js 37 | const fs = require('fs/promises') 38 | const zarr = require('zarr-js')(fs.readFile) 39 | 40 | zarr.open('example.zarr', (err, get) => { 41 | get([0], (err, array) => { 42 | console.log(array.data) 43 | }) 44 | }) 45 | 46 | >> Int16Array [ 1, 2, 3, 4 ] 47 | ``` 48 | 49 | The `loadGroup` and `openGroup` are similar but work on `zarr` groups with consolidated metadata. These are hierarchical data structures typically used to store multiple related arrays. 50 | 51 | ```js 52 | const fs = require('fs/promises') 53 | const zarr = require('zarr-js')(fs.readFile) 54 | 55 | zarr.loadGroup('example_group.zarr', (err, group) => { 56 | console.log(group.a.data) 57 | console.log(group.b.data) 58 | }) 59 | 60 | >> Int16Array [ 1, 2, 3, 4 ] 61 | >> Int16Array [ 5, 6, 7, 8 ] 62 | ``` 63 | 64 | ## v3 support 65 | 66 | We recently added experimental support for [Zarr v3](https://zarr.readthedocs.io/en/stable/spec/v3.html), including support for sharding, which makes it possible to define chunks that are smaller than their containing storage objects via [sharding](https://zarr-specs.readthedocs.io/en/latest/v3/codecs/sharding-indexed/v1.0.html). This can be especially useful in visualization applications where you want to load small portions of data on demand without creating an excessive number of files. 67 | 68 | To use v3 specify the optional version tag `'v3'` (the default is `'v2'`) 69 | 70 | ```js 71 | const zarr = require('zarr-js')(window.fetch, 'v3') 72 | ``` 73 | 74 | Currently, the only supported method is `zarr.open`, which reads the metadata and then can load individual chunks on demand via the `get` method. For non-sharded data, this should behave similarly to Zarr v2. For sharded data, the `key` argument to the `get` method uses byte range requests to load chunks from within shards. 75 | 76 | Here's a simple worked example. For a non-sharded 4x4 array with 2x2 chunks, calling `get([0,0])` will return a 2x2 array with the entries `[0:2,0:2]` from the original array. For a sharded 4x4 array with 2x2 shards and 1x1 chunks, calling `get([0,0])` will return a 1x1 array with the entries `[0:1,0:1]` from the original array. 77 | 78 | You can additionally pass a configuration object for some v3 specific configuration. Current options include `useSuffixRequest` which is `true` by default and will use a suffix request to get the shard index instead of using a sequence of a HEAD request to get file size and then a byte range request. Most large object stores (e.g. S3, GCS, Azure) support suffix requests, but you may need to turn it off for other http servers. 79 | 80 | ## api 81 | 82 | This documentation is for the v2 version only (for the v3 version only the `open` method is supported). 83 | 84 | #### `zarr.load(uri, [callback], [metadata])` 85 | 86 | Loads a zarr file and passes the result to the `callback`. If the file contains multiple chunks, they are merged. This is the simplest way to load an array. If metadata has already been loaded, it can be passed as an optional third argument to avoid making the request. 87 | 88 | #### `zarr.open(uri, [callback], [metadata])` 89 | 90 | Opens a zarr file and passes a function to the `callback` that can then be used to load individual chunks based on their key. This is useful for laziliy loading chunks (e.g. tiles in a map viewer). The result is a function that can be used to load chunks of the array. If metadata has already been loaded, it can be passed as an optional third argument to avoid making the request. 91 | 92 | #### `zarr.loadGroup(uri, [callback], [list], [metadata])` 93 | 94 | Loads all arrays with consolidated metadata from a zarr group, which is typically a collection of related arrays. The result passed to the `callback` is an object with keys as array names and values as arrays. An optional list of array names can be passed if you know you only want to load a subset. If metadata has already been loaded, it can be passed as an optional fourth argument to avoid making the request. 95 | 96 | #### `zarr.openGroup(uri, [callback], [list], [metadata])` 97 | 98 | Opens consolidated metadata for a zarr group, which is typically a collection of related arrays. Only the metadata is loaded, so this is useful when lazily loading chunks from multiple sources. The result passed to the `callback` is an object with keys as array names and values as functions that can be used to load chunks. An optional list of array names can be passed if you only want to return a subset of keys. If metadata has already been loaded, it can be passed as an optional fourth argument to avoid making the request. 99 | 100 | 101 | ## tests 102 | 103 | To run the tests, generate the example data from Python by running 104 | 105 | ``` 106 | rm -rf data 107 | python generate.py 108 | ``` 109 | 110 | and then run the tests using 111 | 112 | ``` 113 | node tests.js 114 | ``` 115 | 116 | The python script assumes you have `zarr` and `numpy` installed in your Python environment. -------------------------------------------------------------------------------- /data/V2/1d.chunked.compressed.i2.zarr/.zarray: -------------------------------------------------------------------------------- 1 | { 2 | "chunks": [ 3 | 2 4 | ], 5 | "compressor": { 6 | "id": "zlib", 7 | "level": 1 8 | }, 9 | "dtype": " { 7 | get([0], function (err, array) { 8 | console.log(err) 9 | console.log(array.data) 10 | }) 11 | }) -------------------------------------------------------------------------------- /example.zarr/.zarray: -------------------------------------------------------------------------------- 1 | { 2 | "chunks": [ 3 | 4 4 | ], 5 | "compressor": { 6 | "id": "zlib", 7 | "level": 1 8 | }, 9 | "dtype": " { 5 | if (!version || version == 'v2') { 6 | return v2(request) 7 | } else if (version == 'v3') { 8 | return v3(request, config) 9 | } else { 10 | throw Error(`version ${version} not recognized`) 11 | } 12 | } 13 | 14 | module.exports = zarr 15 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "zarr-js", 3 | "version": "3.3.0", 4 | "description": "load chunked binary zarr files in javascript", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "npm run test-remote-v2 && npm run test-remote-v3", 8 | "test-local-v3": "tape test/v3.js local | tap-spec", 9 | "test-remote-v3": "tape test/v3.js remote | tap-spec", 10 | "test-local-v2": "tape test/v2.js local | tap-spec", 11 | "test-remote-v2": "tape test/v2.js remote | tap-spec", 12 | "prettier": "prettier --write index.js src/v2.js src/v3.js test/v2.js test/v3.js" 13 | }, 14 | "author": "freeman-lab", 15 | "license": "MIT", 16 | "dependencies": { 17 | "async": "^2.6.2", 18 | "cartesian-product": "^2.1.2", 19 | "fflate": "^0.7.3", 20 | "ndarray": "^1.0.18", 21 | "ndarray-ops": "^1.2.2", 22 | "ndarray-scratch": "^1.2.0" 23 | }, 24 | "devDependencies": { 25 | "node-fetch": "^2.6.7", 26 | "prettier": "^2.4.1", 27 | "tap-spec": "^5.0.0", 28 | "tape": "^4.13.0" 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/v2.js: -------------------------------------------------------------------------------- 1 | const { gunzipSync, unzlibSync } = require('fflate') 2 | const ndarray = require('ndarray') 3 | const ops = require('ndarray-ops') 4 | const { parallel } = require('async') 5 | const pool = require('ndarray-scratch') 6 | const product = require('cartesian-product') 7 | 8 | const zarr = (request) => { 9 | if (!request) { 10 | request = window.fetch 11 | } 12 | if (!request) throw new Error('no request function defined') 13 | 14 | const loader = async (src, type, cb) => { 15 | let response 16 | try { 17 | response = await request(src) 18 | } catch (err) { 19 | if (type === 'arraybuffer' && err.code === 'ENOENT') { 20 | return cb(null, null) 21 | } else { 22 | return cb(new Error('error evaluating fetching function')) 23 | } 24 | } 25 | if (response && Buffer.isBuffer(response)) { 26 | return cb(null, response) 27 | } else { 28 | if (response && response.status && response.status === 200) { 29 | let body 30 | if (type === 'text') { 31 | body = await response.text() 32 | } else if (type === 'arraybuffer') { 33 | body = await response.arrayBuffer() 34 | } else { 35 | return cb(new Error('unsupported file format')) 36 | } 37 | if (!body) { 38 | return cb(new Error('failed to parse data')) 39 | } else { 40 | return cb(null, body) 41 | } 42 | } else if ( 43 | type === 'arraybuffer' && 44 | response && 45 | response.status && 46 | [403, 404].includes(response.status) 47 | ) { 48 | return cb(null, null) 49 | } else { 50 | return cb(new Error('resource not found')) 51 | } 52 | } 53 | } 54 | 55 | const load = (path, cb, metadata) => { 56 | const onload = (metadata) => { 57 | const keys = listKeys(metadata) 58 | const tasks = keys.map((k) => { 59 | const fetchChunk = (cb) => { 60 | loader(path + '/' + k, 'arraybuffer', (err, res) => { 61 | if (err) return cb(err) 62 | const result = {} 63 | result[k] = parseChunk(res, metadata) 64 | cb(null, result) 65 | }) 66 | } 67 | return fetchChunk 68 | }) 69 | parallel(tasks, (err, chunks) => { 70 | if (err) return cb(err) 71 | cb(null, mergeChunks(chunks, metadata)) 72 | }) 73 | } 74 | if (metadata) { 75 | onload(metadata) 76 | } else { 77 | loader(path + '/.zarray', 'text', (err, res) => { 78 | if (err) return cb(err) 79 | const metadata = parseMetadata(res) 80 | onload(metadata) 81 | }) 82 | } 83 | } 84 | 85 | const open = (path, cb, metadata) => { 86 | const onload = (metadata) => { 87 | const keys = listKeys(metadata) 88 | metadata.keys = keys 89 | const getChunk = function (k, cb) { 90 | const key = k.join('.') 91 | if (!keys.includes(key)) 92 | return cb(new Error('chunk ' + key + ' not found', null)) 93 | loader(path + '/' + key, 'arraybuffer', (err, res) => { 94 | if (err) return cb(err) 95 | const chunk = parseChunk(res, metadata) 96 | cb(null, chunk) 97 | }) 98 | } 99 | cb(null, getChunk) 100 | } 101 | if (metadata) { 102 | onload(metadata) 103 | } else { 104 | loader(path + '/.zarray', 'text', (err, res) => { 105 | if (err) return cb(err) 106 | const metadata = parseMetadata(res) 107 | onload(metadata) 108 | }) 109 | } 110 | } 111 | 112 | const openGroup = (path, cb, list, metadata) => { 113 | const onload = (metadata) => { 114 | if (!Object.keys(metadata).includes('zarr_consolidated_format')) { 115 | return cb(new Error('metadata is not consolidated', null)) 116 | } 117 | const arrays = listArrays(metadata.metadata) 118 | let keys = Object.keys(arrays) 119 | if (list && list.length > 0) keys = keys.filter((k) => list.includes(k)) 120 | const tasks = keys.map((k) => { 121 | return function (cb) { 122 | open(path + '/' + k, cb, arrays[k]) 123 | } 124 | }) 125 | parallel(tasks, (err, result) => { 126 | if (err) return cb(err) 127 | const out = {} 128 | keys.forEach((k, i) => { 129 | out[k] = result[i] 130 | }) 131 | cb(null, out, metadata) 132 | }) 133 | } 134 | if (metadata) { 135 | onload(metadata) 136 | } else { 137 | loader(path + '/.zmetadata', 'text', (err, res) => { 138 | if (err) return cb(err) 139 | const metadata = parseMetadata(res) 140 | onload(metadata) 141 | }) 142 | } 143 | } 144 | 145 | const loadGroup = (path, cb, list, metadata) => { 146 | const onload = (metadata) => { 147 | if (!Object.keys(metadata).includes('zarr_consolidated_format')) { 148 | return cb(new Error('metadata is not consolidated', null)) 149 | } 150 | const arrays = listArrays(metadata.metadata) 151 | let keys = Object.keys(arrays) 152 | if (list && list.length > 0) keys = keys.filter((k) => list.includes(k)) 153 | const tasks = keys.map((k) => { 154 | return function (cb) { 155 | load(path + '/' + k, cb, arrays[k]) 156 | } 157 | }) 158 | parallel(tasks, (err, result) => { 159 | if (err) return cb(err) 160 | const out = {} 161 | keys.forEach((k, i) => { 162 | out[k] = result[i] 163 | }) 164 | cb(null, out, metadata) 165 | }) 166 | } 167 | if (metadata) { 168 | onload(metadata) 169 | } else { 170 | loader(path + '/.zmetadata', 'text', (err, res) => { 171 | if (err) return cb(err) 172 | const metadata = parseMetadata(res) 173 | onload(metadata) 174 | }) 175 | } 176 | } 177 | 178 | // parse json metadata 179 | const parseMetadata = (json) => { 180 | return JSON.parse(json) 181 | } 182 | 183 | // parse a single zarr chunk 184 | const parseChunk = (chunk, metadata) => { 185 | if (chunk) { 186 | chunk = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk) 187 | if (metadata.compressor) { 188 | if (metadata.compressor.id === 'zlib') { 189 | chunk = unzlibSync(chunk) 190 | } else if (metadata.compressor.id === 'gzip') { 191 | chunk = gunzipSync(chunk) 192 | } else { 193 | throw new Error( 194 | 'compressor ' + metadata.compressor.id + ' is not supported' 195 | ) 196 | } 197 | } 198 | const dtype = metadata.dtype 199 | if (dtype.startsWith('|S')) { 200 | const length = parseInt(dtype.split('|S')[1]) 201 | chunk = new constructors['|S'](length, 1)(chunk.buffer) 202 | } else if (metadata.dtype.startsWith(' a * b, 1) 210 | chunk = Array(length).fill(metadata.fill_value) 211 | } 212 | chunk = ndarray(chunk, metadata.chunks) 213 | return chunk 214 | } 215 | 216 | // merge chunks into an array 217 | const mergeChunks = (chunks, metadata) => { 218 | // use first chunk to get dtype (spec ensures all same) 219 | const dtype = Object.values(chunks[0])[0].dtype 220 | // get shape as exact multiple of chunks by rounding 221 | const shape = metadata.shape.map((d, i) => { 222 | const c = metadata.chunks[i] 223 | return Math.floor(d / c) * c + (d % c > 0) * c 224 | }) 225 | // create new array to store merged chunks 226 | let merged 227 | if (dtype === 'array') { 228 | merged = ndarray(new Array(shape.reduce((a, b) => a * b, 1)), shape) 229 | } else { 230 | merged = pool.zeros(shape, dtype) 231 | } 232 | // loop over chunks inserting into array based on key 233 | chunks.forEach((chunk) => { 234 | const key = Object.keys(chunk)[0] 235 | .split('.') 236 | .map((k) => parseInt(k)) 237 | const value = Object.values(chunk)[0] 238 | const lo = key.map((k, i) => k * metadata.chunks[i]) 239 | const hi = metadata.chunks 240 | let view = merged.lo.apply(merged, lo) 241 | view = view.hi.apply(view, hi) 242 | ops.assign(view, value) 243 | }) 244 | // truncate final array if needed 245 | if (metadata.shape.every((d, i) => d === merged.shape[i])) { 246 | return merged 247 | } else { 248 | const result = pool.zeros(metadata.shape, dtype) 249 | ops.assign(result, merged.hi.apply(merged, metadata.shape)) 250 | pool.free(merged) 251 | return result 252 | } 253 | } 254 | 255 | // list arrays 256 | const listArrays = (metadata) => { 257 | const keys = Object.keys(metadata).filter((k) => k.includes('.zarray')) 258 | const out = {} 259 | keys.forEach((k) => { 260 | out[k.replace('/.zarray', '')] = metadata[k] 261 | }) 262 | return out 263 | } 264 | 265 | // list keys of all zarr chunks based on metadata 266 | const listKeys = (metadata) => { 267 | const zipped = [] 268 | // loop over dimensions 269 | for (let i = 0; i < metadata.shape.length; i++) { 270 | const counts = [] 271 | let iter = 0 272 | let total = 0 273 | // add chunks until we exceed shape 274 | while (total < metadata.shape[i]) { 275 | counts.push(iter) 276 | total += metadata.chunks[i] 277 | iter += 1 278 | } 279 | zipped.push(counts) 280 | } 281 | const keys = product(zipped).map((name) => name.join('.')) 282 | return keys 283 | } 284 | 285 | function StringArray(size, bytes) { 286 | return (buffer) => { 287 | const count = buffer.byteLength / (size * bytes) 288 | const array = [] 289 | for (let s = 0; s < count; s++) { 290 | const subuffer = buffer.slice(s * bytes * size, (s + 1) * bytes * size) 291 | const substring = [] 292 | for (let c = 0; c < size; c++) { 293 | const parsed = Buffer.from( 294 | subuffer.slice(c * bytes, (c + 1) * bytes) 295 | ).toString('utf8') 296 | substring.push(parsed.replace(/\x00/g, '')) 297 | } 298 | array.push(substring.join('')) 299 | } 300 | return array 301 | } 302 | } 303 | 304 | function BoolArray(buffer) { 305 | const result = new Uint8Array(buffer) 306 | return Array.from(result).map((d) => d === 1) 307 | } 308 | 309 | const constructors = { 310 | ' { 9 | 10 | let useSuffixRequest = true 11 | if (config.hasOwnProperty('useSuffixRequest')) { 12 | useSuffixRequest = config.useSuffixRequest 13 | } 14 | 15 | if (!request) { 16 | request = window.fetch 17 | } 18 | if (!request) throw new Error('no request function defined') 19 | 20 | const loader = async (src, options, type, cb) => { 21 | let response 22 | try { 23 | response = await request(src, options) 24 | } catch (err) { 25 | if (type === 'arraybuffer' && err.code === 'ENOENT') { 26 | return cb(null, null) 27 | } else { 28 | return cb(new Error('error evaluating fetching function')) 29 | } 30 | } 31 | if (response && Buffer.isBuffer(response)) { 32 | return cb(null, response) 33 | } else { 34 | if ( 35 | response && 36 | response.status && 37 | (response.status === 200 || response.status === 206) 38 | ) { 39 | let body 40 | if (type === 'text') { 41 | body = await response.text() 42 | } else if (type === 'arraybuffer') { 43 | body = await response.arrayBuffer() 44 | } else { 45 | return cb(new Error('unsupported file format')) 46 | } 47 | if (!body) { 48 | return cb(new Error('failed to parse data')) 49 | } else { 50 | return cb(null, body) 51 | } 52 | } else if ( 53 | type === 'arraybuffer' && 54 | response && 55 | response.status && 56 | [403, 404].includes(response.status) 57 | ) { 58 | return cb(null, null) 59 | } else { 60 | return cb(new Error('resource not found')) 61 | } 62 | } 63 | } 64 | 65 | const open = (path, cb, metadata) => { 66 | const indexCache = {} 67 | 68 | const onload = (metadata) => { 69 | const isSharded = metadata.codecs[0].name == 'sharding_indexed' 70 | const arrayShape = metadata.shape 71 | const chunkShape = isSharded 72 | ? metadata.codecs[0].configuration.chunk_shape 73 | : metadata.chunk_grid.configuration.chunk_shape 74 | const dataType = metadata.data_type 75 | const separator = metadata.chunk_key_encoding.configuration.separator 76 | const fillValue = metadata.fill_value 77 | const codec = isSharded 78 | ? metadata.codecs[0].configuration.codecs[0] 79 | : metadata.codecs[0] 80 | const keys = listKeys(arrayShape, chunkShape, separator) 81 | 82 | const getChunk = function (k, cb) { 83 | if (k.length != arrayShape.length || k.length != chunkShape.length) { 84 | return cb( 85 | new Error( 86 | 'key dimensionality must match array shape and chunk shape' 87 | ) 88 | ) 89 | } 90 | const key = k.join(separator) 91 | if (!keys.includes(key)) 92 | return cb(new Error('storage key ' + key + ' not found', null)) 93 | 94 | // fetch the chunk 95 | loader(path + '/c/' + key, {}, 'arraybuffer', (err, res) => { 96 | if (err) return cb(err) 97 | const chunk = parseChunk(res, dataType, chunkShape, fillValue, codec) 98 | cb(null, chunk) 99 | }) 100 | } 101 | 102 | const getShardedChunk = async function (k, cb) { 103 | if (k.length != arrayShape.length || k.length != chunkShape.length) { 104 | return cb( 105 | new Error( 106 | 'key dimensionality must match array shape and chunk shape' 107 | ) 108 | ) 109 | } 110 | const lookup = [] 111 | const chunksPerShard = 112 | metadata.chunk_grid.configuration.chunk_shape.map((d, i) => { 113 | return d / chunkShape[i] 114 | }) 115 | for (let i = 0; i < k.length; i++) { 116 | lookup.push(Math.floor(k[i] / chunksPerShard[i])) 117 | } 118 | const key = lookup.join(separator) 119 | const src = path + '/c/' + key 120 | const checksumSize = 4 121 | const indexSize = 122 | 16 * chunksPerShard.reduce((a, b) => a * b, 1) 123 | if (!keys.includes(key)) 124 | return cb(new Error('storage key ' + key + ' not found', null)) 125 | 126 | // load a shard using the index 127 | const getUsingIndex = (index) => { 128 | // modulo index to get index for a single shard 129 | const reducedKey = k.map((d, i) => d % chunksPerShard[i]) 130 | // linearize index 131 | const start = ndToLinearIndex(chunksPerShard, reducedKey) 132 | // write null chunk when 2^64-1 indicates fill value 133 | if ( 134 | index[start * 2] === 18446744073709551615n && 135 | index[start * 2 + 1] === 18446744073709551615n 136 | ) { 137 | const chunk = parseChunk( 138 | null, 139 | dataType, 140 | chunkShape, 141 | fillValue, 142 | codec 143 | ) 144 | cb(null, chunk) 145 | } else { 146 | const range = `bytes=${index[start * 2]}-${ 147 | parseInt(index[start * 2] + index[start * 2 + 1]) - 1 148 | }` 149 | // finally load the chunk 150 | loader( 151 | src, 152 | { headers: { Range: range } }, 153 | 'arraybuffer', 154 | (err, res) => { 155 | if (err) return cb(err) 156 | const chunk = parseChunk( 157 | res, 158 | dataType, 159 | chunkShape, 160 | fillValue, 161 | codec 162 | ) 163 | cb(null, chunk) 164 | } 165 | ) 166 | } 167 | } 168 | 169 | // load index from cache or fetch using file size 170 | if (indexCache[key]) { 171 | getUsingIndex(indexCache[key]) 172 | } else { 173 | if (useSuffixRequest) { 174 | loader( 175 | src, 176 | { 177 | headers: { 178 | Range: `bytes=-${indexSize + checksumSize}`, 179 | }, 180 | }, 181 | 'arraybuffer', 182 | (err, res) => { 183 | if (err) return cb(err) 184 | const index = new BigUint64Array(new Buffer.from(res).buffer.slice(0, indexSize)) 185 | indexCache[key] = index 186 | getUsingIndex(indexCache[key]) 187 | } 188 | ) 189 | } else { 190 | request(src, { method: 'HEAD' }).then((res) => { 191 | const contentLength = res.headers.get('Content-Length') 192 | if (contentLength) { 193 | const fileSize = Number(contentLength) 194 | // get index byte range according to sharding spec 195 | const startRange = fileSize - (indexSize + checksumSize) 196 | loader( 197 | src, 198 | { 199 | headers: { 200 | Range: `bytes=${startRange}-${fileSize - checksumSize - 1}`, 201 | }, 202 | }, 203 | 'arraybuffer', 204 | (err, res) => { 205 | if (err) return cb(err) 206 | const index = new BigUint64Array(Buffer.from(res).buffer) 207 | indexCache[key] = index 208 | getUsingIndex(indexCache[key]) 209 | } 210 | ) 211 | } 212 | }) 213 | } 214 | } 215 | } 216 | 217 | isSharded ? cb(null, getShardedChunk) : cb(null, getChunk) 218 | } 219 | if (metadata) { 220 | onload(metadata) 221 | } else { 222 | loader(path + '/zarr.json', {}, 'text', (err, res) => { 223 | if (err) return cb(err) 224 | const metadata = parseMetadata(res) 225 | onload(metadata) 226 | }) 227 | } 228 | } 229 | 230 | // parse json metadata 231 | const parseMetadata = (json) => { 232 | return JSON.parse(json) 233 | } 234 | 235 | // parse a single zarr chunk 236 | const parseChunk = (chunk, dtype, chunkShape, fillValue, codec) => { 237 | if (chunk) { 238 | chunk = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk) 239 | if (codec.name == 'gzip') { 240 | chunk = gunzipSync(chunk) 241 | } else if (codec.name == 'blosc' && codec.configuration.cname == 'zlib') { 242 | chunk = unzlibSync(chunk) 243 | } else { 244 | throw new Error('compressor ' + compressor + ' is not supported') 245 | } 246 | chunk = new constructors[dtype](chunk.buffer) 247 | } else { 248 | const length = chunkShape.reduce((a, b) => a * b, 1) 249 | chunk = new constructors[dtype](length).fill(fillValue) 250 | } 251 | chunk = ndarray(chunk, chunkShape) 252 | return chunk 253 | } 254 | 255 | // list all storage keys based on shape properties 256 | const listKeys = (arrayShape, chunkShape, separator) => { 257 | const zipped = [] 258 | for (let i = 0; i < arrayShape.length; i++) { 259 | const counts = [] 260 | let iter = 0 261 | let total = 0 262 | // add chunks until we exceed shape 263 | while (total < arrayShape[i]) { 264 | counts.push(iter) 265 | total += chunkShape[i] 266 | iter += 1 267 | } 268 | zipped.push(counts) 269 | } 270 | return product(zipped).map((name) => name.join(separator)) 271 | } 272 | 273 | const ndToLinearIndex = (shape, index) => { 274 | let stride = 1 275 | let linearIndex = 0 276 | for (let i = shape.length - 1; i >= 0; i--) { 277 | linearIndex += index[i] * stride 278 | stride *= shape[i] 279 | } 280 | return linearIndex 281 | } 282 | 283 | const constructors = { 284 | uint8: Uint8Array, 285 | int16: Int16Array, 286 | int32: Int32Array, 287 | float32: Float32Array, 288 | float64: Float64Array, 289 | } 290 | 291 | return { 292 | open: open, 293 | } 294 | } 295 | 296 | module.exports = zarr 297 | -------------------------------------------------------------------------------- /test/v2.js: -------------------------------------------------------------------------------- 1 | const test = require('tape') 2 | const fs = require('fs/promises') 3 | const fetch = require('node-fetch') 4 | const zarrLocal = require('../index')(fs.readFile, 'v2') 5 | const zarrRemote = require('../index')(fetch, 'v2') 6 | 7 | const args = process.argv 8 | 9 | const urlLocal = 'data/v2/' 10 | const urlRemote = 11 | 'https://storage.googleapis.com/carbonplan-share/testing/zarr-js/v2/' 12 | 13 | if (args.includes('local')) { 14 | run(zarrLocal, urlLocal, 'local') 15 | } 16 | if (args.includes('remote')) { 17 | run(zarrRemote, urlRemote, 'remote') 18 | } 19 | 20 | function run(zarr, prefix, mode) { 21 | test('1d.chunked.compressed.i2' + `.${mode}`, function (t) { 22 | zarr.load(prefix + '1d.chunked.compressed.i2.zarr', (err, array) => { 23 | t.deepEqual(array.data, new Int16Array([1, 2, 3, 4])) 24 | t.end() 25 | }) 26 | }) 27 | 28 | test('1d.chunked.filled.compressed.i2' + `.${mode}`, function (t) { 29 | zarr.load(prefix + '1d.chunked.filled.compressed.i2.zarr', (err, array) => { 30 | t.deepEqual(array.data, new Int16Array([1, 2, 0, 0])) 31 | t.end() 32 | }) 33 | }) 34 | 35 | test('1d.contiguous.uncompressed.i2' + `.${mode}`, function (t) { 36 | zarr.load(prefix + '1d.contiguous.uncompressed.i2.zarr', (err, array) => { 37 | t.deepEqual(array.data, new Int16Array([1, 2, 3, 4])) 38 | t.end() 39 | }) 40 | }) 41 | 42 | test('1d.contiguous.compressed.i4' + `.${mode}`, function (t) { 43 | zarr.load(prefix + '1d.contiguous.compressed.i4.zarr', (err, array) => { 44 | t.deepEqual(array.data, new Int32Array([1, 2, 3, 4])) 45 | t.end() 46 | }) 47 | }) 48 | 49 | test('1d.contiguous.compressed.u1' + `.${mode}`, function (t) { 50 | zarr.load(prefix + '1d.contiguous.compressed.u1.zarr', (err, array) => { 51 | t.deepEqual(array.data, new Uint8Array([255, 0, 255, 0])) 52 | t.end() 53 | }) 54 | }) 55 | 56 | test('1d.contiguous.compressed.f4' + `.${mode}`, function (t) { 57 | zarr.load(prefix + '1d.contiguous.compressed.f4.zarr', (err, array) => { 58 | t.deepEqual(array.data, new Float32Array([-1000.5, 0, 1000.5, 0])) 59 | t.end() 60 | }) 61 | }) 62 | 63 | test('1d.contiguous.uncompressed.i4' + `.${mode}`, function (t) { 64 | zarr.load(prefix + '1d.contiguous.uncompressed.i4.zarr', (err, array) => { 65 | t.deepEqual(array.data, new Int32Array([1, 2, 3, 4])) 66 | t.end() 67 | }) 68 | }) 69 | 70 | test('1d.contiguous.compressed.f8' + `.${mode}`, function (t) { 71 | zarr.load(prefix + '1d.contiguous.compressed.f8.zarr', (err, array) => { 72 | t.deepEqual(array.data, new Float64Array([1.5, 2.5, 3.5, 4.5])) 73 | t.end() 74 | }) 75 | }) 76 | 77 | test('1d.contiguous.compressed.U13' + `.${mode}`, function (t) { 78 | zarr.load(prefix + '1d.contiguous.compressed.U13.zarr', (err, array) => { 79 | t.deepEqual(array.data, new Array('a', 'b', 'cc', 'd')) 80 | t.end() 81 | }) 82 | }) 83 | 84 | test('1d.contiguous.uncompressed.U13' + `.${mode}`, function (t) { 85 | zarr.load(prefix + '1d.contiguous.compressed.U13.zarr', (err, array) => { 86 | t.deepEqual(array.data, new Array('a', 'b', 'cc', 'd')) 87 | t.end() 88 | }) 89 | }) 90 | 91 | test('1d.contiguous.compressed.U7' + `.${mode}`, function (t) { 92 | zarr.load(prefix + '1d.contiguous.compressed.U13.zarr', (err, array) => { 93 | t.deepEqual(array.data, new Array('a', 'b', 'cc', 'd')) 94 | t.end() 95 | }) 96 | }) 97 | 98 | test('1d.contiguous.uncompressed.U7' + `.${mode}`, function (t) { 99 | zarr.load(prefix + '1d.contiguous.compressed.U13.zarr', (err, array) => { 100 | t.deepEqual(array.data, new Array('a', 'b', 'cc', 'd')) 101 | t.end() 102 | }) 103 | }) 104 | 105 | test('1d.contiguous.compressed.S7' + `.${mode}`, function (t) { 106 | zarr.load(prefix + '1d.contiguous.compressed.U13.zarr', (err, array) => { 107 | t.deepEqual(array.data, new Array('a', 'b', 'cc', 'd')) 108 | t.end() 109 | }) 110 | }) 111 | 112 | test('1d.contiguous.uncompressed.S7' + `.${mode}`, function (t) { 113 | zarr.load(prefix + '1d.contiguous.compressed.U13.zarr', (err, array) => { 114 | t.deepEqual(array.data, new Array('a', 'b', 'cc', 'd')) 115 | t.end() 116 | }) 117 | }) 118 | 119 | test('1d.contiguous.compressed.b1' + `.${mode}`, function (t) { 120 | zarr.load(prefix + '1d.contiguous.compressed.b1.zarr', (err, array) => { 121 | t.deepEqual(array.data, new Array(true, false, true, false)) 122 | t.end() 123 | }) 124 | }) 125 | 126 | test('1d.contiguous.uncompressed.b1' + `.${mode}`, function (t) { 127 | zarr.load(prefix + '1d.contiguous.compressed.b1.zarr', (err, array) => { 128 | t.deepEqual(array.data, new Array(true, false, true, false)) 129 | t.end() 130 | }) 131 | }) 132 | 133 | test('1d.contiguous.uncompressed.f8' + `.${mode}`, function (t) { 134 | zarr.load(prefix + '1d.contiguous.uncompressed.f8.zarr', (err, array) => { 135 | t.deepEqual(array.data, new Float64Array([1.5, 2.5, 3.5, 4.5])) 136 | t.end() 137 | }) 138 | }) 139 | 140 | test('1d.chunked.compressed.i2' + `.${mode}`, function (t) { 141 | zarr.load(prefix + '1d.chunked.compressed.i2.zarr', (err, array) => { 142 | t.deepEqual(array.data, new Int16Array([1, 2, 3, 4])) 143 | t.end() 144 | }) 145 | }) 146 | 147 | test('1d.chunked.ragged.compressed.i2' + `.${mode}`, function (t) { 148 | zarr.load(prefix + '1d.chunked.ragged.compressed.i2.zarr', (err, array) => { 149 | t.deepEqual(array.data, new Int16Array([1, 2, 3, 4, 5])) 150 | t.end() 151 | }) 152 | }) 153 | 154 | test('2d.contiguous.compressed.i2' + `.${mode}`, function (t) { 155 | zarr.load(prefix + '2d.contiguous.compressed.i2.zarr', (err, array) => { 156 | t.deepEqual(array.data, new Int16Array([1, 2, 3, 4])) 157 | t.deepEqual(array.shape, [2, 2]) 158 | t.end() 159 | }) 160 | }) 161 | 162 | test('2d.chunked.compressed.i2' + `.${mode}`, function (t) { 163 | zarr.load(prefix + '2d.chunked.compressed.i2.zarr', (err, array) => { 164 | t.deepEqual(array.data, new Int16Array([1, 2, 3, 4])) 165 | t.deepEqual(array.shape, [2, 2]) 166 | t.end() 167 | }) 168 | }) 169 | 170 | test('2d.chunked.compressed.U7' + `.${mode}`, function (t) { 171 | zarr.load(prefix + '2d.chunked.compressed.U7.zarr', (err, array) => { 172 | t.deepEqual(array.data, new Array('a', 'b', 'cc', 'd')) 173 | t.deepEqual(array.shape, [2, 2]) 174 | t.end() 175 | }) 176 | }) 177 | 178 | test('2d.chunked.filled.compressed.U7' + `.${mode}`, function (t) { 179 | zarr.load(prefix + '2d.chunked.filled.compressed.U7.zarr', (err, array) => { 180 | t.deepEqual(array.data, new Array('a', 'b', 'cc', '')) 181 | t.deepEqual(array.shape, [2, 2]) 182 | t.end() 183 | }) 184 | }) 185 | 186 | test('2d.chunked.ragged.compressed.i2' + `.${mode}`, function (t) { 187 | zarr.load(prefix + '2d.chunked.ragged.compressed.i2.zarr', (err, array) => { 188 | t.deepEqual(array.data, new Int16Array([1, 2, 3, 4, 5, 6, 7, 8, 9])) 189 | t.deepEqual(array.shape, [3, 3]) 190 | t.end() 191 | }) 192 | }) 193 | 194 | test('3d.chunked.compressed.i2' + `.${mode}`, function (t) { 195 | zarr.load(prefix + '3d.chunked.compressed.i2.zarr', (err, array) => { 196 | t.deepEqual( 197 | array.data, 198 | new Int16Array([ 199 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 200 | 20, 21, 22, 23, 24, 25, 26, 201 | ]) 202 | ) 203 | t.deepEqual(array.shape, [3, 3, 3]) 204 | t.end() 205 | }) 206 | }) 207 | 208 | test('3d.contiguous.compressed.i2' + `.${mode}`, function (t) { 209 | zarr.load(prefix + '3d.contiguous.compressed.i2.zarr', (err, array) => { 210 | t.deepEqual( 211 | array.data, 212 | new Int16Array([ 213 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 214 | 20, 21, 22, 23, 24, 25, 26, 215 | ]) 216 | ) 217 | t.deepEqual(array.shape, [3, 3, 3]) 218 | t.end() 219 | }) 220 | }) 221 | 222 | test('3d.chunked.mixed.compressed.i2' + `.${mode}`, function (t) { 223 | zarr.load(prefix + '3d.chunked.mixed.compressed.i2.zarr', (err, array) => { 224 | t.deepEqual( 225 | array.data, 226 | new Int16Array([ 227 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 228 | 20, 21, 22, 23, 24, 25, 26, 229 | ]) 230 | ) 231 | t.deepEqual(array.shape, [3, 3, 3]) 232 | t.end() 233 | }) 234 | }) 235 | 236 | test('1d.contiguous.compressed.i2.group' + `.${mode}`, function (t) { 237 | zarr.loadGroup( 238 | prefix + '1d.contiguous.compressed.i2.group.zarr', 239 | (err, group) => { 240 | t.deepEqual(group.a.data, new Int16Array([1, 2, 3, 4])) 241 | t.deepEqual(group.b.data, new Int16Array([5, 6, 7, 8])) 242 | t.end() 243 | } 244 | ) 245 | }) 246 | 247 | test('1d.contiguous.compressed.i2.group.list' + `.${mode}`, function (t) { 248 | zarr.loadGroup( 249 | prefix + '1d.contiguous.compressed.i2.group.zarr', 250 | (err, group) => { 251 | t.deepEqual(group.a.data, new Int16Array([1, 2, 3, 4])) 252 | t.equal(group.b, undefined) 253 | t.end() 254 | }, 255 | ['a'] 256 | ) 257 | }) 258 | } 259 | -------------------------------------------------------------------------------- /test/v3.js: -------------------------------------------------------------------------------- 1 | const test = require('tape') 2 | const fs = require('fs/promises') 3 | const fetch = require('node-fetch') 4 | const zarrLocal = require('../index')(fetch, 'v3', {useSuffixRequest: false}) 5 | const zarrRemote = require('../index')(fetch, 'v3') 6 | 7 | const args = process.argv 8 | 9 | const urlLocal = 'http://localhost:8080/v3/' 10 | const urlRemote = 11 | 'https://storage.googleapis.com/carbonplan-share/testing/zarr-js/v3/' 12 | 13 | if (args.includes('local')) { 14 | run(zarrLocal, urlLocal, 'local') 15 | } 16 | if (args.includes('remote')) { 17 | run(zarrRemote, urlRemote, 'remote') 18 | } 19 | 20 | function run(zarr, prefix, mode) { 21 | test('1d.chunked.compressed.i2' + `.${mode}`, function (t) { 22 | zarr.open(prefix + '1d.chunked.compressed.i2.zarr', (err, get) => { 23 | t.plan(2) 24 | get([0], (err, array) => { 25 | t.deepEqual(array.data, new Int16Array([1, 2])) 26 | }) 27 | get([1], (err, array) => { 28 | t.deepEqual(array.data, new Int16Array([3, 4])) 29 | }) 30 | }) 31 | }) 32 | 33 | test('1d.chunked.compressed.sharded.i2' + `.${mode}`, function (t) { 34 | zarr.open(prefix + '1d.chunked.compressed.sharded.i2.zarr', (err, get) => { 35 | t.plan(4) 36 | get([0], (err, array) => { 37 | t.deepEqual(array.data, new Int16Array([1])) 38 | }) 39 | get([1], (err, array) => { 40 | t.deepEqual(array.data, new Int16Array([2])) 41 | }) 42 | get([2], (err, array) => { 43 | t.deepEqual(array.data, new Int16Array([3])) 44 | }) 45 | get([3], (err, array) => { 46 | t.deepEqual(array.data, new Int16Array([4])) 47 | }) 48 | }) 49 | }) 50 | 51 | test('1d.contiguous.compressed.i2' + `.${mode}`, function (t) { 52 | zarr.open(prefix + '1d.contiguous.compressed.i2.zarr', (err, get) => { 53 | get([0], (err, array) => { 54 | t.deepEqual(array.data, new Int16Array([1, 2, 3, 4])) 55 | t.end() 56 | }) 57 | }) 58 | }) 59 | 60 | test('1d.contiguous.compressed.sharded.i2' + `.${mode}`, function (t) { 61 | zarr.open( 62 | prefix + '1d.contiguous.compressed.sharded.i2.zarr', 63 | (err, get) => { 64 | get([0], (err, array) => { 65 | t.deepEqual(array.data, new Int16Array([1, 2, 3, 4])) 66 | t.end() 67 | }) 68 | } 69 | ) 70 | }) 71 | 72 | test('1d.contiguous.compressed.i4' + `.${mode}`, function (t) { 73 | zarr.open(prefix + '1d.contiguous.compressed.i4.zarr', (err, get) => { 74 | get([0], (err, array) => { 75 | t.deepEqual(array.data, new Int32Array([1, 2, 3, 4])) 76 | t.end() 77 | }) 78 | }) 79 | }) 80 | 81 | test('1d.contiguous.compressed.sharded.i4' + `.${mode}`, function (t) { 82 | zarr.open( 83 | prefix + '1d.contiguous.compressed.sharded.i4.zarr', 84 | (err, get) => { 85 | get([0], (err, array) => { 86 | t.deepEqual(array.data, new Int32Array([1, 2, 3, 4])) 87 | t.end() 88 | }) 89 | } 90 | ) 91 | }) 92 | 93 | test('1d.contiguous.compressed.u1' + `.${mode}`, function (t) { 94 | zarr.open(prefix + '1d.contiguous.compressed.u1.zarr', (err, get) => { 95 | get([0], (err, array) => { 96 | t.deepEqual(array.data, new Uint8Array([255, 0, 255, 0])) 97 | t.end() 98 | }) 99 | }) 100 | }) 101 | 102 | test('1d.contiguous.compressed.sharded.u1' + `.${mode}`, function (t) { 103 | zarr.open( 104 | prefix + '1d.contiguous.compressed.sharded.u1.zarr', 105 | (err, get) => { 106 | get([0], (err, array) => { 107 | t.deepEqual(array.data, new Uint8Array([255, 0, 255, 0])) 108 | t.end() 109 | }) 110 | } 111 | ) 112 | }) 113 | 114 | test('1d.contiguous.compressed.f4' + `.${mode}`, function (t) { 115 | zarr.open(prefix + '1d.contiguous.compressed.f4.zarr', (err, get) => { 116 | get([0], (err, array) => { 117 | t.deepEqual(array.data, new Float32Array([-1000.5, 0, 1000.5, 0])) 118 | t.end() 119 | }) 120 | }) 121 | }) 122 | 123 | test('1d.contiguous.compressed.sharded.f4' + `.${mode}`, function (t) { 124 | zarr.open( 125 | prefix + '1d.contiguous.compressed.sharded.f4.zarr', 126 | (err, get) => { 127 | get([0], (err, array) => { 128 | t.deepEqual(array.data, new Float32Array([-1000.5, 0, 1000.5, 0])) 129 | t.end() 130 | }) 131 | } 132 | ) 133 | }) 134 | 135 | test('1d.contiguous.compressed.f8' + `.${mode}`, function (t) { 136 | zarr.open(prefix + '1d.contiguous.compressed.f8.zarr', (err, get) => { 137 | get([0], (err, array) => { 138 | t.deepEqual(array.data, new Float32Array([1.5, 2.5, 3.5, 4.5])) 139 | t.end() 140 | }) 141 | }) 142 | }) 143 | 144 | test('1d.contiguous.compressed.sharded.f8' + `.${mode}`, function (t) { 145 | zarr.open( 146 | prefix + '1d.contiguous.compressed.sharded.f8.zarr', 147 | (err, get) => { 148 | get([0], (err, array) => { 149 | t.deepEqual(array.data, new Float32Array([1.5, 2.5, 3.5, 4.5])) 150 | t.end() 151 | }) 152 | } 153 | ) 154 | }) 155 | 156 | test('2d.chunked.compressed.i2' + `.${mode}`, function (t) { 157 | zarr.open(prefix + '2d.chunked.compressed.i2.zarr', (err, get) => { 158 | t.plan(4) 159 | get([0, 0], (err, array) => { 160 | t.deepEqual(array.data, new Int16Array([1])) 161 | }) 162 | get([0, 1], (err, array) => { 163 | t.deepEqual(array.data, new Int16Array([2])) 164 | }) 165 | get([1, 0], (err, array) => { 166 | t.deepEqual(array.data, new Int16Array([3])) 167 | }) 168 | get([1, 1], (err, array) => { 169 | t.deepEqual(array.data, new Int16Array([4])) 170 | }) 171 | }) 172 | }) 173 | 174 | test('2d.chunked.compressed.sharded.i2' + `.${mode}`, function (t) { 175 | zarr.open(prefix + '2d.chunked.compressed.sharded.i2.zarr', (err, get) => { 176 | t.plan(16) 177 | get([0, 0], (err, array) => { 178 | t.deepEqual(array.data, new Int16Array([1])) 179 | }) 180 | get([0, 1], (err, array) => { 181 | t.deepEqual(array.data, new Int16Array([2])) 182 | }) 183 | get([0, 2], (err, array) => { 184 | t.deepEqual(array.data, new Int16Array([3])) 185 | }) 186 | get([0, 3], (err, array) => { 187 | t.deepEqual(array.data, new Int16Array([4])) 188 | }) 189 | get([1, 0], (err, array) => { 190 | t.deepEqual(array.data, new Int16Array([5])) 191 | }) 192 | get([1, 1], (err, array) => { 193 | t.deepEqual(array.data, new Int16Array([6])) 194 | }) 195 | get([1, 2], (err, array) => { 196 | t.deepEqual(array.data, new Int16Array([7])) 197 | }) 198 | get([1, 3], (err, array) => { 199 | t.deepEqual(array.data, new Int16Array([8])) 200 | }) 201 | get([2, 0], (err, array) => { 202 | t.deepEqual(array.data, new Int16Array([9])) 203 | }) 204 | get([2, 1], (err, array) => { 205 | t.deepEqual(array.data, new Int16Array([10])) 206 | }) 207 | get([2, 2], (err, array) => { 208 | t.deepEqual(array.data, new Int16Array([11])) 209 | }) 210 | get([2, 3], (err, array) => { 211 | t.deepEqual(array.data, new Int16Array([12])) 212 | }) 213 | get([3, 0], (err, array) => { 214 | t.deepEqual(array.data, new Int16Array([13])) 215 | }) 216 | get([3, 1], (err, array) => { 217 | t.deepEqual(array.data, new Int16Array([14])) 218 | }) 219 | get([3, 2], (err, array) => { 220 | t.deepEqual(array.data, new Int16Array([15])) 221 | }) 222 | get([3, 3], (err, array) => { 223 | t.deepEqual(array.data, new Int16Array([16])) 224 | }) 225 | }) 226 | }) 227 | 228 | test('2d.chunked.compressed.sharded.filled.i2' + `.${mode}`, function (t) { 229 | zarr.open( 230 | prefix + '2d.chunked.compressed.sharded.filled.i2.zarr', 231 | (err, get) => { 232 | t.plan(16) 233 | get([0, 0], (err, array) => { 234 | t.deepEqual(array.data, new Int16Array([0])) 235 | }) 236 | get([0, 1], (err, array) => { 237 | t.deepEqual(array.data, new Int16Array([1])) 238 | }) 239 | get([0, 2], (err, array) => { 240 | t.deepEqual(array.data, new Int16Array([2])) 241 | }) 242 | get([0, 3], (err, array) => { 243 | t.deepEqual(array.data, new Int16Array([3])) 244 | }) 245 | get([1, 0], (err, array) => { 246 | t.deepEqual(array.data, new Int16Array([4])) 247 | }) 248 | get([1, 1], (err, array) => { 249 | t.deepEqual(array.data, new Int16Array([5])) 250 | }) 251 | get([1, 2], (err, array) => { 252 | t.deepEqual(array.data, new Int16Array([6])) 253 | }) 254 | get([1, 3], (err, array) => { 255 | t.deepEqual(array.data, new Int16Array([7])) 256 | }) 257 | get([2, 0], (err, array) => { 258 | t.deepEqual(array.data, new Int16Array([8])) 259 | }) 260 | get([2, 1], (err, array) => { 261 | t.deepEqual(array.data, new Int16Array([9])) 262 | }) 263 | get([2, 2], (err, array) => { 264 | t.deepEqual(array.data, new Int16Array([10])) 265 | }) 266 | get([2, 3], (err, array) => { 267 | t.deepEqual(array.data, new Int16Array([11])) 268 | }) 269 | get([3, 0], (err, array) => { 270 | t.deepEqual(array.data, new Int16Array([12])) 271 | }) 272 | get([3, 1], (err, array) => { 273 | t.deepEqual(array.data, new Int16Array([13])) 274 | }) 275 | get([3, 2], (err, array) => { 276 | t.deepEqual(array.data, new Int16Array([14])) 277 | }) 278 | get([3, 3], (err, array) => { 279 | t.deepEqual(array.data, new Int16Array([15])) 280 | }) 281 | } 282 | ) 283 | }) 284 | 285 | test('3d.chunked.compressed.i2' + `.${mode}`, function (t) { 286 | zarr.open(prefix + '3d.chunked.compressed.i2.zarr', (err, get) => { 287 | t.plan(2) 288 | get([0, 0, 0], (err, array) => { 289 | t.deepEqual(array.data, new Int16Array([0, 1, 4, 5, 16, 17, 20, 21])) 290 | }) 291 | get([1, 0, 1], (err, array) => { 292 | t.deepEqual( 293 | array.data, 294 | new Int16Array([34, 35, 38, 39, 50, 51, 54, 55]) 295 | ) 296 | }) 297 | }) 298 | }) 299 | 300 | test('3d.chunked.compressed.sharded.i2' + `.${mode}`, function (t) { 301 | zarr.open(prefix + '3d.chunked.compressed.sharded.i2.zarr', (err, get) => { 302 | t.plan(6) 303 | get([0, 0, 0], (err, array) => { 304 | t.deepEqual(array.data, new Int16Array([0])) 305 | }) 306 | get([1, 0, 1], (err, array) => { 307 | t.deepEqual(array.data, new Int16Array([17])) 308 | }) 309 | get([2, 0, 0], (err, array) => { 310 | t.deepEqual(array.data, new Int16Array([32])) 311 | }) 312 | get([1, 1, 1], (err, array) => { 313 | t.deepEqual(array.data, new Int16Array([21])) 314 | }) 315 | get([1, 3, 2], (err, array) => { 316 | t.deepEqual(array.data, new Int16Array([30])) 317 | }) 318 | get([3, 3, 3], (err, array) => { 319 | t.deepEqual(array.data, new Int16Array([63])) 320 | }) 321 | }) 322 | }) 323 | } 324 | --------------------------------------------------------------------------------