├── .eslintrc.yml
├── .github
└── workflows
│ ├── lactame.yml
│ ├── nodejs.yml
│ ├── release.yml
│ └── typedoc.yml
├── .gitignore
├── .npmrc
├── .prettierrc.json
├── CHANGELOG.md
├── LICENSE
├── README.md
├── demo
└── agilent.ts
├── jest.config.js
├── package.json
├── src
├── .npmignore
├── __tests__
│ ├── __snapshots__
│ │ └── toString.test.ts.snap
│ ├── attributeExists.test.ts
│ ├── dataVariableExists.test.ts
│ ├── files
│ │ ├── P071.CDF
│ │ ├── agilent_hplc.cdf
│ │ ├── ichthyop.nc
│ │ ├── madis-sao.nc
│ │ ├── model1_md2.nc
│ │ └── not_nc.txt
│ ├── getAttribute.test.ts
│ ├── getDataVariableAsString.test.ts
│ ├── index.test.ts
│ ├── toString.test.ts
│ └── types.test.ts
├── data.ts
├── header.ts
├── index.ts
├── parser.ts
├── toString.ts
├── types.ts
└── utils.ts
├── tsconfig.cjs.json
├── tsconfig.esm.json
└── tsconfig.json
/.eslintrc.yml:
--------------------------------------------------------------------------------
1 | extends: cheminfo-typescript
2 |
--------------------------------------------------------------------------------
/.github/workflows/lactame.yml:
--------------------------------------------------------------------------------
1 | name: Deploy build on lactame.com
2 |
3 | on:
4 | release:
5 | types: [published]
6 |
7 | env:
8 | NODE_VERSION: 16.x
9 |
10 | jobs:
11 | deploy:
12 | runs-on: ubuntu-latest
13 | steps:
14 | - uses: actions/checkout@v3
15 | - name: Get package name
16 | run: echo "PACKAGE_NAME=$(jq .name package.json | tr -d '"')" >> $GITHUB_ENV
17 | - uses: actions/setup-node@v3
18 | with:
19 | node-version: ${{ env.NODE_VERSION }}
20 | - name: Install dependencies
21 | run: npm install
22 | - name: Build project
23 | run: npm run build
24 | - name: Deploy to lactame.com
25 | uses: zakodium/lactame-action@v1
26 | with:
27 | token: ${{ secrets.LACTAME_TOKEN }}
28 | name: ${{ env.PACKAGE_NAME }}
29 | folder: dist
30 |
--------------------------------------------------------------------------------
/.github/workflows/nodejs.yml:
--------------------------------------------------------------------------------
1 | name: Node.js CI
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | pull_request:
8 |
9 | jobs:
10 | nodejs:
11 | # Documentation: https://github.com/zakodium/workflows#nodejs-ci
12 | uses: zakodium/workflows/.github/workflows/nodejs.yml@nodejs-v1
13 | with:
14 | lint-check-types: true
15 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Release
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 |
8 | jobs:
9 | release:
10 | # Documentation: https://github.com/zakodium/workflows#release
11 | uses: zakodium/workflows/.github/workflows/release.yml@release-v1
12 | with:
13 | npm: true
14 | secrets:
15 | github-token: ${{ secrets.BOT_TOKEN }}
16 | npm-token: ${{ secrets.NPM_BOT_TOKEN }}
17 |
--------------------------------------------------------------------------------
/.github/workflows/typedoc.yml:
--------------------------------------------------------------------------------
1 | name: Deploy TypeDoc on GitHub pages
2 |
3 | on:
4 | workflow_dispatch:
5 | release:
6 | types: [published]
7 |
8 | env:
9 | NODE_VERSION: 16.x
10 | ENTRY_FILE: 'src/index.ts'
11 |
12 | jobs:
13 | deploy:
14 | runs-on: ubuntu-latest
15 | steps:
16 | - uses: actions/checkout@v3
17 | - uses: actions/setup-node@v3
18 | with:
19 | node-version: ${{ env.NODE_VERSION }}
20 | - name: Install dependencies
21 | run: npm install
22 | - name: Build documentation
23 | uses: zakodium/typedoc-action@v2
24 | with:
25 | entry: ${{ env.ENTRY_FILE }}
26 | - name: Deploy to GitHub pages
27 | uses: JamesIves/github-pages-deploy-action@releases/v4
28 | with:
29 | token: ${{ secrets.BOT_TOKEN }}
30 | branch: gh-pages
31 | folder: docs
32 | clean: true
33 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 | node_modules
3 | .DS_Store
4 | coverage
5 | *.log
6 | dist/
7 | .vscode
8 | docs
9 | lib
10 | lib-esm
11 | .eslintcache
12 |
--------------------------------------------------------------------------------
/.npmrc:
--------------------------------------------------------------------------------
1 | package-lock=false
2 |
--------------------------------------------------------------------------------
/.prettierrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "arrowParens": "always",
3 | "semi": true,
4 | "singleQuote": true,
5 | "tabWidth": 2,
6 | "trailingComma": "all"
7 | }
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | ## [3.0.0](https://github.com/cheminfo/netcdfjs/compare/v2.0.2...v3.0.0) (2023-08-08)
4 |
5 |
6 | ### ⚠ BREAKING CHANGES
7 |
8 | * migrate to TypeScript ([#26](https://github.com/cheminfo/netcdfjs/issues/26))
9 |
10 | ### Code Refactoring
11 |
12 | * migrate to TypeScript ([#26](https://github.com/cheminfo/netcdfjs/issues/26)) ([9bf14de](https://github.com/cheminfo/netcdfjs/commit/9bf14dea6e2523304aae29d5f0520648dfb5eef3))
13 |
14 | ## [2.0.2](https://github.com/cheminfo/netcdfjs/compare/v0.7.0...v2.0.2) (2022-10-21)
15 |
16 |
17 | ### ⚠ BREAKING CHANGES
18 |
19 | * No more default export You need to import the class using `const { NetCDFReader } = require("netcdfjs")`
20 |
21 | ### Bug Fixes
22 |
23 | * do not trime attribute values ([d8dd69c](https://github.com/cheminfo/netcdfjs/commit/d8dd69c6582a7372630fb991e537e2dbff1da68b))
24 | * use npm's "files" array instead of npmignore ([fd69b25](https://github.com/cheminfo/netcdfjs/commit/fd69b2575103c4cc16a91472c702a8716115066c))
25 |
26 |
27 | ### Miscellaneous Chores
28 |
29 | * Finalise es6 module migration ([3667a0b](https://github.com/cheminfo/netcdfjs/commit/3667a0b6be1c1ab444e46b620f38234dcac5c87c))
30 | * release correct version ([5f47151](https://github.com/cheminfo/netcdfjs/commit/5f471511c77d6176126a4198cde863e900a6e4bf))
31 |
32 | ### [2.0.1](https://github.com/cheminfo/netcdfjs/compare/v2.0.0...v2.0.1) (2021-09-07)
33 |
34 | ## [2.0.0](https://github.com/cheminfo/netcdfjs/compare/v0.7.0...v2.0.0) (2021-09-07)
35 |
36 |
37 | ### ⚠ BREAKING CHANGES
38 |
39 | * No more default export
40 | You need to import the class using `const { NetCDFReader } = require("netcdfjs")`
41 |
42 | ### Bug Fixes
43 |
44 | * do not trime attribute values ([d8dd69c](https://github.com/cheminfo/netcdfjs/commit/d8dd69c6582a7372630fb991e537e2dbff1da68b))
45 | * use npm's "files" array instead of npmignore ([fd69b25](https://github.com/cheminfo/netcdfjs/commit/fd69b2575103c4cc16a91472c702a8716115066c))
46 |
47 |
48 | ### Miscellaneous Chores
49 |
50 | * Finalise es6 module migration ([3667a0b](https://github.com/cheminfo/netcdfjs/commit/3667a0b6be1c1ab444e46b620f38234dcac5c87c))
51 |
52 |
53 | ## [0.3.3](https://github.com/cheminfo-js/netcdfjs/compare/v0.3.2...v0.3.3) (2018-11-02)
54 |
55 |
56 |
57 |
58 | # 0.1.0 (2016-10-31)
59 |
60 |
61 | ### Bug Fixes
62 |
63 | * export step size for record dimension and use it ([9c95ff6](https://github.com/cheminfo-js/netcdfjs/commit/9c95ff6))
64 |
65 |
66 | ### Features
67 |
68 | * read dimensions metadata ([aa3ae30](https://github.com/cheminfo-js/netcdfjs/commit/aa3ae30))
69 | * read global attributes metadata ([8dc9d71](https://github.com/cheminfo-js/netcdfjs/commit/8dc9d71))
70 | * read non-record variables ([c9a818a](https://github.com/cheminfo-js/netcdfjs/commit/c9a818a))
71 | * read record variables ([4455705](https://github.com/cheminfo-js/netcdfjs/commit/4455705))
72 | * read variables metadata ([11e4a68](https://github.com/cheminfo-js/netcdfjs/commit/11e4a68))
73 | * validates that it's a NetCDF file ([1439756](https://github.com/cheminfo-js/netcdfjs/commit/1439756))
74 |
75 |
76 |
77 | 0.0.1 / HEAD
78 | ============
79 |
80 | * first release
81 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2016 cheminfo
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
23 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # netcdfjs
2 |
3 | [![NPM version][npm-image]][npm-url]
4 | [![build status][ci-image]][ci-url]
5 | [![Test coverage][codecov-image]][codecov-url]
6 | [![npm download][download-image]][download-url]
7 |
8 | Read and explore NetCDF v3 files.
9 |
10 | ## Installation
11 |
12 | `$ npm install netcdfjs`
13 |
14 | ## [API Documentation](https://cheminfo.github.io/netcdfjs/)
15 |
16 | For further information about the grammar you should go to [this link](https://www.unidata.ucar.edu/software/netcdf/docs/file_format_specifications.html).
17 |
18 | ### Example
19 |
20 | ```js
21 | const { readFileSync } = require("fs");
22 | const { NetCDFReader } = require("netcdfjs");
23 |
24 | // http://www.unidata.ucar.edu/software/netcdf/examples/files.html
25 | const data = readFileSync("madis-sao.nc");
26 |
27 | var reader = new NetCDFReader(data); // read the header
28 | reader.getDataVariable("wmoId"); // go to offset and read it
29 | ```
30 |
31 | ## License
32 |
33 | [MIT](./LICENSE)
34 |
35 | [npm-image]: https://img.shields.io/npm/v/netcdfjs.svg
36 | [npm-url]: https://www.npmjs.com/package/netcdfjs
37 | [ci-image]: https://github.com/cheminfo/netcdfjs/workflows/Node.js%20CI/badge.svg?branch=main
38 | [ci-url]: https://github.com/cheminfo/netcdfjs/actions?query=workflow%3A%22Node.js+CI%22
39 | [codecov-image]: https://img.shields.io/codecov/c/github/cheminfo/netcdfjs.svg
40 | [codecov-url]: https://codecov.io/gh/cheminfo/netcdfjs
41 | [download-image]: https://img.shields.io/npm/dm/netcdfjs.svg
42 |
--------------------------------------------------------------------------------
/demo/agilent.ts:
--------------------------------------------------------------------------------
1 | import { readFileSync as rfs } from 'node:fs';
2 | import { join } from 'node:path';
3 |
4 | import { NetCDFReader } from '../src/index';
5 |
6 | const data = rfs(join(__dirname, '../src/__tests__/files/agilent_hplc.cdf'));
7 |
8 | let reader = new NetCDFReader(data);
9 |
10 | let selectedVariable = reader.variables[4];
11 |
12 | reader.getDataVariable(selectedVariable);
13 |
14 | for (let variable of reader.variables) {
15 | console.log(variable.name, reader.getDataVariable(variable));
16 | }
17 |
18 | let ordinates = reader.getDataVariable(reader.variables[5]);
19 | console.log(Math.max(...(ordinates as number[])));
20 | console.log(Math.min(...(ordinates as number[])));
--------------------------------------------------------------------------------
/jest.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | preset: 'ts-jest/presets/js-with-ts',
3 | testEnvironment: 'node',
4 | };
5 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "netcdfjs",
3 | "version": "3.0.0",
4 | "description": "Read and explore NetCDF files",
5 | "main": "./lib/index.js",
6 | "module": "./lib-esm/index.js",
7 | "types": "./lib/index.d.ts",
8 | "files": [
9 | "src",
10 | "lib",
11 | "lib-esm"
12 | ],
13 | "keywords": [
14 | "netcdf",
15 | "nc",
16 | "data",
17 | "format"
18 | ],
19 | "author": "Miguel Asencio (https://github.com/maasencioh)",
20 | "repository": "cheminfo/netcdfjs",
21 | "bugs": {
22 | "url": "https://github.com/cheminfo/netcdfjs/issues"
23 | },
24 | "homepage": "https://github.com/cheminfo/netcdfjs",
25 | "license": "MIT",
26 | "scripts": {
27 | "check-types": "tsc --noEmit",
28 | "clean": "rimraf lib lib-esm",
29 | "eslint": "eslint src --cache",
30 | "eslint-fix": "npm run eslint -- --fix",
31 | "prepack": "npm run tsc",
32 | "prettier": "prettier --check src",
33 | "prettier-write": "prettier --write src",
34 | "test": "npm run test-only && npm run eslint && npm run prettier && npm run check-types",
35 | "test-only": "jest --coverage",
36 | "tsc": "npm run clean && npm run tsc-cjs && npm run tsc-esm",
37 | "tsc-cjs": "tsc --project tsconfig.cjs.json",
38 | "tsc-esm": "tsc --project tsconfig.esm.json"
39 | },
40 | "devDependencies": {
41 | "@types/jest": "^29.5.3",
42 | "cheminfo-types": "^1.7.2",
43 | "eslint": "^8.46.0",
44 | "eslint-config-cheminfo-typescript": "^12.0.4",
45 | "jest": "^29.6.2",
46 | "prettier": "^3.0.1",
47 | "ts-jest": "^29.1.1",
48 | "typescript": "^5.1.6"
49 | },
50 | "dependencies": {
51 | "iobuffer": "^5.3.2"
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/src/.npmignore:
--------------------------------------------------------------------------------
1 | __tests__
2 | .npmignore
3 |
--------------------------------------------------------------------------------
/src/__tests__/__snapshots__/toString.test.ts.snap:
--------------------------------------------------------------------------------
1 | // Jest Snapshot v1, https://goo.gl/fbAQLP
2 |
3 | exports[`toString 1`] = `
4 | "DIMENSIONS
5 | _2_byte_string = size: 2
6 | _4_byte_string = size: 4
7 | _8_byte_string = size: 8
8 | _16_byte_string = size: 16
9 | _32_byte_string = size: 32
10 | _64_byte_string = size: 64
11 | _128_byte_string = size: 128
12 | _255_byte_string = size: 255
13 | range = size: 2
14 | point_number = size: 0
15 | error_number = size: 1
16 | scan_number = size: 6401
17 | instrument_number = size: 1
18 |
19 | GLOBAL ATTRIBUTES
20 | dataset_completeness = C1+C2
21 | ms_template_revision = 1.0.1
22 | netcdf_revision = 2.3.2
23 | languages = English
24 | administrative_comments = 1% CH2Cl2
25 | dataset_origin = Santa Clara, CA
26 | netcdf_file_date_time_stamp = 20161012052159+0200
27 | experiment_title = P071 Essence super BP
28 | experiment_date_time_stamp = 20070923040800+0200
29 | operator_name = SC
30 | external_file_ref_0 = FIRE_RTL.M
31 | experiment_type = Centroided Mass Spectrum
32 | number_of_times_processed = 1
33 | number_of_times_calibrated = 0
34 | sample_state = Other State
35 | test_separation_type = No Chromatography
36 | test_ms_inlet = Capillary Direct
37 | test_ionization_mode = Electron Impact
38 | test_ionization_polarity = Positive Polarity
39 | test_detector_type = Electron Multiplier
40 | test_resolution_type = Constant Resolution
41 | test_scan_function = Mass Scan
42 | test_scan_direction = Up
43 | test_scan_law = Linear
44 | raw_data_mass_format = Float
45 | raw_data_time_format = Short
46 | raw_data_intensity_format = Float
47 |
48 | VARIABLES:
49 | error_log = [" "," "," "," "," "," "," "," "," "," "," "," "," (length: 64)
50 | a_d_sampling_rate = [-9999,-9999,-9999,-9999,-9999,-9999,-9999,-9999,- (length: 6401)
51 | a_d_coaddition_factor = [-9999,-9999,-9999,-9999,-9999,-9999,-9999,-9999,- (length: 6402)
52 | scan_acquisition_time = [5.25,5.84,6.428999999999999,7.019,7.609,8.199,8.7 (length: 6401)
53 | scan_duration = [-9999,-9999,-9999,-9999,-9999,-9999,-9999,-9999,- (length: 6401)
54 | inter_scan_time = [-9999,-9999,-9999,-9999,-9999,-9999,-9999,-9999,- (length: 6401)
55 | resolution = [-9999,-9999,-9999,-9999,-9999,-9999,-9999,-9999,- (length: 6401)
56 | actual_scan_number = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19 (length: 6401)
57 | total_intensity = [3134,3157,3085,3134,3093,3113,3061,3057,3030,3166 (length: 6401)
58 | mass_range_min = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 (length: 6401)
59 | mass_range_max = [206.89999389648438,206.89999389648438,207,207.100 (length: 6401)
60 | time_range_min = [-9999,-9999,-9999,-9999,-9999,-9999,-9999,-9999,- (length: 6401)
61 | time_range_max = [-9999,-9999,-9999,-9999,-9999,-9999,-9999,-9999,- (length: 6401)
62 | scan_index = [0,11,22,33,44,55,66,76,88,99,111,122,134,145,156, (length: 6401)
63 | point_count = [11,11,11,11,11,11,10,12,11,12,11,12,11,11,11,11,1 (length: 6401)
64 | flag_count = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 (length: 6401)
65 | mass_values = [16,17,18.100000381469727,28,32,35,36,38,40,44.099 (length: 157201)
66 | time_values = [9.969209968386869e+36,9.969209968386869e+36,9.969 (length: 157201)
67 | intensity_values = [37,293,1243,737,420,45,196,72,22,35,34,28,299,123 (length: 157201)
68 | instrument_name = ["G","a","s"," ","C","h","r","o","m","a","t","o"," (length: 32)
69 | instrument_id = [" "," "," "," "," "," "," "," "," "," "," "," "," (length: 32)
70 | instrument_mfr = [" "," "," "," "," "," "," "," "," "," "," "," "," (length: 32)
71 | instrument_model = [" "," "," "," "," "," "," "," "," "," "," "," "," (length: 32)
72 | instrument_serial_no = [" "," "," "," "," "," "," "," "," "," "," "," "," (length: 32)
73 | instrument_sw_version = [" "," "," "," "," "," "," "," "," "," "," "," "," (length: 32)
74 | instrument_fw_version = [" "," "," "," "," "," "," "," "," "," "," "," "," (length: 32)
75 | instrument_os_version = [" "," "," "," "," "," "," "," "," "," "," "," "," (length: 32)
76 | instrument_app_version = [" "," "," "," "," "," "," "," "," "," "," "," "," (length: 32)
77 | instrument_comments = [" "," "," "," "," "," "," "," "," "," "," "," "," (length: 32)"
78 | `;
79 |
--------------------------------------------------------------------------------
/src/__tests__/attributeExists.test.ts:
--------------------------------------------------------------------------------
1 | import { readFileSync } from 'fs';
2 |
3 | import { NetCDFReader } from '../parser';
4 |
5 | const pathFiles = `${__dirname}/files/`;
6 |
7 | test('attributeExists', () => {
8 | const data = readFileSync(`${pathFiles}P071.CDF`);
9 |
10 | const reader = new NetCDFReader(data);
11 | expect(reader.attributeExists('operator_name')).toBe(true);
12 | expect(reader.attributeExists('operator_nameXX')).toBe(false);
13 | });
14 |
--------------------------------------------------------------------------------
/src/__tests__/dataVariableExists.test.ts:
--------------------------------------------------------------------------------
1 | import { readFileSync } from 'fs';
2 |
3 | import { NetCDFReader } from '../parser';
4 |
5 | const pathFiles = `${__dirname}/files/`;
6 |
7 | test('dataVariableExists', () => {
8 | const data = readFileSync(`${pathFiles}P071.CDF`);
9 |
10 | const reader = new NetCDFReader(data);
11 | expect(reader.dataVariableExists('instrument_name')).toBe(true);
12 | expect(reader.dataVariableExists('instrument_nameXX')).toBe(false);
13 | });
14 |
--------------------------------------------------------------------------------
/src/__tests__/files/P071.CDF:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cheminfo/netcdfjs/c8b73f5d39d8c62ba9b5c1e83c96420d86e2d5bd/src/__tests__/files/P071.CDF
--------------------------------------------------------------------------------
/src/__tests__/files/agilent_hplc.cdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cheminfo/netcdfjs/c8b73f5d39d8c62ba9b5c1e83c96420d86e2d5bd/src/__tests__/files/agilent_hplc.cdf
--------------------------------------------------------------------------------
/src/__tests__/files/ichthyop.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cheminfo/netcdfjs/c8b73f5d39d8c62ba9b5c1e83c96420d86e2d5bd/src/__tests__/files/ichthyop.nc
--------------------------------------------------------------------------------
/src/__tests__/files/madis-sao.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cheminfo/netcdfjs/c8b73f5d39d8c62ba9b5c1e83c96420d86e2d5bd/src/__tests__/files/madis-sao.nc
--------------------------------------------------------------------------------
/src/__tests__/files/model1_md2.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cheminfo/netcdfjs/c8b73f5d39d8c62ba9b5c1e83c96420d86e2d5bd/src/__tests__/files/model1_md2.nc
--------------------------------------------------------------------------------
/src/__tests__/files/not_nc.txt:
--------------------------------------------------------------------------------
1 | This is not a NetCDF file
--------------------------------------------------------------------------------
/src/__tests__/getAttribute.test.ts:
--------------------------------------------------------------------------------
1 | import { readFileSync } from 'fs';
2 |
3 | import { NetCDFReader } from '../parser';
4 |
5 | const pathFiles = `${__dirname}/files/`;
6 |
7 | test('getAttribute', () => {
8 | const data = readFileSync(`${pathFiles}P071.CDF`);
9 |
10 | const reader = new NetCDFReader(data);
11 | expect(reader.getAttribute('operator_name')).toBe('SC');
12 | });
13 |
--------------------------------------------------------------------------------
/src/__tests__/getDataVariableAsString.test.ts:
--------------------------------------------------------------------------------
1 | import { readFileSync } from 'fs';
2 |
3 | import { NetCDFReader } from '../parser';
4 |
5 | const pathFiles = `${__dirname}/files/`;
6 |
7 | test('getDataVariableAsString', () => {
8 | const data = readFileSync(`${pathFiles}P071.CDF`);
9 |
10 | const reader = new NetCDFReader(data);
11 | expect(reader.getDataVariableAsString('instrument_name')).toBe(
12 | 'Gas Chromatograph',
13 | );
14 | });
15 |
--------------------------------------------------------------------------------
/src/__tests__/index.test.ts:
--------------------------------------------------------------------------------
1 | import { readFileSync } from 'fs';
2 |
3 | import { NetCDFReader } from '../parser';
4 |
5 | const pathFiles = `${__dirname}/files/`;
6 |
7 | describe('Read file', () => {
8 | it('Throws on non NetCDF file', () => {
9 | const data = readFileSync(`${pathFiles}not_nc.txt`);
10 | expect(function notValid() {
11 | return new NetCDFReader(data);
12 | }).toThrow('Not a valid NetCDF v3.x file: should start with CDF');
13 | });
14 |
15 | it('read header information', () => {
16 | // http://www.unidata.ucar.edu/software/netcdf/examples/files.html
17 | // http://www.unidata.ucar.edu/software/netcdf/examples/madis-sao.cdl
18 | const data = readFileSync(`${pathFiles}madis-sao.nc`);
19 |
20 | const reader = new NetCDFReader(data);
21 | expect(reader.version).toBe('classic format');
22 | expect(reader.recordDimension).toStrictEqual({
23 | length: 178,
24 | id: 21,
25 | name: 'recNum',
26 | recordStep: 1220,
27 | });
28 | expect(reader.dimensions).toStrictEqual([
29 | { name: 'maxAutoStaLen', size: 6 },
30 | { name: 'maxAutoWeather', size: 5 },
31 | { name: 'maxAutoWeaLen', size: 12 },
32 | { name: 'maxCldTypeLen', size: 5 },
33 | { name: 'maxCloudTypes', size: 5 },
34 | { name: 'maxDataSrcLen', size: 8 },
35 | { name: 'maxRepLen', size: 5 },
36 | { name: 'maxSAOLen', size: 256 },
37 | { name: 'maxSkyCover', size: 5 },
38 | { name: 'maxSkyLen', size: 8 },
39 | { name: 'maxSkyMethLen', size: 3 },
40 | { name: 'maxStaNamLen', size: 5 },
41 | { name: 'maxWeatherNum', size: 5 },
42 | { name: 'maxWeatherLen', size: 40 },
43 | { name: 'QCcheckNum', size: 10 },
44 | { name: 'QCcheckNameLen', size: 60 },
45 | { name: 'ICcheckNum', size: 55 },
46 | { name: 'ICcheckNameLen', size: 72 },
47 | { name: 'maxStaticIds', size: 350 },
48 | { name: 'totalIdLen', size: 6 },
49 | { name: 'nInventoryBins', size: 24 },
50 | { name: 'recNum', size: 0 },
51 | ]);
52 |
53 | expect(reader.globalAttributes[0]).toStrictEqual({
54 | name: 'cdlDate',
55 | type: 'char',
56 | value: '20010327',
57 | });
58 | expect(reader.globalAttributes[3]).toStrictEqual({
59 | name: 'filePeriod',
60 | type: 'int',
61 | value: 3600,
62 | });
63 |
64 | expect(reader.variables[0]).toStrictEqual({
65 | name: 'nStaticIds',
66 | dimensions: [],
67 | attributes: [
68 | {
69 | name: '_FillValue',
70 | type: 'int',
71 | value: 0,
72 | },
73 | ],
74 | type: 'int',
75 | size: 4,
76 | offset: 39208,
77 | record: false,
78 | });
79 | expect(reader.variables[11]).toStrictEqual({
80 | name: 'wmoId',
81 | dimensions: [21],
82 | attributes: [
83 | { name: 'long_name', type: 'char', value: 'WMO numeric station ID' },
84 | { name: '_FillValue', type: 'int', value: -2147483647 },
85 | { name: 'valid_range', type: 'int', value: [1, 89999] },
86 | { name: 'reference', type: 'char', value: 'station table' },
87 | ],
88 | type: 'int',
89 | size: 4,
90 | offset: 48884,
91 | record: true,
92 | });
93 | });
94 |
95 | it('read non-record variable', () => {
96 | const data = readFileSync(`${pathFiles}madis-sao.nc`);
97 | const reader = new NetCDFReader(data);
98 |
99 | expect(reader.getDataVariable('nStaticIds')[0]).toBe(145);
100 | });
101 |
102 | it('read 2 dimensional variable', () => {
103 | const data = readFileSync(`${pathFiles}ichthyop.nc`);
104 | const reader = new NetCDFReader(data);
105 | expect(reader.getDataVariable('time')).toHaveLength(49);
106 | expect(reader.getDataVariable('time')[0]).toBe(1547070300);
107 | expect(reader.getDataVariable('lat')).toHaveLength(49);
108 | expect(reader.getDataVariable('lat')[0]).toHaveLength(1000);
109 | const lat = reader.getDataVariable('lat')[0] as number[];
110 | expect(lat[0]).toBe(53.26256561279297);
111 | });
112 |
113 | it('read record variable with string', () => {
114 | const data = readFileSync(`${pathFiles}madis-sao.nc`);
115 | const reader = new NetCDFReader(data);
116 |
117 | const record = reader.getDataVariable('wmoId');
118 | expect(record[0]).toBe(71419);
119 | expect(record[1]).toBe(71415);
120 | expect(record[2]).toBe(71408);
121 | });
122 |
123 | it('read non-record variable with object', () => {
124 | const data = readFileSync(`${pathFiles}madis-sao.nc`);
125 | const reader = new NetCDFReader(data);
126 | const variables = reader.variables;
127 |
128 | const withString = reader.getDataVariable('staticIds');
129 | const withObject = reader.getDataVariable(variables[1]);
130 | expect(withString[0]).toBe('W');
131 | expect(withString[1]).toBe('A');
132 | expect(withString[2]).toBe('F');
133 | expect(withString[0]).toBe(withObject[0]);
134 | expect(withString[1]).toBe(withObject[1]);
135 | expect(withString[2]).toBe(withObject[2]);
136 | });
137 |
138 | it('read non-existent variable string', () => {
139 | const data = readFileSync(`${pathFiles}madis-sao.nc`);
140 | const reader = new NetCDFReader(data);
141 |
142 | expect(reader.getDataVariable.bind(reader, "n'importe quoi")).toThrow(
143 | 'Not a valid NetCDF v3.x file: variable not found',
144 | );
145 | });
146 |
147 | it('read 64 bit offset file', () => {
148 | const data = readFileSync(`${pathFiles}model1_md2.nc`);
149 | const reader = new NetCDFReader(data);
150 | expect(reader.version).toBe('64-bit offset format');
151 | expect(reader.getDataVariable('cell_angular')[0]).toBe('a');
152 | expect(reader.getDataVariable('cell_spatial')[0]).toBe('a');
153 | });
154 |
155 | it('read agilent hplc file file', () => {
156 | const data = readFileSync(`${pathFiles}agilent_hplc.cdf`);
157 | const reader = new NetCDFReader(data);
158 |
159 | expect(reader.version).toBe('classic format');
160 |
161 | const variables = [];
162 |
163 | for (const variable of reader.variables) {
164 | const value = reader.getDataVariable(variable);
165 | variables.push({ value, ...variable });
166 | }
167 | expect(variables[3].value).toStrictEqual([0.012000000104308128]);
168 | expect(variables).toHaveLength(24);
169 | expect(reader.getDataVariable('ordinate_values')).toHaveLength(4651);
170 | });
171 | });
172 |
--------------------------------------------------------------------------------
/src/__tests__/toString.test.ts:
--------------------------------------------------------------------------------
1 | import { readFileSync } from 'fs';
2 |
3 | import { NetCDFReader } from '../parser';
4 |
5 | const pathFiles = `${__dirname}/files/`;
6 |
7 | test('toString', () => {
8 | const data = readFileSync(`${pathFiles}P071.CDF`);
9 |
10 | const reader = new NetCDFReader(data);
11 | expect(reader.toString()).toMatchSnapshot();
12 | });
13 |
--------------------------------------------------------------------------------
/src/__tests__/types.test.ts:
--------------------------------------------------------------------------------
1 | import { num2bytes, num2str, str2num } from '../types';
2 |
3 | describe('test type mappings', () => {
4 | test('number to string', () => {
5 | expect(num2str(1)).toBe('byte');
6 | expect(num2str(2)).toBe('char');
7 | expect(num2str(3)).toBe('short');
8 | expect(num2str(4)).toBe('int');
9 | expect(num2str(5)).toBe('float');
10 | expect(num2str(6)).toBe('double');
11 | expect(num2str(7)).toBe('undefined');
12 | });
13 | test('num to bytes', () => {
14 | expect(num2bytes(1)).toBe(1);
15 | expect(num2bytes(2)).toBe(1);
16 | expect(num2bytes(3)).toBe(2);
17 | expect(num2bytes(4)).toBe(4);
18 | expect(num2bytes(5)).toBe(4);
19 | expect(num2bytes(6)).toBe(8);
20 | expect(num2bytes(7)).toBe(-1);
21 | });
22 | test('string to number', () => {
23 | expect(str2num('byte')).toBe(1);
24 | expect(str2num('char')).toBe(2);
25 | expect(str2num('short')).toBe(3);
26 | expect(str2num('int')).toBe(4);
27 | expect(str2num('float')).toBe(5);
28 | expect(str2num('double')).toBe(6);
29 | expect(str2num('undefined')).toBe(-1);
30 | });
31 | });
32 |
--------------------------------------------------------------------------------
/src/data.ts:
--------------------------------------------------------------------------------
1 | import { IOBuffer } from 'iobuffer';
2 |
3 | import { Header } from './header';
4 | import { num2bytes, str2num, readType } from './types';
5 | // const STREAMING = 4294967295;
6 |
7 | /**
8 | * Read data for the given non-record variable
9 | * @param buffer - Buffer for the file data
10 | * @param variable - Variable metadata
11 | * @return - Data of the element
12 | */
13 | export function nonRecord(
14 | buffer: IOBuffer,
15 | variable: Header['variables'][number],
16 | ): Array> {
17 | // variable type
18 | const type = str2num(variable.type);
19 |
20 | // size of the data
21 | const size = variable.size / num2bytes(type);
22 |
23 | // iterates over the data
24 | const data = new Array(size);
25 | for (let i = 0; i < size; i++) {
26 | data[i] = readType(buffer, type, 1);
27 | }
28 |
29 | return data;
30 | }
31 |
32 | /**
33 | * Read data for the given record variable
34 | * @param buffer - Buffer for the file data
35 | * @param variable - Variable metadata
36 | * @param recordDimension - Record dimension metadata
37 | * @return - Data of the element
38 | */
39 | export function record(
40 | buffer: IOBuffer,
41 | variable: Header['variables'][number],
42 | recordDimension: Header['recordDimension'],
43 | ): Array> {
44 | // variable type
45 | const type = str2num(variable.type);
46 | const width = variable.size ? variable.size / num2bytes(type) : 1;
47 |
48 | // size of the data
49 | // TODO streaming data
50 | const size = recordDimension.length;
51 |
52 | // iterates over the data
53 | const data = new Array(size);
54 | const step = recordDimension.recordStep;
55 | if (step) {
56 | for (let i = 0; i < size; i++) {
57 | const currentOffset = buffer.offset;
58 | data[i] = readType(buffer, type, width);
59 | buffer.seek(currentOffset + step);
60 | }
61 | } else {
62 | throw new Error('recordDimension.recordStep is undefined');
63 | }
64 |
65 | return data;
66 | }
67 |
--------------------------------------------------------------------------------
/src/header.ts:
--------------------------------------------------------------------------------
1 | import { IOBuffer } from 'iobuffer';
2 |
3 | import { num2str, readType } from './types';
4 | import { padding, notNetcdf, readName } from './utils';
5 |
6 | // Grammar constants
7 | const ZERO = 0;
8 | const NC_DIMENSION = 10;
9 | const NC_VARIABLE = 11;
10 | const NC_ATTRIBUTE = 12;
11 | const NC_UNLIMITED = 0;
12 |
13 | export interface Header {
14 | recordDimension: {
15 | /**
16 | Length of the record dimension
17 | sum of the varSize's of all the record variables.
18 | */
19 | length: number;
20 | id?: number;
21 | name?: string;
22 | recordStep?: number;
23 | };
24 | // Version
25 | version: number;
26 | /* List of dimensions*/
27 | dimensions: Dimensions['dimensions'];
28 | /* List of global attributes */
29 | globalAttributes: Attribute[];
30 | /* List of variables*/
31 | variables: Variables['variables'];
32 | }
33 | /**
34 | * Reads the file header as @see {@link Header}
35 | * @param buffer - Buffer for the file data
36 | * @param version - Version of the file
37 | * @returns
38 | */
39 | export function header(buffer: IOBuffer, version: number): Header {
40 | const header: Partial = { version };
41 |
42 | const recordDimension: Header['recordDimension'] = {
43 | length: buffer.readUint32(),
44 | };
45 |
46 | const dimList = dimensionsList(buffer);
47 |
48 | if (!Array.isArray(dimList)) {
49 | recordDimension.id = dimList.recordId;
50 | recordDimension.name = dimList.recordName;
51 | header.dimensions = dimList.dimensions;
52 | }
53 |
54 | header.globalAttributes = attributesList(buffer);
55 |
56 | const variables = variablesList(buffer, recordDimension?.id, version);
57 | if (!Array.isArray(variables)) {
58 | header.variables = variables.variables;
59 | recordDimension.recordStep = variables.recordStep;
60 | }
61 |
62 | header.recordDimension = recordDimension;
63 |
64 | return header as Header;
65 | }
66 |
67 | export interface Dimensions {
68 | /* that is an array of dimension object:*/
69 | dimensions: Array<{
70 | /* name of the dimension*/
71 | name: string;
72 | /* size of the dimension */
73 | size: number;
74 | }>;
75 | /* id of the dimension that has unlimited size or undefined,*/
76 | recordId?: number;
77 | /* name of the dimension that has unlimited size */
78 | recordName?: string;
79 | }
80 |
81 | /**
82 | * List of dimensions
83 | * @param buffer - Buffer for the file data
84 | * @return List of dimensions
85 | */
86 | function dimensionsList(buffer: IOBuffer): Dimensions | [] {
87 | const result: Partial = {};
88 | let recordId: number | undefined, recordName: string | undefined;
89 |
90 | const dimList = buffer.readUint32();
91 |
92 | let dimensions: Dimensions['dimensions'];
93 |
94 | if (dimList === ZERO) {
95 | notNetcdf(
96 | buffer.readUint32() !== ZERO,
97 | 'wrong empty tag for list of dimensions',
98 | );
99 | return [];
100 | } else {
101 | notNetcdf(dimList !== NC_DIMENSION, 'wrong tag for list of dimensions');
102 |
103 | // Length of dimensions
104 | const dimensionSize = buffer.readUint32();
105 | dimensions = new Array(dimensionSize);
106 |
107 | //populate `name` and `size` for each dimension
108 | for (let dim = 0; dim < dimensionSize; dim++) {
109 | // Read name
110 | const name = readName(buffer);
111 |
112 | // Read dimension size
113 | const size = buffer.readUint32();
114 | if (size === NC_UNLIMITED) {
115 | // in netcdf 3 one field can be of size unlimited
116 | recordId = dim;
117 | recordName = name;
118 | }
119 |
120 | dimensions[dim] = {
121 | name,
122 | size,
123 | };
124 | }
125 | }
126 | if (recordId !== undefined) {
127 | result.recordId = recordId;
128 | }
129 | if (recordName !== undefined) {
130 | result.recordName = recordName;
131 | }
132 | result.dimensions = dimensions;
133 | return result as Dimensions;
134 | }
135 |
136 | export interface Attribute {
137 | /* name of the attribute */
138 | name: string;
139 | /* type of the attribute */
140 | type: string;
141 | /* value of the attribute */
142 | value: number | string;
143 | }
144 | /**
145 | * List of attributes
146 | * @param buffer - Buffer for the file data
147 | * @return - List of attributes with:
148 | */
149 | function attributesList(buffer: IOBuffer): Attribute[] {
150 | const gAttList = buffer.readUint32();
151 | let attributes;
152 | if (gAttList === ZERO) {
153 | notNetcdf(
154 | buffer.readUint32() !== ZERO,
155 | 'wrong empty tag for list of attributes',
156 | );
157 | return [];
158 | } else {
159 | notNetcdf(gAttList !== NC_ATTRIBUTE, 'wrong tag for list of attributes');
160 |
161 | // Length of attributes
162 | const attributeSize = buffer.readUint32();
163 | attributes = new Array(attributeSize);
164 | // Populate `name`, `type` and `value` for each attribute
165 | for (let gAtt = 0; gAtt < attributeSize; gAtt++) {
166 | // Read name
167 | const name = readName(buffer);
168 |
169 | // Read type
170 | const type = buffer.readUint32();
171 | notNetcdf(type < 1 || type > 6, `non valid type ${type}`);
172 |
173 | // Read attribute
174 | const size = buffer.readUint32();
175 | const value = readType(buffer, type, size);
176 |
177 | // Apply padding
178 | padding(buffer);
179 |
180 | attributes[gAtt] = {
181 | name,
182 | type: num2str(type),
183 | value,
184 | };
185 | }
186 | }
187 | return attributes;
188 | }
189 |
190 | export interface Variable {
191 | /* name of the variable */
192 | name: string;
193 | /* Array with the dimension IDs of the variable*/
194 | dimensions: number[];
195 | /* Array with the attributes of the variable*/
196 | attributes: [];
197 | /* type of the variable*/
198 | type: string;
199 | /* size of the variable */
200 | size: number;
201 | /* offset where of the variable begins */
202 | offset: number;
203 | /* True if is a record variable, false otherwise (unlimited size) */
204 | record: boolean;
205 | }
206 | interface Variables {
207 | variables: Variable[];
208 | recordStep: number;
209 | }
210 | /**
211 | * @param buffer - Buffer for the file data
212 | * @param recordId - Id of the unlimited dimension (also called record dimension)
213 | * This value may be undefined if there is no unlimited dimension
214 | * @param version - Version of the file
215 | * @return - Number of recordStep and list of variables @see {@link Variables}
216 | */
217 | function variablesList(
218 | buffer: IOBuffer,
219 | recordId: number | undefined,
220 | version: number,
221 | ): Variables | [] {
222 | const varList = buffer.readUint32();
223 | let recordStep = 0;
224 | let variables;
225 | if (varList === ZERO) {
226 | notNetcdf(
227 | buffer.readUint32() !== ZERO,
228 | 'wrong empty tag for list of variables',
229 | );
230 | return [];
231 | } else {
232 | notNetcdf(varList !== NC_VARIABLE, 'wrong tag for list of variables');
233 |
234 | // Length of variables
235 | const variableSize = buffer.readUint32();
236 | variables = new Array(variableSize);
237 | for (let v = 0; v < variableSize; v++) {
238 | // Read name
239 | const name = readName(buffer);
240 |
241 | // Read dimensionality of the variable
242 | const dimensionality = buffer.readUint32();
243 |
244 | // Index into the list of dimensions
245 | const dimensionsIds = new Array(dimensionality);
246 | for (let dim = 0; dim < dimensionality; dim++) {
247 | dimensionsIds[dim] = buffer.readUint32();
248 | }
249 |
250 | // Read variables size
251 | const attributes = attributesList(buffer);
252 |
253 | // Read type
254 | const type = buffer.readUint32();
255 | notNetcdf(type < 1 && type > 6, `non valid type ${type}`);
256 |
257 | // Read variable size
258 | // The 32-bit varSize field is not large enough to contain the size of variables that require
259 | // more than 2^32 - 4 bytes, so 2^32 - 1 is used in the varSize field for such variables.
260 | const varSize = buffer.readUint32();
261 |
262 | // Read offset
263 | let offset = buffer.readUint32();
264 | if (version === 2) {
265 | notNetcdf(offset > 0, 'offsets larger than 4GB not supported');
266 | offset = buffer.readUint32();
267 | }
268 |
269 | let record = false;
270 | // Count amount of record variables
271 | if (typeof recordId !== 'undefined' && dimensionsIds[0] === recordId) {
272 | recordStep += varSize;
273 | record = true;
274 | }
275 | variables[v] = {
276 | name,
277 | dimensions: dimensionsIds,
278 | attributes,
279 | type: num2str(type),
280 | size: varSize,
281 | offset,
282 | record,
283 | };
284 | }
285 | }
286 | return {
287 | variables,
288 | recordStep,
289 | };
290 | }
291 |
--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------
1 | export * from './parser';
2 | export type { Header, Attribute, Variable } from './header';
3 |
--------------------------------------------------------------------------------
/src/parser.ts:
--------------------------------------------------------------------------------
1 | import { IOBuffer } from 'iobuffer';
2 |
3 | import { record, nonRecord } from './data';
4 | import { Header, header } from './header';
5 | import { toString } from './toString';
6 | import { notNetcdf } from './utils';
7 |
8 | /**
9 | * Reads a NetCDF v3.x file
10 | * [See specification](https://www.unidata.ucar.edu/software/netcdf/docs/file_format_specifications.html)
11 | * @param data - ArrayBuffer or any Typed Array (including Node.js' Buffer from v4) with the data
12 | * @constructor
13 | */
14 | export class NetCDFReader {
15 | public header: Header;
16 | public buffer: IOBuffer;
17 |
18 | constructor(data: BinaryData) {
19 | const buffer = new IOBuffer(data);
20 | buffer.setBigEndian();
21 |
22 | // Validate that it's a NetCDF file
23 | notNetcdf(buffer.readChars(3) !== 'CDF', 'should start with CDF');
24 |
25 | // Check the NetCDF format
26 | const version = buffer.readByte();
27 | notNetcdf(version > 2, 'unknown version');
28 |
29 | // Read the header
30 | this.header = header(buffer, version);
31 | this.buffer = buffer;
32 | }
33 |
34 | /**
35 | * @return - Version for the NetCDF format
36 | */
37 | get version() {
38 | if (this.header.version === 1) {
39 | return 'classic format';
40 | } else {
41 | return '64-bit offset format';
42 | }
43 | }
44 |
45 | /**
46 | * @return {object} - Metadata for the record dimension
47 | * * `length`: Number of elements in the record dimension
48 | * * `id`: Id number in the list of dimensions for the record dimension
49 | * * `name`: String with the name of the record dimension
50 | * * `recordStep`: Number with the record variables step size
51 | */
52 | get recordDimension() {
53 | return this.header.recordDimension;
54 | }
55 |
56 | /**
57 | * @return - Array - List of dimensions with:
58 | * * `name`: String with the name of the dimension
59 | * * `size`: Number with the size of the dimension
60 | */
61 | get dimensions() {
62 | return this.header.dimensions;
63 | }
64 |
65 | /**
66 | * @return - Array - List of global attributes with:
67 | * * `name`: String with the name of the attribute
68 | * * `type`: String with the type of the attribute
69 | * * `value`: A number or string with the value of the attribute
70 | */
71 | get globalAttributes(): Header['globalAttributes'] {
72 | return this.header.globalAttributes;
73 | }
74 |
75 | /**
76 | * Returns the value of an attribute
77 | * @param - AttributeName
78 | * @return - Value of the attributeName or null
79 | */
80 | getAttribute(attributeName: string) {
81 | const attribute = this.globalAttributes.find(
82 | (val) => val.name === attributeName,
83 | );
84 | if (attribute) return attribute.value;
85 | return null;
86 | }
87 |
88 | /**
89 | * Returns the value of a variable as a string
90 | * @param - variableName
91 | * @return - Value of the variable as a string or null
92 | */
93 | getDataVariableAsString(variableName: string) {
94 | const variable = this.getDataVariable(variableName);
95 | if (variable) return variable.join('');
96 | return null;
97 | }
98 |
99 | get variables() {
100 | return this.header.variables;
101 | }
102 |
103 | toString = toString;
104 |
105 | /**
106 | * Retrieves the data for a given variable
107 | * @param variableName - Name of the variable to search or variable object
108 | * @return The variable values
109 | */
110 | getDataVariable(variableName: string | Header['variables'][number]) {
111 | let variable;
112 | if (typeof variableName === 'string') {
113 | // search the variable
114 | variable = this.header.variables.find((val) => {
115 | return val.name === variableName;
116 | });
117 | } else {
118 | variable = variableName;
119 | }
120 |
121 | // throws if variable not found
122 | if (variable === undefined) {
123 | throw new Error('Not a valid NetCDF v3.x file: variable not found');
124 | }
125 |
126 | // go to the offset position
127 | this.buffer.seek(variable.offset);
128 |
129 | if (variable.record) {
130 | // record variable case
131 | return record(this.buffer, variable, this.header.recordDimension);
132 | } else {
133 | // non-record variable case
134 | return nonRecord(this.buffer, variable);
135 | }
136 | }
137 |
138 | /**
139 | * Check if a dataVariable exists
140 | * @param variableName - Name of the variable to find
141 | * @return boolean
142 | */
143 | dataVariableExists(variableName: string) {
144 | const variable = this.header.variables.find((val) => {
145 | return val.name === variableName;
146 | });
147 | return variable !== undefined;
148 | }
149 |
150 | /**
151 | * Check if an attribute exists
152 | * @param attributeName - Name of the attribute to find
153 | * @return boolean
154 | */
155 | attributeExists(attributeName: string) {
156 | const attribute = this.globalAttributes.find(
157 | (val) => val.name === attributeName,
158 | );
159 | return attribute !== undefined;
160 | }
161 | }
162 |
--------------------------------------------------------------------------------
/src/toString.ts:
--------------------------------------------------------------------------------
1 | import { NetCDFReader } from './parser';
2 |
3 | export function toString(this: NetCDFReader) {
4 | const result = [];
5 | result.push('DIMENSIONS');
6 | for (const dimension of this.dimensions) {
7 | result.push(` ${dimension.name.padEnd(30)} = size: ${dimension.size}`);
8 | }
9 |
10 | result.push('');
11 | result.push('GLOBAL ATTRIBUTES');
12 | for (const attribute of this.globalAttributes) {
13 | result.push(` ${attribute.name.padEnd(30)} = ${attribute.value}`);
14 | }
15 |
16 | const variables = JSON.parse(JSON.stringify(this.variables));
17 | result.push('');
18 | result.push('VARIABLES:');
19 | for (const variable of variables) {
20 | variable.value = this.getDataVariable(variable);
21 | let stringify = JSON.stringify(variable.value);
22 | if (stringify.length > 50) stringify = stringify.substring(0, 50);
23 | if (!isNaN(variable.value.length)) {
24 | stringify += ` (length: ${variable.value.length})`;
25 | }
26 | result.push(` ${variable.name.padEnd(30)} = ${stringify}`);
27 | }
28 | return result.join('\n');
29 | }
30 |
--------------------------------------------------------------------------------
/src/types.ts:
--------------------------------------------------------------------------------
1 | import { IOBuffer } from 'iobuffer';
2 |
3 | const types = {
4 | BYTE: 1,
5 | CHAR: 2,
6 | SHORT: 3,
7 | INT: 4,
8 | FLOAT: 5,
9 | DOUBLE: 6,
10 | };
11 |
12 | /**
13 | * Parse a number into their respective type
14 | * @param type - integer that represents the type
15 | * @return - parsed value of the type
16 | */
17 | export function num2str(type: number): string {
18 | switch (Number(type)) {
19 | case types.BYTE:
20 | return 'byte';
21 | case types.CHAR:
22 | return 'char';
23 | case types.SHORT:
24 | return 'short';
25 | case types.INT:
26 | return 'int';
27 | case types.FLOAT:
28 | return 'float';
29 | case types.DOUBLE:
30 | return 'double';
31 | default:
32 | return 'undefined';
33 | }
34 | }
35 |
36 | /**
37 | * Parse a number type identifier to his size in bytes
38 | * @param type - integer that represents the type
39 | * @return size of the type
40 | */
41 | export function num2bytes(type: number): number {
42 | switch (Number(type)) {
43 | case types.BYTE:
44 | return 1;
45 | case types.CHAR:
46 | return 1;
47 | case types.SHORT:
48 | return 2;
49 | case types.INT:
50 | return 4;
51 | case types.FLOAT:
52 | return 4;
53 | case types.DOUBLE:
54 | return 8;
55 | default:
56 | return -1;
57 | }
58 | }
59 |
60 | /**
61 | * Reverse search of num2str
62 | * @param type - string that represents the type
63 | * @return parsed value of the type
64 | */
65 | export function str2num(type: string) {
66 | switch (String(type)) {
67 | case 'byte':
68 | return types.BYTE;
69 | case 'char':
70 | return types.CHAR;
71 | case 'short':
72 | return types.SHORT;
73 | case 'int':
74 | return types.INT;
75 | case 'float':
76 | return types.FLOAT;
77 | case 'double':
78 | return types.DOUBLE;
79 | /* istanbul ignore next */
80 | default:
81 | return -1;
82 | }
83 | }
84 |
85 | /**
86 | * Auxiliary function to read numeric data
87 | * @param size - Size of the element to read
88 | * @param bufferReader - Function to read next value
89 | * @return
90 | */
91 | function readNumber(
92 | size: number,
93 | bufferReader: () => number,
94 | ): number | number[] {
95 | if (size !== 1) {
96 | const numbers = new Array(size);
97 | for (let i = 0; i < size; i++) {
98 | numbers[i] = bufferReader();
99 | }
100 | return numbers;
101 | } else {
102 | return bufferReader();
103 | }
104 | }
105 |
106 | /**
107 | * Given a type and a size reads the next element
108 | * @param buffer - Buffer for the file data
109 | * @param type - Type of the data to read
110 | * @param size - Size of the element to read
111 | * @return
112 | */
113 | export function readType(
114 | buffer: IOBuffer,
115 | type: number,
116 | size: number,
117 | ): string | number | number[] {
118 | switch (type) {
119 | case types.BYTE:
120 | return Array.from(buffer.readBytes(size));
121 | case types.CHAR:
122 | return trimNull(buffer.readChars(size));
123 | case types.SHORT:
124 | return readNumber(size, buffer.readInt16.bind(buffer));
125 | case types.INT:
126 | return readNumber(size, buffer.readInt32.bind(buffer));
127 | case types.FLOAT:
128 | return readNumber(size, buffer.readFloat32.bind(buffer));
129 | case types.DOUBLE:
130 | return readNumber(size, buffer.readFloat64.bind(buffer));
131 | default:
132 | throw new Error(`non valid type ${type}`);
133 | }
134 | }
135 |
136 | /**
137 | * Removes null terminate value
138 | * @param value - String to trim
139 | * @return - Trimmed string
140 | */
141 | function trimNull(value: string): string {
142 | if (value.charCodeAt(value.length - 1) === 0) {
143 | return value.substring(0, value.length - 1);
144 | }
145 | return value;
146 | }
147 |
--------------------------------------------------------------------------------
/src/utils.ts:
--------------------------------------------------------------------------------
1 | import { IOBuffer } from 'iobuffer';
2 | /**
3 | * Throws a non-valid NetCDF exception if the statement it's true
4 | * @ignore
5 | * @param statement - Throws if true
6 | * @param reason - Reason to throw
7 | */
8 | export function notNetcdf(statement: boolean, reason: string) {
9 | if (statement) {
10 | throw new TypeError(`Not a valid NetCDF v3.x file: ${reason}`);
11 | }
12 | }
13 |
14 | /**
15 | * Moves 1, 2, or 3 bytes to next 4-byte boundary
16 | * @param buffer - Buffer for the file data
17 | */
18 | export function padding(buffer: IOBuffer) {
19 | if (buffer.offset % 4 !== 0) {
20 | buffer.skip(4 - (buffer.offset % 4));
21 | }
22 | }
23 |
24 | /**
25 | * Reads the name
26 | * @param buffer - Buffer for the file data
27 | * @return Name
28 | */
29 | export function readName(buffer: IOBuffer) {
30 | // Read name
31 | const nameLength = buffer.readUint32();
32 | const name = buffer.readChars(nameLength);
33 |
34 | // validate name
35 | // TODO
36 | // Apply padding
37 | padding(buffer);
38 | return name;
39 | }
40 |
--------------------------------------------------------------------------------
/tsconfig.cjs.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "./tsconfig.json",
3 | "compilerOptions": {
4 | "module": "commonjs",
5 | "declaration": true,
6 | "declarationMap": true
7 | },
8 | "exclude": ["./src/**/__tests__"]
9 | }
10 |
--------------------------------------------------------------------------------
/tsconfig.esm.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "./tsconfig.cjs.json",
3 | "compilerOptions": {
4 | "module": "es2020",
5 | "outDir": "lib-esm"
6 | }
7 | }
8 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "esModuleInterop": true,
4 | "moduleResolution": "node",
5 | "outDir": "lib",
6 | "sourceMap": true,
7 | "strict": true,
8 | "target": "es2020"
9 | },
10 | "include": ["./src/**/*"]
11 | }
12 |
--------------------------------------------------------------------------------