├── .github
├── dependabot.yml
└── workflows
│ ├── release.yml
│ └── test_build.yml
├── .gitignore
├── .npmignore
├── .npmrc
├── LICENSE.txt
├── README.md
├── esm
├── btree.js
├── core.js
├── dataobjects.js
├── datatype-msg.js
├── filters.js
├── high-level.js
└── misc-low-level.js
├── index.js
├── package-lock.json
├── package.json
└── test
├── create_test_files.py
├── test.h5
└── tests.mjs
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # To get started with Dependabot version updates, you'll need to specify which
2 | # package ecosystems to update and where the package manifests are located.
3 | # Please see the documentation for all configuration options:
4 | # https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
5 |
6 | version: 2
7 | updates:
8 | - package-ecosystem: "github-actions"
9 | # Workflow files stored in the default location of `.github/workflows`
10 | # You don't need to specify `/.github/workflows` for `directory`. You can use `directory: "/"`.
11 | directory: "/"
12 | schedule:
13 | interval: "monthly"
14 |
15 | # Enable version updates for npm
16 | - package-ecosystem: "npm"
17 | # Look for `package.json` and `lock` files in the `root` directory
18 | directory: "/"
19 | # Check the npm registry for updates every day (weekdays)
20 | schedule:
21 | interval: "monthly"
22 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Publish
2 | # Controls when the workflow will run
3 | on:
4 | # Triggers the workflow on push or pull request events but only for the main branch
5 | release:
6 | types: [published]
7 | # Allows you to run this workflow manually from the Actions tab
8 | workflow_dispatch:
9 |
10 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel
11 | jobs:
12 |
13 | do_release:
14 | runs-on: ubuntu-latest
15 | permissions:
16 | contents: write
17 | steps:
18 | - uses: actions/checkout@v4
19 | - name: setup node
20 | uses: actions/setup-node@v4
21 | with:
22 | node-version: '22'
23 | registry-url: 'https://registry.npmjs.org'
24 | - name: Build library
25 | run: |
26 | npm install
27 | npm run build
28 | - name: Publish to npmjs
29 | if: startsWith(github.ref, 'refs/tags')
30 | run: npm publish --provenance --access public
31 | env:
32 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
33 | - name: Pack library
34 | run: |
35 | npm pack
36 | echo "NPM_TGZ=$(ls jsfive*.tgz)" >> $GITHUB_ENV
37 | - name: Upload artifact
38 | uses: actions/upload-artifact@v4
39 | with:
40 | path: ${{ env.NPM_TGZ }}
41 | - name: Update current release
42 | if: startsWith(github.ref, 'refs/tags')
43 | uses: johnwbyrd/update-release@v1.0.0
44 | with:
45 | token: ${{ secrets.GITHUB_TOKEN }}
46 | files: |
47 | ${{ env.NPM_TGZ }}
48 |
--------------------------------------------------------------------------------
/.github/workflows/test_build.yml:
--------------------------------------------------------------------------------
1 | name: Test Build
2 | # Controls when the workflow will run
3 | on:
4 | # Triggers the workflow on push or pull request events but only for the main branch
5 | push:
6 | branches: [ master ]
7 | # Allows you to run this workflow manually from the Actions tab
8 | workflow_dispatch:
9 |
10 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel
11 | jobs:
12 |
13 | build_and_test:
14 | runs-on: ubuntu-latest
15 | steps:
16 | - uses: actions/checkout@v4
17 | - name: setup node
18 | uses: actions/setup-node@v4
19 | with:
20 | node-version: '22'
21 | - name: Build library
22 | run: |
23 | npm install
24 | npm run build
25 | - name: Test library
26 | run: |
27 | node --test
28 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | jsfive**.tgz
3 |
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | .github
2 | jsfive**.tgz
3 |
--------------------------------------------------------------------------------
/.npmrc:
--------------------------------------------------------------------------------
1 | @usnistgov:registry=https://npm.pkg.github.com
2 |
3 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | jsfive is in the public domain.
2 |
3 | It is based in large part on the pyfive library
4 | https://github.com/jjhelmus/pyfive
5 | Copyright (c) 2016 Jonathan J. Helmus
6 | All rights reserved.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # jsfive: A pure javascript HDF5 file reader
2 |
3 | jsfive is a library for reading (not writing) HDF5 files using pure javascript, such as in the browser. It is based on the [pyfive](https://github.com/jjhelmus/pyfive) pure-python implementation of an HDF5 reader.
4 | Not all features of HDF5 are supported, but some key ones that are:
5 |
6 | * data chunking
7 | * data compression, if javascript zlib is provided (like pako)
8 |
9 | It is only for reading HDF5 files as an ArrayBuffer representation of the file.
10 |
11 | If you need to write HDF5 files in javascript consider using h5wasm ([github](https://github.com/usnistgov/h5wasm), [npm](https://www.npmjs.com/package/h5wasm)) instead (also provides efficient slicing of large datasets, and uses direct filesystem access in nodejs).
12 |
13 | ## Dependencies
14 | * ES6 module support (current versions of Firefox and Chrome work)
15 | * zlib from [pako](https://github.com/nodeca/pako)
16 |
17 | ## Limitations
18 | * not all datatypes that are supported by pyfive (through numpy) are supported (yet), though dtypes like u8, f4, S12, i4 are supported.
19 | * datafiles larger than javascript's Number.MAX_SAFE_INTEGER (in bytes) will result in corrupted reads, as the input ArrayBuffer can't be indexed above that (I'm pretty sure ArrayBuffers larger than that are allowed to exist in Javascript) since no 64-bit integers exist in javascript.
20 | * currently this gives an upper limit of 9007199254740991 bytes, which is a lot. (~107 GB)
21 | * currently the getitem syntax is not supported, but it will likely be soon, for browsers that support object Proxy (not IE), so you have to do say f.get('entry/dataset') instead of f['entry/dataset']
22 |
23 | ## Installation
24 | ### CDN:
25 | If you want to use it as an old-style ES5 script, you can use the pre-built library in /dist/hdf5.js e.g.
26 | ```html
27 |
28 | ```
29 |
30 | ### NPM
31 | To include in a project,
32 | ```bash
33 | npm install jsfive
34 | ```
35 | then in your project
36 | ```js
37 | import * as hdf5 from 'jsfive';
38 | // this works in create-react-app too, in
39 | // jsfive >= 0.3.7
40 | ```
41 | or
42 | ```javascript
43 | const hdf5 = await import("jsfive");
44 | ```
45 |
46 | ## Usage
47 | With fetch, from the browser:
48 | ```javascript
49 | fetch(file_url)
50 | .then(function(response) {
51 | return response.arrayBuffer()
52 | })
53 | .then(function(buffer) {
54 | var f = new hdf5.File(buffer, filename);
55 | // do something with f;
56 | // let g = f.get('group');
57 | // let d = f.get('group/dataset');
58 | // let v = d.value;
59 | // let a = d.attrs;
60 | });
61 | ```
62 |
63 | Or if you want to upload a file to work with, into the browser:
64 | ```javascript
65 | function loadData() {
66 | var file_input = document.getElementById('datafile');
67 | var file = file_input.files[0]; // only one file allowed
68 | let datafilename = file.name;
69 | let reader = new FileReader();
70 | reader.onloadend = function(evt) {
71 | let barr = evt.target.result;
72 | var f = new hdf5.File(barr, datafilename);
73 | // do something with f...
74 | }
75 | reader.readAsArrayBuffer(file);
76 | file_input.value = "";
77 | }
78 | ```
79 |
80 | in node REPL (might require --experimental-repl-await for older nodejs)
81 | ```js
82 | $ node
83 | Welcome to Node.js v16.13.2.
84 | Type ".help" for more information.
85 | > const hdf5 = await import("jsfive");
86 | undefined
87 | > var fs = require("fs");
88 | undefined
89 | > var ab = fs.readFileSync("/home/brian/Downloads/sans59510.nxs.ngv");
90 | undefined
91 | > var f = new hdf5.File(ab.buffer);
92 | undefined
93 | > f.keys
94 | [ 'entry' ]
95 | > f.get("entry").attrs
96 | { NX_class: 'NXentry' }
97 | >
98 | ```
99 |
--------------------------------------------------------------------------------
/esm/btree.js:
--------------------------------------------------------------------------------
1 | import {_unpack_struct_from, _structure_size, struct, dtype_getter, bitSize, DataView64} from './core.js';
2 | import { Filters } from './filters.js';
3 |
4 | class AbstractBTree {
5 | //B_LINK_NODE = null;
6 | //NODE_TYPE = null;
7 |
8 | constructor(fh, offset) {
9 | //""" initalize. """
10 | this.fh = fh;
11 | this.offset = offset;
12 | this.depth = null;
13 | }
14 |
15 | init() {
16 | this.all_nodes = new Map();
17 | this._read_root_node();
18 | this._read_children();
19 | }
20 |
21 | _read_children() {
22 | // # Leaf nodes: level 0
23 | // # Root node: level "depth"
24 | let node_level = this.depth;
25 | while (node_level > 0) {
26 | for (var parent_node of this.all_nodes.get(node_level)) {
27 | for (var child_addr of parent_node.get('addresses')) {
28 | this._add_node(this._read_node(child_addr, node_level-1));
29 | }
30 | }
31 | node_level--;
32 | }
33 | }
34 |
35 | _read_root_node() {
36 | let root_node = this._read_node(this.offset, null);
37 | this._add_node(root_node);
38 | this.depth = root_node.get('node_level');
39 | }
40 |
41 | _add_node(node) {
42 | let node_level = node.get('node_level');
43 | if (this.all_nodes.has(node_level)) {
44 | this.all_nodes.get(node_level).push(node);
45 | }
46 | else {
47 | this.all_nodes.set(node_level, [node]);
48 | }
49 | }
50 |
51 | _read_node(offset, node_level) {
52 | // """ Return a single node in the B-Tree located at a given offset. """
53 | node = this._read_node_header(offset, node_level);
54 | node.set('keys', []);
55 | node.set('addresses', []);
56 | return node
57 | }
58 |
59 | _read_node_header(offset) {
60 | //""" Return a single node header in the b-tree located at a give offset. """
61 | throw "NotImplementedError: must define _read_node_header in implementation class";
62 | }
63 | }
64 |
65 | export class BTreeV1 extends AbstractBTree {
66 | /*
67 | """
68 | HDF5 version 1 B-Tree.
69 | """
70 | */
71 |
72 | B_LINK_NODE = new Map([
73 | ['signature', '4s'],
74 |
75 | ['node_type', 'B'],
76 | ['node_level', 'B'],
77 | ['entries_used', 'H'],
78 |
79 | ['left_sibling', 'Q'], // 8 byte addressing
80 | ['right_sibling', 'Q'] // 8 byte addressing
81 | ])
82 |
83 | _read_node_header(offset, node_level) {
84 | // """ Return a single node header in the b-tree located at a give offset. """
85 | let node = _unpack_struct_from(this.B_LINK_NODE, this.fh, offset);
86 | //assert node['signature'] == b'TREE'
87 | //assert node['node_type'] == this.NODE_TYPE
88 | if (node_level != null) {
89 | if (node.get("node_level") != node_level) {
90 | throw "node level does not match"
91 | }
92 | }
93 | return node;
94 | }
95 |
96 | }
97 |
98 |
99 | export class BTreeV1Groups extends BTreeV1 {
100 | /*
101 | """
102 | HDF5 version 1 B-Tree storing group nodes (type 0).
103 | """
104 | */
105 | NODE_TYPE = 0;
106 |
107 | constructor(fh, offset) {
108 | super(fh, offset);
109 | this.init();
110 | }
111 |
112 | _read_node(offset, node_level) {
113 | // """ Return a single node in the B-Tree located at a given offset. """
114 | let node = this._read_node_header(offset, node_level);
115 | offset += _structure_size(this.B_LINK_NODE);
116 | let keys = [];
117 | let addresses = [];
118 | let entries_used = node.get('entries_used');
119 | for (var i=0; i=0; d--) {
267 | if (cpos[d] >= chunk_shape[d]) {
268 | cpos[d] = 0;
269 | apos[d] = chunk_offset[d];
270 | if (d > 0) {
271 | cpos[d-1] += 1;
272 | apos[d-1] += 1;
273 | }
274 | }
275 | else {
276 | break;
277 | }
278 | }
279 | let inbounds = apos.slice(0,-1).every(function(p, d) { return p < data_shape[d] });
280 | if (inbounds) {
281 | let cb_offset = ci * item_size;
282 | let datum = cview[item_getter](cb_offset, !item_big_endian, item_size);
283 | let ai = apos.slice(0,-1).reduce(function(prev, curr, index) {
284 | return curr * data_strides[index] + prev }, 0);
285 | data[ai] = datum;
286 | }
287 | cpos[dims-1] += 1;
288 | apos[dims-1] += 1;
289 | }
290 | }
291 | }
292 | return data;
293 | }
294 |
295 | _filter_chunk(chunk_buffer, filter_mask, filter_pipeline, itemsize) {
296 | //""" Apply decompression filters to a chunk of data. """
297 | let num_filters = filter_pipeline.length;
298 | let buf = chunk_buffer.slice();
299 | for (var filter_index=num_filters-1; filter_index >=0; filter_index--) {
300 | //for i, pipeline_entry in enumerate(filter_pipeline[::-1]):
301 |
302 | //# A filter is skipped is the bit corresponding to its index in the
303 | //# pipeline is set in filter_mask
304 | if (filter_mask & (1 << filter_index)) {
305 | continue
306 | }
307 | let pipeline_entry = filter_pipeline[filter_index];
308 | let filter_id = pipeline_entry.get('filter_id');
309 | let client_data = pipeline_entry.get('client_data');
310 | if (Filters.has(filter_id)) {
311 | buf = Filters.get(filter_id)(buf, itemsize, client_data);
312 | }
313 | else {
314 | throw 'NotImplementedError("Filter with id:' + filter_id.toFixed() + ' not supported")';
315 | }
316 | }
317 | return buf;
318 | }
319 | }
320 |
321 | export class BTreeV2 extends AbstractBTree {
322 | /*
323 | HDF5 version 2 B-Tree.
324 | */
325 |
326 | // III.A.2. Disk Format: Level 1A2 - Version 2 B-trees
327 | B_TREE_HEADER = new Map([
328 | ['signature', '4s'],
329 |
330 | ['version', 'B'],
331 | ['node_type', 'B'],
332 | ['node_size', 'I'],
333 | ['record_size', 'H'],
334 | ['depth', 'H'],
335 | ['split_percent', 'B'],
336 | ['merge_percent', 'B'],
337 |
338 | ['root_address', 'Q'], // 8 byte addressing
339 | ['root_nrecords', 'H'],
340 | ['total_nrecords', 'Q'], // 8 byte addressing
341 | ]);
342 |
343 | B_LINK_NODE = new Map([
344 | ['signature', '4s'],
345 |
346 | ['version', 'B'],
347 | ['node_type', 'B'],
348 | ])
349 |
350 | constructor(fh, offset) {
351 | super(fh, offset);
352 | this.init();
353 | }
354 |
355 | _read_root_node() {
356 | let h = this._read_tree_header(this.offset);
357 | this.address_formats = this._calculate_address_formats(h);
358 | this.header = h;
359 | this.depth = h.get("depth");
360 |
361 | let address = [h.get("root_address"), h.get("root_nrecords"), h.get("total_nrecords")];
362 | let root_node = this._read_node(address, this.depth);
363 | this._add_node(root_node);
364 | }
365 |
366 | _read_tree_header(offset) {
367 | let header = _unpack_struct_from(this.B_TREE_HEADER, this.fh, this.offset);
368 | //assert header['signature'] == b'BTHD'
369 | //assert header['node_type'] == this.NODE_TYPE
370 | return header;
371 | }
372 |
373 | _calculate_address_formats(header) {
374 | let node_size = header.get("node_size");
375 | let record_size = header.get("record_size");
376 | let nrecords_max = 0;
377 | let ntotalrecords_max = 0;
378 | let address_formats = new Map();
379 | let max_depth = header.get("depth");
380 | for (var node_level=0; node_level <= max_depth; node_level++) {
381 | let offset_fmt = "";
382 | let num1_fmt = "";
383 | let num2_fmt = "";
384 | let offset_size, num1_size, num2_size;
385 | if (node_level == 0) { // leaf node
386 | offset_size = 0;
387 | num1_size = 0;
388 | num2_size = 0;
389 | }
390 | else if (node_level == 1) { // internal node (twig node)
391 | offset_size = 8;
392 | offset_fmt = " 0) {
412 | ntotalrecords_max *= nrecords_max;
413 | }
414 | else {
415 | ntotalrecords_max = nrecords_max;
416 | }
417 | }
418 | }
419 |
420 | return address_formats
421 | }
422 |
423 | _nrecords_max(node_size, record_size, addr_size) {
424 | // """ Calculate the maximal records a node can contain. """
425 | // node_size = overhead + nrecords_max*record_size + (nrecords_max+1)*addr_size
426 | //
427 | // overhead = size(B_LINK_NODE) + 4 (checksum)
428 | //
429 | // Leaf node (node_level = 0)
430 | // addr_size = 0
431 | // Internal node (node_level = 1)
432 | // addr_size = offset_size + num1_size
433 | // Internal node (node_level > 1)
434 | // addr_size = offset_size + num1_size + num2_size
435 | return Math.floor((node_size - 10 - addr_size)/(record_size + addr_size));
436 | }
437 |
438 | _required_bytes(integer) {
439 | // """ Calculate the minimal required bytes to contain an integer. """
440 | return Math.ceil(bitSize(integer) / 8);
441 | }
442 |
443 | _int_format(bytelength) {
444 | return [" 0) {
471 | num2 = struct.unpack_from(num2_fmt, this.fh, offset)[0];
472 | offset += num2_size;
473 | }
474 | addresses.push([address_offset, num1, num2]);
475 | }
476 | }
477 |
478 | node.set('keys', keys);
479 | node.set('addresses', addresses);
480 | return node
481 | }
482 |
483 | _read_node_header(offset, node_level) {
484 | // """ Return a single node header in the b-tree located at a give offset. """
485 | let node = _unpack_struct_from(this.B_LINK_NODE, this.fh, offset);
486 | //assert node['node_type'] == this.NODE_TYPE
487 | if (node_level > 0) {
488 | // Internal node (has children)
489 | // assert node['signature'] == b'BTIN'
490 | }
491 | else {
492 | // Leaf node (has no children)
493 | // assert node['signature'] == b'BTLF'
494 | }
495 | node.set("node_level", node_level);
496 | return node
497 | }
498 |
499 | * iter_records() {
500 | // """ Iterate over all records. """
501 | for (let nodelist of this.all_nodes.values()) {
502 | for (let node of nodelist) {
503 | for (let key of node.get('keys')) {
504 | yield key
505 | }
506 | }
507 | }
508 | }
509 |
510 |
511 | _parse_record(record) {
512 | throw "NotImplementedError"
513 | }
514 | }
515 |
516 | export class BTreeV2GroupNames extends BTreeV2 {
517 | /*
518 | HDF5 version 2 B-Tree storing group names (type 5).
519 | */
520 | NODE_TYPE = 5
521 |
522 | _parse_record(buf, offset, size) {
523 | let namehash = struct.unpack_from("=!@]?(i|u|f)(\d*)/);
43 | nbytes = parseInt(bytestr || 4, 10);
44 | let nbits = nbytes * 8;
45 | getter = "get" + dtype_to_format[fstr] + nbits.toFixed();
46 | }
47 | return [getter, big_endian, nbytes];
48 | }
49 |
50 | // Pretty sure we can just use a number for this...
51 | export class Reference {
52 | /*
53 | """
54 | HDF5 Reference.
55 | """
56 | */
57 | constructor(address_of_reference) {
58 | this.address_of_reference = address_of_reference;
59 | }
60 |
61 | __bool__() {
62 | return (this.address_of_reference != 0);
63 | }
64 | }
65 |
66 | class Struct {
67 | constructor() {
68 | this.big_endian = isBigEndian();
69 | this.getters = {
70 | "s": "getUint8",
71 | "b": "getInt8",
72 | "B": "getUint8",
73 | "h": "getInt16",
74 | "H": "getUint16",
75 | "i": "getInt32",
76 | "I": "getUint32",
77 | "l": "getInt32",
78 | "L": "getUint32",
79 | "q": "getInt64",
80 | "Q": "getUint64",
81 | "e": "getFloat16",
82 | "f": "getFloat32",
83 | "d": "getFloat64"
84 | }
85 | this.byte_lengths = {
86 | "s": 1,
87 | "b": 1,
88 | "B": 1,
89 | "h": 2,
90 | "H": 2,
91 | "i": 4,
92 | "I": 4,
93 | "l": 4,
94 | "L": 4,
95 | "q": 8,
96 | "Q": 8,
97 | "e": 2,
98 | "f": 4,
99 | "d": 8
100 | }
101 | let all_formats = Object.keys(this.byte_lengths).join('');
102 | this.fmt_size_regex = '(\\d*)([' + all_formats + '])';
103 | }
104 | calcsize(fmt) {
105 | var size = 0;
106 | var match;
107 | var regex = new RegExp(this.fmt_size_regex, 'g');
108 | while ((match = regex.exec(fmt)) !== null) {
109 | let n = parseInt(match[1] || 1, 10);
110 | let f = match[2];
111 | let subsize = this.byte_lengths[f];
112 | size += n * subsize;
113 | }
114 | return size;
115 | }
116 | _is_big_endian(fmt) {
117 | var big_endian;
118 | if (/^)/.test(fmt)) {
122 | big_endian = true;
123 | }
124 | else {
125 | big_endian = this.big_endian;
126 | }
127 | return big_endian;
128 | }
129 | unpack_from(fmt, buffer, offset) {
130 | var offset = Number(offset || 0);
131 | var view = new DataView64(buffer, 0);
132 | var output = [];
133 | var big_endian = this._is_big_endian(fmt);
134 | var match;
135 | var regex = new RegExp(this.fmt_size_regex, 'g');
136 | while ((match = regex.exec(fmt)) !== null) {
137 | let n = parseInt(match[1] || 1, 10);
138 | let f = match[2];
139 | let getter = this.getters[f];
140 | let size = this.byte_lengths[f];
141 | if (f == 's') {
142 | output.push(new TextDecoder().decode(buffer.slice(offset, offset + n)));
143 | offset += n;
144 | }
145 | else {
146 | for (var i = 0; i < n; i++) {
147 | output.push(view[getter](offset, !big_endian));
148 | offset += size;
149 | }
150 | }
151 | }
152 | return output
153 | }
154 | }
155 |
156 | export const struct = new Struct();
157 |
158 | function isBigEndian() {
159 | const array = new Uint8Array(4);
160 | const view = new Uint32Array(array.buffer);
161 | return !((view[0] = 1) & array[0]);
162 | }
163 |
164 | var WARN_OVERFLOW = false;
165 | var MAX_INT64 = 1n << 63n - 1n;
166 | var MIN_INT64 = -1n << 63n;
167 | var MAX_UINT64 = 1n << 64n;
168 | var MIN_UINT64 = 0n;
169 |
170 |
171 | function decodeFloat16(low, high) {
172 | // decode IEEE 754 half-precision (2 bytes)
173 | let sign = (high & 0b10000000) >> 7;
174 | let exponent = (high & 0b01111100) >> 2;
175 | let fraction = ((high & 0b00000011) << 8) + low;
176 |
177 | let magnitude;
178 | if (exponent == 0b11111) {
179 | magnitude = (fraction == 0) ? Infinity : NaN;
180 | }
181 | else if (exponent == 0) {
182 | magnitude = 2**-14 * (fraction / 1024);
183 | }
184 | else {
185 | magnitude = 2**(exponent - 15) * (1 + (fraction/1024));
186 | }
187 |
188 | return (sign) ? -magnitude : magnitude;
189 | }
190 |
191 | export class DataView64 extends DataView {
192 | getFloat16(byteOffset, littlEndian) {
193 | // little-endian by default
194 | let bytes = [this.getUint8(byteOffset), this.getUint8(byteOffset + 1)]
195 | if (!littlEndian) bytes.reverse();
196 | let [low, high] = bytes;
197 | return decodeFloat16(low, high);
198 | }
199 |
200 | getUint64(byteOffset, littleEndian) {
201 | // split 64-bit number into two 32-bit (4-byte) parts
202 | const left = BigInt(this.getUint32(byteOffset, littleEndian));
203 | const right = BigInt(this.getUint32(byteOffset + 4, littleEndian));
204 |
205 | // combine the two 32-bit values
206 | let combined = littleEndian ? left + (right << 32n) : (left << 32n) + right;
207 |
208 | if (WARN_OVERFLOW && (combined < MIN_UINT64 || combined > MAX_UINT64)) {
209 | console.warn(combined, 'exceeds range of 64-bit unsigned int');
210 | }
211 |
212 | return Number(combined);
213 | }
214 |
215 | getInt64(byteOffset, littleEndian) {
216 | // split 64-bit number into two 32-bit (4-byte) parts
217 | // untested!!
218 | var low, high;
219 | if (littleEndian) {
220 | low = this.getUint32(byteOffset, true);
221 | high = this.getInt32(byteOffset + 4, true);
222 | }
223 | else {
224 | high = this.getInt32(byteOffset, false);
225 | low = this.getUint32(byteOffset + 4, false);
226 | }
227 |
228 | let combined = BigInt(low) + (BigInt(high) << 32n);
229 |
230 | if (WARN_OVERFLOW && (combined < MIN_INT64 || combined > MAX_INT64)) {
231 | console.warn(combined, 'exceeds range of 64-bit signed int');
232 | }
233 |
234 | return Number(combined);
235 | }
236 |
237 | getString(byteOffset, littleEndian, length) {
238 | const str_buffer = this.buffer.slice(byteOffset, byteOffset + length);
239 | const decoder = new TextDecoder();
240 | return decoder.decode(str_buffer);
241 | }
242 |
243 | getVLENStruct(byteOffset, littleEndian, length) {
244 | // get the addressing information for VLEN data
245 | let item_size = this.getUint32(byteOffset, littleEndian);
246 | let collection_address = this.getUint64(byteOffset + 4, littleEndian);
247 | let object_index = this.getUint32(byteOffset + 12, littleEndian);
248 | return [item_size, collection_address, object_index];
249 | }
250 |
251 | }
252 |
253 | export function bitSize(integer) {
254 | return integer.toString(2).length;
255 | }
256 |
257 | export function _unpack_integer(nbytes, fh, offset = 0, littleEndian = true) {
258 | //let padded_bytelength = 1 << Math.ceil(Math.log2(nbytes));
259 | //let format = _int_format(padded_bytelength);
260 | //let buf = new ArrayBuffer(padded_bytelength); // all zeros to start
261 | let bytes = new Uint8Array(fh.slice(offset, offset+nbytes));
262 | if (!littleEndian) {
263 | bytes.reverse();
264 | }
265 | let integer = bytes.reduce((accumulator, currentValue, index) => accumulator + (currentValue << (index * 8)), 0);
266 | return integer;
267 |
268 | //new Uint8Array(buf).set(new Uint8Array(fh.slice(offset, offset + nbytes)));
269 | //return struct.unpack_from(format, buf, 0)[0];
270 | }
271 |
272 | function _int_format(bytelength) {
273 | assert([1,2,4,8].includes(bytelength));
274 | let index = Math.log2(bytelength);
275 | return [" 255) {
114 | name_length = struct.unpack_from(' 0;
120 | filter_info.set('optional', optional);
121 | let num_client_values = struct.unpack_from(' 0) {
125 | name = struct.unpack_from(`${name_length}s`, buf, offset)[0];
126 | offset += name_length;
127 | }
128 | filter_info.set('name', name);
129 | let client_values = struct.unpack_from(`<${num_client_values}i`, buf, offset);
130 | offset += (4 * num_client_values);
131 | filter_info.set('client_data', client_values);
132 | filter_info.set('client_data_values', num_client_values);
133 | filters.push(filter_info);
134 | }
135 | }
136 | else {
137 | throw `version ${version} is not supported`
138 | }
139 | this._filter_pipeline = filters;
140 | return this._filter_pipeline;
141 | }
142 |
143 | find_msg_type(msg_type) {
144 | //""" Return a list of all messages of a given type. """
145 | return this.msgs.filter(function (m) { return m.get('type') == msg_type });
146 | }
147 |
148 | get_attributes() {
149 | //""" Return a dictionary of all attributes. """
150 | let attrs = {};
151 | let attr_msgs = this.find_msg_type(ATTRIBUTE_MSG_TYPE);
152 | for (let msg of attr_msgs) {
153 | let offset = msg.get('offset_to_message');
154 | let [name, value] = this.unpack_attribute(offset);
155 | attrs[name] = value;
156 | }
157 | //# TODO attributes may also be stored in objects reference in the
158 | //# Attribute Info Message (0x0015, 21).
159 | return attrs
160 | }
161 |
162 | get fillvalue() {
163 | /* Fillvalue of the dataset. */
164 | let msg = this.find_msg_type(FILLVALUE_MSG_TYPE)[0];
165 | var offset = msg.get('offset_to_message');
166 | var is_defined;
167 | let version = struct.unpack_from('= block_size) {
366 | [block_offset, block_size] = object_header_blocks[++current_block];
367 | local_offset = 0;
368 | }
369 | let msg = _unpack_struct_from(HEADER_MSG_INFO_V1, buf, block_offset + local_offset);
370 | let offset_to_message = block_offset + local_offset + HEADER_MSG_INFO_V1_SIZE;
371 | msg.set('offset_to_message', offset_to_message);
372 | if (msg.get('type') == OBJECT_CONTINUATION_MSG_TYPE) {
373 | var [fh_off, size] = struct.unpack_from('= block_size - HEADER_MSG_INFO_V2_SIZE) {
399 | let next_block = object_header_blocks[++current_block];
400 | if (next_block == null) {
401 | break
402 | }
403 | [block_offset, block_size] = next_block;
404 | local_offset = 0;
405 | }
406 | let msg = _unpack_struct_from(HEADER_MSG_INFO_V2, buf, block_offset + local_offset);
407 | let offset_to_message = block_offset + local_offset + HEADER_MSG_INFO_V2_SIZE + creation_order_size;
408 | msg.set('offset_to_message', offset_to_message);
409 | if (msg.get('type') == OBJECT_CONTINUATION_MSG_TYPE) {
410 | var [fh_off, size] = struct.unpack_from(' 0;
503 | let link_name_character_set_field_present = (flags & 2 ** 4) > 0;
504 | let ordered = (flags & 2 ** 2) > 0;
505 | let link_type;
506 |
507 | if (link_type_field_present) {
508 | link_type = struct.unpack_from(' 0) {
595 | // # creation order present
596 | offset += 8;
597 | }
598 |
599 | let fmt = ((flags & 2) > 0) ? LINK_INFO_MSG2 : LINK_INFO_MSG1;
600 |
601 | let link_info = _unpack_struct_from(fmt, data, offset);
602 | let output = new Map();
603 | for (let [k, v] of link_info.entries()) {
604 | output.set(k, v == UNDEFINED_ADDRESS ? null : v);
605 | }
606 | return output
607 | }
608 |
609 | get is_dataset() {
610 | //""" True when DataObjects points to a dataset, False for a group. """
611 | return ((this.find_msg_type(DATASPACE_MSG_TYPE)).length > 0);
612 | }
613 |
614 | /**
615 | * Return the data pointed to in the DataObject
616 | *
617 | * @returns {Array}
618 | * @memberof DataObjects
619 | */
620 | get_data() {
621 | // offset and size from data storage message:
622 | let msg = this.find_msg_type(DATA_STORAGE_MSG_TYPE)[0];
623 | let msg_offset = msg.get('offset_to_message');
624 | var [version, dims, layout_class, property_offset] = (
625 | this._get_data_message_properties(msg_offset));
626 |
627 | if (layout_class == 0) { // compact storage
628 | throw "Compact storage of DataObject not implemented"
629 | }
630 | else if (layout_class == 1) {
631 | return this._get_contiguous_data(property_offset)
632 | }
633 | else if (layout_class == 2) { // # chunked storage
634 | return this._get_chunked_data(msg_offset);
635 | }
636 | }
637 |
638 | _get_data_message_properties(msg_offset) {
639 | // """ Return the message properties of the DataObject. """
640 | let dims, layout_class, property_offset;
641 | let [version, arg1, arg2] = struct.unpack_from(
642 | '= 1) && (version <= 4));
660 | return [version, dims, layout_class, property_offset];
661 | }
662 |
663 | _get_contiguous_data(property_offset) {
664 | let [data_offset] = struct.unpack_from('=!@\|]?(i|u|f|S)(\d*)/.test(dtype)) {
678 | let [item_getter, item_is_big_endian, item_size] = dtype_getter(dtype);
679 | let output = new Array(fullsize);
680 | let view = new DataView64(this.fh);
681 | for (var i = 0; i < fullsize; i++) {
682 | output[i] = view[item_getter](data_offset + i * item_size, !item_is_big_endian, item_size);
683 | }
684 | return output
685 | }
686 | else {
687 | throw "not Implemented - no proper dtype defined";
688 | }
689 | }
690 | else {
691 | let dtype_class = this.dtype[0];
692 | if (dtype_class == 'REFERENCE') {
693 | let size = this.dtype[1];
694 | if (size != 8) {
695 | throw "NotImplementedError('Unsupported Reference type')";
696 | }
697 | let ref_addresses = this.fh.slice(data_offset, data_offset + fullsize);
698 |
699 | //ref_addresses = np.memmap(
700 | // self.fh, dtype=('= 1) && (version <= 3));
790 |
791 | var fmt = '<' + (dims - 1).toFixed() + 'I';
792 | var chunk_shape = struct.unpack_from(fmt, this.fh, data_offset);
793 | this._chunks = chunk_shape;
794 | this._chunk_dims = dims;
795 | this._chunk_address = address;
796 | return
797 | }
798 |
799 | }
800 |
801 | function determine_data_shape(buf, offset) {
802 | //""" Return the shape of the dataset pointed to in a Dataspace message. """
803 | let version = struct.unpack_from(' 0) {
76 | dtype_char = 'i';
77 | }
78 | else {
79 | dtype_char = 'u';
80 | }
81 |
82 | let byte_order = datatype_msg.get('class_bit_field_0') & 0x01;
83 | var byte_order_char;
84 | if (byte_order == 0) {
85 | byte_order_char = '<'; //# little-endian
86 | }
87 | else {
88 | byte_order_char = '>'; //# big-endian
89 | }
90 |
91 | //# 4-byte fixed-point property description
92 | //# not read, assumed to be IEEE standard format
93 | this.offset += 4
94 |
95 | return byte_order_char + dtype_char + length_in_bytes.toFixed();
96 | }
97 |
98 | _determine_dtype_floating_point(datatype_msg) {
99 | //""" Return the NumPy dtype for a floating point class. """
100 | //# Floating point types are assumed to follow IEEE standard formats
101 | let length_in_bytes = datatype_msg.get('size');
102 | if (!([1, 2, 4, 8].includes(length_in_bytes))) {
103 | throw "Unsupported datatype size";
104 | }
105 |
106 | let dtype_char = 'f'
107 |
108 | let byte_order = datatype_msg.get('class_bit_field_0') & 0x01;
109 | var byte_order_char;
110 | if (byte_order == 0) {
111 | byte_order_char = '<'; //# little-endian
112 | }
113 | else {
114 | byte_order_char = '>'; //# big-endian
115 | }
116 |
117 | //# 12-bytes floating-point property description
118 | //# not read, assumed to be IEEE standard format
119 | this.offset += 12
120 |
121 | return byte_order_char + dtype_char + length_in_bytes.toFixed();
122 | }
123 |
124 | _determine_dtype_string(datatype_msg) {
125 | //""" Return the NumPy dtype for a string class. """
126 | return 'S' + datatype_msg.get('size').toFixed();
127 | }
128 |
129 | _determine_dtype_vlen(datatype_msg) {
130 | //""" Return the dtype information for a variable length class. """
131 | let vlen_type = datatype_msg.get('class_bit_field_0') & 0x01;
132 | if (vlen_type != 1) {
133 | return ['VLEN_SEQUENCE', 0, 0];
134 | }
135 | let padding_type = datatype_msg.get('class_bit_field_0') >> 4; //# bits 4-7
136 | let character_set = datatype_msg.get('class_bit_field_1') & 0x01;
137 | return ['VLEN_STRING', padding_type, character_set];
138 | }
139 | _determine_dtype_compound(datatype_msg) {
140 | throw "Compound type not yet implemented!";
141 | }
142 | }
143 |
144 |
145 | var DATATYPE_MSG = new Map([
146 | ['class_and_version', 'B'],
147 | ['class_bit_field_0', 'B'],
148 | ['class_bit_field_1', 'B'],
149 | ['class_bit_field_2', 'B'],
150 | ['size', 'I'],
151 | ]);
152 | var DATATYPE_MSG_SIZE = _structure_size(DATATYPE_MSG);
153 |
154 |
155 | var COMPOUND_PROP_DESC_V1 = new Map([
156 | ['offset', 'I'],
157 | ['dimensionality', 'B'],
158 | ['reserved_0', 'B'],
159 | ['reserved_1', 'B'],
160 | ['reserved_2', 'B'],
161 | ['permutation', 'I'],
162 | ['reserved_3', 'I'],
163 | ['dim_size_1', 'I'],
164 | ['dim_size_2', 'I'],
165 | ['dim_size_3', 'I'],
166 | ['dim_size_4', 'I'],
167 | ]);
168 | var COMPOUND_PROP_DESC_V1_SIZE = _structure_size(COMPOUND_PROP_DESC_V1);
169 |
170 |
171 | //# Datatype message, datatype classes
172 | var DATATYPE_FIXED_POINT = 0;
173 | var DATATYPE_FLOATING_POINT = 1;
174 | var DATATYPE_TIME = 2;
175 | var DATATYPE_STRING = 3;
176 | var DATATYPE_BITFIELD = 4;
177 | var DATATYPE_OPAQUE = 5;
178 | var DATATYPE_COMPOUND = 6;
179 | var DATATYPE_REFERENCE = 7;
180 | var DATATYPE_ENUMERATED = 8;
181 | var DATATYPE_VARIABLE_LENGTH = 9;
182 | var DATATYPE_ARRAY = 10;
--------------------------------------------------------------------------------
/esm/filters.js:
--------------------------------------------------------------------------------
1 | import * as pako from '../node_modules/pako/dist/pako.esm.mjs';
2 | import { struct } from './core.js';
3 |
4 | const zlib_decompress = function (buf, itemsize) {
5 | let input_array = new Uint8Array(buf);
6 | return pako.inflate(input_array).buffer;
7 | }
8 |
9 | const unshuffle = function (buf, itemsize) {
10 | let buffer_size = buf.byteLength;
11 | let unshuffled_view = new Uint8Array(buffer_size);
12 | let step = Math.floor(buffer_size / itemsize);
13 | let shuffled_view = new DataView(buf);
14 | for (var j = 0; j < itemsize; j++) {
15 | for (var i = 0; i < step; i++) {
16 | unshuffled_view[j + i * itemsize] = shuffled_view.getUint8(j * step + i);
17 | }
18 | }
19 | return unshuffled_view.buffer;
20 | }
21 |
22 | const fletch32 = function (buf, itemsize) {
23 | _verify_fletcher32(buf);
24 | //# strip off 4-byte checksum from end of buffer
25 | return buf.slice(0, -4);
26 | }
27 |
28 | function _verify_fletcher32(chunk_buffer) {
29 | //""" Verify a chunk with a fletcher32 checksum. """
30 | //# calculate checksums
31 | var odd_chunk_buffer = ((chunk_buffer.byteLength % 2) != 0);
32 | var data_length = chunk_buffer.byteLength - 4;
33 | var view = new DataView(chunk_buffer);
34 |
35 | var sum1 = 0;
36 | var sum2 = 0;
37 | for (var offset=0; offset<(data_length-1); offset+=2) {
38 | let datum = view.getUint16(offset, true); // little-endian
39 | sum1 = (sum1 + datum) % 65535
40 | sum2 = (sum2 + sum1) % 65535
41 | }
42 | if (odd_chunk_buffer) {
43 | // process the last item:
44 | let datum = view.getUint8(data_length-1);
45 | sum1 = (sum1 + datum) % 65535
46 | sum2 = (sum2 + sum1) % 65535
47 | }
48 |
49 | //# extract stored checksums
50 | var [ref_sum1, ref_sum2] = struct.unpack_from('>HH', chunk_buffer, data_length); // .fromstring(chunk_buffer[-4:], '>u2')
51 | ref_sum1 = ref_sum1 % 65535
52 | ref_sum2 = ref_sum2 % 65535
53 |
54 | //# compare
55 | if (sum1 != ref_sum1 || sum2 != ref_sum2) {
56 | throw 'ValueError("fletcher32 checksum invalid")';
57 | }
58 | return true
59 | }
60 |
61 | //# IV.A.2.l The Data Storage - Filter Pipeline message
62 | var RESERVED_FILTER = 0;
63 | const GZIP_DEFLATE_FILTER = 1;
64 | const SHUFFLE_FILTER = 2;
65 | const FLETCH32_FILTER = 3;
66 | var SZIP_FILTER = 4;
67 | var NBIT_FILTER = 5;
68 | var SCALEOFFSET_FILTER = 6;
69 |
70 | // To register a new filter, add a function (ArrayBuffer) => ArrayBuffer
71 | // the the following map, using a key that corresponds to filter_id (int)
72 | export const Filters = new Map([
73 | [GZIP_DEFLATE_FILTER, zlib_decompress],
74 | [SHUFFLE_FILTER, unshuffle],
75 | [FLETCH32_FILTER, fletch32]
76 | ]);
--------------------------------------------------------------------------------
/esm/high-level.js:
--------------------------------------------------------------------------------
1 | import {DataObjects} from './dataobjects.js';
2 | import {SuperBlock} from './misc-low-level.js';
3 | export { Filters } from './filters.js';
4 |
5 | export class Group {
6 | /*
7 | An HDF5 Group which may hold attributes, datasets, or other groups.
8 | Attributes
9 | ----------
10 | attrs : dict
11 | Attributes for this group.
12 | name : str
13 | Full path to this group.
14 | file : File
15 | File instance where this group resides.
16 | parent : Group
17 | Group instance containing this group.
18 | */
19 |
20 | /**
21 | *
22 | *
23 | * @memberof Group
24 | * @member {Group|File} parent;
25 | * @member {File} file;
26 | * @member {string} name;
27 | * @member {DataObjects} _dataobjects;
28 | * @member {Object} _attrs;
29 | * @member {Array} _keys;
30 | */
31 | // parent;
32 | // file;
33 | // name;
34 | // _links;
35 | // _dataobjects;
36 | // _attrs;
37 | // _keys;
38 |
39 | /**
40 | *
41 | * @param {string} name
42 | * @param {DataObjects} dataobjects
43 | * @param {Group} [parent]
44 | * @param {boolean} [getterProxy=false]
45 | * @returns {Group}
46 | */
47 | constructor(name, dataobjects, parent, getterProxy=false) {
48 | if (parent == null) {
49 | this.parent = this;
50 | this.file = this;
51 | }
52 | else {
53 | this.parent = parent;
54 | this.file = parent.file;
55 | }
56 | this.name = name;
57 |
58 | this._links = dataobjects.get_links();
59 | this._dataobjects = dataobjects;
60 | this._attrs = null; // cached property
61 | this._keys = null;
62 | if (getterProxy) {
63 | return new Proxy(this, groupGetHandler);
64 | }
65 | }
66 |
67 | get keys() {
68 | if (this._keys == null) {
69 | this._keys = Object.keys(this._links);
70 | }
71 | return this._keys.slice();
72 | }
73 |
74 | get values() {
75 | return this.keys.map(k => this.get(k));
76 | }
77 |
78 | length() {
79 | return this.keys.length;
80 | }
81 |
82 | _dereference(ref) {
83 | //""" Deference a Reference object. """
84 | if (!ref) {
85 | throw 'cannot deference null reference';
86 | }
87 | let obj = this.file._get_object_by_address(ref);
88 | if (obj == null) {
89 | throw 'reference not found in file';
90 | }
91 | return obj
92 | }
93 |
94 | get(y) {
95 | //""" x.__getitem__(y) <==> x[y] """
96 | if (typeof(y) == 'number') {
97 | return this._dereference(y);
98 | }
99 |
100 | var path = normpath(y);
101 | if (path == '/') {
102 | return this.file;
103 | }
104 |
105 | if (path == '.') {
106 | return this
107 | }
108 | if (/^\//.test(path)) {
109 | return this.file.get(path.slice(1));
110 | }
111 |
112 | if (posix_dirname(path) != '') {
113 | var [next_obj, additional_obj] = path.split(/\/(.*)/);
114 | }
115 | else {
116 | var next_obj = path;
117 | var additional_obj = '.'
118 | }
119 | if (!(next_obj in this._links)) {
120 | throw next_obj + ' not found in group';
121 | }
122 |
123 | var obj_name = normpath(this.name + '/' + next_obj);
124 | let link_target = this._links[next_obj];
125 |
126 | if (typeof(link_target) == "string") {
127 | try {
128 | return this.get(link_target)
129 | } catch (error) {
130 | return null
131 | }
132 | }
133 |
134 | var dataobjs = new DataObjects(this.file._fh, link_target);
135 | if (dataobjs.is_dataset) {
136 | if (additional_obj != '.') {
137 | throw obj_name + ' is a dataset, not a group';
138 | }
139 | return new Dataset(obj_name, dataobjs, this);
140 | }
141 | else {
142 | var new_group = new Group(obj_name, dataobjs, this);
143 | return new_group.get(additional_obj);
144 | }
145 | }
146 |
147 | visit(func) {
148 | /*
149 | Recursively visit all names in the group and subgroups.
150 | func should be a callable with the signature:
151 | func(name) -> None or return value
152 | Returning None continues iteration, return anything else stops and
153 | return that value from the visit method.
154 | */
155 | return this.visititems((name, obj) => func(name));
156 | }
157 |
158 | visititems(func) {
159 | /*
160 | Recursively visit all objects in this group and subgroups.
161 | func should be a callable with the signature:
162 | func(name, object) -> None or return value
163 | Returning None continues iteration, return anything else stops and
164 | return that value from the visit method.
165 | */
166 | var root_name_length = this.name.length;
167 | if (!(/\/$/.test(this.name))) {
168 | root_name_length += 1;
169 | }
170 | //queue = deque(this.values())
171 | var queue = this.values.slice();
172 | while (queue) {
173 | let obj = queue.shift();
174 | if (queue.length == 1) console.log(obj);
175 | let name = obj.name.slice(root_name_length);
176 | let ret = func(name, obj);
177 | if (ret != null) {
178 | return ret
179 | }
180 | if (obj instanceof Group) {
181 | queue = queue.concat(obj.values);
182 | }
183 | }
184 | return null
185 | }
186 |
187 | get attrs() {
188 | //""" attrs attribute. """
189 | if (this._attrs == null) {
190 | this._attrs = this._dataobjects.get_attributes();
191 | }
192 | return this._attrs
193 | }
194 |
195 | }
196 |
197 | const groupGetHandler = {
198 | get: function(target, prop, receiver) {
199 | if (prop in target) {
200 | return target[prop];
201 | }
202 | return target.get(prop);
203 | }
204 | };
205 |
206 |
207 | export class File extends Group {
208 | /*
209 | Open a HDF5 file.
210 | Note in addition to having file specific methods the File object also
211 | inherit the full interface of **Group**.
212 | File is also a context manager and therefore supports the with statement.
213 | Files opened by the class will be closed after the with block, file-like
214 | object are not closed.
215 | Parameters
216 | ----------
217 | filename : str or file-like
218 | Name of file (string or unicode) or file like object which has read
219 | and seek methods which behaved like a Python file object.
220 | Attributes
221 | ----------
222 | filename : str
223 | Name of the file on disk, None if not available.
224 | mode : str
225 | String indicating that the file is open readonly ("r").
226 | userblock_size : int
227 | Size of the user block in bytes (currently always 0).
228 | */
229 |
230 | constructor (fh, filename) {
231 | //""" initalize. """
232 | //if hasattr(filename, 'read'):
233 | // if not hasattr(filename, 'seek'):
234 | // raise ValueError(
235 | // 'File like object must have a seek method')
236 |
237 | var superblock = new SuperBlock(fh, 0);
238 | var offset = superblock.offset_to_dataobjects;
239 | var dataobjects = new DataObjects(fh, offset);
240 | super('/', dataobjects, null);
241 | this.parent = this;
242 |
243 | this._fh = fh
244 | this.filename = filename || '';
245 |
246 | this.file = this;
247 | this.mode = 'r';
248 | this.userblock_size = 0;
249 | }
250 |
251 | _get_object_by_address(obj_addr) {
252 | //""" Return the object pointed to by a given address. """
253 | if (this._dataobjects.offset == obj_addr) {
254 | return this
255 | }
256 | return this.visititems(
257 | (y) => {(y._dataobjects.offset == obj_addr) ? y : null;}
258 | );
259 | }
260 | }
261 |
262 | export class Dataset extends Array {
263 | /*
264 | A HDF5 Dataset containing an n-dimensional array and meta-data attributes.
265 | Attributes
266 | ----------
267 | shape : tuple
268 | Dataset dimensions.
269 | dtype : dtype
270 | Dataset's type.
271 | size : int
272 | Total number of elements in the dataset.
273 | chunks : tuple or None
274 | Chunk shape, or NOne is chunked storage not used.
275 | compression : str or None
276 | Compression filter used on dataset. None if compression is not enabled
277 | for this dataset.
278 | compression_opts : dict or None
279 | Options for the compression filter.
280 | scaleoffset : dict or None
281 | Setting for the HDF5 scale-offset filter, or None if scale-offset
282 | compression is not used for this dataset.
283 | shuffle : bool
284 | Whether the shuffle filter is applied for this dataset.
285 | fletcher32 : bool
286 | Whether the Fletcher32 checksumming is enabled for this dataset.
287 | fillvalue : float or None
288 | Value indicating uninitialized portions of the dataset. None is no fill
289 | values has been defined.
290 | dim : int
291 | Number of dimensions.
292 | dims : None
293 | Dimension scales.
294 | attrs : dict
295 | Attributes for this dataset.
296 | name : str
297 | Full path to this dataset.
298 | file : File
299 | File instance where this dataset resides.
300 | parent : Group
301 | Group instance containing this dataset.
302 | */
303 |
304 | /**
305 | *
306 | *
307 | * @memberof Dataset
308 | * @member {Group|File} parent;
309 | * @member {File} file;
310 | * @member {string} name;
311 | * @member {DataObjects} _dataobjects;
312 | * @member {Object} _attrs;
313 | * @member {string} _astype;
314 | */
315 | // parent;
316 | // file;
317 | // name;
318 | // _dataobjects;
319 | // _attrs;
320 | // _astype;
321 |
322 | constructor(name, dataobjects, parent) {
323 | //""" initalize. """
324 | super();
325 | this.parent = parent;
326 | this.file = parent.file
327 | this.name = name;
328 |
329 | this._dataobjects = dataobjects
330 | this._attrs = null;
331 | this._astype = null;
332 | }
333 |
334 | get value() {
335 | var data = this._dataobjects.get_data();
336 | if (this._astype == null) {
337 | return data
338 | }
339 | return data.astype(this._astype);
340 | }
341 |
342 | get shape() {
343 | return this._dataobjects.shape;
344 | }
345 |
346 | get attrs() {
347 | return this._dataobjects.get_attributes();
348 | }
349 |
350 | get dtype() {
351 | return this._dataobjects.dtype;
352 | }
353 |
354 | get fillvalue() {
355 | return this._dataobjects.fillvalue;
356 | }
357 | }
358 |
359 |
360 | function posix_dirname(p) {
361 | let sep = '/';
362 | let i = p.lastIndexOf(sep) + 1;
363 | let head = p.slice(0, i);
364 | let all_sep = new RegExp('^' + sep + '+$');
365 | let end_sep = new RegExp(sep + '$');
366 | if (head && !(all_sep.test(head))) {
367 | head = head.replace(end_sep, '');
368 | }
369 | return head
370 | }
371 |
372 | function normpath(path) {
373 | return path.replace(/\/(\/)+/g, '/');
374 | // path = posixpath.normpath(y)
375 | }
376 |
--------------------------------------------------------------------------------
/esm/misc-low-level.js:
--------------------------------------------------------------------------------
1 | import { _structure_size, _padded_size, _unpack_struct_from, struct, assert, _unpack_integer, bitSize } from './core.js';
2 |
3 | export class SuperBlock {
4 | constructor(fh, offset) {
5 | let version_hint = struct.unpack_from(' 0) {
191 | throw "Filter info size not supported on FractalHeap";
192 | }
193 |
194 | if (header.get("btree_address_huge_objects") == UNDEFINED_ADDRESS) {
195 | header.set("btree_address_huge_objects", null);
196 | }
197 | else {
198 | throw "Huge objects not implemented in FractalHeap";
199 | }
200 |
201 | if (header.get("root_block_address") == UNDEFINED_ADDRESS) {
202 | header.set("root_block_address", null);
203 | }
204 |
205 | let nbits = header.get("log2_maximum_heap_size");
206 | let block_offset_size = this._min_size_nbits(nbits);
207 | let h = new Map([
208 | ['signature', '4s'],
209 | ['version', 'B'],
210 | ['heap_header_adddress', 'Q'],
211 | //['block_offset', `${block_offset_size}s`]
212 | ['block_offset', `${block_offset_size}B`]
213 | //this._int_format(block_offset_size)]
214 | ]);
215 | this.indirect_block_header = new Map(h); // make shallow copy;
216 | this.indirect_block_header_size = _structure_size(h);
217 | if ((header.get("flags") & 2) == 2) {
218 | h.set('checksum', 'I');
219 | }
220 | this.direct_block_header = h;
221 | this.direct_block_header_size = _structure_size(h);
222 |
223 | let maximum_dblock_size = header.get('maximum_direct_block_size');
224 | this._managed_object_offset_size = this._min_size_nbits(nbits);
225 | let value = Math.min(maximum_dblock_size, header.get('max_managed_object_size'));
226 | this._managed_object_length_size = this._min_size_integer(value);
227 |
228 | let start_block_size = header.get('starting_block_size');
229 | let table_width = header.get('table_width');
230 | if (!(start_block_size > 0)) {
231 | throw "Starting block size == 0 not implemented";
232 | }
233 |
234 | let log2_maximum_dblock_size = Number(Math.floor(Math.log2(maximum_dblock_size)));
235 | assert(1n << BigInt(log2_maximum_dblock_size) == maximum_dblock_size);
236 |
237 | let log2_start_block_size = Number(Math.floor(Math.log2(start_block_size)));
238 | assert(1n << BigInt(log2_start_block_size) == start_block_size);
239 |
240 | this._max_direct_nrows = log2_maximum_dblock_size - log2_start_block_size + 2;
241 |
242 | let log2_table_width = Math.floor(Math.log2(table_width)); // regular number (H, not Q format)
243 | assert(1 << log2_table_width == table_width);
244 | this._indirect_nrows_sub = log2_table_width + log2_start_block_size - 1;
245 |
246 | this.header = header;
247 | this.nobjects = header.get("managed_object_count") + header.get("huge_object_count") + header.get("tiny_object_count");
248 |
249 | let managed = [];
250 | let root_address = header.get("root_block_address");
251 | let nrows = 0;
252 | if (root_address != null) {
253 | nrows = header.get("indirect_current_rows_count");
254 | }
255 | if (nrows > 0) {
256 | for (let data of this._iter_indirect_block(fh, root_address, nrows)) {
257 | managed.push(data);
258 | }
259 | }
260 | else {
261 | let data = this._read_direct_block(fh, root_address, start_block_size);
262 | managed.push(data);
263 | }
264 | let data_size = managed.reduce((p, c) => p + c.byteLength, 0);
265 | let combined = new Uint8Array(data_size);
266 | let moffset = 0;
267 | managed.forEach((m) => {combined.set(new Uint8Array(m), moffset); moffset += m.byteLength});
268 | this.managed = combined.buffer;
269 | }
270 |
271 | _read_direct_block(fh, offset, block_size) {
272 | let data = fh.slice(offset, offset + block_size);
273 | let header = _unpack_struct_from(this.direct_block_header, data)
274 | assert(header.get("signature") == "FHDB");
275 | return data;
276 | }
277 |
278 | get_data(heapid) {
279 | let firstbyte = struct.unpack_from('> 4) & 3; // bit 4-5
283 | let version = firstbyte >> 6 // bit 6-7
284 | let data_offset = 1;
285 | if (idtype == 0) { // managed
286 | assert(version == 0);
287 | let nbytes = this._managed_object_offset_size;
288 | let offset = _unpack_integer(nbytes, heapid, data_offset);
289 | // add heap offset:
290 | //offset += this.offset;
291 | data_offset += nbytes;
292 |
293 | nbytes = this._managed_object_length_size;
294 | let size = _unpack_integer(nbytes, heapid, data_offset);
295 |
296 | return this.managed.slice(offset, offset + size);
297 | }
298 | else if (idtype == 1) { // tiny
299 | throw "tiny objectID not supported in FractalHeap"
300 | }
301 | else if (idtype == 2) { // huge
302 | throw "huge objectID not supported in FractalHeap"
303 | }
304 | else {
305 | throw "unknown objectID type in FractalHeap"
306 | }
307 | }
308 |
309 | _min_size_integer(integer) {
310 | // """ Calculate the minimal required bytes to contain an integer. """
311 | return this._min_size_nbits(bitSize(integer));
312 | }
313 |
314 | _min_size_nbits(nbits) {
315 | //""" Calculate the minimal required bytes to contain a number of bits. """
316 | return Math.ceil(nbits / 8);
317 | }
318 |
319 | * _iter_indirect_block(fh, offset, nrows) {
320 | let header = _unpack_struct_from(this.indirect_block_header, fh, offset);
321 | offset += this.indirect_block_header_size;
322 | assert(header.get("signature") == "FHIB");
323 | let block_offset_bytes = header.get("block_offset");
324 | // equivalent to python int.from_bytes with byteorder="little":
325 | let block_offset = block_offset_bytes.reduce((p, c, i) => p + (c << (i * 8)), 0);
326 | header.set("block_offset", block_offset);
327 |
328 | let [ndirect, nindirect] = this._indirect_info(nrows);
329 |
330 | let direct_blocks = [];
331 | for (let i = 0; i < ndirect; i++) {
332 | let address = struct.unpack_from('=18"
33 | }
34 | },
35 | "node_modules/@esbuild/android-arm": {
36 | "version": "0.25.0",
37 | "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.0.tgz",
38 | "integrity": "sha512-PTyWCYYiU0+1eJKmw21lWtC+d08JDZPQ5g+kFyxP0V+es6VPPSUhM6zk8iImp2jbV6GwjX4pap0JFbUQN65X1g==",
39 | "cpu": [
40 | "arm"
41 | ],
42 | "dev": true,
43 | "license": "MIT",
44 | "optional": true,
45 | "os": [
46 | "android"
47 | ],
48 | "engines": {
49 | "node": ">=18"
50 | }
51 | },
52 | "node_modules/@esbuild/android-arm64": {
53 | "version": "0.25.0",
54 | "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.0.tgz",
55 | "integrity": "sha512-grvv8WncGjDSyUBjN9yHXNt+cq0snxXbDxy5pJtzMKGmmpPxeAmAhWxXI+01lU5rwZomDgD3kJwulEnhTRUd6g==",
56 | "cpu": [
57 | "arm64"
58 | ],
59 | "dev": true,
60 | "license": "MIT",
61 | "optional": true,
62 | "os": [
63 | "android"
64 | ],
65 | "engines": {
66 | "node": ">=18"
67 | }
68 | },
69 | "node_modules/@esbuild/android-x64": {
70 | "version": "0.25.0",
71 | "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.0.tgz",
72 | "integrity": "sha512-m/ix7SfKG5buCnxasr52+LI78SQ+wgdENi9CqyCXwjVR2X4Jkz+BpC3le3AoBPYTC9NHklwngVXvbJ9/Akhrfg==",
73 | "cpu": [
74 | "x64"
75 | ],
76 | "dev": true,
77 | "license": "MIT",
78 | "optional": true,
79 | "os": [
80 | "android"
81 | ],
82 | "engines": {
83 | "node": ">=18"
84 | }
85 | },
86 | "node_modules/@esbuild/darwin-arm64": {
87 | "version": "0.25.0",
88 | "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.0.tgz",
89 | "integrity": "sha512-mVwdUb5SRkPayVadIOI78K7aAnPamoeFR2bT5nszFUZ9P8UpK4ratOdYbZZXYSqPKMHfS1wdHCJk1P1EZpRdvw==",
90 | "cpu": [
91 | "arm64"
92 | ],
93 | "dev": true,
94 | "license": "MIT",
95 | "optional": true,
96 | "os": [
97 | "darwin"
98 | ],
99 | "engines": {
100 | "node": ">=18"
101 | }
102 | },
103 | "node_modules/@esbuild/darwin-x64": {
104 | "version": "0.25.0",
105 | "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.0.tgz",
106 | "integrity": "sha512-DgDaYsPWFTS4S3nWpFcMn/33ZZwAAeAFKNHNa1QN0rI4pUjgqf0f7ONmXf6d22tqTY+H9FNdgeaAa+YIFUn2Rg==",
107 | "cpu": [
108 | "x64"
109 | ],
110 | "dev": true,
111 | "license": "MIT",
112 | "optional": true,
113 | "os": [
114 | "darwin"
115 | ],
116 | "engines": {
117 | "node": ">=18"
118 | }
119 | },
120 | "node_modules/@esbuild/freebsd-arm64": {
121 | "version": "0.25.0",
122 | "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.0.tgz",
123 | "integrity": "sha512-VN4ocxy6dxefN1MepBx/iD1dH5K8qNtNe227I0mnTRjry8tj5MRk4zprLEdG8WPyAPb93/e4pSgi1SoHdgOa4w==",
124 | "cpu": [
125 | "arm64"
126 | ],
127 | "dev": true,
128 | "license": "MIT",
129 | "optional": true,
130 | "os": [
131 | "freebsd"
132 | ],
133 | "engines": {
134 | "node": ">=18"
135 | }
136 | },
137 | "node_modules/@esbuild/freebsd-x64": {
138 | "version": "0.25.0",
139 | "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.0.tgz",
140 | "integrity": "sha512-mrSgt7lCh07FY+hDD1TxiTyIHyttn6vnjesnPoVDNmDfOmggTLXRv8Id5fNZey1gl/V2dyVK1VXXqVsQIiAk+A==",
141 | "cpu": [
142 | "x64"
143 | ],
144 | "dev": true,
145 | "license": "MIT",
146 | "optional": true,
147 | "os": [
148 | "freebsd"
149 | ],
150 | "engines": {
151 | "node": ">=18"
152 | }
153 | },
154 | "node_modules/@esbuild/linux-arm": {
155 | "version": "0.25.0",
156 | "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.0.tgz",
157 | "integrity": "sha512-vkB3IYj2IDo3g9xX7HqhPYxVkNQe8qTK55fraQyTzTX/fxaDtXiEnavv9geOsonh2Fd2RMB+i5cbhu2zMNWJwg==",
158 | "cpu": [
159 | "arm"
160 | ],
161 | "dev": true,
162 | "license": "MIT",
163 | "optional": true,
164 | "os": [
165 | "linux"
166 | ],
167 | "engines": {
168 | "node": ">=18"
169 | }
170 | },
171 | "node_modules/@esbuild/linux-arm64": {
172 | "version": "0.25.0",
173 | "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.0.tgz",
174 | "integrity": "sha512-9QAQjTWNDM/Vk2bgBl17yWuZxZNQIF0OUUuPZRKoDtqF2k4EtYbpyiG5/Dk7nqeK6kIJWPYldkOcBqjXjrUlmg==",
175 | "cpu": [
176 | "arm64"
177 | ],
178 | "dev": true,
179 | "license": "MIT",
180 | "optional": true,
181 | "os": [
182 | "linux"
183 | ],
184 | "engines": {
185 | "node": ">=18"
186 | }
187 | },
188 | "node_modules/@esbuild/linux-ia32": {
189 | "version": "0.25.0",
190 | "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.0.tgz",
191 | "integrity": "sha512-43ET5bHbphBegyeqLb7I1eYn2P/JYGNmzzdidq/w0T8E2SsYL1U6un2NFROFRg1JZLTzdCoRomg8Rvf9M6W6Gg==",
192 | "cpu": [
193 | "ia32"
194 | ],
195 | "dev": true,
196 | "license": "MIT",
197 | "optional": true,
198 | "os": [
199 | "linux"
200 | ],
201 | "engines": {
202 | "node": ">=18"
203 | }
204 | },
205 | "node_modules/@esbuild/linux-loong64": {
206 | "version": "0.25.0",
207 | "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.0.tgz",
208 | "integrity": "sha512-fC95c/xyNFueMhClxJmeRIj2yrSMdDfmqJnyOY4ZqsALkDrrKJfIg5NTMSzVBr5YW1jf+l7/cndBfP3MSDpoHw==",
209 | "cpu": [
210 | "loong64"
211 | ],
212 | "dev": true,
213 | "license": "MIT",
214 | "optional": true,
215 | "os": [
216 | "linux"
217 | ],
218 | "engines": {
219 | "node": ">=18"
220 | }
221 | },
222 | "node_modules/@esbuild/linux-mips64el": {
223 | "version": "0.25.0",
224 | "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.0.tgz",
225 | "integrity": "sha512-nkAMFju7KDW73T1DdH7glcyIptm95a7Le8irTQNO/qtkoyypZAnjchQgooFUDQhNAy4iu08N79W4T4pMBwhPwQ==",
226 | "cpu": [
227 | "mips64el"
228 | ],
229 | "dev": true,
230 | "license": "MIT",
231 | "optional": true,
232 | "os": [
233 | "linux"
234 | ],
235 | "engines": {
236 | "node": ">=18"
237 | }
238 | },
239 | "node_modules/@esbuild/linux-ppc64": {
240 | "version": "0.25.0",
241 | "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.0.tgz",
242 | "integrity": "sha512-NhyOejdhRGS8Iwv+KKR2zTq2PpysF9XqY+Zk77vQHqNbo/PwZCzB5/h7VGuREZm1fixhs4Q/qWRSi5zmAiO4Fw==",
243 | "cpu": [
244 | "ppc64"
245 | ],
246 | "dev": true,
247 | "license": "MIT",
248 | "optional": true,
249 | "os": [
250 | "linux"
251 | ],
252 | "engines": {
253 | "node": ">=18"
254 | }
255 | },
256 | "node_modules/@esbuild/linux-riscv64": {
257 | "version": "0.25.0",
258 | "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.0.tgz",
259 | "integrity": "sha512-5S/rbP5OY+GHLC5qXp1y/Mx//e92L1YDqkiBbO9TQOvuFXM+iDqUNG5XopAnXoRH3FjIUDkeGcY1cgNvnXp/kA==",
260 | "cpu": [
261 | "riscv64"
262 | ],
263 | "dev": true,
264 | "license": "MIT",
265 | "optional": true,
266 | "os": [
267 | "linux"
268 | ],
269 | "engines": {
270 | "node": ">=18"
271 | }
272 | },
273 | "node_modules/@esbuild/linux-s390x": {
274 | "version": "0.25.0",
275 | "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.0.tgz",
276 | "integrity": "sha512-XM2BFsEBz0Fw37V0zU4CXfcfuACMrppsMFKdYY2WuTS3yi8O1nFOhil/xhKTmE1nPmVyvQJjJivgDT+xh8pXJA==",
277 | "cpu": [
278 | "s390x"
279 | ],
280 | "dev": true,
281 | "license": "MIT",
282 | "optional": true,
283 | "os": [
284 | "linux"
285 | ],
286 | "engines": {
287 | "node": ">=18"
288 | }
289 | },
290 | "node_modules/@esbuild/linux-x64": {
291 | "version": "0.25.0",
292 | "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.0.tgz",
293 | "integrity": "sha512-9yl91rHw/cpwMCNytUDxwj2XjFpxML0y9HAOH9pNVQDpQrBxHy01Dx+vaMu0N1CKa/RzBD2hB4u//nfc+Sd3Cw==",
294 | "cpu": [
295 | "x64"
296 | ],
297 | "dev": true,
298 | "license": "MIT",
299 | "optional": true,
300 | "os": [
301 | "linux"
302 | ],
303 | "engines": {
304 | "node": ">=18"
305 | }
306 | },
307 | "node_modules/@esbuild/netbsd-arm64": {
308 | "version": "0.25.0",
309 | "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.0.tgz",
310 | "integrity": "sha512-RuG4PSMPFfrkH6UwCAqBzauBWTygTvb1nxWasEJooGSJ/NwRw7b2HOwyRTQIU97Hq37l3npXoZGYMy3b3xYvPw==",
311 | "cpu": [
312 | "arm64"
313 | ],
314 | "dev": true,
315 | "license": "MIT",
316 | "optional": true,
317 | "os": [
318 | "netbsd"
319 | ],
320 | "engines": {
321 | "node": ">=18"
322 | }
323 | },
324 | "node_modules/@esbuild/netbsd-x64": {
325 | "version": "0.25.0",
326 | "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.0.tgz",
327 | "integrity": "sha512-jl+qisSB5jk01N5f7sPCsBENCOlPiS/xptD5yxOx2oqQfyourJwIKLRA2yqWdifj3owQZCL2sn6o08dBzZGQzA==",
328 | "cpu": [
329 | "x64"
330 | ],
331 | "dev": true,
332 | "license": "MIT",
333 | "optional": true,
334 | "os": [
335 | "netbsd"
336 | ],
337 | "engines": {
338 | "node": ">=18"
339 | }
340 | },
341 | "node_modules/@esbuild/openbsd-arm64": {
342 | "version": "0.25.0",
343 | "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.0.tgz",
344 | "integrity": "sha512-21sUNbq2r84YE+SJDfaQRvdgznTD8Xc0oc3p3iW/a1EVWeNj/SdUCbm5U0itZPQYRuRTW20fPMWMpcrciH2EJw==",
345 | "cpu": [
346 | "arm64"
347 | ],
348 | "dev": true,
349 | "license": "MIT",
350 | "optional": true,
351 | "os": [
352 | "openbsd"
353 | ],
354 | "engines": {
355 | "node": ">=18"
356 | }
357 | },
358 | "node_modules/@esbuild/openbsd-x64": {
359 | "version": "0.25.0",
360 | "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.0.tgz",
361 | "integrity": "sha512-2gwwriSMPcCFRlPlKx3zLQhfN/2WjJ2NSlg5TKLQOJdV0mSxIcYNTMhk3H3ulL/cak+Xj0lY1Ym9ysDV1igceg==",
362 | "cpu": [
363 | "x64"
364 | ],
365 | "dev": true,
366 | "license": "MIT",
367 | "optional": true,
368 | "os": [
369 | "openbsd"
370 | ],
371 | "engines": {
372 | "node": ">=18"
373 | }
374 | },
375 | "node_modules/@esbuild/sunos-x64": {
376 | "version": "0.25.0",
377 | "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.0.tgz",
378 | "integrity": "sha512-bxI7ThgLzPrPz484/S9jLlvUAHYMzy6I0XiU1ZMeAEOBcS0VePBFxh1JjTQt3Xiat5b6Oh4x7UC7IwKQKIJRIg==",
379 | "cpu": [
380 | "x64"
381 | ],
382 | "dev": true,
383 | "license": "MIT",
384 | "optional": true,
385 | "os": [
386 | "sunos"
387 | ],
388 | "engines": {
389 | "node": ">=18"
390 | }
391 | },
392 | "node_modules/@esbuild/win32-arm64": {
393 | "version": "0.25.0",
394 | "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.0.tgz",
395 | "integrity": "sha512-ZUAc2YK6JW89xTbXvftxdnYy3m4iHIkDtK3CLce8wg8M2L+YZhIvO1DKpxrd0Yr59AeNNkTiic9YLf6FTtXWMw==",
396 | "cpu": [
397 | "arm64"
398 | ],
399 | "dev": true,
400 | "license": "MIT",
401 | "optional": true,
402 | "os": [
403 | "win32"
404 | ],
405 | "engines": {
406 | "node": ">=18"
407 | }
408 | },
409 | "node_modules/@esbuild/win32-ia32": {
410 | "version": "0.25.0",
411 | "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.0.tgz",
412 | "integrity": "sha512-eSNxISBu8XweVEWG31/JzjkIGbGIJN/TrRoiSVZwZ6pkC6VX4Im/WV2cz559/TXLcYbcrDN8JtKgd9DJVIo8GA==",
413 | "cpu": [
414 | "ia32"
415 | ],
416 | "dev": true,
417 | "license": "MIT",
418 | "optional": true,
419 | "os": [
420 | "win32"
421 | ],
422 | "engines": {
423 | "node": ">=18"
424 | }
425 | },
426 | "node_modules/@esbuild/win32-x64": {
427 | "version": "0.25.0",
428 | "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.0.tgz",
429 | "integrity": "sha512-ZENoHJBxA20C2zFzh6AI4fT6RraMzjYw4xKWemRTRmRVtN9c5DcH9r/f2ihEkMjOW5eGgrwCslG/+Y/3bL+DHQ==",
430 | "cpu": [
431 | "x64"
432 | ],
433 | "dev": true,
434 | "license": "MIT",
435 | "optional": true,
436 | "os": [
437 | "win32"
438 | ],
439 | "engines": {
440 | "node": ">=18"
441 | }
442 | },
443 | "node_modules/esbuild": {
444 | "version": "0.25.0",
445 | "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.0.tgz",
446 | "integrity": "sha512-BXq5mqc8ltbaN34cDqWuYKyNhX8D/Z0J1xdtdQ8UcIIIyJyz+ZMKUt58tF3SrZ85jcfN/PZYhjR5uDQAYNVbuw==",
447 | "dev": true,
448 | "hasInstallScript": true,
449 | "license": "MIT",
450 | "bin": {
451 | "esbuild": "bin/esbuild"
452 | },
453 | "engines": {
454 | "node": ">=18"
455 | },
456 | "optionalDependencies": {
457 | "@esbuild/aix-ppc64": "0.25.0",
458 | "@esbuild/android-arm": "0.25.0",
459 | "@esbuild/android-arm64": "0.25.0",
460 | "@esbuild/android-x64": "0.25.0",
461 | "@esbuild/darwin-arm64": "0.25.0",
462 | "@esbuild/darwin-x64": "0.25.0",
463 | "@esbuild/freebsd-arm64": "0.25.0",
464 | "@esbuild/freebsd-x64": "0.25.0",
465 | "@esbuild/linux-arm": "0.25.0",
466 | "@esbuild/linux-arm64": "0.25.0",
467 | "@esbuild/linux-ia32": "0.25.0",
468 | "@esbuild/linux-loong64": "0.25.0",
469 | "@esbuild/linux-mips64el": "0.25.0",
470 | "@esbuild/linux-ppc64": "0.25.0",
471 | "@esbuild/linux-riscv64": "0.25.0",
472 | "@esbuild/linux-s390x": "0.25.0",
473 | "@esbuild/linux-x64": "0.25.0",
474 | "@esbuild/netbsd-arm64": "0.25.0",
475 | "@esbuild/netbsd-x64": "0.25.0",
476 | "@esbuild/openbsd-arm64": "0.25.0",
477 | "@esbuild/openbsd-x64": "0.25.0",
478 | "@esbuild/sunos-x64": "0.25.0",
479 | "@esbuild/win32-arm64": "0.25.0",
480 | "@esbuild/win32-ia32": "0.25.0",
481 | "@esbuild/win32-x64": "0.25.0"
482 | }
483 | },
484 | "node_modules/pako": {
485 | "version": "2.1.0",
486 | "resolved": "https://registry.npmjs.org/pako/-/pako-2.1.0.tgz",
487 | "integrity": "sha512-w+eufiZ1WuJYgPXbV/PO3NCMEc3xqylkKHzp8bxp1uW4qaSNQUkwmLLEc3kKsfz8lpV1F8Ht3U1Cm+9Srog2ug=="
488 | }
489 | },
490 | "dependencies": {
491 | "@esbuild/aix-ppc64": {
492 | "version": "0.25.0",
493 | "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.0.tgz",
494 | "integrity": "sha512-O7vun9Sf8DFjH2UtqK8Ku3LkquL9SZL8OLY1T5NZkA34+wG3OQF7cl4Ql8vdNzM6fzBbYfLaiRLIOZ+2FOCgBQ==",
495 | "dev": true,
496 | "optional": true
497 | },
498 | "@esbuild/android-arm": {
499 | "version": "0.25.0",
500 | "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.0.tgz",
501 | "integrity": "sha512-PTyWCYYiU0+1eJKmw21lWtC+d08JDZPQ5g+kFyxP0V+es6VPPSUhM6zk8iImp2jbV6GwjX4pap0JFbUQN65X1g==",
502 | "dev": true,
503 | "optional": true
504 | },
505 | "@esbuild/android-arm64": {
506 | "version": "0.25.0",
507 | "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.0.tgz",
508 | "integrity": "sha512-grvv8WncGjDSyUBjN9yHXNt+cq0snxXbDxy5pJtzMKGmmpPxeAmAhWxXI+01lU5rwZomDgD3kJwulEnhTRUd6g==",
509 | "dev": true,
510 | "optional": true
511 | },
512 | "@esbuild/android-x64": {
513 | "version": "0.25.0",
514 | "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.0.tgz",
515 | "integrity": "sha512-m/ix7SfKG5buCnxasr52+LI78SQ+wgdENi9CqyCXwjVR2X4Jkz+BpC3le3AoBPYTC9NHklwngVXvbJ9/Akhrfg==",
516 | "dev": true,
517 | "optional": true
518 | },
519 | "@esbuild/darwin-arm64": {
520 | "version": "0.25.0",
521 | "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.0.tgz",
522 | "integrity": "sha512-mVwdUb5SRkPayVadIOI78K7aAnPamoeFR2bT5nszFUZ9P8UpK4ratOdYbZZXYSqPKMHfS1wdHCJk1P1EZpRdvw==",
523 | "dev": true,
524 | "optional": true
525 | },
526 | "@esbuild/darwin-x64": {
527 | "version": "0.25.0",
528 | "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.0.tgz",
529 | "integrity": "sha512-DgDaYsPWFTS4S3nWpFcMn/33ZZwAAeAFKNHNa1QN0rI4pUjgqf0f7ONmXf6d22tqTY+H9FNdgeaAa+YIFUn2Rg==",
530 | "dev": true,
531 | "optional": true
532 | },
533 | "@esbuild/freebsd-arm64": {
534 | "version": "0.25.0",
535 | "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.0.tgz",
536 | "integrity": "sha512-VN4ocxy6dxefN1MepBx/iD1dH5K8qNtNe227I0mnTRjry8tj5MRk4zprLEdG8WPyAPb93/e4pSgi1SoHdgOa4w==",
537 | "dev": true,
538 | "optional": true
539 | },
540 | "@esbuild/freebsd-x64": {
541 | "version": "0.25.0",
542 | "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.0.tgz",
543 | "integrity": "sha512-mrSgt7lCh07FY+hDD1TxiTyIHyttn6vnjesnPoVDNmDfOmggTLXRv8Id5fNZey1gl/V2dyVK1VXXqVsQIiAk+A==",
544 | "dev": true,
545 | "optional": true
546 | },
547 | "@esbuild/linux-arm": {
548 | "version": "0.25.0",
549 | "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.0.tgz",
550 | "integrity": "sha512-vkB3IYj2IDo3g9xX7HqhPYxVkNQe8qTK55fraQyTzTX/fxaDtXiEnavv9geOsonh2Fd2RMB+i5cbhu2zMNWJwg==",
551 | "dev": true,
552 | "optional": true
553 | },
554 | "@esbuild/linux-arm64": {
555 | "version": "0.25.0",
556 | "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.0.tgz",
557 | "integrity": "sha512-9QAQjTWNDM/Vk2bgBl17yWuZxZNQIF0OUUuPZRKoDtqF2k4EtYbpyiG5/Dk7nqeK6kIJWPYldkOcBqjXjrUlmg==",
558 | "dev": true,
559 | "optional": true
560 | },
561 | "@esbuild/linux-ia32": {
562 | "version": "0.25.0",
563 | "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.0.tgz",
564 | "integrity": "sha512-43ET5bHbphBegyeqLb7I1eYn2P/JYGNmzzdidq/w0T8E2SsYL1U6un2NFROFRg1JZLTzdCoRomg8Rvf9M6W6Gg==",
565 | "dev": true,
566 | "optional": true
567 | },
568 | "@esbuild/linux-loong64": {
569 | "version": "0.25.0",
570 | "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.0.tgz",
571 | "integrity": "sha512-fC95c/xyNFueMhClxJmeRIj2yrSMdDfmqJnyOY4ZqsALkDrrKJfIg5NTMSzVBr5YW1jf+l7/cndBfP3MSDpoHw==",
572 | "dev": true,
573 | "optional": true
574 | },
575 | "@esbuild/linux-mips64el": {
576 | "version": "0.25.0",
577 | "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.0.tgz",
578 | "integrity": "sha512-nkAMFju7KDW73T1DdH7glcyIptm95a7Le8irTQNO/qtkoyypZAnjchQgooFUDQhNAy4iu08N79W4T4pMBwhPwQ==",
579 | "dev": true,
580 | "optional": true
581 | },
582 | "@esbuild/linux-ppc64": {
583 | "version": "0.25.0",
584 | "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.0.tgz",
585 | "integrity": "sha512-NhyOejdhRGS8Iwv+KKR2zTq2PpysF9XqY+Zk77vQHqNbo/PwZCzB5/h7VGuREZm1fixhs4Q/qWRSi5zmAiO4Fw==",
586 | "dev": true,
587 | "optional": true
588 | },
589 | "@esbuild/linux-riscv64": {
590 | "version": "0.25.0",
591 | "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.0.tgz",
592 | "integrity": "sha512-5S/rbP5OY+GHLC5qXp1y/Mx//e92L1YDqkiBbO9TQOvuFXM+iDqUNG5XopAnXoRH3FjIUDkeGcY1cgNvnXp/kA==",
593 | "dev": true,
594 | "optional": true
595 | },
596 | "@esbuild/linux-s390x": {
597 | "version": "0.25.0",
598 | "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.0.tgz",
599 | "integrity": "sha512-XM2BFsEBz0Fw37V0zU4CXfcfuACMrppsMFKdYY2WuTS3yi8O1nFOhil/xhKTmE1nPmVyvQJjJivgDT+xh8pXJA==",
600 | "dev": true,
601 | "optional": true
602 | },
603 | "@esbuild/linux-x64": {
604 | "version": "0.25.0",
605 | "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.0.tgz",
606 | "integrity": "sha512-9yl91rHw/cpwMCNytUDxwj2XjFpxML0y9HAOH9pNVQDpQrBxHy01Dx+vaMu0N1CKa/RzBD2hB4u//nfc+Sd3Cw==",
607 | "dev": true,
608 | "optional": true
609 | },
610 | "@esbuild/netbsd-arm64": {
611 | "version": "0.25.0",
612 | "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.0.tgz",
613 | "integrity": "sha512-RuG4PSMPFfrkH6UwCAqBzauBWTygTvb1nxWasEJooGSJ/NwRw7b2HOwyRTQIU97Hq37l3npXoZGYMy3b3xYvPw==",
614 | "dev": true,
615 | "optional": true
616 | },
617 | "@esbuild/netbsd-x64": {
618 | "version": "0.25.0",
619 | "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.0.tgz",
620 | "integrity": "sha512-jl+qisSB5jk01N5f7sPCsBENCOlPiS/xptD5yxOx2oqQfyourJwIKLRA2yqWdifj3owQZCL2sn6o08dBzZGQzA==",
621 | "dev": true,
622 | "optional": true
623 | },
624 | "@esbuild/openbsd-arm64": {
625 | "version": "0.25.0",
626 | "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.0.tgz",
627 | "integrity": "sha512-21sUNbq2r84YE+SJDfaQRvdgznTD8Xc0oc3p3iW/a1EVWeNj/SdUCbm5U0itZPQYRuRTW20fPMWMpcrciH2EJw==",
628 | "dev": true,
629 | "optional": true
630 | },
631 | "@esbuild/openbsd-x64": {
632 | "version": "0.25.0",
633 | "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.0.tgz",
634 | "integrity": "sha512-2gwwriSMPcCFRlPlKx3zLQhfN/2WjJ2NSlg5TKLQOJdV0mSxIcYNTMhk3H3ulL/cak+Xj0lY1Ym9ysDV1igceg==",
635 | "dev": true,
636 | "optional": true
637 | },
638 | "@esbuild/sunos-x64": {
639 | "version": "0.25.0",
640 | "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.0.tgz",
641 | "integrity": "sha512-bxI7ThgLzPrPz484/S9jLlvUAHYMzy6I0XiU1ZMeAEOBcS0VePBFxh1JjTQt3Xiat5b6Oh4x7UC7IwKQKIJRIg==",
642 | "dev": true,
643 | "optional": true
644 | },
645 | "@esbuild/win32-arm64": {
646 | "version": "0.25.0",
647 | "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.0.tgz",
648 | "integrity": "sha512-ZUAc2YK6JW89xTbXvftxdnYy3m4iHIkDtK3CLce8wg8M2L+YZhIvO1DKpxrd0Yr59AeNNkTiic9YLf6FTtXWMw==",
649 | "dev": true,
650 | "optional": true
651 | },
652 | "@esbuild/win32-ia32": {
653 | "version": "0.25.0",
654 | "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.0.tgz",
655 | "integrity": "sha512-eSNxISBu8XweVEWG31/JzjkIGbGIJN/TrRoiSVZwZ6pkC6VX4Im/WV2cz559/TXLcYbcrDN8JtKgd9DJVIo8GA==",
656 | "dev": true,
657 | "optional": true
658 | },
659 | "@esbuild/win32-x64": {
660 | "version": "0.25.0",
661 | "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.0.tgz",
662 | "integrity": "sha512-ZENoHJBxA20C2zFzh6AI4fT6RraMzjYw4xKWemRTRmRVtN9c5DcH9r/f2ihEkMjOW5eGgrwCslG/+Y/3bL+DHQ==",
663 | "dev": true,
664 | "optional": true
665 | },
666 | "esbuild": {
667 | "version": "0.25.0",
668 | "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.0.tgz",
669 | "integrity": "sha512-BXq5mqc8ltbaN34cDqWuYKyNhX8D/Z0J1xdtdQ8UcIIIyJyz+ZMKUt58tF3SrZ85jcfN/PZYhjR5uDQAYNVbuw==",
670 | "dev": true,
671 | "requires": {
672 | "@esbuild/aix-ppc64": "0.25.0",
673 | "@esbuild/android-arm": "0.25.0",
674 | "@esbuild/android-arm64": "0.25.0",
675 | "@esbuild/android-x64": "0.25.0",
676 | "@esbuild/darwin-arm64": "0.25.0",
677 | "@esbuild/darwin-x64": "0.25.0",
678 | "@esbuild/freebsd-arm64": "0.25.0",
679 | "@esbuild/freebsd-x64": "0.25.0",
680 | "@esbuild/linux-arm": "0.25.0",
681 | "@esbuild/linux-arm64": "0.25.0",
682 | "@esbuild/linux-ia32": "0.25.0",
683 | "@esbuild/linux-loong64": "0.25.0",
684 | "@esbuild/linux-mips64el": "0.25.0",
685 | "@esbuild/linux-ppc64": "0.25.0",
686 | "@esbuild/linux-riscv64": "0.25.0",
687 | "@esbuild/linux-s390x": "0.25.0",
688 | "@esbuild/linux-x64": "0.25.0",
689 | "@esbuild/netbsd-arm64": "0.25.0",
690 | "@esbuild/netbsd-x64": "0.25.0",
691 | "@esbuild/openbsd-arm64": "0.25.0",
692 | "@esbuild/openbsd-x64": "0.25.0",
693 | "@esbuild/sunos-x64": "0.25.0",
694 | "@esbuild/win32-arm64": "0.25.0",
695 | "@esbuild/win32-ia32": "0.25.0",
696 | "@esbuild/win32-x64": "0.25.0"
697 | }
698 | },
699 | "pako": {
700 | "version": "2.1.0",
701 | "resolved": "https://registry.npmjs.org/pako/-/pako-2.1.0.tgz",
702 | "integrity": "sha512-w+eufiZ1WuJYgPXbV/PO3NCMEc3xqylkKHzp8bxp1uW4qaSNQUkwmLLEc3kKsfz8lpV1F8Ht3U1Cm+9Srog2ug=="
703 | }
704 | }
705 | }
706 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "jsfive",
3 | "version": "0.4.0",
4 | "description": "A pure javascript HDF5 file reader, based on pyfive",
5 | "main": "./dist/cjs/index.js",
6 | "module": "./dist/esm/index.mjs",
7 | "exports": {
8 | ".": {
9 | "import": "./dist/esm/index.mjs",
10 | "require": "./dist/cjs/index.js"
11 | }
12 | },
13 | "browser": "./dist/browser/hdf5.js",
14 | "scripts": {
15 | "build": "npm run build_browser && npm run build_esm && npm run build_commonjs",
16 | "build_browser": "esbuild index.js --bundle --sourcemap --target=es2020 --outfile=dist/browser/hdf5.js --format=iife --global-name=hdf5",
17 | "build_commonjs": "esbuild index.js --bundle --sourcemap --target=es2020 --outfile=dist/cjs/index.js --format=cjs",
18 | "build_esm": "esbuild index.js --bundle --format=esm --outfile=dist/esm/index.mjs"
19 | },
20 | "repository": {
21 | "type": "git",
22 | "url": "https://github.com/usnistgov/jsfive"
23 | },
24 | "keywords": [
25 | "hdf5",
26 | "javascript",
27 | "es6",
28 | "browser"
29 | ],
30 | "author": "Brian B. Maranville",
31 | "license": "SEE LICENSE IN LICENSE.txt",
32 | "bugs": {
33 | "url": "https://github.com/usnistgov/jsfive/issues"
34 | },
35 | "homepage": "https://github.com/usnistgov/jsfive#readme",
36 | "dependencies": {
37 | "pako": "^2.0.4"
38 | },
39 | "devDependencies": {
40 | "esbuild": "^0.25.0"
41 | },
42 | "publishConfig": {
43 | "access": "public"
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/test/create_test_files.py:
--------------------------------------------------------------------------------
1 | import h5py
2 | import numpy as np
3 |
4 | with h5py.File('test.h5', 'w') as f:
5 | # create datasets with different dtypes: f2, f4, f8, i1, i2, i4
6 | data = [3.0, 4.0, 5.0]
7 | dtypes = ['f2', 'f4', 'f8', 'i1', 'i2', 'i4']
8 | for dtype in dtypes:
9 | f.create_dataset(dtype, data=data, dtype=dtype)
10 |
11 | # create a dataset with a string dtype
12 | f.create_dataset('string', data='hello', dtype='S5')
13 |
14 | # create a dataset with a vlen string dtype
15 | f.create_dataset('vlen_string', data='hello')
16 |
17 |
--------------------------------------------------------------------------------
/test/test.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/usnistgov/jsfive/3f57900f9f5cfe911042d4d69cc0f4ba8c4c3d0c/test/test.h5
--------------------------------------------------------------------------------
/test/tests.mjs:
--------------------------------------------------------------------------------
1 | import assert from 'node:assert';
2 | import { test } from 'node:test';
3 | import { read, readFileSync } from "node:fs";
4 |
5 | import * as hdf5 from "jsfive";
6 |
7 | function loadFile(filename) {
8 | const ab = readFileSync(filename);
9 | return new hdf5.File(ab.buffer, filename);
10 | }
11 |
12 | test('check dtypes', () => {
13 | const dtypes = ['f2', 'f4', 'f8', 'i1', 'i2', 'i4'];
14 | const values = [3.0, 4.0, 5.0];
15 | const f = loadFile("test/test.h5");
16 |
17 | for (const dtype of dtypes) {
18 | const dset = f.get(dtype);
19 | assert.strictEqual(dset.dtype, `<${dtype}`);
20 | assert.deepEqual(dset.value, values);
21 | }
22 | });
23 |
24 | test('strings', () => {
25 | const f = loadFile("test/test.h5");
26 | const dset = f.get('string');
27 |
28 | assert.strictEqual(dset.dtype, 'S5');
29 | assert.deepEqual(dset.value, ['hello']);
30 |
31 | const vlen_dset = f.get('vlen_string');
32 | assert.deepEqual(vlen_dset.dtype, ['VLEN_STRING', 0, 1]);
33 | assert.deepEqual(vlen_dset.value, ['hello']);
34 | });
--------------------------------------------------------------------------------