├── .eslintignore ├── .eslintrc.js ├── .github └── workflows │ └── test.yml ├── .gitignore ├── .mocharc.js ├── .npmignore ├── .prettierrc.js ├── .travis.yml ├── LICENSE ├── README.md ├── RELEASE-NOTES.md ├── config ├── webpack.config-base.js ├── webpack.config-dev.js └── webpack.config-prod.js ├── example ├── data-model.png ├── detour-example.js ├── image │ ├── example-eb.dcm │ ├── example-ed.dcm │ ├── example-el-delimited.dcm │ ├── example-el.dcm │ └── example-il.dcm ├── parse-log.js ├── parse-modify-log.js └── parse-modify-write.js ├── package-lock.json ├── package.json ├── src ├── base.ts ├── byte-parser.ts ├── character-sets.ts ├── collect-flow.ts ├── detour.ts ├── dicom-elements.ts ├── dicom-flow.ts ├── dicom-flows.ts ├── dicom-parts.ts ├── element-flows.ts ├── element-sink.ts ├── elements-builder.ts ├── elements.ts ├── flows.ts ├── index.ts ├── lookup.ts ├── modify-flow.ts ├── parse-flow.ts ├── parser.ts ├── parsing.ts ├── person-name.ts ├── sinks.ts ├── sources.ts ├── tag-path-like.ts ├── tag-path.ts ├── tag-to-vr.ts ├── tag-tree.ts ├── tag.ts ├── uid-to-name.ts ├── uid.ts ├── value.ts └── vr.ts ├── test ├── base-test.ts ├── character-sets-test.ts ├── chunker.ts ├── collect-flow-test.ts ├── dicom-flow-test.ts ├── dicom-flows-test.ts ├── element-flows-test.ts ├── element-sink-test.ts ├── elements-test.ts ├── lookup-test.ts ├── modify-flow-test.ts ├── parse-flow-test.ts ├── parser-test.ts ├── sinks-test.ts ├── tag-path-like-test.ts ├── tag-path-test.ts ├── tag-tree-test.ts ├── test-data.ts ├── test-util.ts └── value-test.ts ├── tsconfig.json └── typings ├── dicom-character-set └── index.d.ts └── multipipe └── index.d.ts /.eslintignore: -------------------------------------------------------------------------------- 1 | example/* 2 | config/* -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | parser: '@typescript-eslint/parser', // Specifies the ESLint parser 3 | parserOptions: { 4 | ecmaVersion: 2020, // Allows for the parsing of modern ECMAScript features 5 | sourceType: 'module', // Allows for the use of imports 6 | }, 7 | extends: [ 8 | 'prettier' 9 | ] 10 | }; 11 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | # This workflow will do a clean installation of node dependencies, cache/restore them, build the source code and run tests across different versions of node 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-nodejs 3 | 4 | name: Node.js CI 5 | 6 | on: 7 | push: 8 | branches: [ "develop" ] 9 | pull_request: 10 | branches: [ "develop" ] 11 | 12 | jobs: 13 | build: 14 | 15 | runs-on: ubuntu-latest 16 | 17 | strategy: 18 | matrix: 19 | node-version: [18.x, 20.x, 22.x] 20 | # See supported Node.js release schedule at https://nodejs.org/en/about/releases/ 21 | 22 | steps: 23 | - uses: actions/checkout@v4 24 | - name: Use Node.js ${{ matrix.node-version }} 25 | uses: actions/setup-node@v4 26 | with: 27 | node-version: ${{ matrix.node-version }} 28 | cache: 'npm' 29 | - run: npm ci 30 | - run: npm run build --if-present 31 | - run: npm test 32 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .fleet 3 | .idea 4 | .vscode 5 | .nyc_output 6 | dist 7 | npm-debug.log 8 | node_modules 9 | coverage 10 | -------------------------------------------------------------------------------- /.mocharc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | require: ['ts-node/register/transpile-only', 'source-map-support/register'], 3 | recursive: true, 4 | spec: ['test/*-test.ts'], 5 | watchFiles: ['src/**/*.ts', 'test/**/*.ts'], 6 | }; 7 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | .* 2 | **/tsconfig.json 3 | **/.eslintrc.js 4 | **/.eslintignore 5 | **/.mocharc.js 6 | **/.prettierrc.js 7 | **/webpack.config-*.js 8 | src 9 | test 10 | build 11 | node_modules 12 | -------------------------------------------------------------------------------- /.prettierrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | semi: true, 3 | trailingComma: 'all', 4 | singleQuote: true, 5 | printWidth: 120, 6 | tabWidth: 4, 7 | }; 8 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | sudo: false 3 | node_js: 4 | - 14 5 | script: 6 | - npm run test 7 | after_success: npm run coverage 8 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # dicom-streams-js 2 | 3 | [![Build Status](https://travis-ci.org/exini/dicom-streams-js.svg?branch=develop)](https://travis-ci.org/exini/dicom-streams-js) 4 | [![Coverage Status](https://coveralls.io/repos/exini/dicom-streams-js/badge.svg)](https://coveralls.io/r/exini/dicom-streams-js) 5 | 6 | The purpose of this project is to create a streaming API for reading and processing DICOM data using [node-streams](https://nodejs.org/api/stream.html). It can be used both on the backend using Node, as well as on the frontend. Advantages of streaming DICOM data include better control over resource allocation such as memory via strict bounds on DICOM data chunk size and network utilization using back-pressure. 7 | 8 | This project is a port of the [dicom-streams](https://github.com/exini/dicom-streams) project which is written in Scala using Akka-streams. 9 | 10 | ### Setup 11 | 12 | The dicom-streams-js library is deployed to NPM. Install it using `npm install -s @exini/dicom-streams-js`. Time and date handling using `js-joda` is marked as an external dependency in dicom-streams-js. If you want to read and modify times and dates, install this using `npm install -s js-joda`. 13 | 14 | ### Data Model 15 | 16 | Streaming binary DICOM data may originate from many different sources such as files, a HTTP POST request (on the server side), or a read from a database. Streaming data arrives in chunks (`Buffer`s). In the Node Streams nomenclature, chunks originate from _readables_, they are processed in _transforms_ and and folded into a non-streaming plain objects using _writables_. Synonyms for these terms are _sources_, _flows_ and _sinks_. These latter terms are used in the docs and throughout the code. 17 | 18 | This library provides flows for parsing binary DICOM data into DICOM parts (represented by the `DicomPart` abstraction) - small objects representing a part of a data element. These DICOM parts are bounded in size by a user specified chunk size parameter. Flows of DICOM parts can be processed using a series of flows in this library. There are flows for filtering based on tag path conditions, flows for converting between transfer syntaxes, flows for re-encoding sequences and items, etc. 19 | 20 | The `Element` interface provides a set of higher level data classes, each roughly corresponding to one row in a textual dump of a DICOM files. Here, chunks are aggregated into complete data elements. There are representations for standard tag-value elements, sequence and item start elements, sequence and item delimitation elements, fragments start elements, etc. A `DicomPart` stream is transformed into an `Element` stream via the `elementFlow` flow. 21 | 22 | A flow of `Element`s can be materialized into a representation of a dataset called an `Elements` using the `elementSink` sink. For processing of large sets of data, one should strive for a fully streaming DICOM pipeline, however, in some cases it can be convenient to work with a plain dataset; `Elements` serves this purpose. Internally, the sink aggregates 23 | `Element`s into `ElementSet`s, each with an asssociated tag number (value elements, sequences and fragments). `Elements` implements a straight-forward data hierarchy: 24 | 25 | - An `Elements` holds a list of `ElementSet`s (`ValueElement`, `Sequence` and `Fragments`) 26 | - A `ValueElement` is a standard attribute with tag number and binary value 27 | - A `Sequence` holds a list of `Item`s 28 | - An `Item` contains zero or one `Elements` (note the recursion) 29 | - A `Fragments` holds a list of `Fragment`s 30 | - A `Fragment` holds a binary value. 31 | 32 | The following diagram shows an overview of the data model at the `DicomPart`, `Element` and `ElementSet` levels. 33 | 34 | ![Data model](example/data-model.png) 35 | 36 | As seen, a standard attribute, represented by the `ValueElement` class is composed by one `HeaderPart` followed by zero, 37 | one or more `ValueChunk`s of data. Likewise, ecapsulated data such as a jpeg image is composed by one `FragmentsPart` 38 | followed by, for each fragment, one `ItemPart` followed by `ValueChunk`s of data, and ends with a 39 | `SequenceDelimitationPart`. 40 | 41 | ### Examples 42 | 43 | The following example reads the DICOM file provided as input argument, folds its contents first into a stream of `Element`s, and then into a concrete `Elements` object and logs the result to the console 44 | 45 | ```javascript 46 | const fs = require('fs'); 47 | const { parseFlow, elementFlow, elementSink, pipe, VR } = require('@exini/dicom-streams-js'); 48 | 49 | const src = fs.createReadStream(process.argv[2]); 50 | 51 | pipe( 52 | src, 53 | parseFlow(), 54 | elementFlow(), 55 | elementSink((elements) => { 56 | console.log(elements.toString()); 57 | }), 58 | ); 59 | ``` 60 | 61 | The next, longer, example reads the file specified by the first input argument then passes the data through the following flows: 62 | 63 | 1. Parsing the binary data into `DicomPart`s for further processing 64 | 2. Re-encoding the data to always use indeterminate length sequences and items with explicit sequence and item delimitations 65 | 3. Re-encoding the data to use the UTF-8 character set 66 | 4. Filtering of the elements to preserve only those on a allow list specified as an array of `TagTree`s (trees of pointers into a dataset) 67 | 5. Filtering of the remaining elements according to a deny list of tag trees 68 | 6. Modification of the remaining elements to set Patient Name to `Anon 001`, add or modifiy the attribute Patient Identity Removed to `YES`, and leave other elements unmodified 69 | 7. Map the resulting elements to their corresponding byte representations 70 | 8. Write the results to disk using the file name specified by the second input argument. 71 | 72 | ```javascript 73 | const fs = require('fs'); 74 | const { 75 | TagPath, 76 | TagTree, 77 | parseFlow, 78 | toBytesFlow, 79 | allowFilter, 80 | denyFilter, 81 | toUtf8Flow, 82 | toIndeterminateLengthSequences, 83 | modifyFlow, 84 | TagModification, 85 | TagInsertion, 86 | pipe, 87 | } = require('@exini/dicom-streams-js'); 88 | 89 | const src = fs.createReadStream(process.argv[2]); 90 | const dest = fs.createWriteStream(process.argv[3]); 91 | 92 | pipe( 93 | src, 94 | parseFlow(), 95 | toIndeterminateLengthSequences(), 96 | toUtf8Flow(), 97 | allowFilter([ 98 | TagTree.fromTag(Tag.SpecificCharacterSet), 99 | TagTree.fromTag(Tag.PatientName), 100 | TagTree.fromTag(Tag.PatientName), 101 | TagTree.fromTag(Tag.StudyDescription), 102 | TagTree.fromTag(Tag.SeriesDate), 103 | TagTree.fromAnyItem(Tag.MACParametersSequence), 104 | ]), 105 | denyFilter([TagTree.fromAnyItem(Tag.MACParametersSequence).thenTag(Tag.DataElementsSigned)]), 106 | modifyFlow( 107 | [TagModification.equals(TagPath.fromTag(Tag.PatientName), () => Buffer.from('Anon 001'))], 108 | [new TagInsertion(TagPath.fromTag(Tag.PatientIdentityRemoved), () => Buffer.from('YES'))], 109 | ), 110 | toBytesFlow(), 111 | dest, 112 | ); 113 | ``` 114 | 115 | ### Custom Processing 116 | 117 | New non-trivial DICOM flows can be built using a modular system of capabilities that are mixed in as appropriate with a core class implementing a common base interface. The base abstraction for DICOM flows is `DicomFlow` and new flows are created using the `DicomFlow.createFlow` method. The `DicomFlow` interface defines a series of events, one for each type of `DicomPart` that is produced when parsing DICOM data with `DicomParseFlow`. The core events are: 118 | 119 | ```javascript 120 | public onPreamble(part: PreamblePart): DicomPart[] 121 | public onHeader(part: HeaderPart): DicomPart[] 122 | public onValueChunk(part: ValueChunk): DicomPart[] 123 | public onSequence(part: SequencePart): DicomPart[] 124 | public onSequenceDelimitation(part: SequenceDelimitationPart): DicomPart[] 125 | public onFragments(part: FragmentsPart): DicomPart[] 126 | public onItem(part: ItemPart): DicomPart[] 127 | public onItemDelimitation(part: ItemDelimitationPart): DicomPart[] 128 | public onDeflatedChunk(part: DeflatedChunk): DicomPart[] 129 | public onUnknown(part: UnknownPart): DicomPart[] 130 | public onPart(part: DicomPart): DicomPart[] 131 | ``` 132 | 133 | Default behavior to these events are implemented in core classes. The most natural behavior is to simply pass parts on 134 | down the stream, e.g. 135 | 136 | ```javascript 137 | public onPreamble(part: PreamblePart): DicomPart[] { return [part]; } 138 | public onHeader(part: HeaderPart): DicomPart[] { return [part]; } 139 | ... 140 | ``` 141 | 142 | This behavior is implemented in the `IdentityFlow` core class. Another option is to defer handling to the `onPart` method which is implemented in the `DeferToPartFlow` core class. This is appropriate for flows which define a common behavior for all part types. 143 | 144 | To give an example of a custom flow, here is the implementation of a filter that removes nested sequences from a dataset. We define a nested dataset as a sequence with `depth > 1` given that the root dataset 145 | has `depth = 0`. 146 | 147 | ```javascript 148 | public nestedSequencesFilter() = createFlow(new class extends TagPathTracking(GuaranteedValueEvent(GuaranteedDelimitationEvents(InFragments(DeferToPartFlow)))) { 149 | public onPart(part: DicomPart): DicomPart[] { return this.tagPath.depth() > 1 ? [] : [part]; 150 | }()); 151 | ``` 152 | 153 | In this example, we chose to use `DeferToPartFlow` as the core class and mixed in the `TagPathTracking` capability (along with its dependencies `GuaranteedValueEvent`, `GuaranteedDelimitationEvents` and `InFragments`) which gives access to a `tagPath: TagPath` variable at all times which is automatically updated as the flow progresses. 154 | 155 | ### License 156 | 157 | This project is released under the [Apache License, version 2.0](./LICENSE). 158 | -------------------------------------------------------------------------------- /RELEASE-NOTES.md: -------------------------------------------------------------------------------- 1 | # Dicom Streams JS RELEASE NOTES 2 | 3 | ## Release 3.1.0 4 | 5 | - Fixed parsing of deflated objects in chunked, non-streaming parsing. 6 | 7 | 8 | ## Release 3.0.10 9 | 10 | - Fixed bug where switching to indeterminate length sequences and items in big endian files led to inserted delimitations with the wrong endianess (little endian) 11 | -------------------------------------------------------------------------------- /config/webpack.config-base.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const nodeExternals = require('webpack-node-externals'); 3 | const { merge } = require('webpack-merge'); 4 | 5 | const commonConfig = { 6 | entry: { 7 | main: [path.resolve(__dirname, '../src/index.ts')], 8 | }, 9 | resolve: { 10 | extensions: ['.ts', '.tsx', '.js'], 11 | fallback: { 12 | stream: require.resolve('stream-browserify'), 13 | zlib: require.resolve('browserify-zlib'), 14 | buffer: require.resolve('buffer/'), 15 | assert: require.resolve('assert/'), 16 | }, 17 | }, 18 | module: { 19 | rules: [ 20 | { 21 | test: /\.ts?$/, 22 | loader: 'ts-loader', 23 | exclude: /node_modules/, 24 | }, 25 | ], 26 | }, 27 | }; 28 | 29 | const nodeConfig = { 30 | target: 'node', 31 | output: { 32 | path: path.resolve(__dirname, '../dist/node'), 33 | libraryTarget: 'commonjs2', 34 | }, 35 | externals: [nodeExternals()], 36 | }; 37 | 38 | const webConfig = { 39 | target: 'web', 40 | output: { 41 | path: path.resolve(__dirname, '../dist/web'), 42 | libraryTarget: 'umd', 43 | }, 44 | externals: ['js-joda'], 45 | }; 46 | 47 | module.exports = [merge(commonConfig, nodeConfig), merge(commonConfig, webConfig)]; 48 | -------------------------------------------------------------------------------- /config/webpack.config-dev.js: -------------------------------------------------------------------------------- 1 | const { merge } = require('webpack-merge'); 2 | const configs = require('./webpack.config-base'); 3 | 4 | const devConfig = { 5 | output: { 6 | filename: 'index.js', 7 | }, 8 | mode: 'development', 9 | devtool: 'source-map', 10 | }; 11 | 12 | module.exports = [merge(configs[0], devConfig), merge(configs[1], devConfig)]; 13 | -------------------------------------------------------------------------------- /config/webpack.config-prod.js: -------------------------------------------------------------------------------- 1 | const { merge } = require('webpack-merge'); 2 | const configs = require('./webpack.config-base'); 3 | 4 | const prodConfig = { 5 | output: { 6 | filename: 'index.min.js', 7 | }, 8 | mode: 'production', 9 | devtool: false, 10 | }; 11 | 12 | module.exports = [merge(configs[0], prodConfig), merge(configs[1], prodConfig)]; 13 | -------------------------------------------------------------------------------- /example/data-model.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exini/dicom-streams-js/74e0620a898d2c1245b51adba033505e7b6a85f1/example/data-model.png -------------------------------------------------------------------------------- /example/detour-example.js: -------------------------------------------------------------------------------- 1 | const { Readable, Transform } = require('stream'); 2 | const { Detour, objectToStringFlow } = require('../dist'); 3 | 4 | class Inner extends Transform { 5 | constructor(prefix) { 6 | super({ 7 | highWaterMark: 10, 8 | }); 9 | this.prefix = prefix || ''; 10 | } 11 | 12 | _transform(chunk, _encoding, callback) { 13 | const out = this.prefix + chunk; 14 | setTimeout((obj) => callback(null, obj), 200, out); 15 | } 16 | } 17 | 18 | class Source extends Readable { 19 | constructor() { 20 | super({}); 21 | this.i = 1; 22 | } 23 | 24 | _read(size) { 25 | this.push(this.i + ''); 26 | this.i = this.i + 1; 27 | if (this.i > 30) { 28 | this.push(null); 29 | } 30 | } 31 | } 32 | 33 | class WithDetour extends Detour { 34 | constructor() { 35 | super({ readableObjectMode: true }, new Inner('Detour - ')); 36 | } 37 | 38 | process(chunk) { 39 | const obj = { result: chunk.toString() }; 40 | if (obj.result === '10') this.setDetour(true); 41 | this.push(obj); 42 | } 43 | 44 | cleanup() { 45 | this.push({ result: 'end' }); 46 | } 47 | } 48 | 49 | const source = new Source(); 50 | const detour = new WithDetour(); 51 | 52 | source.pipe(detour).pipe(objectToStringFlow(JSON.stringify)).pipe(process.stdout); 53 | -------------------------------------------------------------------------------- /example/image/example-eb.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exini/dicom-streams-js/74e0620a898d2c1245b51adba033505e7b6a85f1/example/image/example-eb.dcm -------------------------------------------------------------------------------- /example/image/example-ed.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exini/dicom-streams-js/74e0620a898d2c1245b51adba033505e7b6a85f1/example/image/example-ed.dcm -------------------------------------------------------------------------------- /example/image/example-el-delimited.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exini/dicom-streams-js/74e0620a898d2c1245b51adba033505e7b6a85f1/example/image/example-el-delimited.dcm -------------------------------------------------------------------------------- /example/image/example-el.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exini/dicom-streams-js/74e0620a898d2c1245b51adba033505e7b6a85f1/example/image/example-el.dcm -------------------------------------------------------------------------------- /example/image/example-il.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exini/dicom-streams-js/74e0620a898d2c1245b51adba033505e7b6a85f1/example/image/example-il.dcm -------------------------------------------------------------------------------- /example/parse-log.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const { parseFlow, elementFlow, elementSink, pipe, VR } = require('../dist'); 3 | 4 | const src = fs.createReadStream(process.argv[2]); 5 | 6 | pipe( 7 | src, 8 | parseFlow(), 9 | elementFlow(), 10 | elementSink((elements) => { 11 | console.log(elements.toString()); 12 | }), 13 | ); 14 | -------------------------------------------------------------------------------- /example/parse-modify-log.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const { 3 | TagPath, 4 | TagTree, 5 | parseFlow, 6 | allowFilter, 7 | denyFilter, 8 | toUtf8Flow, 9 | toIndeterminateLengthSequences, 10 | modifyFlow, 11 | TagModification, 12 | TagInsertion, 13 | elementFlow, 14 | elementSink, 15 | Tag, 16 | pipe, 17 | } = require('../dist'); 18 | 19 | const src = fs.createReadStream(process.argv[2]); 20 | 21 | pipe( 22 | src, 23 | parseFlow(), 24 | toIndeterminateLengthSequences(), 25 | toUtf8Flow(), 26 | allowFilter([ 27 | TagTree.fromTag(Tag.SpecificCharacterSet), 28 | TagTree.fromTag(Tag.PatientName), 29 | TagTree.fromTag(Tag.PatientName), 30 | TagTree.fromTag(Tag.StudyDescription), 31 | TagTree.fromTag(Tag.SeriesDate), 32 | TagTree.fromAnyItem(Tag.MACParametersSequence), 33 | ]), 34 | denyFilter([TagTree.fromAnyItem(Tag.MACParametersSequence).thenTag(Tag.DataElementsSigned)]), 35 | modifyFlow( 36 | [TagModification.equals(TagPath.fromTag(Tag.PatientName), () => Buffer.from('Anon 001'))], 37 | [new TagInsertion(TagPath.fromTag(Tag.PatientIdentityRemoved), () => Buffer.from('YES'))], 38 | ), 39 | elementFlow(), 40 | elementSink((elements) => { 41 | console.log(elements.toString()); 42 | }), 43 | ); 44 | -------------------------------------------------------------------------------- /example/parse-modify-write.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const { 3 | TagPath, 4 | TagTree, 5 | parseFlow, 6 | toBytesFlow, 7 | allowFilter, 8 | denyFilter, 9 | toUtf8Flow, 10 | toIndeterminateLengthSequences, 11 | modifyFlow, 12 | TagModification, 13 | TagInsertion, 14 | pipe, 15 | } = require('../dist'); 16 | 17 | const src = fs.createReadStream(process.argv[2]); 18 | const dest = fs.createWriteStream(process.argv[3]); 19 | 20 | pipe( 21 | src, 22 | parseFlow(), 23 | toIndeterminateLengthSequences(), 24 | toUtf8Flow(), 25 | allowFilter([ 26 | TagTree.fromTag(Tag.SpecificCharacterSet), 27 | TagTree.fromTag(Tag.PatientName), 28 | TagTree.fromTag(Tag.PatientName), 29 | TagTree.fromTag(Tag.StudyDescription), 30 | TagTree.fromTag(Tag.SeriesDate), 31 | TagTree.fromAnyItem(Tag.MACParametersSequence), 32 | ]), 33 | denyFilter([TagTree.fromAnyItem(Tag.MACParametersSequence).thenTag(Tag.DataElementsSigned)]), 34 | modifyFlow( 35 | [TagModification.equals(TagPath.fromTag(Tag.PatientName), () => Buffer.from('Anon 001'))], 36 | [new TagInsertion(TagPath.fromTag(Tag.PatientIdentityRemoved), () => Buffer.from('YES'))], 37 | ), 38 | toBytesFlow(), 39 | dest, 40 | ); 41 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@exini/dicom-streams-js", 3 | "version": "3.1.0", 4 | "description": "Streaming parsing and processing of DICOM data", 5 | "main": "node/index.js", 6 | "browser": "web/index.js", 7 | "types": "lib/index.d.ts", 8 | "scripts": { 9 | "clean": "shx rm -rf build dist coverage", 10 | "lint": "eslint \"*/**/*.ts\" --quiet --fix", 11 | "test": "nyc mocha", 12 | "test-watch": "nyc mocha --watch", 13 | "coverage": "nyc report --reporter=text-lcov | coveralls", 14 | "build-lib": "tsc", 15 | "build-dev": "webpack --config config/webpack.config-dev.js", 16 | "build-prod": "webpack --config config/webpack.config-prod.js", 17 | "build-copy-files": "shx cp package.json LICENSE README.md dist", 18 | "build": "npm run clean && npm run lint && npm run test && npm run build-lib && npm run build-dev && npm run build-prod && npm run build-copy-files" 19 | }, 20 | "nyc": { 21 | "extension": [ 22 | ".ts" 23 | ], 24 | "exclude": [ 25 | "**/tag.ts", 26 | "**/tag-to-vr.ts" 27 | ], 28 | "reporter": [ 29 | "html" 30 | ], 31 | "sourceMap": true, 32 | "cache": false, 33 | "require": [ 34 | "ts-node/register", 35 | "source-map-support/register" 36 | ] 37 | }, 38 | "husky": { 39 | "hooks": { 40 | "pre-commit": "pretty-quick --staged" 41 | } 42 | }, 43 | "keywords": [ 44 | "dicom", 45 | "streams" 46 | ], 47 | "author": "karl.sostrand@exini.com", 48 | "license": "Apache-2.0", 49 | "repository": { 50 | "type": "git", 51 | "url": "https://github.com/exini/dicom-streams-js" 52 | }, 53 | "bugs": { 54 | "url": "https://github.com/exini/dicom-streams-js/issues" 55 | }, 56 | "dependencies": { 57 | "dicom-character-set": "^1.0.5", 58 | "js-joda": "^1.11.0", 59 | "multipipe": "^4.0.0", 60 | "uuid": "^9.0.0" 61 | }, 62 | "devDependencies": { 63 | "@types/mocha": "^10.0.1", 64 | "@types/node": "^18.15.11", 65 | "@types/uuid": "^9.0.1", 66 | "@typescript-eslint/eslint-plugin": "^5.57.1", 67 | "@typescript-eslint/parser": "^5.57.1", 68 | "assert": "^2.0.0", 69 | "browserify-zlib": "^0.2.0", 70 | "buffer": "^6.0.3", 71 | "coveralls": "^3.1.1", 72 | "eslint": "^8.37.0", 73 | "eslint-config-prettier": "^8.8.0", 74 | "eslint-plugin-prettier": "^4.2.1", 75 | "husky": "^8.0.3", 76 | "istanbul": "^0.4.5", 77 | "mocha": "^10.2.0", 78 | "nyc": "^15.1.0", 79 | "prettier": "2.8.7", 80 | "pretty-quick": "^3.1.3", 81 | "shx": "^0.3.4", 82 | "source-map-support": "^0.5.21", 83 | "stream-browserify": "^3.0.0", 84 | "ts-loader": "^9.4.2", 85 | "ts-node": "^10.9.1", 86 | "typescript": "^5.4.5", 87 | "webpack": "^5.77.0", 88 | "webpack-cli": "^5.0.1", 89 | "webpack-merge": "^5.8.0", 90 | "webpack-node-externals": "^3.0.0" 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /src/base.ts: -------------------------------------------------------------------------------- 1 | import { ZoneId } from 'js-joda'; 2 | import mpipe from 'multipipe'; 3 | import { v4 as uuidv4, v5 as uuidv5 } from 'uuid'; 4 | import * as CS from './character-sets'; 5 | import { Lookup } from './lookup'; 6 | import { Tag } from './tag'; 7 | import { UID } from './uid'; 8 | import { VR } from './vr'; 9 | 10 | // eslint:disable: no-bitwise 11 | 12 | export const indeterminateLength = 0xffffffff; 13 | export const zero4Bytes = Buffer.from([0, 0, 0, 0]); 14 | 15 | export function concat(a: Buffer, b: Buffer): Buffer { 16 | return Buffer.concat([a, b], a.length + b.length); 17 | } 18 | export function concatv(...buffers: Buffer[]): Buffer { 19 | return Buffer.concat(buffers); 20 | } 21 | export function flatten(array: T[][]): T[] { 22 | return [].concat(...array); 23 | } 24 | export function appendToArray(object: T, array: T[]): T[] { 25 | const newArray = array.slice(); 26 | newArray.push(object); 27 | return newArray; 28 | } 29 | export function prependToArray(object: T, array: T[]): T[] { 30 | const newArray = array.slice(); 31 | newArray.unshift(object); 32 | return newArray; 33 | } 34 | export function concatArrays(array1: T[], array2: T[]): T[] { 35 | const newArray = array1.slice(); 36 | array2.forEach((i) => newArray.push(i)); 37 | return newArray; 38 | } 39 | 40 | const uidRoot = '2.25'; 41 | const uuidNamespace = 'd181d67b-0a1c-45bf-8616-070f1bb0d0cf'; 42 | 43 | export function hexToDec(s: string): string { 44 | const digits = [0]; 45 | let carry: number; 46 | for (let i = 0; i < s.length; i++) { 47 | carry = parseInt(s.charAt(i), 16); 48 | for (let j = 0; j < digits.length; j++) { 49 | digits[j] = digits[j] * 16 + carry; 50 | carry = (digits[j] / 10) | 0; 51 | digits[j] %= 10; 52 | } 53 | while (carry > 0) { 54 | digits.push(carry % 10); 55 | carry = (carry / 10) | 0; 56 | } 57 | } 58 | return digits.reverse().join(''); 59 | } 60 | 61 | export function toUID(root: string, uuid: string): string { 62 | const hexStr = uuid.replace(/-/g, ''); 63 | const docStr = hexToDec(hexStr).replace(/^0+/, ''); 64 | return (root + '.' + docStr).substring(0, 64); 65 | } 66 | 67 | export function nameBasedUID(name: string, root: string): string { 68 | return toUID(root, uuidv5(name, uuidNamespace)); 69 | } 70 | export function randomUID(root: string): string { 71 | return toUID(root, uuidv4()); 72 | } 73 | 74 | export const multiValueDelimiter = '\\'; 75 | 76 | export const emptyBuffer = Buffer.alloc(0); 77 | 78 | export function toUInt32(num: number): number { 79 | return num >>> 0; 80 | } 81 | export function toInt32(num: number): number { 82 | return num >> 0; 83 | } 84 | export function shiftLeftUnsigned(num: number, n: number): number { 85 | return toUInt32(num << n); 86 | } 87 | 88 | export function groupNumber(tag: number): number { 89 | return tag >>> 16; 90 | } 91 | export function elementNumber(tag: number): number { 92 | return tag & 0xffff; 93 | } 94 | export function bytesToShortBE(bytes: Buffer): number { 95 | return bytes.readInt16BE(0); 96 | } 97 | export function bytesToShortLE(bytes: Buffer): number { 98 | return bytes.readInt16LE(0); 99 | } 100 | export function bytesToShort(bytes: Buffer, bigEndian = false): number { 101 | return bigEndian ? bytesToShortBE(bytes) : bytesToShortLE(bytes); 102 | } 103 | export function bytesToUShortBE(bytes: Buffer): number { 104 | return bytes.readUInt16BE(0); 105 | } 106 | export function bytesToUShortLE(bytes: Buffer): number { 107 | return bytes.readUInt16LE(0); 108 | } 109 | export function bytesToUShort(bytes: Buffer, bigEndian = false): number { 110 | return bigEndian ? bytesToUShortBE(bytes) : bytesToUShortLE(bytes); 111 | } 112 | export function bytesToVR(bytes: Buffer): number { 113 | return bytesToUShortBE(bytes); 114 | } 115 | export function bytesToIntBE(bytes: Buffer): number { 116 | return bytes.readInt32BE(0); 117 | } 118 | export function bytesToIntLE(bytes: Buffer): number { 119 | return bytes.readInt32LE(0); 120 | } 121 | export function bytesToInt(bytes: Buffer, bigEndian = false): number { 122 | return bigEndian ? bytesToIntBE(bytes) : bytesToIntLE(bytes); 123 | } 124 | export function bytesToUIntBE(bytes: Buffer): number { 125 | return bytes.readUInt32BE(0); 126 | } 127 | export function bytesToUIntLE(bytes: Buffer): number { 128 | return bytes.readUInt32LE(0); 129 | } 130 | export function bytesToUInt(bytes: Buffer, bigEndian = false): number { 131 | return bigEndian ? bytesToUIntBE(bytes) : bytesToUIntLE(bytes); 132 | } 133 | export function bytesToTagBE(bytes: Buffer): number { 134 | return bytesToUIntBE(bytes); 135 | } 136 | export function bytesToTagLE(bytes: Buffer): number { 137 | return shiftLeftUnsigned(bytes.readUInt16LE(0), 16) + bytes.readUInt16LE(2); 138 | } 139 | export function bytesToTag(bytes: Buffer, bigEndian = false): number { 140 | return bigEndian ? bytesToTagBE(bytes) : bytesToTagLE(bytes); 141 | } 142 | export function bytesToFloatBE(bytes: Buffer): number { 143 | return bytes.readFloatBE(0); 144 | } 145 | export function bytesToFloatLE(bytes: Buffer): number { 146 | return bytes.readFloatLE(0); 147 | } 148 | export function bytesToFloat(bytes: Buffer, bigEndian = false): number { 149 | return bigEndian ? bytesToFloatBE(bytes) : bytesToFloatLE(bytes); 150 | } 151 | export function bytesToDoubleBE(bytes: Buffer): number { 152 | return bytes.readDoubleBE(0); 153 | } 154 | export function bytesToDoubleLE(bytes: Buffer): number { 155 | return bytes.readDoubleLE(0); 156 | } 157 | export function bytesToDouble(bytes: Buffer, bigEndian = false): number { 158 | return bigEndian ? bytesToDoubleBE(bytes) : bytesToDoubleLE(bytes); 159 | } 160 | 161 | export function intToBytesBE(i: number): Buffer { 162 | return Buffer.from([i >> 24, i >> 16, i >> 8, i]); 163 | } 164 | export function intToBytesLE(i: number): Buffer { 165 | return Buffer.from([i, i >> 8, i >> 16, i >> 24]); 166 | } 167 | export function shortToBytesBE(i: number): Buffer { 168 | return Buffer.from([i >> 8, i]); 169 | } 170 | export function shortToBytesLE(i: number): Buffer { 171 | return Buffer.from([i, i >> 8]); 172 | } 173 | export function shortToBytes(i: number, bigEndian = false): Buffer { 174 | return bigEndian ? shortToBytesBE(i) : shortToBytesLE(i); 175 | } 176 | export function intToBytes(i: number, bigEndian = false): Buffer { 177 | return bigEndian ? intToBytesBE(i) : intToBytesLE(i); 178 | } 179 | export function tagToBytesBE(tag: number): Buffer { 180 | return intToBytesBE(tag); 181 | } 182 | export function tagToBytesLE(tag: number): Buffer { 183 | return Buffer.from([tag >> 16, tag >> 24, tag, tag >> 8]); 184 | } 185 | export function tagToBytes(tag: number, bigEndian = false): Buffer { 186 | return bigEndian ? tagToBytesBE(tag) : tagToBytesLE(tag); 187 | } 188 | 189 | export function floatToBytes(f: number, bigEndian = false): Buffer { 190 | const buf = Buffer.allocUnsafe(4); 191 | if (bigEndian) { 192 | buf.writeFloatBE(f, 0); 193 | } else { 194 | buf.writeFloatLE(f, 0); 195 | } 196 | return buf; 197 | } 198 | export function doubleToBytes(f: number, bigEndian = false): Buffer { 199 | const buf = Buffer.allocUnsafe(8); 200 | if (bigEndian) { 201 | buf.writeDoubleBE(f, 0); 202 | } else { 203 | buf.writeDoubleLE(f, 0); 204 | } 205 | return buf; 206 | } 207 | 208 | export function tagToString(tag: number): string { 209 | const hex = ('00000000' + tag.toString(16)).slice(-8); 210 | return '(' + hex.slice(0, 4) + ',' + hex.slice(4, 8) + ')'; 211 | } 212 | 213 | export function trim(s: string): string { 214 | return s.replace(/^[\x00-\x20]*/g, '').replace(/[\x00-\x20]*$/g, ''); 215 | } 216 | 217 | export function padToEvenLength(bytes: Buffer, tagOrVR: number | VR): Buffer { 218 | const vr = isNaN(tagOrVR as number) ? (tagOrVR as VR) : Lookup.vrOf(tagOrVR as number); 219 | return (bytes.length & 1) !== 0 ? concat(bytes, Buffer.from([vr.paddingByte])) : bytes; 220 | } 221 | 222 | export const itemLE = concat(tagToBytesLE(Tag.Item), intToBytesLE(indeterminateLength)); 223 | export const itemBE = concat(tagToBytesBE(Tag.Item), intToBytesBE(indeterminateLength)); 224 | export function item(length: number = indeterminateLength, bigEndian = false): Buffer { 225 | return length === indeterminateLength 226 | ? bigEndian 227 | ? itemBE 228 | : itemLE 229 | : concat(tagToBytes(Tag.Item, bigEndian), intToBytes(length, bigEndian)); 230 | } 231 | 232 | export const itemDelimitationLE = concat(tagToBytesLE(Tag.ItemDelimitationItem), zero4Bytes); 233 | export const itemDelimitationBE = concat(tagToBytesBE(Tag.ItemDelimitationItem), zero4Bytes); 234 | export function itemDelimitation(bigEndian = false): Buffer { 235 | return bigEndian ? itemDelimitationBE : itemDelimitationLE; 236 | } 237 | 238 | export const sequenceDelimitationLE = concat(tagToBytesLE(Tag.SequenceDelimitationItem), zero4Bytes); 239 | export const sequenceDelimitationBE = concat(tagToBytesBE(Tag.SequenceDelimitationItem), zero4Bytes); 240 | export function sequenceDelimitation(bigEndian = false): Buffer { 241 | return bigEndian ? sequenceDelimitationBE : sequenceDelimitationLE; 242 | } 243 | export function sequenceDelimitationNonZeroLength(bigEndian = false): Buffer { 244 | return concatv(tagToBytes(Tag.SequenceDelimitationItem, bigEndian), intToBytes(0x00000010, bigEndian)); 245 | } 246 | 247 | export function isFileMetaInformation(tag: number): boolean { 248 | return (tag & 0xffff0000) === 0x00020000; 249 | } 250 | export function isGroupLength(tag: number): boolean { 251 | return elementNumber(tag) === 0; 252 | } 253 | export function isDeflated(transferSyntaxUid: string): boolean { 254 | return transferSyntaxUid === UID.DeflatedExplicitVRLittleEndian || transferSyntaxUid === UID.JPIPReferencedDeflate; 255 | } 256 | 257 | export const systemZone = ZoneId.SYSTEM; 258 | export const defaultCharacterSet = CS.defaultCharacterSet; 259 | 260 | export function createUID(): string { 261 | return randomUID(uidRoot); 262 | } 263 | export function createUIDFromRoot(root: string): string { 264 | return randomUID(root); 265 | } 266 | export function createNameBasedUID(name: string): string { 267 | return nameBasedUID(name, uidRoot); 268 | } 269 | export function createNameBasedUIDFromRoot(name: string, root: string): string { 270 | return nameBasedUID(name, root); 271 | } 272 | 273 | export function pipe(...streams: any[]): any { 274 | return mpipe(streams); 275 | } 276 | -------------------------------------------------------------------------------- /src/byte-parser.ts: -------------------------------------------------------------------------------- 1 | import { concat, emptyBuffer } from './base'; 2 | 3 | export interface ByteParserTarget { 4 | next(result: any): void; 5 | fail(error?: any): void; 6 | complete(): void; 7 | } 8 | 9 | const recurse = true; 10 | const dontRecurse = false; 11 | const needMoreData = new Error(); 12 | 13 | export abstract class ParseStep { 14 | public abstract parse(reader: ByteReader): ParseResult; 15 | 16 | public onTruncation(reader: ByteReader): void { 17 | throw Error(reader.remainingSize() + ' bytes remain after finished parsing'); 18 | } 19 | } 20 | 21 | class FinishedParser extends ParseStep { 22 | public parse(): ParseResult { 23 | throw Error('No initial parser installed: you must use startWith(...)'); 24 | } 25 | } 26 | export const finishedParser = new FinishedParser(); 27 | 28 | export class ParseResult { 29 | constructor(public result: any, public nextStep: ParseStep) {} 30 | } 31 | 32 | export class ByteReader { 33 | private input = emptyBuffer; 34 | private off = 0; 35 | 36 | constructor(input: Buffer) { 37 | this.setInput(input); 38 | } 39 | 40 | public setInput(input: Buffer): void { 41 | this.input = input; 42 | this.off = 0; 43 | } 44 | 45 | public hasRemaining(): boolean { 46 | return this.off < this.input.length; 47 | } 48 | 49 | public remainingSize(): number { 50 | return this.input.length - this.off; 51 | } 52 | 53 | public remainingData(): Buffer { 54 | return this.hasRemaining() ? this.input.slice(this.off) : emptyBuffer; 55 | } 56 | 57 | public ensure(n: number): void { 58 | if (this.remainingSize() < n) { 59 | throw needMoreData; 60 | } 61 | } 62 | 63 | public take(n: number): Buffer { 64 | if (this.off + n <= this.input.length) { 65 | const o = this.off; 66 | this.off = o + n; 67 | return this.input.slice(o, this.off); 68 | } else { 69 | throw needMoreData; 70 | } 71 | } 72 | } 73 | export class ByteParser { 74 | public current: ParseStep = finishedParser; 75 | public isCompleted = false; 76 | public hasData = false; 77 | 78 | private reader = new ByteReader(emptyBuffer); 79 | private buffer: Buffer = emptyBuffer; 80 | 81 | constructor(public readonly out: ByteParserTarget) {} 82 | 83 | public parse(chunk: any): void { 84 | this.buffer = concat(this.buffer, chunk); 85 | this.hasData = chunk.length > 0; 86 | 87 | while (this.hasData && !this.isCompleted) { 88 | this.doParse(1000); 89 | } 90 | } 91 | 92 | public flush(): void { 93 | if (!this.isCompleted) { 94 | if (this.buffer.length > 0) { 95 | try { 96 | this.reader.setInput(this.buffer); 97 | this.current.onTruncation(this.reader); 98 | this.complete(); 99 | } catch (error) { 100 | this.fail(error); 101 | } 102 | } else { 103 | this.complete(); 104 | } 105 | } 106 | } 107 | 108 | public startWith(step: ParseStep): void { 109 | this.current = step; 110 | } 111 | 112 | protected complete(): void { 113 | this.isCompleted = true; 114 | this.buffer = emptyBuffer; 115 | this.reader = null; 116 | this.out.complete(); 117 | } 118 | 119 | protected fail(error?: any): void { 120 | error.message = 'Parsing failed: ' + (error && error.message ? error.message : ''); 121 | this.isCompleted = true; 122 | this.buffer = emptyBuffer; 123 | this.reader = null; 124 | this.out.fail(error); 125 | } 126 | 127 | private doParseInner(): boolean { 128 | if (this.buffer.length > 0) { 129 | this.reader.setInput(this.buffer); 130 | try { 131 | const parseResult = this.current.parse(this.reader); 132 | if (parseResult.result) { 133 | this.out.next(parseResult.result); 134 | } 135 | 136 | if (parseResult.nextStep === finishedParser) { 137 | this.complete(); 138 | return dontRecurse; 139 | } else { 140 | this.buffer = this.reader.remainingData(); 141 | this.current = parseResult.nextStep; 142 | if (!this.reader.hasRemaining()) { 143 | this.hasData = false; 144 | } 145 | 146 | // If this step didn't produce a result, continue parsing. 147 | if (!parseResult.result) { 148 | return recurse; 149 | } else { 150 | return dontRecurse; 151 | } 152 | } 153 | } catch (error) { 154 | if (error === needMoreData) { 155 | this.hasData = false; 156 | return dontRecurse; 157 | } 158 | 159 | this.fail(error); 160 | return dontRecurse; 161 | } 162 | } else { 163 | this.hasData = false; 164 | return dontRecurse; 165 | } 166 | } 167 | 168 | private doParse(remainingRecursions: number): void { 169 | if (remainingRecursions === 0) { 170 | this.fail( 171 | new Error("Parsing logic didn't produce result. Aborting processing to avoid infinite cycles."), 172 | ); 173 | } else { 174 | const doRecurse = this.doParseInner(); 175 | if (doRecurse) { 176 | this.doParse(remainingRecursions - 1); 177 | } 178 | } 179 | } 180 | } 181 | -------------------------------------------------------------------------------- /src/character-sets.ts: -------------------------------------------------------------------------------- 1 | import { convertBytes } from 'dicom-character-set'; 2 | import { VR } from './vr'; 3 | 4 | export class CharacterSets { 5 | public static isVrAffectedBySpecificCharacterSet(vr: VR): boolean { 6 | return vr === VR.LO || vr === VR.LT || vr === VR.PN || vr === VR.SH || vr === VR.ST || vr === VR.UT; 7 | } 8 | 9 | public static fromNames(names: string): CharacterSets { 10 | return new CharacterSets(names); 11 | } 12 | 13 | public static fromBytes(specificCharacterSetBytes: Buffer): CharacterSets { 14 | return !specificCharacterSetBytes || specificCharacterSetBytes.length === 0 15 | ? defaultCharacterSet 16 | : new CharacterSets(specificCharacterSetBytes.toString().trim()); 17 | } 18 | 19 | public static encode(s: string): Buffer { 20 | return Buffer.from(s, 'utf8'); 21 | } 22 | 23 | public static defaultOnly(): CharacterSets { 24 | return new CharacterSets(''); 25 | } 26 | 27 | constructor(public readonly charsets: string) {} 28 | 29 | public decode(bytes: Buffer, vr: VR): string { 30 | try { 31 | return convertBytes(this.charsets, bytes, { vr: vr.name }); 32 | } catch (err) { 33 | console.warn('Cannot decode using character set: ' + this.charsets + ', using default instead: ' + err); 34 | return defaultCharacterSet.decode(bytes, vr); 35 | } 36 | } 37 | 38 | public toString(): string { 39 | return 'CharacterSets [' + this.charsets.split('\\').join(',') + ']'; 40 | } 41 | } 42 | 43 | export const defaultCharacterSet = CharacterSets.defaultOnly(); 44 | -------------------------------------------------------------------------------- /src/collect-flow.ts: -------------------------------------------------------------------------------- 1 | import { Transform } from 'stream'; 2 | import { concat, prependToArray, emptyBuffer } from './base'; 3 | import { 4 | createFlow, 5 | DeferToPartFlow, 6 | EndEvent, 7 | GuaranteedDelimitationEvents, 8 | GuaranteedValueEvent, 9 | InFragments, 10 | ItemDelimitationPartMarker, 11 | SequenceDelimitationPartMarker, 12 | TagPathTracking, 13 | ValueChunkMarker, 14 | } from './dicom-flow'; 15 | import { 16 | Element, 17 | ValueElement, 18 | FragmentElement, 19 | SequenceElement, 20 | FragmentsElement, 21 | ItemElement, 22 | ItemDelimitationElement, 23 | SequenceDelimitationElement, 24 | } from './dicom-elements'; 25 | import { 26 | DicomPart, 27 | ElementsPart, 28 | HeaderPart, 29 | ValueChunk, 30 | ItemPart, 31 | SequencePart, 32 | FragmentsPart, 33 | ItemDelimitationPart, 34 | SequenceDelimitationPart, 35 | } from './dicom-parts'; 36 | import { TagPath } from './tag-path'; 37 | import { Value } from './value'; 38 | import { ElementsBuilder } from './elements-builder'; 39 | import { TagTree } from './tag-tree'; 40 | 41 | export function collectFlow( 42 | tagCondition: (t: TagPath) => boolean, 43 | stopCondition: (t: TagPath) => boolean, 44 | label: string, 45 | maxBufferSize = 1000000, 46 | ): Transform { 47 | return createFlow( 48 | new (class extends EndEvent( 49 | TagPathTracking(GuaranteedDelimitationEvents(GuaranteedValueEvent(InFragments(DeferToPartFlow)))), 50 | ) { 51 | private buffer: DicomPart[] = []; 52 | private currentBufferSize = 0; 53 | private hasEmitted = false; 54 | private bytes: Buffer = emptyBuffer; 55 | private currentValue: ValueElement = undefined; 56 | private currentFragment: FragmentElement = undefined; 57 | 58 | private builder = new ElementsBuilder(); 59 | 60 | private elementsAndBuffer(): DicomPart[] { 61 | const parts = prependToArray(new ElementsPart(label, this.builder.build()), this.buffer); 62 | 63 | this.hasEmitted = true; 64 | this.buffer = []; 65 | this.currentBufferSize = 0; 66 | 67 | return parts; 68 | } 69 | 70 | private maybeAdd(element: Element): ElementsBuilder { 71 | return tagCondition(this.tagPath) 72 | ? this.builder.addElement(element) 73 | : this.builder.noteElement(element); 74 | } 75 | 76 | public onEnd(): DicomPart[] { 77 | return this.hasEmitted ? [] : this.elementsAndBuffer(); 78 | } 79 | 80 | public onPart(part: DicomPart): DicomPart[] { 81 | if (this.hasEmitted) { 82 | return [part]; 83 | } else { 84 | if (maxBufferSize > 0 && this.currentBufferSize > maxBufferSize) { 85 | throw Error('Error collecting elements: max buffer size exceeded'); 86 | } 87 | 88 | if ( 89 | !(part instanceof ValueChunkMarker) && 90 | !(part instanceof SequenceDelimitationPartMarker) && 91 | !(part instanceof ItemDelimitationPartMarker) 92 | ) { 93 | this.buffer.push(part); 94 | this.currentBufferSize += part.bytes.length; 95 | } 96 | 97 | if ('tag' in part && stopCondition(this.tagPath)) { 98 | return this.elementsAndBuffer(); 99 | } 100 | 101 | if (part instanceof HeaderPart) { 102 | this.currentValue = new ValueElement( 103 | part.tag, 104 | part.vr, 105 | Value.empty(), 106 | part.bigEndian, 107 | part.explicitVR, 108 | ); 109 | this.bytes = emptyBuffer; 110 | return []; 111 | } 112 | 113 | if (part instanceof ItemPart && this.inFragments) { 114 | this.currentFragment = new FragmentElement(part.length, Value.empty(), part.bigEndian); 115 | this.bytes = emptyBuffer; 116 | return []; 117 | } 118 | 119 | if (part instanceof ValueChunk) { 120 | this.bytes = concat(this.bytes, part.bytes); 121 | if (part.last) { 122 | if (this.inFragments && this.currentFragment) { 123 | this.maybeAdd( 124 | new FragmentElement( 125 | this.currentFragment.length, 126 | new Value(this.bytes), 127 | this.currentFragment.bigEndian, 128 | ), 129 | ); 130 | } else if (this.currentValue) { 131 | this.maybeAdd( 132 | new ValueElement( 133 | this.currentValue.tag, 134 | this.currentValue.vr, 135 | new Value(this.bytes), 136 | this.currentValue.bigEndian, 137 | this.currentValue.explicitVR, 138 | ), 139 | ); 140 | } 141 | this.currentFragment = undefined; 142 | this.currentValue = undefined; 143 | } 144 | 145 | return []; 146 | } 147 | 148 | if (part instanceof SequencePart) { 149 | this.maybeAdd(new SequenceElement(part.tag, part.length, part.bigEndian, part.explicitVR)); 150 | return []; 151 | } 152 | if (part instanceof FragmentsPart) { 153 | this.maybeAdd(new FragmentsElement(part.tag, part.vr, part.bigEndian, part.explicitVR)); 154 | return []; 155 | } 156 | if (part instanceof ItemPart) { 157 | this.maybeAdd(new ItemElement(part.length, part.bigEndian)); 158 | return []; 159 | } 160 | if (part instanceof ItemDelimitationPartMarker) { 161 | return []; 162 | } 163 | if (part instanceof ItemDelimitationPart) { 164 | this.maybeAdd(new ItemDelimitationElement(part.bigEndian)); 165 | return []; 166 | } 167 | if (part instanceof SequenceDelimitationPartMarker) { 168 | return []; 169 | } 170 | if (part instanceof SequenceDelimitationPart) { 171 | this.maybeAdd(new SequenceDelimitationElement(part.bigEndian)); 172 | return []; 173 | } 174 | return []; 175 | } 176 | } 177 | })(), 178 | ); 179 | } 180 | 181 | export function collectFromTagPathsFlow(allowlist: TagTree[], label: string, maxBufferSize?: number): Transform { 182 | const maxTag = allowlist.length > 0 ? Math.max(...allowlist.map((t) => t.head().tag())) : 0; 183 | const tagCondition = (currentPath: TagPath): boolean => 184 | allowlist.find((t) => t.hasTrunk(currentPath) || t.isTrunkOf(currentPath)) !== undefined; 185 | const stopCondition = (tagPath: TagPath): boolean => 186 | allowlist.length === 0 || (tagPath.isRoot() && tagPath.tag() > maxTag); 187 | 188 | return collectFlow(tagCondition, stopCondition, label, maxBufferSize); 189 | } 190 | -------------------------------------------------------------------------------- /src/detour.ts: -------------------------------------------------------------------------------- 1 | import { Transform, TransformOptions } from 'stream'; 2 | 3 | export abstract class Detour extends Transform { 4 | private detour = false; 5 | 6 | constructor(private readonly options: TransformOptions, private detourFlow?: Transform) { 7 | super(options); 8 | if (detourFlow) { 9 | this.setDetourFlow(detourFlow); 10 | } 11 | } 12 | 13 | public setDetourFlow(detourFlow: Transform): void { 14 | this.detourFlow = detourFlow; 15 | } 16 | 17 | public setDetour(detour = true, initialChunk?: any): void { 18 | this.detour = detour; 19 | if (this.detourFlow !== undefined) { 20 | if (this.detour) { 21 | this.detourFlow.on('data', (chunk) => this.process(chunk)); 22 | this.detourFlow.once('end', () => this.cleanup()); 23 | this.detourFlow.once('error', (error) => this.emit('error', error)); 24 | } else { 25 | this.detourFlow.end(); 26 | } 27 | } 28 | if (initialChunk !== undefined && (initialChunk.length === undefined || initialChunk.length > 0)) { 29 | if (detour && this.detourFlow !== undefined) { 30 | this.detourFlow.write(initialChunk); 31 | } else { 32 | this.write(initialChunk); 33 | } 34 | } 35 | } 36 | 37 | public abstract process(chunk: any): void; 38 | 39 | public cleanup(): void { 40 | // override to add custom cleanup code 41 | } 42 | 43 | public _transform(chunk: any, encoding: string, callback: (error?: Error, data?: any) => void): void { 44 | if (this.detour !== undefined && this.detourFlow !== undefined) { 45 | if (!this.detourFlow.write(chunk)) { 46 | this.detourFlow.once('drain', callback); 47 | } else { 48 | process.nextTick(() => callback()); 49 | } 50 | } else { 51 | this.process(chunk); 52 | callback(); 53 | } 54 | } 55 | 56 | public _flush(callback: (error?: Error, data?: any) => void): void { 57 | if (this.detour && this.detourFlow) { 58 | this.detourFlow.once('end', callback); 59 | this.detourFlow.end(); 60 | } else { 61 | this.cleanup(); 62 | process.nextTick(() => callback()); 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/dicom-elements.ts: -------------------------------------------------------------------------------- 1 | import { 2 | appendToArray, 3 | bytesToUInt, 4 | concat, 5 | defaultCharacterSet, 6 | emptyBuffer, 7 | indeterminateLength, 8 | intToBytes, 9 | multiValueDelimiter, 10 | tagToBytes, 11 | tagToString, 12 | } from './base'; 13 | import { Lookup } from './lookup'; 14 | import { 15 | DicomPart, 16 | FragmentsPart, 17 | HeaderPart, 18 | ItemDelimitationPart, 19 | ItemPart, 20 | PreamblePart, 21 | SequenceDelimitationPart, 22 | SequencePart, 23 | ValueChunk, 24 | } from './dicom-parts'; 25 | import { Tag } from './tag'; 26 | import { Value } from './value'; 27 | import { VR } from './vr'; 28 | import { Elements } from './elements'; 29 | 30 | export class Element { 31 | constructor(public readonly bigEndian: boolean = false) {} 32 | 33 | public toBytes(): Buffer { 34 | return emptyBuffer; 35 | } 36 | public toParts(): DicomPart[] { 37 | return []; 38 | } 39 | } 40 | 41 | export class ElementSet { 42 | constructor( 43 | public readonly tag: number, 44 | public readonly vr: VR, 45 | public readonly bigEndian: boolean = false, 46 | public readonly explicitVR: boolean = true, 47 | ) {} 48 | 49 | public toBytes(): Buffer { 50 | return emptyBuffer; 51 | } 52 | public toElements(): Element[] { 53 | return []; 54 | } 55 | } 56 | 57 | export class UnknownElement extends Element { 58 | constructor(bigEndian?: boolean) { 59 | super(bigEndian); 60 | } 61 | } 62 | 63 | class PreambleElement extends Element { 64 | constructor() { 65 | super(false); 66 | } 67 | public toBytes(): Buffer { 68 | return concat(Buffer.from(new Array(128).fill(0)), Buffer.from('DICM')); 69 | } 70 | public toString(): string { 71 | return 'PreambleElement(0, ..., 0, D, I, C, M)'; 72 | } 73 | public toParts(): DicomPart[] { 74 | return [new PreamblePart(this.toBytes())]; 75 | } 76 | } 77 | export const preambleElement = new PreambleElement(); 78 | 79 | export class ValueElement extends ElementSet { 80 | public length: number; 81 | 82 | constructor(tag: number, vr: VR, public readonly value: Value, bigEndian?: boolean, explicitVR?: boolean) { 83 | super(tag, vr, bigEndian, explicitVR); 84 | this.length = value.length; 85 | } 86 | 87 | public setValue(value: Value): ValueElement { 88 | return new ValueElement(this.tag, this.vr, value.ensurePadding(this.vr), this.bigEndian, this.explicitVR); 89 | } 90 | public toBytes(): Buffer { 91 | return this.toParts() 92 | .map((p) => p.bytes) 93 | .reduce(concat); 94 | } 95 | public toParts(): DicomPart[] { 96 | const headerPart = HeaderPart.create(this.tag, this.vr, this.length, this.bigEndian, this.explicitVR); 97 | if (this.length > 0) { 98 | return [headerPart, new ValueChunk(this.bigEndian, this.value.bytes, true)]; 99 | } else { 100 | return [headerPart]; 101 | } 102 | } 103 | public toElements(): Element[] { 104 | return [this]; 105 | } 106 | public toString(): string { 107 | const strings = this.value.toStrings(this.vr, this.bigEndian, defaultCharacterSet); 108 | const s = strings.join(multiValueDelimiter); 109 | const vm = strings.length + ''; 110 | return ( 111 | 'ValueElement(' + 112 | tagToString(this.tag) + 113 | ' ' + 114 | this.vr.name + 115 | ' [' + 116 | s + 117 | '] # ' + 118 | this.length + 119 | ', ' + 120 | vm + 121 | ' ' + 122 | Lookup.keywordOf(this.tag) || '' + ')' 123 | ); 124 | } 125 | } 126 | 127 | export class SequenceElement extends Element { 128 | public indeterminate: boolean; 129 | 130 | constructor( 131 | public readonly tag: number, 132 | public readonly length: number = indeterminateLength, 133 | bigEndian?: boolean, 134 | public readonly explicitVR: boolean = true, 135 | ) { 136 | super(bigEndian); 137 | this.tag = tag; 138 | this.indeterminate = this.length === indeterminateLength; 139 | } 140 | 141 | public toBytes(): Buffer { 142 | return HeaderPart.create(this.tag, VR.SQ, this.length, this.bigEndian, this.explicitVR).bytes; 143 | } 144 | public toParts(): DicomPart[] { 145 | return [new SequencePart(this.tag, this.length, this.bigEndian, this.explicitVR, this.toBytes())]; 146 | } 147 | public toString(): string { 148 | return ( 149 | 'SequenceElement(' + tagToString(this.tag) + ' SQ # ' + this.length + ' ' + Lookup.keywordOf(this.tag) || 150 | '' + ')' 151 | ); 152 | } 153 | } 154 | 155 | export class FragmentsElement extends Element { 156 | constructor( 157 | public readonly tag: number, 158 | public readonly vr: VR, 159 | bigEndian?: boolean, 160 | public readonly explicitVR: boolean = true, 161 | ) { 162 | super(bigEndian); 163 | } 164 | 165 | public toBytes(): Buffer { 166 | return this.toParts()[0].bytes; 167 | } 168 | public toParts(): DicomPart[] { 169 | return [ 170 | new FragmentsPart( 171 | this.tag, 172 | indeterminateLength, 173 | this.vr, 174 | this.bigEndian, 175 | this.explicitVR, 176 | HeaderPart.create(this.tag, this.vr, indeterminateLength, this.bigEndian, this.explicitVR).bytes, 177 | ), 178 | ]; 179 | } 180 | public toString(): string { 181 | return ( 182 | 'FragmentsElement(' + tagToString(this.tag) + ' ' + this.vr.name + ' # ' + Lookup.keywordOf(this.tag) || 183 | '' + ')' 184 | ); 185 | } 186 | } 187 | 188 | export class ItemElement extends Element { 189 | public indeterminate: boolean; 190 | 191 | constructor(public readonly length = indeterminateLength, bigEndian?: boolean) { 192 | super(bigEndian); 193 | this.indeterminate = this.length === indeterminateLength; 194 | } 195 | 196 | public toBytes(): Buffer { 197 | return concat(tagToBytes(Tag.Item, this.bigEndian), intToBytes(this.length, this.bigEndian)); 198 | } 199 | public toParts(): DicomPart[] { 200 | return [new ItemPart(this.length, this.bigEndian, this.toBytes())]; 201 | } 202 | public toString(): string { 203 | return 'ItemElement(length = ' + this.length + ')'; 204 | } 205 | } 206 | 207 | export class FragmentElement extends Element { 208 | constructor(public readonly length: number, public readonly value: Value, bigEndian?: boolean) { 209 | super(bigEndian); 210 | } 211 | 212 | public toBytes(): Buffer { 213 | return this.toParts() 214 | .map((p) => p.bytes) 215 | .reduce(concat); 216 | } 217 | public toParts(): DicomPart[] { 218 | const itemParts: DicomPart[] = new ItemElement(this.value.length, this.bigEndian).toParts(); 219 | if (this.value.length !== 0) { 220 | itemParts.push(new ValueChunk(this.bigEndian, this.value.bytes, true)); 221 | } 222 | return itemParts; 223 | } 224 | public toString(): string { 225 | return 'FragmentElement(length = ' + this.length + ')'; 226 | } 227 | } 228 | 229 | export class ItemDelimitationElement extends Element { 230 | constructor(bigEndian?: boolean) { 231 | super(bigEndian); 232 | } 233 | 234 | public toBytes(): Buffer { 235 | return concat(tagToBytes(Tag.ItemDelimitationItem, this.bigEndian), Buffer.from([0, 0, 0, 0])); 236 | } 237 | public toParts(): DicomPart[] { 238 | return [new ItemDelimitationPart(this.bigEndian, this.toBytes())]; 239 | } 240 | public toString(): string { 241 | return 'ItemDelimitationElement'; 242 | } 243 | } 244 | 245 | export class SequenceDelimitationElement extends Element { 246 | constructor(bigEndian?: boolean) { 247 | super(bigEndian); 248 | } 249 | 250 | public toBytes(): Buffer { 251 | return concat(tagToBytes(Tag.SequenceDelimitationItem, this.bigEndian), Buffer.from([0, 0, 0, 0])); 252 | } 253 | public toParts(): DicomPart[] { 254 | return [new SequenceDelimitationPart(this.bigEndian, this.toBytes())]; 255 | } 256 | public toString(): string { 257 | return 'SequenceDelimitationElement'; 258 | } 259 | } 260 | 261 | export class Sequence extends ElementSet { 262 | public indeterminate: boolean; 263 | public size: number; 264 | 265 | constructor( 266 | public readonly tag: number, 267 | public readonly length: number = indeterminateLength, 268 | public readonly items: Item[] = [], 269 | bigEndian?: boolean, 270 | explicitVR?: boolean, 271 | ) { 272 | super(tag, VR.SQ, bigEndian, explicitVR); 273 | this.indeterminate = length === indeterminateLength; 274 | this.size = items.length; 275 | } 276 | 277 | public item(index: number): Item { 278 | return this.items.length >= index ? this.items[index - 1] : undefined; 279 | } 280 | public addItem(item: Item): Sequence { 281 | const newItems = appendToArray(item, this.items); 282 | const newLength = this.indeterminate ? this.length : this.length + item.toBytes().length; 283 | return new Sequence(this.tag, newLength, newItems, this.bigEndian, this.explicitVR); 284 | } 285 | public removeItem(index: number): Sequence { 286 | const newItems = this.items.slice(); 287 | newItems.splice(index - 1, 1); 288 | const newLength = this.indeterminate ? this.length : this.length - this.item(index).toBytes().length; 289 | return new Sequence(this.tag, newLength, newItems, this.bigEndian, this.explicitVR); 290 | } 291 | public toBytes(): Buffer { 292 | return this.toElements() 293 | .map((e) => e.toBytes()) 294 | .reduce(concat, emptyBuffer); 295 | } 296 | public toElements(): Element[] { 297 | const elements = []; 298 | elements.push(new SequenceElement(this.tag, this.length, this.bigEndian, this.explicitVR)); 299 | for (let i = 1; i <= this.items.length; i++) { 300 | const itemElements = this.item(i).toElements(); 301 | itemElements.forEach((e) => elements.push(e)); 302 | } 303 | if (this.indeterminate) { 304 | elements.push(new SequenceDelimitationElement(this.bigEndian)); 305 | } 306 | return elements; 307 | } 308 | public setItem(index: number, item: Item): Sequence { 309 | const newItems = this.items.slice(); 310 | newItems[index - 1] = item; 311 | return new Sequence(this.tag, this.length, newItems, this.bigEndian, this.explicitVR); 312 | } 313 | public toString(): string { 314 | return ( 315 | 'Sequence(' + 316 | tagToString(this.tag) + 317 | ' SQ # ' + 318 | this.length + 319 | ' ' + 320 | this.size + 321 | ' ' + 322 | Lookup.keywordOf(this.tag) || '' + ')' 323 | ); 324 | } 325 | } 326 | 327 | export class Item { 328 | public indeterminate: boolean; 329 | 330 | constructor( 331 | public readonly elements: Elements, 332 | public readonly length: number = indeterminateLength, 333 | public readonly bigEndian: boolean = false, 334 | ) { 335 | this.indeterminate = length === indeterminateLength; 336 | } 337 | 338 | public toElements(): Element[] { 339 | const elements: Element[] = []; 340 | elements.push(new ItemElement(this.length, this.bigEndian)); 341 | this.elements.toElements(false).forEach((e) => elements.push(e)); 342 | if (this.indeterminate) { 343 | elements.push(new ItemDelimitationElement(this.bigEndian)); 344 | } 345 | return elements; 346 | } 347 | public toBytes(): Buffer { 348 | return this.toElements() 349 | .map((e) => e.toBytes()) 350 | .reduce(concat); 351 | } 352 | public setElements(elements: Elements): Item { 353 | const newLength = this.indeterminate ? indeterminateLength : elements.toBytes(false).length; 354 | return new Item(elements, newLength, this.bigEndian); 355 | } 356 | public toString(): string { 357 | return 'Item(length = ' + this.length + ', elements size = ' + this.elements.size + ')'; 358 | } 359 | } 360 | 361 | export class Fragment { 362 | constructor( 363 | public readonly length: number, 364 | public readonly value: Value, 365 | public readonly bigEndian: boolean = false, 366 | ) {} 367 | 368 | public toElement(): Element { 369 | return new FragmentElement(this.length, this.value, this.bigEndian); 370 | } 371 | public toString(): string { 372 | return 'Fragment(length = ' + this.length + ', value length = ' + this.value.length + ')'; 373 | } 374 | } 375 | 376 | export class Fragments extends ElementSet { 377 | public size: number; 378 | 379 | constructor( 380 | public readonly tag: number, 381 | public readonly vr: VR, 382 | public readonly offsets: number[], 383 | public readonly fragments: Fragment[] = [], 384 | bigEndian?: boolean, 385 | explicitVR?: boolean, 386 | ) { 387 | super(tag, vr, bigEndian, explicitVR); 388 | this.size = fragments.length; 389 | } 390 | 391 | public fragment(index: number): Fragment { 392 | return this.fragments.length > index ? undefined : this.fragments[index - 1]; 393 | } 394 | public frameCount(): number { 395 | return this.offsets === undefined && this.fragments.length === 0 396 | ? 0 397 | : this.offsets === undefined 398 | ? 1 399 | : this.offsets.length; 400 | } 401 | public addFragment(fragment: Fragment): Fragments { 402 | if (this.size === 0 && this.offsets === undefined) { 403 | const bytes = fragment.value.bytes; 404 | const offsets = []; 405 | for (let i = 0; i < bytes.length; i += 4) { 406 | offsets.push(bytesToUInt(bytes.slice(i), fragment.bigEndian)); 407 | } 408 | return new Fragments(this.tag, this.vr, offsets, this.fragments, this.bigEndian, this.explicitVR); 409 | } else { 410 | return new Fragments( 411 | this.tag, 412 | this.vr, 413 | this.offsets, 414 | appendToArray(fragment, this.fragments), 415 | this.bigEndian, 416 | this.explicitVR, 417 | ); 418 | } 419 | } 420 | public toBytes(): Buffer { 421 | return this.toElements() 422 | .map((e) => e.toBytes()) 423 | .reduce(concat); 424 | } 425 | 426 | public toElements(): Element[] { 427 | const elements: Element[] = []; 428 | elements.push(new FragmentsElement(this.tag, this.vr, this.bigEndian, this.explicitVR)); 429 | if (this.offsets !== undefined) { 430 | elements.push( 431 | new FragmentElement( 432 | 4 * this.offsets.length, 433 | new Value( 434 | this.offsets 435 | .map((offset) => intToBytes(offset, this.bigEndian), this.bigEndian) 436 | .reduce(concat, emptyBuffer), 437 | ), 438 | this.bigEndian, 439 | ), 440 | ); 441 | } else { 442 | elements.push(new FragmentElement(0, Value.empty())); 443 | } 444 | for (let i = 1; i <= this.fragments.length; i++) { 445 | elements.push(this.fragment(i).toElement()); 446 | } 447 | elements.push(new SequenceDelimitationElement(this.bigEndian)); 448 | return elements; 449 | } 450 | public setFragment(index: number, fragment: Fragment): Fragments { 451 | const newFragments = this.fragments.slice(); 452 | newFragments[index - 1] = fragment; 453 | return new Fragments(this.tag, this.vr, this.offsets, newFragments, this.bigEndian, this.explicitVR); 454 | } 455 | public toString(): string { 456 | return `Fragments(${tagToString(this.tag)} ${this.vr.name} # ${this.fragments.length} ${ 457 | Lookup.keywordOf(this.tag) || '' 458 | })`; 459 | } 460 | } 461 | -------------------------------------------------------------------------------- /src/dicom-parts.ts: -------------------------------------------------------------------------------- 1 | import { 2 | concat, 3 | emptyBuffer, 4 | indeterminateLength, 5 | intToBytes, 6 | isFileMetaInformation, 7 | shortToBytes, 8 | tagToBytes, 9 | tagToString, 10 | trim, 11 | } from './base'; 12 | import { Elements } from './elements'; 13 | import { VR } from './vr'; 14 | 15 | export class DicomPart { 16 | constructor(public readonly bigEndian: boolean, public readonly bytes: Buffer) {} 17 | } 18 | 19 | export class MetaPart extends DicomPart { 20 | constructor() { 21 | super(false, emptyBuffer); 22 | } 23 | } 24 | 25 | export class PreamblePart extends DicomPart { 26 | constructor(bytes: Buffer) { 27 | super(false, bytes); 28 | } 29 | 30 | public toString(): string { 31 | return 'Preamble []'; 32 | } 33 | } 34 | 35 | export class HeaderPart extends DicomPart { 36 | public static create(tag: number, vr: VR, length: number, bigEndian = false, explicitVR = true): HeaderPart { 37 | const bytes = explicitVR 38 | ? vr.headerLength === 8 39 | ? Buffer.concat([tagToBytes(tag, bigEndian), Buffer.from(vr.name), shortToBytes(length, bigEndian)], 8) 40 | : Buffer.concat( 41 | [ 42 | tagToBytes(tag, bigEndian), 43 | Buffer.from(vr.name), 44 | Buffer.from([0, 0]), 45 | intToBytes(length, bigEndian), 46 | ], 47 | 12, 48 | ) 49 | : Buffer.concat([tagToBytes(tag, bigEndian), intToBytes(length, bigEndian)], 8); 50 | return new HeaderPart(tag, vr, length, isFileMetaInformation(tag), bigEndian, explicitVR, bytes); 51 | } 52 | 53 | constructor( 54 | public readonly tag: number, 55 | public readonly vr: VR, 56 | public readonly length: number, 57 | public readonly isFmi: boolean, 58 | public readonly bigEndian: boolean, 59 | public readonly explicitVR: boolean, 60 | public readonly bytes: Buffer, 61 | ) { 62 | super(bigEndian, bytes); 63 | if (!this.bytes) { 64 | this.bytes = this.explicitVR 65 | ? vr.headerLength === 8 66 | ? Buffer.concat( 67 | [tagToBytes(tag, bigEndian), Buffer.from(vr.name), shortToBytes(length, bigEndian)], 68 | 8, 69 | ) 70 | : Buffer.concat( 71 | [ 72 | tagToBytes(tag, bigEndian), 73 | Buffer.from(vr.name), 74 | Buffer.from([0, 0]), 75 | intToBytes(length, bigEndian), 76 | ], 77 | 12, 78 | ) 79 | : Buffer.concat([tagToBytes(tag, bigEndian), intToBytes(length, bigEndian)], 8); 80 | } 81 | } 82 | 83 | public withUpdatedLength(newLength: number): HeaderPart { 84 | if (newLength === this.length) { 85 | return this; 86 | } else { 87 | let updated = null; 88 | if (this.bytes.length >= 8 && this.explicitVR && this.vr.headerLength === 8) { 89 | // explicit vr 90 | updated = concat(this.bytes.slice(0, 6), shortToBytes(newLength, this.bigEndian)); 91 | } else if (this.bytes.length >= 12 && this.explicitVR && this.vr.headerLength === 12) { 92 | // explicit vr 93 | updated = concat(this.bytes.slice(0, 8), intToBytes(newLength, this.bigEndian)); 94 | } else { 95 | // implicit vr 96 | updated = concat(this.bytes.slice(0, 4), intToBytes(newLength, this.bigEndian)); 97 | } 98 | 99 | return new HeaderPart(this.tag, this.vr, newLength, this.isFmi, this.bigEndian, this.explicitVR, updated); 100 | } 101 | } 102 | 103 | public toString(): string { 104 | return ( 105 | 'Header [tag = ' + 106 | tagToString(this.tag) + 107 | ', vr = ' + 108 | this.vr.name + 109 | ', length = ' + 110 | this.length + 111 | ', bigEndian = ' + 112 | this.bigEndian + 113 | ', explicitVR = ' + 114 | this.explicitVR + 115 | ']' 116 | ); 117 | } 118 | } 119 | 120 | export class ValueChunk extends DicomPart { 121 | constructor(bigEndian: boolean, bytes: Buffer, public readonly last: boolean) { 122 | super(bigEndian, bytes); 123 | } 124 | 125 | public toString(): string { 126 | let ascii = trim( 127 | this.bytes 128 | .slice(0, 100) 129 | .toString('ascii') 130 | .replace(/[^\x20-\x7E]/g, ''), 131 | ); 132 | if (this.bytes.length > 100) { 133 | ascii = ascii + '...'; 134 | } 135 | return 'ValueChunk [length = ' + this.bytes.length + ', last = ' + this.last + ', ascii = ' + ascii + ']'; 136 | } 137 | } 138 | 139 | export class DeflatedChunk extends DicomPart { 140 | constructor(bigEndian: boolean, bytes: Buffer) { 141 | super(bigEndian, bytes); 142 | } 143 | 144 | public toString(): string { 145 | return 'DeflatedChunk [length = ' + this.bytes.length + ']'; 146 | } 147 | } 148 | 149 | export class ItemPart extends DicomPart { 150 | public indeterminate = false; 151 | 152 | constructor(public readonly length: number, bigEndian: boolean, bytes: Buffer) { 153 | super(bigEndian, bytes); 154 | this.indeterminate = length === indeterminateLength; 155 | } 156 | 157 | public toString(): string { 158 | return 'Item [length = ' + this.length + ']'; 159 | } 160 | } 161 | 162 | export class ItemDelimitationPart extends DicomPart { 163 | constructor(bigEndian: boolean, bytes: Buffer) { 164 | super(bigEndian, bytes); 165 | } 166 | 167 | public toString(): string { 168 | return 'ItemDelimitation'; 169 | } 170 | } 171 | 172 | export class SequencePart extends DicomPart { 173 | public indeterminate = false; 174 | 175 | constructor( 176 | public readonly tag: number, 177 | public readonly length: number, 178 | bigEndian: boolean, 179 | public readonly explicitVR: boolean, 180 | bytes: Buffer, 181 | ) { 182 | super(bigEndian, bytes); 183 | this.indeterminate = length === indeterminateLength; 184 | } 185 | 186 | public toString(): string { 187 | return 'Sequence [tag = ' + tagToString(this.tag) + ', length = ' + this.length + ']'; 188 | } 189 | } 190 | 191 | export class SequenceDelimitationPart extends DicomPart { 192 | constructor(bigEndian: boolean, bytes: Buffer) { 193 | super(bigEndian, bytes); 194 | } 195 | 196 | public toString(): string { 197 | return 'SequenceDelimitation []'; 198 | } 199 | } 200 | 201 | export class FragmentsPart extends DicomPart { 202 | constructor( 203 | public readonly tag: number, 204 | public readonly length: number, 205 | public readonly vr: VR, 206 | bigEndian: boolean, 207 | public readonly explicitVR: boolean, 208 | bytes: Buffer, 209 | ) { 210 | super(bigEndian, bytes); 211 | } 212 | 213 | public toString(): string { 214 | return ( 215 | 'Fragments [tag = ' + tagToString(this.tag) + ', vr = ' + this.vr.name + ', length = ' + this.length + ']' 216 | ); 217 | } 218 | } 219 | 220 | export class UnknownPart extends DicomPart { 221 | constructor(bigEndian: boolean, bytes: Buffer) { 222 | super(bigEndian, bytes); 223 | } 224 | 225 | public toString(): string { 226 | return 'Unknown []'; 227 | } 228 | } 229 | 230 | export class ElementsPart extends MetaPart { 231 | constructor(public readonly label: string, public readonly elements: Elements) { 232 | super(); 233 | } 234 | } 235 | -------------------------------------------------------------------------------- /src/element-flows.ts: -------------------------------------------------------------------------------- 1 | import { concat, emptyBuffer } from './base'; 2 | import { createFlow, DeferToPartFlow, GuaranteedValueEvent, InFragments } from './dicom-flow'; 3 | import { 4 | Element, 5 | FragmentElement, 6 | FragmentsElement, 7 | ItemDelimitationElement, 8 | ItemElement, 9 | preambleElement, 10 | SequenceDelimitationElement, 11 | SequenceElement, 12 | ValueElement, 13 | } from './dicom-elements'; 14 | import { 15 | DicomPart, 16 | FragmentsPart, 17 | HeaderPart, 18 | ItemDelimitationPart, 19 | ItemPart, 20 | PreamblePart, 21 | SequenceDelimitationPart, 22 | SequencePart, 23 | ValueChunk, 24 | } from './dicom-parts'; 25 | import { Value } from './value'; 26 | 27 | export function elementFlow(): any { 28 | return createFlow( 29 | new (class extends GuaranteedValueEvent(InFragments(DeferToPartFlow)) { 30 | private bytes: Buffer = emptyBuffer; 31 | private currentValue: ValueElement; 32 | private currentFragment: FragmentElement; 33 | 34 | public onPart(part: DicomPart): Element[] { 35 | if (part instanceof PreamblePart) { 36 | return [preambleElement]; 37 | } 38 | 39 | if (part instanceof HeaderPart) { 40 | this.currentValue = new ValueElement( 41 | part.tag, 42 | part.vr, 43 | Value.empty(), 44 | part.bigEndian, 45 | part.explicitVR, 46 | ); 47 | this.bytes = emptyBuffer; 48 | return []; 49 | } 50 | 51 | if (part instanceof ItemPart && this.inFragments) { 52 | this.currentFragment = new FragmentElement(part.length, Value.empty(), part.bigEndian); 53 | this.bytes = emptyBuffer; 54 | return []; 55 | } 56 | 57 | if (part instanceof ValueChunk) { 58 | this.bytes = concat(this.bytes, part.bytes); 59 | if (part.last) { 60 | if (this.inFragments) { 61 | if (this.currentFragment === undefined) { 62 | return []; 63 | } else { 64 | return [ 65 | new FragmentElement( 66 | this.currentFragment.length, 67 | new Value(this.bytes), 68 | this.currentFragment.bigEndian, 69 | ), 70 | ]; 71 | } 72 | } else { 73 | return [ 74 | new ValueElement( 75 | this.currentValue.tag, 76 | this.currentValue.vr, 77 | new Value(this.bytes), 78 | this.currentValue.bigEndian, 79 | this.currentValue.explicitVR, 80 | ), 81 | ]; 82 | } 83 | } else { 84 | return []; 85 | } 86 | } 87 | 88 | if (part instanceof SequencePart) { 89 | return [new SequenceElement(part.tag, part.length, part.bigEndian, part.explicitVR)]; 90 | } 91 | 92 | if (part instanceof FragmentsPart) { 93 | return [new FragmentsElement(part.tag, part.vr, part.bigEndian, part.explicitVR)]; 94 | } 95 | 96 | if (part instanceof ItemPart) { 97 | return [new ItemElement(part.length, part.bigEndian)]; 98 | } 99 | 100 | if (part instanceof ItemDelimitationPart) { 101 | return [new ItemDelimitationElement(part.bigEndian)]; 102 | } 103 | 104 | if (part instanceof SequenceDelimitationPart) { 105 | return [new SequenceDelimitationElement(part.bigEndian)]; 106 | } 107 | 108 | return []; 109 | } 110 | })(), 111 | ); 112 | } 113 | -------------------------------------------------------------------------------- /src/element-sink.ts: -------------------------------------------------------------------------------- 1 | import { Writable } from 'stream'; 2 | import { Elements } from './elements'; 3 | import { ElementsBuilder } from './elements-builder'; 4 | 5 | export function elementSink(callback: (e: Elements) => void): Writable { 6 | const builder = new ElementsBuilder(); 7 | const sink = new Writable({ 8 | objectMode: true, 9 | write(element, encoding, cb): void { 10 | try { 11 | builder.addElement(element); 12 | process.nextTick(() => cb()); 13 | } catch (error) { 14 | process.nextTick(() => this.emit('error', error)); 15 | } 16 | }, 17 | }); 18 | sink.once('finish', () => { 19 | callback(builder.build()); 20 | }); 21 | return sink; 22 | } 23 | -------------------------------------------------------------------------------- /src/elements-builder.ts: -------------------------------------------------------------------------------- 1 | import { ZoneId } from 'js-joda'; 2 | import { defaultCharacterSet, systemZone } from './base'; 3 | import { CharacterSets } from './character-sets'; 4 | import { 5 | Element, 6 | ElementSet, 7 | Fragment, 8 | FragmentElement, 9 | Fragments, 10 | FragmentsElement, 11 | Item, 12 | ItemDelimitationElement, 13 | ItemElement, 14 | Sequence, 15 | SequenceDelimitationElement, 16 | SequenceElement, 17 | ValueElement, 18 | preambleElement, 19 | } from './dicom-elements'; 20 | import { Tag } from './tag'; 21 | import { VR } from './vr'; 22 | import { Elements, parseZoneOffset } from './elements'; 23 | 24 | class DatasetBuilder { 25 | private data = new Array(64); 26 | private pos = 0; 27 | 28 | constructor(public characterSets: CharacterSets, public zoneOffset: ZoneId) {} 29 | 30 | public addElementSet(elementSet: ElementSet): DatasetBuilder { 31 | if (elementSet instanceof ValueElement && elementSet.tag === Tag.SpecificCharacterSet) { 32 | this.characterSets = CharacterSets.fromBytes(elementSet.value.bytes); 33 | } else if (elementSet instanceof ValueElement && elementSet.tag === Tag.TimezoneOffsetFromUTC) { 34 | const newOffset = parseZoneOffset( 35 | elementSet.value.toSingleString(VR.SH, elementSet.bigEndian, this.characterSets), 36 | ); 37 | this.zoneOffset = newOffset || this.zoneOffset; 38 | } 39 | 40 | if (this.data.length <= this.pos) { 41 | this.data.length *= 2; 42 | } 43 | this.data[this.pos++] = elementSet; 44 | 45 | return this; 46 | } 47 | 48 | public isEmpty(): boolean { 49 | return this.data.length == 0; 50 | } 51 | 52 | public build(): Elements { 53 | return new Elements(this.characterSets, this.zoneOffset, this.data.slice(0, this.pos)); 54 | } 55 | } 56 | 57 | export class ElementsBuilder { 58 | private builderStack: DatasetBuilder[] = [new DatasetBuilder(defaultCharacterSet, systemZone)]; 59 | private sequenceStack: Sequence[] = []; 60 | private lengthStack: { element: Element; bytesLeft: number }[] = []; 61 | private fragments: Fragments; 62 | 63 | public addElement(element: Element): ElementsBuilder { 64 | if (element == preambleElement && this.builderStack.length == 1 && this.builderStack[0].isEmpty()) { 65 | return this; 66 | } 67 | if (element instanceof ValueElement) { 68 | this.subtractLength(element.length + (element.explicitVR ? element.vr.headerLength : 8)); 69 | const builder = this.builderStack[0]; 70 | builder.addElementSet(element); 71 | return this.maybeDelimit(); 72 | } 73 | if (element instanceof FragmentsElement) { 74 | this.subtractLength(element.explicitVR ? element.vr.headerLength : 8); 75 | this.updateFragments( 76 | new Fragments(element.tag, element.vr, undefined, [], element.bigEndian, element.explicitVR), 77 | ); 78 | return this.maybeDelimit(); 79 | } 80 | if (element instanceof FragmentElement) { 81 | this.subtractLength(8 + element.length); 82 | if (this.fragments !== undefined) { 83 | const updatedFragments = this.fragments.addFragment( 84 | new Fragment(element.length, element.value, element.bigEndian), 85 | ); 86 | this.updateFragments(updatedFragments); 87 | } 88 | return this.maybeDelimit(); 89 | } 90 | if (element instanceof SequenceDelimitationElement && this.hasFragments()) { 91 | this.subtractLength(8); 92 | const builder = this.builderStack[0]; 93 | builder.addElementSet(this.fragments); 94 | this.updateFragments(undefined); 95 | return this.maybeDelimit(); 96 | } 97 | if (element instanceof SequenceElement) { 98 | this.subtractLength(element.explicitVR ? 12 : 8); 99 | if (!element.indeterminate) { 100 | this.pushLength(element, element.length); 101 | } 102 | this.pushSequence( 103 | new Sequence( 104 | element.tag, 105 | element.indeterminate ? element.length : 0, 106 | [], 107 | element.bigEndian, 108 | element.explicitVR, 109 | ), 110 | ); 111 | return this.maybeDelimit(); 112 | } 113 | if (element instanceof ItemElement && this.hasSequence()) { 114 | this.subtractLength(8); 115 | const builder = this.builderStack[0]; 116 | const sequence = this.sequenceStack[0].addItem( 117 | new Item(Elements.empty(), element.indeterminate ? element.length : 0, element.bigEndian), 118 | ); 119 | if (!element.indeterminate) { 120 | this.pushLength(element, element.length); 121 | } 122 | this.pushBuilder(new DatasetBuilder(builder.characterSets, builder.zoneOffset)); 123 | this.updateSequence(sequence); 124 | return this.maybeDelimit(); 125 | } 126 | if (element instanceof ItemDelimitationElement && this.hasSequence()) { 127 | this.subtractLength(8); 128 | if (!this.itemIsIndeterminate() && this.lengthStack.length > 0) { 129 | this.lengthStack.shift(); // determinate length item with delimitation - handle gracefully 130 | } 131 | this.endItem(); 132 | return this.maybeDelimit(); 133 | } 134 | if (element instanceof SequenceDelimitationElement && this.hasSequence()) { 135 | this.subtractLength(8); 136 | if (!this.sequenceIsIndeterminate() && this.lengthStack.length > 0) { 137 | this.lengthStack.shift(); // determinate length sequence with delimitation - handle gracefully 138 | } 139 | this.endSequence(); 140 | return this.maybeDelimit(); 141 | } 142 | console.warn(`Unexpected element ${element}`); 143 | this.subtractLength(element.toBytes().length); 144 | return this.maybeDelimit(); 145 | } 146 | 147 | public noteElement(element: Element): ElementsBuilder { 148 | this.subtractLength(element.toBytes().length); 149 | return this.maybeDelimit(); 150 | } 151 | 152 | public currentDepth(): number { 153 | return this.sequenceStack.length; 154 | } 155 | 156 | public build(): Elements { 157 | return this.builderStack.length === 0 ? Elements.empty() : this.builderStack[0].build(); 158 | } 159 | private updateSequence(sequence: Sequence): void { 160 | if (this.sequenceStack.length === 0) { 161 | this.sequenceStack = [sequence]; 162 | } else { 163 | this.sequenceStack[0] = sequence; 164 | } 165 | } 166 | private updateFragments(fragments: Fragments): void { 167 | this.fragments = fragments; 168 | } 169 | private subtractLength(length: number): void { 170 | this.lengthStack.forEach((l) => (l.bytesLeft -= length)); 171 | } 172 | private pushBuilder(builder: DatasetBuilder): void { 173 | this.builderStack.unshift(builder); 174 | } 175 | private pushSequence(sequence: Sequence): void { 176 | this.sequenceStack.unshift(sequence); 177 | } 178 | private pushLength(element: Element, length: number): void { 179 | this.lengthStack.unshift({ element, bytesLeft: length }); 180 | } 181 | private popBuilder(): void { 182 | this.builderStack.shift(); 183 | } 184 | private popSequence(): void { 185 | this.sequenceStack.shift(); 186 | } 187 | private hasSequence(): boolean { 188 | return this.sequenceStack.length > 0; 189 | } 190 | private hasFragments(): boolean { 191 | return this.fragments !== undefined; 192 | } 193 | private sequenceIsIndeterminate(): boolean { 194 | return this.sequenceStack.length > 0 && this.sequenceStack[0].indeterminate; 195 | } 196 | private itemIsIndeterminate(): boolean { 197 | return ( 198 | this.sequenceStack.length > 0 && 199 | this.sequenceStack[0].items.length > 0 && 200 | this.sequenceStack[0].items[this.sequenceStack[0].items.length - 1].indeterminate 201 | ); 202 | } 203 | private endItem(): void { 204 | const builder = this.builderStack[0]; 205 | const sequence = this.sequenceStack[0]; 206 | const elements = builder.build(); 207 | const items = sequence.items; 208 | if (items.length > 0) { 209 | items[items.length - 1] = items[items.length - 1].setElements(elements); 210 | const updatedSequence = new Sequence( 211 | sequence.tag, 212 | sequence.length, 213 | items, 214 | sequence.bigEndian, 215 | sequence.explicitVR, 216 | ); 217 | this.popBuilder(); 218 | this.updateSequence(updatedSequence); 219 | } 220 | } 221 | private endSequence(): void { 222 | const sequence = this.sequenceStack[0]; 223 | const sequenceLength = sequence.indeterminate 224 | ? sequence.length 225 | : sequence.toBytes().length - (sequence.explicitVR ? 12 : 8); 226 | const updatedSequence = new Sequence( 227 | sequence.tag, 228 | sequenceLength, 229 | sequence.items, 230 | sequence.bigEndian, 231 | sequence.explicitVR, 232 | ); 233 | const builder = this.builderStack[0]; 234 | builder.addElementSet(updatedSequence); 235 | this.popSequence(); 236 | } 237 | private maybeDelimit(): ElementsBuilder { 238 | const delimits = this.lengthStack.filter((e) => e.bytesLeft <= 0); 239 | if (delimits.length > 0) { 240 | this.lengthStack = this.lengthStack.filter((e) => e.bytesLeft > 0); 241 | delimits.forEach((e) => { 242 | if (e.element instanceof ItemElement) { 243 | this.endItem(); 244 | } else { 245 | this.endSequence(); 246 | } 247 | }); 248 | } 249 | return this; 250 | } 251 | } 252 | -------------------------------------------------------------------------------- /src/flows.ts: -------------------------------------------------------------------------------- 1 | import { Transform } from 'stream'; 2 | 3 | export function identityFlow(objectMode = false): Transform { 4 | return new Transform({ 5 | objectMode, 6 | transform(chunk, encoding, callback): void { 7 | this.push(chunk); 8 | process.nextTick(() => callback()); 9 | }, 10 | }); 11 | } 12 | 13 | export function printFlow(objectMode = false): Transform { 14 | return new Transform({ 15 | objectMode, 16 | transform(chunk, encoding, callback): void { 17 | console.log(chunk); 18 | this.push(chunk); 19 | process.nextTick(() => callback()); 20 | }, 21 | }); 22 | } 23 | 24 | export function prependFlow(prependChunk: any, objectMode = false): Transform { 25 | let hasEmitted = false; 26 | return new Transform({ 27 | objectMode, 28 | transform(chunk, encoding, callback): void { 29 | if (!hasEmitted) { 30 | this.push(prependChunk); 31 | hasEmitted = true; 32 | } 33 | this.push(chunk); 34 | process.nextTick(() => callback()); 35 | }, 36 | }); 37 | } 38 | 39 | export function appendFlow(appendChunk: any, objectMode = false): Transform { 40 | return new Transform({ 41 | objectMode, 42 | transform(chunk, encoding, callback): void { 43 | this.push(chunk); 44 | process.nextTick(() => callback()); 45 | }, 46 | flush(callback): void { 47 | process.nextTick(() => callback(null, appendChunk)); 48 | }, 49 | }); 50 | } 51 | 52 | export function objectToStringFlow(toStringFunction: (a: any) => string): Transform { 53 | return new Transform({ 54 | writableObjectMode: true, 55 | transform(chunk, encoding, callback): void { 56 | this.push(toStringFunction(chunk) + '\n'); 57 | process.nextTick(() => callback()); 58 | }, 59 | }); 60 | } 61 | 62 | export function mapFlow(f: (a: any) => any): Transform { 63 | return new Transform({ 64 | objectMode: true, 65 | transform(chunk, encoding, callback): void { 66 | try { 67 | this.push(f(chunk)); 68 | process.nextTick(() => callback()); 69 | } catch (error) { 70 | process.nextTick(() => this.emit('error', error)); 71 | } 72 | }, 73 | }); 74 | } 75 | 76 | export function filterFlow(f: (a: any) => boolean): Transform { 77 | return new Transform({ 78 | objectMode: true, 79 | transform(chunk, encoding, callback): void { 80 | try { 81 | if (f(chunk) === true) { 82 | this.push(chunk); 83 | } 84 | process.nextTick(() => callback()); 85 | } catch (error) { 86 | process.nextTick(() => this.emit('error', error)); 87 | } 88 | }, 89 | }); 90 | } 91 | 92 | export function flatMapFlow(toChunks: (a: any) => any[]): Transform { 93 | return new Transform({ 94 | objectMode: true, 95 | transform(chunk, encoding, callback): void { 96 | try { 97 | for (const outChunk of toChunks(chunk)) { 98 | this.push(outChunk); 99 | } 100 | process.nextTick(() => callback()); 101 | } catch (error) { 102 | process.nextTick(() => this.emit('error', error)); 103 | } 104 | }, 105 | }); 106 | } 107 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export * from './detour'; 2 | export * from './base'; 3 | export * from './vr'; 4 | export * from './uid'; 5 | export * from './tag'; 6 | export * from './lookup'; 7 | export * from './tag-path'; 8 | export * from './tag-tree'; 9 | export * from './person-name'; 10 | export * from './dicom-parts'; 11 | export * from './dicom-elements'; 12 | export { CharacterSets } from './character-sets'; 13 | export * from './sources'; 14 | export * from './flows'; 15 | export * from './sinks'; 16 | export * from './parse-flow'; 17 | export * from './dicom-flow'; 18 | export * from './dicom-flows'; 19 | export * from './collect-flow'; 20 | export * from './modify-flow'; 21 | export * from './parser'; 22 | export * from './value'; 23 | export * from './elements'; 24 | export * from './elements-builder'; 25 | export * from './element-flows'; 26 | export * from './element-sink'; 27 | -------------------------------------------------------------------------------- /src/lookup.ts: -------------------------------------------------------------------------------- 1 | import { Tag } from './tag'; 2 | import { TagToVR } from './tag-to-vr'; 3 | import { UIDToName } from './uid-to-name'; 4 | import { VR } from './vr'; 5 | 6 | export class Lookup { 7 | public static keywords = Object.keys(Tag); 8 | 9 | public static keywordOf(tag: number): string { 10 | if ((tag & 0x0000ffff) === 0 && (tag & 0xfffd0000) !== 0) { 11 | return 'GroupLength'; 12 | } 13 | if ((tag & 0x00010000) !== 0) { 14 | if ((tag & 0x0000ff00) === 0 && (tag & 0x000000f0) !== 0) { 15 | return 'PrivateCreatorID'; 16 | } 17 | return ''; 18 | } 19 | if ((tag & 0xffffff00) === Tag.SourceImageIDs) { 20 | return 'SourceImageIDs'; 21 | } 22 | let tag2 = tag; 23 | if ((tag & 0xffe00000) === 0x50000000 || (tag & 0xffe00000) === 0x60000000) { 24 | tag2 = tag & 0xffe0ffff; 25 | } else if ((tag & 0xff000000) === 0x7f000000 && (tag & 0xffff0000) !== 0x7fe00000) { 26 | tag2 = tag & 0xff00ffff; 27 | } 28 | return Lookup.keywords.find((key) => Tag[key] === tag2); 29 | } 30 | 31 | public static vrOf(tag: number): VR { 32 | return TagToVR.vrOf(tag); 33 | } 34 | 35 | public static tagOf(keyword: string): number { 36 | return Tag[keyword] as number; 37 | } 38 | 39 | public static nameOf(uid: string): string { 40 | return UIDToName.nameOf(uid); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /src/modify-flow.ts: -------------------------------------------------------------------------------- 1 | import { appendToArray, concat, concatArrays, emptyBuffer, flatten, padToEvenLength, prependToArray } from './base'; 2 | import { 3 | createFlow, 4 | DeferToPartFlow, 5 | EndEvent, 6 | GroupLengthWarnings, 7 | GuaranteedDelimitationEvents, 8 | GuaranteedValueEvent, 9 | InFragments, 10 | TagPathTracking, 11 | } from './dicom-flow'; 12 | import { Lookup } from './lookup'; 13 | import { DicomPart, HeaderPart, MetaPart, SequencePart, ValueChunk } from './dicom-parts'; 14 | import { emptyTagPath, TagPath } from './tag-path'; 15 | import { VR } from './vr'; 16 | 17 | export class TagModification { 18 | public static equals(tagPath: TagPath, modification: (b: Buffer) => Buffer): TagModification { 19 | return new TagModification(tagPath.isEqualTo.bind(tagPath), modification); 20 | } 21 | 22 | public static endsWith(tagPath: TagPath, modification: (b: Buffer) => Buffer): TagModification { 23 | return new TagModification((tp) => tp.endsWith(tagPath), modification); 24 | } 25 | 26 | constructor( 27 | public readonly matches: (t: TagPath) => boolean, 28 | public readonly modification: (b: Buffer) => Buffer, 29 | ) {} 30 | } 31 | 32 | export class TagInsertion { 33 | constructor(public readonly tagPath: TagPath, public readonly insertion: (b: Buffer) => Buffer) { 34 | this.tagPath = tagPath; 35 | this.insertion = insertion; 36 | } 37 | } 38 | 39 | export class TagModificationsPart extends MetaPart { 40 | constructor( 41 | public readonly modifications: TagModification[] = [], 42 | public readonly insertions: TagInsertion[] = [], 43 | public readonly replace: boolean = false, 44 | ) { 45 | super(); 46 | } 47 | } 48 | 49 | export function modifyFlow( 50 | modifications: TagModification[] = [], 51 | insertions: TagInsertion[] = [], 52 | logGroupLengthWarnings = true, 53 | ): any { 54 | const mods = modifications === undefined ? [] : modifications; 55 | const irts = insertions === undefined ? [] : insertions; 56 | const wrns = logGroupLengthWarnings === undefined ? true : logGroupLengthWarnings; 57 | 58 | const organizeInsertions = (inserts: TagInsertion[]): TagInsertion[] => { 59 | const distinct = inserts.filter((a, pos, arr) => { 60 | return arr.findIndex((b) => b.tagPath.isEqualTo(a.tagPath)) === pos; 61 | }); // distinct by tag path 62 | return distinct.sort((a, b) => (a.tagPath.isBelow(b.tagPath) ? -1 : 1)); // ordered by tag path 63 | }; 64 | 65 | return createFlow( 66 | new (class extends TagPathTracking( 67 | GuaranteedValueEvent( 68 | GuaranteedDelimitationEvents(GroupLengthWarnings(InFragments(EndEvent(DeferToPartFlow)))), 69 | ), 70 | ) { 71 | private currentModifications: TagModification[] = mods; 72 | private currentInsertions: TagInsertion[] = organizeInsertions(irts.slice()); 73 | 74 | private currentModification: TagModification; 75 | private currentHeader: HeaderPart; 76 | private latestTagPath: TagPath = emptyTagPath; 77 | private value: Buffer = emptyBuffer; 78 | private bigEndian = false; 79 | private explicitVR = true; 80 | 81 | constructor() { 82 | super(); 83 | this.setSilent(!wrns); 84 | } 85 | 86 | public onPart(part: DicomPart): DicomPart[] { 87 | if (part instanceof TagModificationsPart) { 88 | if (part.replace) { 89 | this.currentModifications = part.modifications; 90 | this.currentInsertions = organizeInsertions(part.insertions.slice()); 91 | } else { 92 | this.currentModifications = concatArrays(this.currentModifications, part.modifications); 93 | this.currentInsertions = organizeInsertions( 94 | concatArrays(this.currentInsertions, part.insertions), 95 | ); 96 | } 97 | return []; 98 | } 99 | 100 | if (part instanceof HeaderPart) { 101 | this.updateSyntax(part); 102 | const insertParts = this.findInsertParts(); 103 | const modifyPart = this.findModifyPart(part); 104 | this.latestTagPath = this.tagPath; 105 | return concatArrays(insertParts, modifyPart); 106 | } 107 | 108 | if (part instanceof SequencePart) { 109 | const insertParts = this.findInsertParts(); 110 | this.latestTagPath = this.tagPath; 111 | return appendToArray(part, insertParts); 112 | } 113 | 114 | if (part instanceof ValueChunk) { 115 | if (this.currentModification !== undefined && this.currentHeader !== undefined) { 116 | this.value = concat(this.value, part.bytes); 117 | if (part.last) { 118 | const newValue = padToEvenLength( 119 | this.currentModification.modification(this.value), 120 | this.currentHeader.vr, 121 | ); 122 | const newHeader = this.currentHeader.withUpdatedLength(newValue.length); 123 | this.currentModification = undefined; 124 | this.currentHeader = undefined; 125 | return prependToArray(newHeader, this.valueOrNot(newValue)); 126 | } else { 127 | return []; 128 | } 129 | } else { 130 | return [part]; 131 | } 132 | } 133 | 134 | this.latestTagPath = this.tagPath; 135 | return [part]; 136 | } 137 | 138 | public onEnd(): DicomPart[] { 139 | if (this.latestTagPath.isEmpty()) { 140 | return []; 141 | } else { 142 | return flatten( 143 | this.currentInsertions 144 | .filter((i) => i.tagPath.isRoot()) 145 | .filter((m) => this.latestTagPath.isBelow(m.tagPath)) 146 | .map((m) => 147 | this.headerAndValueParts( 148 | m.tagPath, 149 | padToEvenLength(m.insertion(undefined), m.tagPath.tag()), 150 | ), 151 | ), 152 | ); 153 | } 154 | } 155 | 156 | private updateSyntax(header: HeaderPart): void { 157 | this.bigEndian = header.bigEndian; 158 | this.explicitVR = header.explicitVR; 159 | } 160 | 161 | private valueOrNot(bytes: Buffer): DicomPart[] { 162 | return bytes.length > 0 ? [new ValueChunk(this.bigEndian, bytes, true)] : []; 163 | } 164 | 165 | private headerAndValueParts(tagPath: TagPath, valueBytes: Buffer): DicomPart[] { 166 | const vr = Lookup.vrOf(tagPath.tag()); 167 | if (vr === VR.UN) { 168 | throw Error('Tag is not present in dictionary, cannot determine value representation'); 169 | } 170 | if (vr === VR.SQ) { 171 | throw Error('Cannot insert sequences'); 172 | } 173 | const header = HeaderPart.create(tagPath.tag(), vr, valueBytes.length, this.bigEndian, this.explicitVR); 174 | return prependToArray(header, this.valueOrNot(valueBytes)); 175 | } 176 | 177 | private isBetween(lowerTag: TagPath, tagToTest: TagPath, upperTag: TagPath): boolean { 178 | return lowerTag.isBelow(tagToTest) && tagToTest.isBelow(upperTag); 179 | } 180 | 181 | private isInDataset(tagToTest: TagPath, tagPath: TagPath): boolean { 182 | return tagToTest.previous().isEqualTo(tagPath.previous()); 183 | } 184 | 185 | private findInsertParts(): DicomPart[] { 186 | return flatten( 187 | this.currentInsertions 188 | .filter((i) => this.isBetween(this.latestTagPath, i.tagPath, this.tagPath)) 189 | .filter((i) => this.isInDataset(i.tagPath, this.tagPath)) 190 | .map((i) => 191 | this.headerAndValueParts( 192 | i.tagPath, 193 | padToEvenLength(i.insertion(undefined), i.tagPath.tag()), 194 | ), 195 | ), 196 | ); 197 | } 198 | 199 | private findModifyPart(header: HeaderPart): DicomPart[] { 200 | const mod = this.currentModifications.find((m) => m.matches(this.tagPath)); 201 | if (mod !== undefined) { 202 | this.currentHeader = header; 203 | this.currentModification = mod; 204 | this.value = emptyBuffer; 205 | return []; 206 | } else { 207 | const ins = this.currentInsertions.find((i) => i.tagPath.isEqualTo(this.tagPath)); 208 | if (ins !== undefined) { 209 | this.currentHeader = header; 210 | this.currentModification = new TagModification( 211 | (tp) => tp.isEqualTo(ins.tagPath), 212 | (v) => ins.insertion(v), 213 | ); 214 | this.value = emptyBuffer; 215 | return []; 216 | } else { 217 | return [header]; 218 | } 219 | } 220 | } 221 | })(), 222 | ); 223 | } 224 | -------------------------------------------------------------------------------- /src/parsing.ts: -------------------------------------------------------------------------------- 1 | import { 2 | bytesToTag, 3 | bytesToUInt, 4 | bytesToUShort, 5 | bytesToVR, 6 | groupNumber, 7 | indeterminateLength, 8 | isFileMetaInformation, 9 | tagToString, 10 | } from './base'; 11 | import { ByteReader } from './byte-parser'; 12 | import { Lookup } from './lookup'; 13 | import { VR } from './vr'; 14 | 15 | export const dicomPreambleLength = 132; 16 | 17 | export function isDICM(bytes: Buffer): boolean { 18 | return bytes[0] === 68 && bytes[1] === 73 && bytes[2] === 67 && bytes[3] === 77; 19 | } 20 | 21 | export class HeaderInfo { 22 | constructor( 23 | public readonly bigEndian: boolean, 24 | public readonly explicitVR: boolean, 25 | public readonly hasFmi: boolean, 26 | ) {} 27 | } 28 | 29 | export function tryReadHeader(data: Buffer): HeaderInfo { 30 | const info = headerInfo(data, false); 31 | return info === undefined ? headerInfo(data, true) : info; 32 | } 33 | 34 | export function headerInfo(data: Buffer, assumeBigEndian: boolean): HeaderInfo { 35 | const tag = bytesToTag(data, assumeBigEndian); 36 | const vr = Lookup.vrOf(tag); 37 | if (vr === VR.UN || (groupNumber(tag) !== 2 && groupNumber(tag) < 8)) { 38 | return undefined; 39 | } 40 | if (bytesToVR(data.slice(4, 6)) === vr.code) { 41 | return { 42 | bigEndian: assumeBigEndian, 43 | explicitVR: true, 44 | hasFmi: isFileMetaInformation(tag), 45 | }; 46 | } 47 | if (bytesToUInt(data.slice(4, 8), assumeBigEndian) >= 0) { 48 | if (assumeBigEndian) { 49 | throw Error('Implicit VR Big Endian encoded DICOM Stream'); 50 | } else { 51 | return { 52 | bigEndian: false, 53 | explicitVR: false, 54 | hasFmi: isFileMetaInformation(tag), 55 | }; 56 | } 57 | } 58 | return undefined; 59 | } 60 | 61 | export function isPreamble(data: Buffer): boolean { 62 | return data.length >= dicomPreambleLength && isDICM(data.slice(dicomPreambleLength - 4, dicomPreambleLength)); 63 | } 64 | 65 | export class TagVr { 66 | constructor(public readonly tag: number, public readonly vr: VR) {} 67 | } 68 | 69 | export function isSpecial(tag: number): boolean { 70 | return tag === 0xfffee000 || tag === 0xfffee00d || tag === 0xfffee0dd; 71 | } 72 | 73 | export function readTagVr(data: Buffer, bigEndian: boolean, explicitVr: boolean): TagVr { 74 | const tag = bytesToTag(data, bigEndian); 75 | if (isSpecial(tag)) { 76 | return new TagVr(tag, undefined); 77 | } 78 | if (explicitVr) { 79 | return new TagVr(tag, VR.valueOf(bytesToVR(data.slice(4, 6)))); 80 | } 81 | return new TagVr(tag, Lookup.vrOf(tag)); 82 | } 83 | 84 | export class AttributeInfo { 85 | constructor( 86 | public readonly tag: number, 87 | public readonly vr: VR, 88 | public readonly headerLength: number, 89 | public readonly valueLength: number, 90 | ) {} 91 | } 92 | 93 | export function readHeader(reader: ByteReader, state: any): AttributeInfo { 94 | reader.ensure(8); 95 | const tagVrBytes = reader.remainingData().slice(0, 8); 96 | const tagVr = readTagVr(tagVrBytes, state.bigEndian, state.explicitVR); 97 | if (tagVr.vr && state.explicitVR) { 98 | if (tagVr.vr.headerLength === 8) { 99 | return new AttributeInfo(tagVr.tag, tagVr.vr, 8, bytesToUShort(tagVrBytes.slice(6), state.bigEndian)); 100 | } 101 | reader.ensure(12); 102 | return new AttributeInfo( 103 | tagVr.tag, 104 | tagVr.vr, 105 | 12, 106 | bytesToUInt(reader.remainingData().slice(8), state.bigEndian), 107 | ); 108 | } 109 | return new AttributeInfo(tagVr.tag, tagVr.vr, 8, bytesToUInt(tagVrBytes.slice(4), state.bigEndian)); 110 | } 111 | 112 | export function warnIfOdd(tag: number, vr: VR, valueLength: number): void { 113 | if (valueLength % 2 > 0 && valueLength != indeterminateLength && vr != null && vr != VR.SQ) { 114 | console.warn(`Element ${tagToString(tag)} has odd length`); 115 | } 116 | } 117 | -------------------------------------------------------------------------------- /src/person-name.ts: -------------------------------------------------------------------------------- 1 | import { parsePersonName } from './value'; 2 | import { multiValueDelimiter, trim } from './base'; 3 | 4 | export class ComponentGroup { 5 | constructor(public alphabetic: string, public ideographic: string = '', public phonetic: string = '') {} 6 | } 7 | 8 | export class PersonName { 9 | constructor( 10 | public familyName: ComponentGroup, 11 | public givenName: ComponentGroup, 12 | public middleName: ComponentGroup = new ComponentGroup(''), 13 | public prefix: ComponentGroup = new ComponentGroup(''), 14 | public suffix: ComponentGroup = new ComponentGroup(''), 15 | ) {} 16 | 17 | static parse(s: string): PersonName[] { 18 | return s 19 | .split(multiValueDelimiter) 20 | .map(trim) 21 | .map((s1) => parsePersonName(s1)); 22 | } 23 | 24 | toString(): string { 25 | const components = [this.familyName, this.givenName, this.middleName, this.prefix, this.suffix]; 26 | const representations = ['alphabetic', 'ideographic', 'phonetic'] as const; 27 | return representations 28 | .map((repr) => { 29 | return components 30 | .map((c) => c[repr]) 31 | .join('^') 32 | .replace(/\^+$/, ''); // Trim trailing ^ separators 33 | }) 34 | .join('=') 35 | .replace(/=+$/, ''); // Trim trailing = separators 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/sinks.ts: -------------------------------------------------------------------------------- 1 | import { Writable } from 'stream'; 2 | import { concat, emptyBuffer } from './base'; 3 | 4 | export function byteSink(callback: (b: Buffer) => any): Writable { 5 | let buffer = emptyBuffer; 6 | 7 | const sink = new Writable({ 8 | write(chunk, encoding, cb): void { 9 | buffer = concat(buffer, chunk); 10 | process.nextTick(() => cb()); 11 | }, 12 | }); 13 | 14 | sink.once('finish', () => { 15 | callback(buffer); 16 | }); 17 | 18 | return sink; 19 | } 20 | 21 | export function ignoreSink(objectMode = false): Writable { 22 | return new Writable({ 23 | objectMode, 24 | write(chunk, encoding, callback): void { 25 | process.nextTick(() => callback()); 26 | }, 27 | }); 28 | } 29 | 30 | export function arraySink(arrayCallback: (a: any[]) => any): Writable { 31 | const array: any[] = []; 32 | const sink = new Writable({ 33 | objectMode: true, 34 | write(chunk, encoding, callback): void { 35 | array.push(chunk); 36 | process.nextTick(() => callback()); 37 | }, 38 | }); 39 | sink.once('finish', () => arrayCallback(array)); 40 | return sink; 41 | } 42 | -------------------------------------------------------------------------------- /src/sources.ts: -------------------------------------------------------------------------------- 1 | import { Readable } from 'stream'; 2 | 3 | export function singleSource(element: any, objectMode = false): Readable { 4 | return new Readable({ 5 | objectMode, 6 | read(): void { 7 | this.push(element); 8 | this.push(null); 9 | }, 10 | }); 11 | } 12 | 13 | export function arraySource(array: any[], objectMode = false): Readable { 14 | let pos = 0; 15 | return new Readable({ 16 | highWaterMark: 1, 17 | objectMode, 18 | read(size: number): void { 19 | size = size || 1; 20 | const maxPos = Math.min(pos + size, array.length); 21 | let i = pos; 22 | while (i < maxPos && this.push(array[i++])) { 23 | // do nothing 24 | } 25 | if (i === array.length) { 26 | this.push(null); 27 | } 28 | pos = i; 29 | }, 30 | }); 31 | } 32 | -------------------------------------------------------------------------------- /src/tag-path-like.ts: -------------------------------------------------------------------------------- 1 | export abstract class TagPathLike> { 2 | public abstract tag(): number; 3 | 4 | public abstract previous(): T; 5 | 6 | public abstract isEmpty(): boolean; 7 | 8 | public isRoot(): boolean { 9 | return this.previous().isEmpty(); 10 | } 11 | 12 | public toList(): T[] { 13 | const toListRec = (path: any, list: T[]): T[] => { 14 | if (!path.isRoot()) { 15 | toListRec(path.previous(), list); 16 | } 17 | list.push(path); 18 | return list; 19 | }; 20 | return toListRec(this, []); 21 | } 22 | 23 | public contains(tag: number): boolean { 24 | return ( 25 | this.toList() 26 | .map((tp) => tp.tag()) 27 | .indexOf(tag) >= 0 28 | ); 29 | } 30 | 31 | public depth(): number { 32 | const depthRec = (path: any, d: number): number => { 33 | if (path.isRoot()) { 34 | return d; 35 | } else { 36 | return depthRec(path.previous(), d + 1); 37 | } 38 | }; 39 | return this.isEmpty() ? 0 : depthRec(this, 1); 40 | } 41 | 42 | public head(): T { 43 | return this.take(1); 44 | } 45 | 46 | public tail(): T { 47 | return this.drop(1); 48 | } 49 | 50 | public take(n: number): T { 51 | const takeRec = (path: any, i: number): T => { 52 | return i <= 0 ? path : takeRec(path.previous(), i - 1); 53 | }; 54 | return takeRec(this, this.depth() - n); 55 | } 56 | 57 | public abstract drop(n: number): T; 58 | } 59 | -------------------------------------------------------------------------------- /src/tag-path.ts: -------------------------------------------------------------------------------- 1 | import { tagToString } from './base'; 2 | import { Lookup } from './lookup'; 3 | import { TagPathLike } from './tag-path-like'; 4 | 5 | export class TagPath extends TagPathLike { 6 | public static fromTag(tag: number): TagPathTag { 7 | return emptyTagPath.thenTag(tag); 8 | } 9 | public static fromSequence(tag: number): TagPathSequence { 10 | return emptyTagPath.thenSequence(tag); 11 | } 12 | public static fromSequenceEnd(tag: number): TagPathSequenceEnd { 13 | return emptyTagPath.thenSequenceEnd(tag); 14 | } 15 | public static fromItem(tag: number, item: number): TagPathItem { 16 | return emptyTagPath.thenItem(tag, item); 17 | } 18 | public static fromItemEnd(tag: number, item: number): TagPathItemEnd { 19 | return emptyTagPath.thenItemEnd(tag, item); 20 | } 21 | 22 | public static parse(s: string): TagPath { 23 | const indexPart = (s1: string): string => s1.substring(s1.lastIndexOf('[') + 1, s1.length - 1); 24 | const tagPart = (s1: string): string => s1.substring(0, s1.indexOf('[')); 25 | const parseTag = (s1: string): number => { 26 | const tag = Lookup.tagOf(s1); 27 | if (!tag) { 28 | if (s1.length === 11 && s1[0] === '(' && s1[5] === ',' && s1[10] === ')') { 29 | const i = parseInt(s1.substring(1, 5) + s1.substring(6, 10), 16); 30 | if (!isNaN(i)) { 31 | return i; 32 | } 33 | } 34 | throw Error(s1 + ' is not a tag or name string'); 35 | } 36 | return tag; 37 | }; 38 | const parseIndex = (s1: string): number => { 39 | const i = parseInt(s1, 10); 40 | if (isNaN(i)) { 41 | throw Error(s1 + ' is not a number'); 42 | } 43 | return i; 44 | }; 45 | const createTag = (s1: string): TagPathTag => TagPath.fromTag(parseTag(s1)); 46 | const addTag = (s1: string, path: TagPathTrunk): TagPathTag => path.thenTag(parseTag(s1)); 47 | const createSeq = (s1: string): TagPathItem => 48 | TagPath.fromItem(parseTag(tagPart(s1)), parseIndex(indexPart(s1))); 49 | const addSeq = (s1: string, path: TagPathTrunk): TagPathItem => 50 | path.thenItem(parseTag(tagPart(s1)), parseIndex(indexPart(s1))); 51 | 52 | const tags = s.indexOf('.') > 0 ? s.split('.') : [s]; 53 | const seqTags = tags.length > 1 ? tags.slice(0, tags.length - 1) : []; // list of sequence tags, if any 54 | const lastTag = tags[tags.length - 1]; // tag or sequence 55 | try { 56 | const first = seqTags.length > 0 ? seqTags[0] : undefined; 57 | if (first) { 58 | const path = seqTags.slice(1, seqTags.length).reduce((p, tag) => addSeq(tag, p), createSeq(first)); 59 | return addTag(lastTag, path); 60 | } 61 | return createTag(lastTag); 62 | } catch (error) { 63 | throw Error('Tag path could not be parsed: ' + error.message); 64 | } 65 | } 66 | 67 | private tagVal: number; 68 | private previousVal: TagPathTrunk; 69 | 70 | constructor(tag: number, previous: TagPathTrunk) { 71 | super(); 72 | this.tagVal = tag; 73 | this.previousVal = previous; 74 | } 75 | 76 | public tag(): number { 77 | return this.tagVal; 78 | } 79 | 80 | public previous(): TagPathTrunk { 81 | return this.previousVal; 82 | } 83 | 84 | public isEmpty(): boolean { 85 | return this === (emptyTagPath as TagPath); 86 | } 87 | 88 | public isBelow(that: TagPath): boolean { 89 | const thisList = this.toList(); 90 | const thatList = that.toList(); 91 | 92 | for (let i = 0; i < Math.min(thisList.length, thatList.length); i++) { 93 | const thisPath = thisList[i]; 94 | const thatPath = thatList[i]; 95 | if (thisPath.isEmpty()) { 96 | return !thatPath.isEmpty(); 97 | } 98 | if (thatPath.isEmpty()) { 99 | return false; 100 | } 101 | if (thisPath.tag() !== thatPath.tag()) { 102 | return thisPath.tag() < thatPath.tag(); 103 | } 104 | if (thisPath instanceof TagPathSequence && 'item' in thatPath) { 105 | return true; 106 | } 107 | if (thisPath instanceof TagPathSequence && thatPath instanceof TagPathSequenceEnd) { 108 | return true; 109 | } 110 | if (thisPath instanceof TagPathSequenceEnd && 'item' in thatPath) { 111 | return false; 112 | } 113 | if (thisPath instanceof TagPathSequenceEnd && thatPath instanceof TagPathSequence) { 114 | return false; 115 | } 116 | if ('item' in thisPath && thatPath instanceof TagPathSequence) { 117 | return false; 118 | } 119 | if ('item' in thisPath && thatPath instanceof TagPathSequenceEnd) { 120 | return true; 121 | } 122 | if ( 123 | 'item' in thisPath && 124 | 'item' in thatPath && 125 | (thisPath as unknown as ItemIndex).item !== (thatPath as unknown as ItemIndex).item 126 | ) { 127 | return (thisPath as unknown as ItemIndex).item < (thatPath as unknown as ItemIndex).item; 128 | } 129 | if (thisPath instanceof TagPathItem && thatPath instanceof TagPathItemEnd) { 130 | return true; 131 | } 132 | if (thisPath instanceof TagPathItemEnd && thatPath instanceof TagPathItem) { 133 | return false; 134 | } 135 | } 136 | return thisList.length < thatList.length; 137 | } 138 | 139 | public isEqualTo(that: TagPath): boolean { 140 | if (this.isEmpty() && that.isEmpty()) { 141 | return true; 142 | } 143 | if (this instanceof TagPathTag && that instanceof TagPathTag) { 144 | return this.tag() === that.tag() && this.previous().isEqualTo(that.previous()); 145 | } 146 | if (this instanceof TagPathSequence && that instanceof TagPathSequence) { 147 | return this.tag() === that.tag() && this.previous().isEqualTo(that.previous()); 148 | } 149 | if (this instanceof TagPathSequenceEnd && that instanceof TagPathSequenceEnd) { 150 | return this.tag() === that.tag() && this.previous().isEqualTo(that.previous()); 151 | } 152 | if (this instanceof TagPathItem && that instanceof TagPathItem) { 153 | return this.tag() === that.tag() && this.item === that.item && this.previous().isEqualTo(that.previous()); 154 | } 155 | if (this instanceof TagPathItemEnd && that instanceof TagPathItemEnd) { 156 | return this.tag() === that.tag() && this.item === that.item && this.previous().isEqualTo(that.previous()); 157 | } 158 | return false; 159 | } 160 | 161 | public startsWith(that: TagPath): boolean { 162 | const thisDepth = this.depth(); 163 | const thatDepth = that.depth(); 164 | if (thisDepth >= thatDepth) { 165 | const n = Math.min(thisDepth, thatDepth); 166 | return this.take(n).isEqualTo(that.take(n)); 167 | } else { 168 | return false; 169 | } 170 | } 171 | 172 | public endsWith(that: TagPath): boolean { 173 | const n = this.depth() - that.depth(); 174 | return n >= 0 ? this.drop(n).isEqualTo(that) : false; 175 | } 176 | 177 | public drop(n: number): TagPath { 178 | const dropRec = (path: TagPath, i: number): TagPath => { 179 | if (i < 0) { 180 | return emptyTagPath; 181 | } 182 | if (i === 0) { 183 | if (path.isEmpty()) { 184 | return emptyTagPath; 185 | } 186 | if (path instanceof TagPathItem) { 187 | return TagPath.fromItem(path.tag(), path.item); 188 | } 189 | if (path instanceof TagPathItemEnd) { 190 | return TagPath.fromItemEnd(path.tag(), path.item); 191 | } 192 | if (path instanceof TagPathSequence) { 193 | return TagPath.fromSequence(path.tag()); 194 | } 195 | if (path instanceof TagPathSequenceEnd) { 196 | return TagPath.fromSequenceEnd(path.tag()); 197 | } 198 | return TagPath.fromTag(path.tag()); 199 | } 200 | const p = dropRec(path.previous(), i - 1) as TagPathTrunk; 201 | if (path instanceof TagPathItem) { 202 | return p.thenItem(path.tag(), path.item); 203 | } 204 | if (path instanceof TagPathItemEnd) { 205 | return p.thenItemEnd(path.tag(), path.item); 206 | } 207 | if (path instanceof TagPathSequence) { 208 | return p.thenSequence(path.tag()); 209 | } 210 | if (path instanceof TagPathSequenceEnd) { 211 | return p.thenSequenceEnd(path.tag()); 212 | } 213 | if (path instanceof TagPathTag) { 214 | return p.thenTag(path.tag()); 215 | } 216 | return emptyTagPath; 217 | }; 218 | return dropRec(this, this.depth() - n - 1); 219 | } 220 | 221 | public toNamedString(lookup: boolean): string { 222 | const toTagString = (tag: number): string => { 223 | if (lookup) { 224 | const keyword = Lookup.keywordOf(tag); 225 | if (keyword) { 226 | return keyword; 227 | } 228 | } 229 | return tagToString(tag); 230 | }; 231 | const toTagPathString = (path: TagPath, tail: string): string => { 232 | const itemIndexSuffix = 'item' in path ? '[' + (path as unknown as ItemIndex).item + ']' : ''; 233 | const head = toTagString(path.tag()) + itemIndexSuffix; 234 | const part = head + tail; 235 | return path.isRoot() ? part : toTagPathString(path.previous(), '.' + part); 236 | }; 237 | return this.isEmpty() ? '' : toTagPathString(this, ''); 238 | } 239 | } 240 | 241 | interface ItemIndex { 242 | item: number; 243 | } 244 | 245 | export class TagPathTrunk extends TagPath { 246 | public thenTag(tag: number): TagPathTag { 247 | return new TagPathTag(tag, this); 248 | } 249 | public thenSequence(tag: number): TagPathSequence { 250 | return new TagPathSequence(tag, this); 251 | } 252 | public thenSequenceEnd(tag: number): TagPathSequenceEnd { 253 | return new TagPathSequenceEnd(tag, this); 254 | } 255 | public thenItem(tag: number, item: number): TagPathItem { 256 | return new TagPathItem(tag, item, this); 257 | } 258 | public thenItemEnd(tag: number, item: number): TagPathItemEnd { 259 | return new TagPathItemEnd(tag, item, this); 260 | } 261 | } 262 | 263 | class EmptyTagPath extends TagPathTrunk { 264 | public tag(): number { 265 | throw Error('Empty tag path'); 266 | } 267 | public previous(): TagPathTrunk { 268 | return emptyTagPath; 269 | } 270 | } 271 | export const emptyTagPath = new EmptyTagPath(-1, null); 272 | 273 | export class TagPathTag extends TagPath { 274 | constructor(tag: number, previous: TagPathTrunk) { 275 | super(tag, previous); 276 | } 277 | } 278 | 279 | export class TagPathSequence extends TagPath { 280 | constructor(tag: number, previous: TagPathTrunk) { 281 | super(tag, previous); 282 | } 283 | } 284 | 285 | export class TagPathSequenceEnd extends TagPath { 286 | constructor(tag: number, previous: TagPathTrunk) { 287 | super(tag, previous); 288 | } 289 | } 290 | 291 | export class TagPathItem extends TagPathTrunk implements ItemIndex { 292 | constructor(tag: number, public readonly item: number, previous: TagPathTrunk) { 293 | super(tag, previous); 294 | } 295 | } 296 | 297 | export class TagPathItemEnd extends TagPathTrunk implements ItemIndex { 298 | constructor(tag: number, public readonly item: number, previous: TagPathTrunk) { 299 | super(tag, previous); 300 | } 301 | } 302 | -------------------------------------------------------------------------------- /src/tag-tree.ts: -------------------------------------------------------------------------------- 1 | import { tagToString } from './base'; 2 | import { Lookup } from './lookup'; 3 | import { TagPath, TagPathItem, TagPathItemEnd, TagPathSequence, TagPathSequenceEnd, TagPathTag } from './tag-path'; 4 | import { TagPathLike } from './tag-path-like'; 5 | 6 | type ItemPath = TagPathItem | TagPathItemEnd; 7 | 8 | export class TagTree extends TagPathLike { 9 | public static fromTag(tag: number): TagTreeTag { 10 | return emptyTagTree.thenTag(tag); 11 | } 12 | public static fromAnyItem(tag: number): TagTreeAnyItem { 13 | return emptyTagTree.thenAnyItem(tag); 14 | } 15 | public static fromItem(tag: number, item: number): TagTreeItem { 16 | return emptyTagTree.thenItem(tag, item); 17 | } 18 | 19 | public static fromPath(tagPath: TagPath): TagTree { 20 | let root: TagTree; 21 | const p = tagPath.head(); 22 | if (p instanceof TagPathTag) { 23 | root = TagTree.fromTag(p.tag()); 24 | } else if (p instanceof TagPathItem) { 25 | root = TagTree.fromItem(p.tag(), p.item); 26 | } else if (p instanceof TagPathItemEnd) { 27 | root = TagTree.fromItem(p.tag(), p.item); 28 | } else if (p instanceof TagPathSequence) { 29 | root = TagTree.fromAnyItem(p.tag()); 30 | } else if (p instanceof TagPathSequenceEnd) { 31 | root = TagTree.fromAnyItem(p.tag()); 32 | } else { 33 | root = emptyTagTree; 34 | } 35 | return tagPath 36 | .drop(1) 37 | .toList() 38 | .reduce((t, p1) => { 39 | if (t instanceof TagTreeTrunk && p1 instanceof TagPathTag) { 40 | return t.thenTag(p1.tag()); 41 | } 42 | if (t instanceof TagTreeTrunk && p1 instanceof TagPathItem) { 43 | return t.thenItem(p1.tag(), p1.item); 44 | } 45 | if (t instanceof TagTreeTrunk && p1 instanceof TagPathItemEnd) { 46 | return t.thenItem(p1.tag(), p1.item); 47 | } 48 | if (t instanceof TagTreeTrunk && p1 instanceof TagPathSequence) { 49 | return t.thenAnyItem(p1.tag()); 50 | } 51 | if (t instanceof TagTreeTrunk && p1 instanceof TagPathSequenceEnd) { 52 | return t.thenAnyItem(p1.tag()); 53 | } 54 | return t; 55 | }, root); 56 | } 57 | 58 | public static parse(str: string): TagTree { 59 | const isSeq = (s: string): boolean => s[s.length - 1] === ']'; 60 | const indexPart = (s: string): string => s.substring(s.lastIndexOf('[') + 1, s.length - 1); 61 | const tagPart = (s: string): string => s.substring(0, s.indexOf('[')); 62 | const parseTag = (s: string): number => { 63 | const tag = Lookup.tagOf(s); 64 | if (!tag) { 65 | if (s.length === 11 && s[0] === '(' && s[5] === ',' && s[10] === ')') { 66 | const i = parseInt(s.substring(1, 5) + s.substring(6, 10), 16); 67 | if (!isNaN(i)) { 68 | return i; 69 | } 70 | } 71 | throw Error(s + ' is not a tag or name string'); 72 | } 73 | return tag; 74 | }; 75 | const parseIndex = (s: string): number | undefined => { 76 | if (s === '*') { 77 | return undefined; 78 | } 79 | const i = parseInt(s, 10); 80 | if (isNaN(i)) { 81 | throw Error(s + ' is not a number'); 82 | } 83 | return i; 84 | }; 85 | const createTag = (s: string): TagTreeTag => TagTree.fromTag(parseTag(s)); 86 | const addTag = (s: string, path: TagTreeTrunk): TagTreeTag => path.thenTag(parseTag(s)); 87 | const createSeq = (s: string): TagTreeTrunk => { 88 | const tag = parseTag(tagPart(s)); 89 | const index = parseIndex(indexPart(s)); 90 | return index === undefined ? TagTree.fromAnyItem(tag) : TagTree.fromItem(tag, index); 91 | }; 92 | const addSeq = (s: string, path: TagTreeTrunk): TagTreeTrunk => { 93 | const tag = parseTag(tagPart(s)); 94 | const index = parseIndex(indexPart(s)); 95 | return index === undefined ? path.thenAnyItem(tag) : path.thenItem(tag, index); 96 | }; 97 | 98 | const tags = str.indexOf('.') > 0 ? str.split('.') : [str]; 99 | const seqTags = tags.length > 1 ? tags.slice(0, tags.length - 1) : []; // list of sequence tags, if any 100 | const lastTag = tags[tags.length - 1]; // tag or sequence 101 | try { 102 | const first = seqTags.length > 0 ? seqTags[0] : undefined; 103 | if (first) { 104 | const tree = seqTags 105 | .slice(1, seqTags.length) 106 | .reduce((tr: TagTreeTrunk, tag: string) => addSeq(tag, tr), createSeq(first)); 107 | if (tree !== undefined) { 108 | return isSeq(lastTag) ? addSeq(lastTag, tree) : addTag(lastTag, tree); 109 | } 110 | return isSeq(lastTag) ? createSeq(lastTag) : createTag(lastTag); 111 | } 112 | return createTag(lastTag); 113 | } catch (error) { 114 | throw Error('Tag tree could not be parsed: ' + error.message); 115 | } 116 | } 117 | 118 | private tagVal: number; 119 | private previousVal: TagTreeTrunk; 120 | constructor(tag: number, previous: TagTreeTrunk) { 121 | super(); 122 | this.tagVal = tag; 123 | this.previousVal = previous; 124 | } 125 | 126 | public tag(): number { 127 | return this.tagVal; 128 | } 129 | 130 | public previous(): TagTreeTrunk { 131 | return this.previousVal; 132 | } 133 | 134 | public isEmpty(): boolean { 135 | return this === (emptyTagTree as TagTree); 136 | } 137 | 138 | public isEqualTo(that: TagTree): boolean { 139 | if (this.isEmpty() && that.isEmpty()) { 140 | return true; 141 | } 142 | if (this instanceof TagTreeTag && that instanceof TagTreeTag) { 143 | return this.tag() === that.tag() && this.previous().isEqualTo(that.previous()); 144 | } 145 | if (this instanceof TagTreeItem && that instanceof TagTreeItem) { 146 | return this.tag() === that.tag() && this.item === that.item && this.previous().isEqualTo(that.previous()); 147 | } 148 | if (this instanceof TagTreeAnyItem && that instanceof TagTreeAnyItem) { 149 | return this.tag() === that.tag() && this.previous().isEqualTo(that.previous()); 150 | } 151 | return false; 152 | } 153 | 154 | public isPath(): boolean { 155 | if (this.isEmpty()) { 156 | return true; 157 | } 158 | if (this instanceof TagTreeAnyItem) { 159 | return false; 160 | } 161 | return this.previous().isPath(); 162 | } 163 | 164 | public hasPath(tagPath: TagPath): boolean { 165 | if (this.isEmpty() && tagPath.isEmpty()) { 166 | return true; 167 | } 168 | if (this instanceof TagTreeTag && tagPath instanceof TagPathTag) { 169 | return this.tag() === tagPath.tag() && this.previous().hasPath(tagPath.previous()); 170 | } 171 | if (this instanceof TagTreeItem && 'item' in tagPath) { 172 | return ( 173 | this.item === (tagPath as ItemPath).item && 174 | this.tag() === (tagPath as ItemPath).tag() && 175 | this.previous().hasPath((tagPath as ItemPath).previous()) 176 | ); 177 | } 178 | if (this instanceof TagTreeAnyItem && 'item' in tagPath) { 179 | return ( 180 | this.tag() === (tagPath as ItemPath).tag() && this.previous().hasPath((tagPath as ItemPath).previous()) 181 | ); 182 | } 183 | if (this instanceof TagTreeAnyItem && tagPath instanceof TagPathSequence) { 184 | return this.tag() === tagPath.tag() && this.previous().hasPath(tagPath.previous()); 185 | } 186 | if (this instanceof TagTreeAnyItem && tagPath instanceof TagPathSequenceEnd) { 187 | return this.tag() === tagPath.tag() && this.previous().hasPath(tagPath.previous()); 188 | } 189 | return false; 190 | } 191 | 192 | public hasTrunk(tagPath: TagPath): boolean { 193 | if (this.depth() >= tagPath.depth()) { 194 | const thisList = this.toList(); 195 | const thatList = tagPath.toList(); 196 | 197 | for (let i = 0; i < Math.min(thisList.length, thatList.length); i++) { 198 | const t = thisList[i]; 199 | const p = thatList[i]; 200 | 201 | let check = false; 202 | if (p.isEmpty()) { 203 | check = true; 204 | } else if (t instanceof TagTreeItem && 'item' in p) { 205 | check = t.tag() === (p as ItemPath).tag() && t.item === (p as ItemPath).item; 206 | } else if (t instanceof TagTreeItem && p instanceof TagPathSequence) { 207 | check = t.tag() === p.tag(); 208 | } else if (t instanceof TagTreeItem && p instanceof TagPathSequenceEnd) { 209 | check = t.tag() === p.tag(); 210 | } else if (t instanceof TagTreeAnyItem && (p as ItemPath).item !== undefined) { 211 | check = t.tag() === p.tag(); 212 | } else if (t instanceof TagTreeAnyItem && p instanceof TagPathSequence) { 213 | check = t.tag() === p.tag(); 214 | } else if (t instanceof TagTreeAnyItem && p instanceof TagPathSequenceEnd) { 215 | check = t.tag() === p.tag(); 216 | } else if (t instanceof TagTreeTag && p instanceof TagPathTag) { 217 | check = t.tag() === p.tag(); 218 | } 219 | if (!check) { 220 | return false; 221 | } 222 | } 223 | return true; 224 | } else { 225 | return false; 226 | } 227 | } 228 | 229 | public isTrunkOf(tagPath: TagPath): boolean { 230 | if (this.depth() <= tagPath.depth()) { 231 | const thisList = this.toList(); 232 | const thatList = tagPath.toList(); 233 | 234 | for (let i = 0; i < Math.min(thisList.length, thatList.length); i++) { 235 | const t = thisList[i]; 236 | const p = thatList[i]; 237 | 238 | let check = false; 239 | if (p.isEmpty()) { 240 | check = true; 241 | } else if (t instanceof TagTreeItem && 'item' in p) { 242 | check = t.tag() === (p as ItemPath).tag() && t.item === (p as ItemPath).item; 243 | } else if (t instanceof TagTreeAnyItem && 'item' in p) { 244 | check = t.tag() === (p as ItemPath).tag(); 245 | } else if (t instanceof TagTreeAnyItem && p instanceof TagPathSequence) { 246 | check = t.tag() === p.tag(); 247 | } else if (t instanceof TagTreeAnyItem && p instanceof TagPathSequenceEnd) { 248 | check = t.tag() === p.tag(); 249 | } else if (t instanceof TagTreeTag && p instanceof TagPathTag) { 250 | check = t.tag() === p.tag(); 251 | } 252 | if (!check) { 253 | return false; 254 | } 255 | } 256 | return true; 257 | } else { 258 | return false; 259 | } 260 | } 261 | 262 | public hasTwig(tagPath: TagPath): boolean { 263 | let check = false; 264 | if (this.isEmpty() && tagPath.isEmpty()) { 265 | check = true; 266 | } else if (this instanceof TagTreeAnyItem && 'item' in tagPath) { 267 | check = this.tag() === (tagPath as ItemPath).tag(); 268 | } else if (this instanceof TagTreeAnyItem && tagPath instanceof TagPathSequence) { 269 | check = this.tag() === tagPath.tag(); 270 | } else if (this instanceof TagTreeAnyItem && tagPath instanceof TagPathSequenceEnd) { 271 | check = this.tag() === tagPath.tag(); 272 | } else if (this instanceof TagTreeItem && 'item' in tagPath) { 273 | check = this.tag() === (tagPath as ItemPath).tag() && this.item === (tagPath as ItemPath).item; 274 | } else if (this instanceof TagTreeTag && tagPath instanceof TagPathTag) { 275 | check = this.tag() === tagPath.tag(); 276 | } 277 | 278 | if (tagPath.previous().isEmpty()) { 279 | return check; 280 | } else if (this.previous().isEmpty()) { 281 | return false; 282 | } 283 | return check && this.previous().hasTwig(tagPath.previous()); 284 | } 285 | 286 | public drop(n: number): TagTree { 287 | const dropRec = (tree: TagTree, i: number): TagTree => { 288 | if (i < 0) { 289 | return emptyTagTree; 290 | } 291 | if (i === 0) { 292 | if (tree.isEmpty()) { 293 | return emptyTagTree; 294 | } 295 | if (tree instanceof TagTreeItem) { 296 | return TagTree.fromItem(tree.tag(), tree.item); 297 | } 298 | if (tree instanceof TagTreeAnyItem) { 299 | return TagTree.fromAnyItem(tree.tag()); 300 | } 301 | return TagTree.fromTag(tree.tag()); 302 | } 303 | const t = dropRec(tree.previous(), i - 1); 304 | if (tree instanceof TagTreeItem) { 305 | return (t as TagTreeTrunk).thenItem(tree.tag(), tree.item); 306 | } 307 | if (tree instanceof TagTreeAnyItem) { 308 | return (t as TagTreeTrunk).thenAnyItem(tree.tag()); 309 | } 310 | if (tree instanceof TagTreeTag) { 311 | return (t as TagTreeTrunk).thenTag(tree.tag()); 312 | } 313 | return emptyTagTree; 314 | }; 315 | return dropRec(this, this.depth() - n - 1); 316 | } 317 | 318 | public toNamedString(lookup: boolean): string { 319 | const toTagString = (tag: number): string => { 320 | if (lookup) { 321 | const keyword = Lookup.keywordOf(tag); 322 | if (keyword) { 323 | return keyword; 324 | } 325 | } 326 | return tagToString(tag); 327 | }; 328 | const toTagTreeString = (tree: TagTree, tail: string): string => { 329 | const itemIndexSuffix = 330 | tree instanceof TagTreeAnyItem ? '[*]' : 'item' in tree ? '[' + (tree as unknown as ItemPath).item + ']' : ''; 331 | const head = toTagString(tree.tag()) + itemIndexSuffix; 332 | const part = head + tail; 333 | return tree.isRoot() ? part : toTagTreeString(tree.previous(), '.' + part); 334 | }; 335 | return this.isEmpty() ? '' : toTagTreeString(this, ''); 336 | } 337 | } 338 | 339 | export class TagTreeTrunk extends TagTree { 340 | public thenTag(tag: number): TagTreeTag { 341 | return new TagTreeTag(tag, this); 342 | } 343 | public thenAnyItem(tag: number): TagTreeAnyItem { 344 | return new TagTreeAnyItem(tag, this); 345 | } 346 | public thenItem(tag: number, item: number): TagTreeItem { 347 | return new TagTreeItem(tag, item, this); 348 | } 349 | } 350 | 351 | class EmptyTagTree extends TagTreeTrunk { 352 | public tag(): number { 353 | throw Error('Empty tag tree'); 354 | } 355 | public previous(): TagTreeTrunk { 356 | return emptyTagTree; 357 | } 358 | } 359 | export const emptyTagTree = new EmptyTagTree(-1, null); 360 | 361 | export class TagTreeTag extends TagTree { 362 | constructor(tag: number, previous: TagTreeTrunk) { 363 | super(tag, previous); 364 | } 365 | } 366 | 367 | export class TagTreeAnyItem extends TagTreeTrunk { 368 | constructor(tag: number, previous: TagTreeTrunk) { 369 | super(tag, previous); 370 | } 371 | } 372 | 373 | export class TagTreeItem extends TagTreeTrunk { 374 | constructor(tag: number, public readonly item: number, previous: TagTreeTrunk) { 375 | super(tag, previous); 376 | } 377 | } 378 | -------------------------------------------------------------------------------- /src/vr.ts: -------------------------------------------------------------------------------- 1 | export class VR { 2 | public static AE = new VR('AE', 0x4145, 8, 0x20); 3 | public static AS = new VR('AS', 0x4153, 8, 0x20); 4 | public static AT = new VR('AT', 0x4154, 8, 0); 5 | public static CS = new VR('CS', 0x4353, 8, 0x20); 6 | public static DA = new VR('DA', 0x4441, 8, 0x20); 7 | public static DS = new VR('DS', 0x4453, 8, 0x20); 8 | public static DT = new VR('DT', 0x4454, 8, 0x20); 9 | public static FD = new VR('FD', 0x4644, 8, 0); 10 | public static FL = new VR('FL', 0x464c, 8, 0); 11 | public static IS = new VR('IS', 0x4953, 8, 0x20); 12 | public static LO = new VR('LO', 0x4c4f, 8, 0x20); 13 | public static LT = new VR('LT', 0x4c54, 8, 0x20); 14 | public static OB = new VR('OB', 0x4f42, 12, 0); 15 | public static OD = new VR('OD', 0x4f44, 12, 0); 16 | public static OF = new VR('OF', 0x4f46, 12, 0); 17 | public static OL = new VR('OL', 0x4f4c, 12, 0); 18 | public static OV = new VR('OV', 0x4f56, 12, 0); 19 | public static OW = new VR('OW', 0x4f57, 12, 0); 20 | public static PN = new VR('PN', 0x504e, 8, 0x20); 21 | public static SH = new VR('SH', 0x5348, 8, 0x20); 22 | public static SL = new VR('SL', 0x534c, 8, 0); 23 | public static SQ = new VR('SQ', 0x5351, 12, 0); 24 | public static SS = new VR('SS', 0x5353, 8, 0); 25 | public static ST = new VR('ST', 0x5354, 8, 0x20); 26 | public static SV = new VR('SV', 0x5356, 12, 0); 27 | public static TM = new VR('TM', 0x544d, 8, 0x20); 28 | public static UC = new VR('UC', 0x5543, 12, 0x20); 29 | public static UI = new VR('UI', 0x5549, 8, 0); 30 | public static UL = new VR('UL', 0x554c, 8, 0); 31 | public static UN = new VR('UN', 0x554e, 12, 0); 32 | public static UR = new VR('UR', 0x5552, 12, 0x20); 33 | public static US = new VR('US', 0x5553, 8, 0); 34 | public static UT = new VR('UT', 0x5554, 12, 0x20); 35 | public static UV = new VR('UV', 0x5556, 12, 0); 36 | 37 | public static values = [ 38 | VR.AE, 39 | VR.AS, 40 | VR.AT, 41 | VR.CS, 42 | VR.DA, 43 | VR.DS, 44 | VR.DT, 45 | VR.FD, 46 | VR.FL, 47 | VR.IS, 48 | VR.LO, 49 | VR.LT, 50 | VR.OB, 51 | VR.OD, 52 | VR.OF, 53 | VR.OL, 54 | VR.OV, 55 | VR.OW, 56 | VR.PN, 57 | VR.SH, 58 | VR.SL, 59 | VR.SQ, 60 | VR.SS, 61 | VR.ST, 62 | VR.SV, 63 | VR.TM, 64 | VR.UC, 65 | VR.UI, 66 | VR.UL, 67 | VR.UN, 68 | VR.UR, 69 | VR.US, 70 | VR.UT, 71 | VR.UV, 72 | ]; 73 | 74 | public static valueOf(code: number): VR { 75 | return VR.map.get(code); 76 | } 77 | 78 | private static map: Map = VR.values.reduce((m: Map, vr: VR) => { 79 | m.set(vr.code, vr); 80 | return m; 81 | }, new Map()); 82 | 83 | constructor( 84 | public readonly name: string, 85 | public readonly code: number, 86 | public readonly headerLength: number, 87 | public readonly paddingByte: number, 88 | ) {} 89 | } 90 | -------------------------------------------------------------------------------- /test/base-test.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert'; 2 | import { createNameBasedUID, createNameBasedUIDFromRoot, createUID, createUIDFromRoot } from '../src/base'; 3 | 4 | describe('Creating a UID', () => { 5 | it('should create a random UID', () => { 6 | const uid = createUID(); 7 | assert.strictEqual(uid.substring(0, 4), '2.25'); 8 | assert(/([0-9]+\.)+[0-9]+/.test(uid)); 9 | assert.notStrictEqual(createUID(), uid); 10 | }); 11 | 12 | it('should create a random UID with specified root', () => { 13 | const uid = createUIDFromRoot('6.66.666'); 14 | assert.strictEqual(uid.substring(0, 8), '6.66.666'); 15 | }); 16 | 17 | it('should create a name based UID', () => { 18 | const uid1 = createNameBasedUID('name'); 19 | const uid2 = createNameBasedUID('name'); 20 | assert.strictEqual(uid1.substring(0, 4), '2.25'); 21 | assert.strictEqual(uid1, uid2); 22 | }); 23 | 24 | it('should create a name based UID with specified root', () => { 25 | const uid1 = createNameBasedUIDFromRoot('name', '6.66.666'); 26 | const uid2 = createNameBasedUIDFromRoot('name', '6.66.666'); 27 | assert.strictEqual(uid1.substring(0, 8), '6.66.666'); 28 | assert.strictEqual(uid1, uid2); 29 | }); 30 | }); 31 | -------------------------------------------------------------------------------- /test/character-sets-test.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert'; 2 | import { CharacterSets } from '../src/character-sets'; 3 | import { VR } from '../src/vr'; 4 | 5 | describe('Parsing a DICOM file', () => { 6 | it('should parse an Arab name correctly', () => { 7 | const expectedName = 'قباني^لنزار'; 8 | const nameBytes = Buffer.from([0xe2, 0xc8, 0xc7, 0xe6, 0xea, 0x5e, 0xe4, 0xe6, 0xd2, 0xc7, 0xd1]); 9 | const cs = CharacterSets.fromNames('ISO_IR 127'); 10 | const name = cs.decode(nameBytes, VR.PN); 11 | assert.strictEqual(name, expectedName); 12 | }); 13 | 14 | it('should parse a French name correctly', () => { 15 | const expectedName = 'Buc^Jérôme'; 16 | const nameBytes = Buffer.from([0x42, 0x75, 0x63, 0x5e, 0x4a, 0xe9, 0x72, 0xf4, 0x6d, 0x65]); 17 | const cs = CharacterSets.fromNames('ISO_IR 100'); 18 | const name = cs.decode(nameBytes, VR.PN); 19 | assert.strictEqual(name, expectedName); 20 | }); 21 | 22 | it('should parse a German name correctly', () => { 23 | const expectedName = 'Äneas^Rüdiger'; 24 | const nameBytes = Buffer.from([0xc4, 0x6e, 0x65, 0x61, 0x73, 0x5e, 0x52, 0xfc, 0x64, 0x69, 0x67, 0x65, 0x72]); 25 | const cs = CharacterSets.fromNames('ISO_IR 100'); 26 | const name = cs.decode(nameBytes, VR.PN); 27 | assert.strictEqual(name, expectedName); 28 | }); 29 | 30 | it('should parse a Greek name correctly', () => { 31 | const expectedName = 'Διονυσιος'; 32 | const nameBytes = Buffer.from([0xc4, 0xe9, 0xef, 0xed, 0xf5, 0xf3, 0xe9, 0xef, 0xf2]); 33 | const cs = CharacterSets.fromNames('ISO_IR 126'); 34 | const name = cs.decode(nameBytes, VR.PN); 35 | assert.strictEqual(name, expectedName); 36 | }); 37 | 38 | it('should parse a Japanese name correctly (1)', () => { 39 | const expectedName = 'Yamada^Tarou=山田^太郎=やまだ^たろう'; 40 | const nameBytes = Buffer.from([ 41 | 0x59, 42 | 0x61, 43 | 0x6d, 44 | 0x61, 45 | 0x64, 46 | 0x61, 47 | 0x5e, 48 | 0x54, 49 | 0x61, 50 | 0x72, 51 | 0x6f, 52 | 0x75, 53 | 0x3d, 54 | 0x1b, 55 | 0x24, 56 | 0x42, 57 | 0x3b, 58 | 0x33, 59 | 0x45, 60 | 0x44, 61 | 0x1b, 62 | 0x28, 63 | 0x42, 64 | 0x5e, 65 | 0x1b, 66 | 0x24, 67 | 0x42, 68 | 0x42, 69 | 0x40, 70 | 0x4f, 71 | 0x3a, 72 | 0x1b, 73 | 0x28, 74 | 0x42, 75 | 0x3d, 76 | 0x1b, 77 | 0x24, 78 | 0x42, 79 | 0x24, 80 | 0x64, 81 | 0x24, 82 | 0x5e, 83 | 0x24, 84 | 0x40, 85 | 0x1b, 86 | 0x28, 87 | 0x42, 88 | 0x5e, 89 | 0x1b, 90 | 0x24, 91 | 0x42, 92 | 0x24, 93 | 0x3f, 94 | 0x24, 95 | 0x6d, 96 | 0x24, 97 | 0x26, 98 | 0x1b, 99 | 0x28, 100 | 0x42, 101 | ]); 102 | const cs = CharacterSets.fromNames('\\ISO 2022 IR 87'); 103 | const name = cs.decode(nameBytes, VR.PN); 104 | assert.strictEqual(name, expectedName); 105 | }); 106 | 107 | it('should parse a Japanese name correctly (2)', () => { 108 | const expectedName = 'ヤマダ^タロウ=山田^太郎=やまだ^たろう'; 109 | const nameBytes = Buffer.from([ 110 | 0xd4, 111 | 0xcf, 112 | 0xc0, 113 | 0xde, 114 | 0x5e, 115 | 0xc0, 116 | 0xdb, 117 | 0xb3, 118 | 0x3d, 119 | 0x1b, 120 | 0x24, 121 | 0x42, 122 | 0x3b, 123 | 0x33, 124 | 0x45, 125 | 0x44, 126 | 0x1b, 127 | 0x28, 128 | 0x4a, 129 | 0x5e, 130 | 0x1b, 131 | 0x24, 132 | 0x42, 133 | 0x42, 134 | 0x40, 135 | 0x4f, 136 | 0x3a, 137 | 0x1b, 138 | 0x28, 139 | 0x4a, 140 | 0x3d, 141 | 0x1b, 142 | 0x24, 143 | 0x42, 144 | 0x24, 145 | 0x64, 146 | 0x24, 147 | 0x5e, 148 | 0x24, 149 | 0x40, 150 | 0x1b, 151 | 0x28, 152 | 0x4a, 153 | 0x5e, 154 | 0x1b, 155 | 0x24, 156 | 0x42, 157 | 0x24, 158 | 0x3f, 159 | 0x24, 160 | 0x6d, 161 | 0x24, 162 | 0x26, 163 | 0x1b, 164 | 0x28, 165 | 0x4a, 166 | ]); 167 | const cs = CharacterSets.fromNames('ISO 2022 IR 13\\ISO 2022 IR 87'); 168 | const name = cs.decode(nameBytes, VR.PN); 169 | assert.strictEqual(name, expectedName); 170 | }); 171 | 172 | it('should parse a Japanese name correctly (3)', () => { 173 | const expectedName = 'ヤマダ^タロウ'; 174 | const nameBytes = Buffer.from([0xd4, 0xcf, 0xc0, 0xde, 0x5e, 0xc0, 0xdb, 0xb3]); 175 | const cs = CharacterSets.fromNames('ISO_IR 13'); 176 | const name = cs.decode(nameBytes, VR.PN); 177 | assert.strictEqual(name, expectedName); 178 | }); 179 | 180 | it('should parse a Hebrew name correctly', () => { 181 | const expectedName = 'שרון^דבורה'; 182 | const nameBytes = Buffer.from([0xf9, 0xf8, 0xe5, 0xef, 0x5e, 0xe3, 0xe1, 0xe5, 0xf8, 0xe4]); 183 | const cs = CharacterSets.fromNames('ISO_IR 138'); 184 | const name = cs.decode(nameBytes, VR.PN); 185 | assert.strictEqual(name, expectedName); 186 | }); 187 | 188 | it('should parse a Korean name correctly', () => { 189 | const expectedName = 'Hong^Gildong=洪^吉洞=홍^길동'; 190 | const nameBytes = Buffer.from([ 191 | 0x48, 192 | 0x6f, 193 | 0x6e, 194 | 0x67, 195 | 0x5e, 196 | 0x47, 197 | 0x69, 198 | 0x6c, 199 | 0x64, 200 | 0x6f, 201 | 0x6e, 202 | 0x67, 203 | 0x3d, 204 | 0x1b, 205 | 0x24, 206 | 0x29, 207 | 0x43, 208 | 0xfb, 209 | 0xf3, 210 | 0x5e, 211 | 0x1b, 212 | 0x24, 213 | 0x29, 214 | 0x43, 215 | 0xd1, 216 | 0xce, 217 | 0xd4, 218 | 0xd7, 219 | 0x3d, 220 | 0x1b, 221 | 0x24, 222 | 0x29, 223 | 0x43, 224 | 0xc8, 225 | 0xab, 226 | 0x5e, 227 | 0x1b, 228 | 0x24, 229 | 0x29, 230 | 0x43, 231 | 0xb1, 232 | 0xe6, 233 | 0xb5, 234 | 0xbf, 235 | ]); 236 | const cs = CharacterSets.fromNames('\\ISO 2022 IR 149'); 237 | const name = cs.decode(nameBytes, VR.PN); 238 | assert.strictEqual(name, expectedName); 239 | }); 240 | 241 | it('should parse a Russian name correctly', () => { 242 | const expectedName = 'Люкceмбypг'; 243 | const nameBytes = Buffer.from([0xbb, 0xee, 0xda, 0x63, 0x65, 0xdc, 0xd1, 0x79, 0x70, 0xd3]); 244 | const cs = CharacterSets.fromNames('ISO_IR 144'); 245 | const name = cs.decode(nameBytes, VR.PN); 246 | assert.strictEqual(name, expectedName); 247 | }); 248 | 249 | it('should parse a Chinese name correctly (1)', () => { 250 | const expectedName = 'Wang^XiaoDong=王^小東='; 251 | const nameBytes = Buffer.from([ 252 | 0x57, 253 | 0x61, 254 | 0x6e, 255 | 0x67, 256 | 0x5e, 257 | 0x58, 258 | 0x69, 259 | 0x61, 260 | 0x6f, 261 | 0x44, 262 | 0x6f, 263 | 0x6e, 264 | 0x67, 265 | 0x3d, 266 | 0xe7, 267 | 0x8e, 268 | 0x8b, 269 | 0x5e, 270 | 0xe5, 271 | 0xb0, 272 | 0x8f, 273 | 0xe6, 274 | 0x9d, 275 | 0xb1, 276 | 0x3d, 277 | ]); 278 | const cs = CharacterSets.fromNames('ISO_IR 192'); 279 | const name = cs.decode(nameBytes, VR.PN); 280 | assert.strictEqual(name, expectedName); 281 | }); 282 | 283 | it('should parse a Chinese name correctly (2)', () => { 284 | const expectedName = 'Wang^XiaoDong=王^小东='; 285 | const nameBytes = Buffer.from([ 286 | 0x57, 287 | 0x61, 288 | 0x6e, 289 | 0x67, 290 | 0x5e, 291 | 0x58, 292 | 0x69, 293 | 0x61, 294 | 0x6f, 295 | 0x44, 296 | 0x6f, 297 | 0x6e, 298 | 0x67, 299 | 0x3d, 300 | 0xcd, 301 | 0xf5, 302 | 0x5e, 303 | 0xd0, 304 | 0xa1, 305 | 0xb6, 306 | 0xab, 307 | 0x3d, 308 | ]); 309 | const cs = CharacterSets.fromNames('GB18030'); 310 | const name = cs.decode(nameBytes, VR.PN); 311 | assert.strictEqual(name, expectedName); 312 | }); 313 | }); 314 | -------------------------------------------------------------------------------- /test/chunker.ts: -------------------------------------------------------------------------------- 1 | import { Transform } from 'stream'; 2 | import { concat, emptyBuffer } from '../src/base'; 3 | 4 | export class Chunker extends Transform { 5 | private buffer: Buffer = emptyBuffer; 6 | 7 | constructor(public readonly size: number) { 8 | super(); 9 | } 10 | 11 | public _transform(chunk: any, encoding: string, callback: (error?: Error, data?: any) => void): void { 12 | this.buffer = concat(this.buffer, chunk); 13 | 14 | while (this.buffer.length >= this.size) { 15 | const newChunk = this.buffer.slice(0, this.size); 16 | this.buffer = this.buffer.slice(this.size); 17 | this.push(newChunk); 18 | } 19 | process.nextTick(() => callback()); 20 | } 21 | 22 | public _flush(callback: (error?: Error, data?: any) => void): void { 23 | if (this.buffer.length) { 24 | this.push(this.buffer); 25 | } 26 | process.nextTick(() => callback()); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /test/collect-flow-test.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert'; 2 | import { ElementsPart, TagTree } from '../src'; 3 | import { concat, concatv, emptyBuffer, pipe, item, itemDelimitation, sequenceDelimitation } from '../src/base'; 4 | import { collectFlow, collectFromTagPathsFlow } from '../src/collect-flow'; 5 | import { parseFlow } from '../src/parse-flow'; 6 | import { Tag } from '../src/tag'; 7 | import * as data from './test-data'; 8 | import * as util from './test-util'; 9 | 10 | describe('A collect elements flow', () => { 11 | it('should first produce an elements part followed by the input dicom parts', () => { 12 | const bytes = concat(data.studyDate(), data.patientNameJohnDoe()); 13 | const tags = [Tag.StudyDate, Tag.PatientName].map(TagTree.fromTag); 14 | return util.testParts(bytes, pipe(parseFlow(), collectFromTagPathsFlow(tags, 'tag')), (parts) => { 15 | const e = parts.shift() as ElementsPart; 16 | assert.strictEqual(e.label, 'tag'); 17 | assert.strictEqual(e.elements.size, 2); 18 | assert(e.elements.elementByTag(Tag.StudyDate) !== undefined); 19 | assert(e.elements.elementByTag(Tag.PatientName) !== undefined); 20 | 21 | util.partProbe(parts) 22 | .expectHeader(Tag.StudyDate) 23 | .expectValueChunk() 24 | .expectHeader(Tag.PatientName) 25 | .expectValueChunk() 26 | .expectDicomComplete(); 27 | }); 28 | }); 29 | 30 | it('should produce an empty elements part when stream is empty', () => { 31 | const bytes = emptyBuffer; 32 | 33 | return util.testParts(bytes, pipe(parseFlow(), collectFromTagPathsFlow([], 'tag')), (parts) => { 34 | const e = parts.shift() as ElementsPart; 35 | assert(e.elements.isEmpty()); 36 | 37 | util.partProbe(parts).expectDicomComplete(); 38 | }); 39 | }); 40 | 41 | it('should produce an empty elements part when no relevant data elements are present', () => { 42 | const bytes = concat(data.patientNameJohnDoe(), data.studyDate()); 43 | 44 | return util.testParts( 45 | bytes, 46 | pipe( 47 | parseFlow(), 48 | collectFromTagPathsFlow([Tag.Modality, Tag.SeriesInstanceUID].map(TagTree.fromTag), 'tag'), 49 | ), 50 | (parts) => { 51 | const e = parts.shift() as ElementsPart; 52 | assert(e.elements.isEmpty()); 53 | 54 | util.partProbe(parts) 55 | .expectHeader(Tag.PatientName) 56 | .expectValueChunk() 57 | .expectHeader(Tag.StudyDate) 58 | .expectValueChunk() 59 | .expectDicomComplete(); 60 | }, 61 | ); 62 | }); 63 | 64 | it('should apply the stop tag appropriately', () => { 65 | const bytes = concatv(data.studyDate(), data.patientNameJohnDoe(), data.pixelData(2000)); 66 | 67 | return util.testParts( 68 | bytes, 69 | pipe(parseFlow(500), collectFromTagPathsFlow([Tag.StudyDate, Tag.PatientName].map(TagTree.fromTag), 'tag')), 70 | (parts) => { 71 | const e = parts.shift() as ElementsPart; 72 | assert.strictEqual(e.label, 'tag'); 73 | assert.strictEqual(e.elements.size, 2); 74 | assert(e.elements.elementByTag(Tag.StudyDate) !== undefined); 75 | assert(e.elements.elementByTag(Tag.PatientName) !== undefined); 76 | 77 | util.partProbe(parts) 78 | .expectHeader(Tag.StudyDate) 79 | .expectValueChunk() 80 | .expectHeader(Tag.PatientName) 81 | .expectValueChunk() 82 | .expectHeader(Tag.PixelData) 83 | .expectValueChunk() 84 | .expectValueChunk() 85 | .expectValueChunk() 86 | .expectValueChunk() 87 | .expectDicomComplete(); 88 | }, 89 | ); 90 | }); 91 | 92 | it('should fail if max buffer size is exceeded', () => { 93 | const bytes = concatv(data.studyDate(), data.patientNameJohnDoe(), data.pixelData(2000)); 94 | 95 | return util.expectDicomError(() => 96 | util.testParts( 97 | bytes, 98 | pipe( 99 | parseFlow(500), 100 | collectFlow( 101 | (tagPath) => tagPath.tag() === Tag.PatientName, 102 | (tagPath) => tagPath.tag() > Tag.PixelData, 103 | 'tag', 104 | 1000, 105 | ), 106 | ), 107 | () => { 108 | // do nothing 109 | }, 110 | ), 111 | ); 112 | }); 113 | 114 | it('should collect attributes in sequences', () => { 115 | const bytes = concatv( 116 | data.studyDate(), 117 | data.sequence(Tag.DerivationCodeSequence, 8 + 16 + 12 + 8 + 16 + 8 + 8 + 16), 118 | item(16 + 12 + 8 + 16 + 8 + 8 + 16), 119 | data.studyDate(), 120 | data.sequence(Tag.DerivationCodeSequence), 121 | item(), 122 | data.studyDate(), 123 | itemDelimitation(), 124 | sequenceDelimitation(), 125 | data.patientNameJohnDoe(), 126 | data.patientID(), 127 | ); 128 | 129 | util.testParts( 130 | bytes, 131 | pipe( 132 | parseFlow(500), 133 | collectFromTagPathsFlow( 134 | [TagTree.fromTag(Tag.PatientID), TagTree.fromItem(Tag.DerivationCodeSequence, 1)], 135 | 'tag', 136 | ), 137 | ), 138 | (parts) => { 139 | const e = parts.shift() as ElementsPart; 140 | assert.strictEqual(e.label, 'tag'); 141 | assert.strictEqual(e.elements.size, 2); 142 | assert(e.elements.elementByTag(Tag.PatientID) !== undefined); 143 | assert(e.elements.elementByTag(Tag.DerivationCodeSequence) !== undefined); 144 | assert.strictEqual(e.elements.sequenceByTag(Tag.DerivationCodeSequence).item(1).elements.size, 3); 145 | }, 146 | ); 147 | }); 148 | 149 | it('should collect fragments', () => { 150 | const bytes = concatv( 151 | data.studyDate(), 152 | data.pixeDataFragments(), 153 | item(4), 154 | Buffer.from([1, 2, 3, 4]), 155 | item(4), 156 | Buffer.from([5, 6, 7, 8]), 157 | sequenceDelimitation(), 158 | ); 159 | 160 | util.testParts( 161 | bytes, 162 | pipe(parseFlow(500), collectFromTagPathsFlow([TagTree.fromTag(Tag.PixelData)], 'tag')), 163 | (parts) => { 164 | const e = parts.shift() as ElementsPart; 165 | assert.strictEqual(e.label, 'tag'); 166 | assert.strictEqual(e.elements.size, 1); 167 | const f = e.elements.fragmentsByTag(Tag.PixelData); 168 | assert(f !== undefined); 169 | assert.strictEqual(f.offsets.length, 1); 170 | assert.strictEqual(f.fragments.length, 1); 171 | }, 172 | ); 173 | }); 174 | }); 175 | -------------------------------------------------------------------------------- /test/element-flows-test.ts: -------------------------------------------------------------------------------- 1 | import { concat, concatv, emptyBuffer, item, pipe, sequenceDelimitation } from '../src/base'; 2 | import { elementFlow } from '../src/element-flows'; 3 | import { parseFlow } from '../src/parse-flow'; 4 | import { Tag } from '../src/tag'; 5 | import * as data from './test-data'; 6 | import * as util from './test-util'; 7 | 8 | describe('A DICOM elements flow', () => { 9 | it('should combine headers and value chunks into elements', () => { 10 | const bytes = concat(data.patientNameJohnDoe(), data.studyDate()); 11 | 12 | return util.testParts(bytes, pipe(parseFlow(), elementFlow()), (elements) => { 13 | util.elementProbe(elements) 14 | .expectElement(Tag.PatientName) 15 | .expectElement(Tag.StudyDate) 16 | .expectDicomComplete(); 17 | }); 18 | }); 19 | 20 | it('should combine items in fragments into fragment elements', () => { 21 | const bytes = concatv( 22 | data.pixeDataFragments(), 23 | item(4), 24 | Buffer.from([1, 2, 3, 4]), 25 | item(4), 26 | Buffer.from([5, 6, 7, 8]), 27 | sequenceDelimitation(), 28 | ); 29 | 30 | return util.testParts(bytes, pipe(parseFlow(), elementFlow()), (elements) => { 31 | util.elementProbe(elements) 32 | .expectFragments(Tag.PixelData) 33 | .expectFragment(4) 34 | .expectFragment(4) 35 | .expectSequenceDelimitation() 36 | .expectDicomComplete(); 37 | }); 38 | }); 39 | 40 | it('should handle elements and fragments of zero length', () => { 41 | const bytes = concatv( 42 | Buffer.from([8, 0, 32, 0, 68, 65, 0, 0]), 43 | data.patientNameJohnDoe(), 44 | data.pixeDataFragments(), 45 | item(0), 46 | item(4), 47 | Buffer.from([5, 6, 7, 8]), 48 | sequenceDelimitation(), 49 | ); 50 | 51 | return util.testParts(bytes, pipe(parseFlow(), elementFlow()), (elements) => { 52 | util.elementProbe(elements) 53 | .expectElement(Tag.StudyDate, emptyBuffer) 54 | .expectElement(Tag.PatientName, Buffer.from('John^Doe')) 55 | .expectFragments(Tag.PixelData) 56 | .expectFragment(0) 57 | .expectFragment(4) 58 | .expectSequenceDelimitation() 59 | .expectDicomComplete(); 60 | }); 61 | }); 62 | 63 | it('should handle determinate length sequences and items', () => { 64 | const bytes = concatv(data.sequence(Tag.DerivationCodeSequence, 24), item(16), data.patientNameJohnDoe()); 65 | 66 | return util.testParts(bytes, pipe(parseFlow(), elementFlow()), (elements) => { 67 | util.elementProbe(elements) 68 | .expectSequence(Tag.DerivationCodeSequence, 24) 69 | .expectItem(16) 70 | .expectElement(Tag.PatientName) 71 | .expectDicomComplete(); 72 | }); 73 | }); 74 | }); 75 | -------------------------------------------------------------------------------- /test/element-sink-test.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert'; 2 | import { concatv, indeterminateLength, intToBytesLE, item, itemDelimitation, sequenceDelimitation } from '../src/base'; 3 | import { elementFlow } from '../src/element-flows'; 4 | import { elementSink } from '../src/element-sink'; 5 | import { 6 | FragmentElement, 7 | FragmentsElement, 8 | ItemDelimitationElement, 9 | ItemElement, 10 | SequenceDelimitationElement, 11 | SequenceElement, 12 | ValueElement, 13 | } from '../src/dicom-elements'; 14 | import { parseFlow } from '../src/parse-flow'; 15 | import { arraySource, singleSource } from '../src/sources'; 16 | import { Tag } from '../src/tag'; 17 | import { UID } from '../src/uid'; 18 | import { Value } from '../src/value'; 19 | import { VR } from '../src/vr'; 20 | import * as data from './test-data'; 21 | import * as util from './test-util'; 22 | 23 | describe('An element sink', () => { 24 | it('aggregate streamed elements into an Elements', () => { 25 | const elementList = [ 26 | new ValueElement(Tag.TransferSyntaxUID, VR.UI, Value.fromString(VR.UI, UID.ExplicitVRLittleEndian)), 27 | new ValueElement(Tag.StudyDate, VR.DA, Value.fromString(VR.DA, '20040329')), 28 | new SequenceElement(Tag.DerivationCodeSequence), 29 | new ItemElement(), 30 | new ValueElement(Tag.StudyDate, VR.DA, Value.fromString(VR.DA, '20040329')), 31 | new ItemDelimitationElement(), 32 | new ItemElement(), 33 | new SequenceElement(Tag.DerivationCodeSequence), 34 | new ItemElement(), 35 | new ValueElement(Tag.StudyDate, VR.DA, Value.fromString(VR.DA, '20040329')), 36 | new ItemDelimitationElement(), 37 | new SequenceDelimitationElement(), 38 | new ItemDelimitationElement(), 39 | new SequenceDelimitationElement(), 40 | new ValueElement(Tag.PatientName, VR.PN, Value.fromString(VR.PN, 'Doe^John')), 41 | new FragmentsElement(Tag.PixelData, VR.OB), 42 | new FragmentElement(4, Value.fromBytes(VR.OB, [1, 2, 3, 4])), 43 | new FragmentElement(4, Value.fromBytes(VR.OB, [1, 2, 3, 4])), 44 | new SequenceDelimitationElement(), 45 | ]; 46 | 47 | return util.streamPromise( 48 | arraySource(elementList, true), 49 | elementSink((elements) => { 50 | assert.deepStrictEqual(elements.toElements(false), elementList); 51 | }), 52 | ); 53 | }); 54 | 55 | it('should handle zero length values, fragments, sequences and items', () => { 56 | const elementList = [ 57 | new ValueElement(Tag.StudyDate, VR.DA, Value.empty()), 58 | new SequenceElement(Tag.DerivationCodeSequence), 59 | new SequenceDelimitationElement(), 60 | new SequenceElement(Tag.DerivationCodeSequence, 0), 61 | new SequenceElement(Tag.DerivationCodeSequence), 62 | new ItemElement(), 63 | new ItemDelimitationElement(), 64 | new ItemElement(0), 65 | new SequenceDelimitationElement(), 66 | new FragmentsElement(Tag.PixelData, VR.OB), 67 | new FragmentElement(0, Value.empty()), 68 | new SequenceDelimitationElement(), 69 | ]; 70 | 71 | return util.streamPromise( 72 | arraySource(elementList, true), 73 | elementSink((elements) => { 74 | assert.deepStrictEqual(elements.toElements(false), elementList); 75 | }), 76 | ); 77 | }); 78 | 79 | it('should handle sequences and items of determinate length', () => { 80 | const elementList = [ 81 | new ValueElement(Tag.StudyDate, VR.DA, Value.fromString(VR.DA, '20040329')), 82 | new SequenceElement(Tag.DerivationCodeSequence, 8 + 16 + 16), 83 | new ItemElement(16 + 16), 84 | new ValueElement(Tag.StudyDate, VR.DA, Value.fromString(VR.DA, '20040329')), 85 | new ValueElement(Tag.PatientName, VR.DA, Value.fromString(VR.DA, 'Doe^John')), 86 | new ValueElement(Tag.PatientName, VR.DA, Value.fromString(VR.DA, 'Doe^John')), 87 | ]; 88 | 89 | return util.streamPromise( 90 | arraySource(elementList, true), 91 | elementSink((elements) => { 92 | assert.deepStrictEqual(elements.toElements(false), elementList); 93 | }), 94 | ); 95 | }); 96 | 97 | it('should convert an empty offsets table item to an empty list of offsets', () => { 98 | const elementList = [ 99 | new FragmentsElement(Tag.PixelData, VR.OB), 100 | new FragmentElement(0, Value.empty()), 101 | new FragmentElement(0, Value.fromBytes(VR.OB, [1, 2, 3, 4])), 102 | new SequenceDelimitationElement(), 103 | ]; 104 | 105 | return util.streamPromise( 106 | arraySource(elementList, true), 107 | elementSink((elements) => { 108 | const fragments = elements.fragmentsByTag(Tag.PixelData); 109 | assert(fragments.offsets !== undefined); 110 | assert(fragments.offsets.length === 0); 111 | }), 112 | ); 113 | }); 114 | 115 | it('should map an offsets table to a list of offsets', () => { 116 | const elementList = [ 117 | new FragmentsElement(Tag.PixelData, VR.OB), 118 | new FragmentElement( 119 | 0, 120 | Value.fromBuffer(VR.OB, concatv(intToBytesLE(1), intToBytesLE(2), intToBytesLE(3), intToBytesLE(4))), 121 | ), 122 | new SequenceDelimitationElement(), 123 | ]; 124 | 125 | return util.streamPromise( 126 | arraySource(elementList, true), 127 | elementSink((elements) => { 128 | const fragments = elements.fragmentsByTag(Tag.PixelData); 129 | assert(fragments.offsets !== undefined); 130 | assert.deepStrictEqual(fragments.offsets, [1, 2, 3, 4]); 131 | }), 132 | ); 133 | }); 134 | 135 | it('should handle determinate length items and sequences', () => { 136 | const elementList = [ 137 | new SequenceElement(Tag.DerivationCodeSequence, 68), 138 | new ItemElement(16), 139 | new ValueElement(Tag.StudyDate, VR.DA, Value.fromString(VR.DA, '20040329')), 140 | new ItemElement(36), 141 | new SequenceElement(Tag.DerivationCodeSequence, 24), 142 | new ItemElement(16), 143 | new ValueElement(Tag.StudyDate, VR.DA, Value.fromString(VR.DA, '20040329')), 144 | ]; 145 | 146 | return util.streamPromise( 147 | arraySource(elementList, true), 148 | elementSink((elements) => { 149 | assert.deepStrictEqual(elements.toElements(false), elementList); 150 | }), 151 | ); 152 | }); 153 | 154 | it('should "handle item and sequence delimitations in when items and sequences are of determinate length', () => { 155 | const elementList = [ 156 | new SequenceElement(Tag.DerivationCodeSequence, 108), 157 | new ItemElement(24), 158 | new ValueElement(Tag.StudyDate, VR.DA, Value.fromString(VR.DA, '20040329')), 159 | new ItemDelimitationElement(), 160 | new ItemElement(60), 161 | new SequenceElement(Tag.DerivationCodeSequence, 40), 162 | new ItemElement(24), 163 | new ValueElement(Tag.StudyDate, VR.DA, Value.fromString(VR.DA, '20040329')), 164 | new ItemDelimitationElement(), 165 | new SequenceDelimitationElement(), 166 | new ItemDelimitationElement(), 167 | new SequenceDelimitationElement(), 168 | ]; 169 | 170 | const expectedElementList = [ 171 | new SequenceElement(Tag.DerivationCodeSequence, 68), 172 | new ItemElement(16), 173 | new ValueElement(Tag.StudyDate, VR.DA, Value.fromString(VR.DA, '20040329')), 174 | new ItemElement(36), 175 | new SequenceElement(Tag.DerivationCodeSequence, 24), 176 | new ItemElement(16), 177 | new ValueElement(Tag.StudyDate, VR.DA, Value.fromString(VR.DA, '20040329')), 178 | ]; 179 | 180 | return util.streamPromise( 181 | arraySource(elementList, true), 182 | elementSink((elements) => { 183 | assert.deepStrictEqual(elements.toElements(false), expectedElementList); 184 | }), 185 | ); 186 | }); 187 | 188 | it('should "handle implicit VR encoding', () => { 189 | const bytes = concatv( 190 | data.preamble, 191 | data.fmiGroupLength(data.transferSyntaxUID(UID.ImplicitVRLittleEndian)), 192 | data.transferSyntaxUID(UID.ImplicitVRLittleEndian), 193 | data.patientNameJohnDoe(false, false), 194 | data.sequence(Tag.DerivationCodeSequence, indeterminateLength, false, false), 195 | item(), 196 | data.patientNameJohnDoe(false, false), 197 | data.studyDate(false, false), 198 | itemDelimitation(), 199 | item(), 200 | data.sequence(Tag.DerivationCodeSequence, 24, false, false), 201 | item(16), 202 | data.patientNameJohnDoe(false, false), 203 | itemDelimitation(), 204 | sequenceDelimitation(), 205 | ); 206 | 207 | return util.streamPromise( 208 | singleSource(bytes, true), 209 | parseFlow(), 210 | elementFlow(), 211 | elementSink((elements) => { 212 | assert.deepStrictEqual(elements.toBytes(), bytes); 213 | }), 214 | ); 215 | }); 216 | }); 217 | 218 | describe('Fragments', () => { 219 | it('should be empty', () => { 220 | const bytes = concatv(data.pixeDataFragments(), sequenceDelimitation()); 221 | 222 | return util.streamPromise( 223 | singleSource(bytes), 224 | parseFlow(), 225 | elementFlow(), 226 | elementSink((elements) => { 227 | const fragments = elements.fragmentsByTag(Tag.PixelData); 228 | assert.strictEqual(fragments.size, 0); 229 | assert(fragments.offsets === undefined); 230 | }), 231 | ); 232 | }); 233 | 234 | it('should convert an empty first item to an empty offsets list', () => { 235 | const bytes = concatv( 236 | data.pixeDataFragments(), 237 | item(0), 238 | item(4), 239 | Buffer.from([1, 2, 3, 4]), 240 | sequenceDelimitation(), 241 | ); 242 | 243 | return util.streamPromise( 244 | singleSource(bytes), 245 | parseFlow(), 246 | elementFlow(), 247 | elementSink((elements) => { 248 | const fragments = elements.fragmentsByTag(Tag.PixelData); 249 | assert(fragments.offsets !== undefined); 250 | assert(fragments.offsets.length === 0); 251 | assert.strictEqual(fragments.size, 1); 252 | }), 253 | ); 254 | }); 255 | 256 | it('should convert first item to offsets', () => { 257 | const bytes = concatv( 258 | data.pixeDataFragments(), 259 | item(8), 260 | intToBytesLE(0), 261 | intToBytesLE(456), 262 | item(4), 263 | Buffer.from([1, 2, 3, 4]), 264 | sequenceDelimitation(), 265 | ); 266 | 267 | return util.streamPromise( 268 | singleSource(bytes), 269 | parseFlow(), 270 | elementFlow(), 271 | elementSink((elements) => { 272 | const fragments = elements.fragmentsByTag(Tag.PixelData); 273 | assert(fragments.offsets !== undefined); 274 | assert.deepStrictEqual(fragments.offsets, [0, 456]); 275 | }), 276 | ); 277 | }); 278 | }); 279 | -------------------------------------------------------------------------------- /test/lookup-test.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert'; 2 | import { Lookup } from '../src/lookup'; 3 | import { Tag } from '../src/tag'; 4 | import { UID } from '../src/uid'; 5 | import { VR } from '../src/vr'; 6 | 7 | describe('The DICOM dictionary', () => { 8 | it('should support getting the value representation for a tag', () => { 9 | assert.strictEqual(Lookup.vrOf(Tag.PatientName), VR.PN); 10 | }); 11 | 12 | it('should support getting the keyword for a tag', () => { 13 | assert.strictEqual(Lookup.keywordOf(Tag.PatientName), 'PatientName'); 14 | assert.strictEqual(Lookup.keywordOf(0x00031141), ''); 15 | }); 16 | 17 | it('should support getting the tag for a keyword', () => { 18 | assert.strictEqual(Lookup.tagOf('PatientName'), Tag.PatientName); 19 | assert.strictEqual(Lookup.tagOf('not-a-keyword'), undefined); 20 | }); 21 | 22 | it('should support listing all keywords', () => { 23 | assert(Lookup.keywords.length > 4000); 24 | assert(Lookup.keywords.includes('PatientName')); 25 | }); 26 | 27 | it('should support getting the name for a UID', () => { 28 | assert.strictEqual(Lookup.nameOf(UID.NuclearMedicineImageStorage), 'Nuclear Medicine Image Storage'); 29 | assert.strictEqual(Lookup.nameOf('not a UID'), undefined); 30 | }); 31 | }); 32 | -------------------------------------------------------------------------------- /test/sinks-test.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert'; 2 | import { byteSink } from '../src/sinks'; 3 | import { singleSource } from '../src/sources'; 4 | import { Chunker } from './chunker'; 5 | import * as util from './test-util'; 6 | 7 | describe('A byte sink', () => { 8 | it('should aggregate bytes', () => { 9 | const data = Buffer.from([1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 5]); 10 | 11 | return util.streamPromise( 12 | singleSource(data), 13 | new Chunker(4), 14 | byteSink((buffer) => { 15 | assert.deepStrictEqual(buffer, data); 16 | }), 17 | ); 18 | }); 19 | }); 20 | -------------------------------------------------------------------------------- /test/tag-path-like-test.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert'; 2 | import { Tag } from '../src/tag'; 3 | import { TagPathLike } from '../src/tag-path-like'; 4 | 5 | class TestTagPath extends TagPathLike { 6 | constructor(private readonly tagVal: number, private readonly previousVal: TestTagPath = emptyTagPath) { 7 | super(); 8 | } 9 | public tag(): number { 10 | return this.tagVal; 11 | } 12 | public previous(): TestTagPath { 13 | return this.previousVal; 14 | } 15 | public isEmpty(): boolean { 16 | return false; 17 | } 18 | public drop(): TestTagPath { 19 | throw new Error('Method not implemented.'); 20 | } 21 | } 22 | 23 | class EmptyTagPath extends TestTagPath { 24 | constructor() { 25 | super(-1, null); 26 | } 27 | public tag(): number { 28 | throw Error('Empty tag path'); 29 | } 30 | public previous(): TestTagPath { 31 | return this; 32 | } 33 | public isEmpty(): boolean { 34 | return true; 35 | } 36 | } 37 | const emptyTagPath = new EmptyTagPath(); 38 | 39 | describe('The tag path depth', () => { 40 | it('should be 1 when pointing to a tag in the root dataset', () => { 41 | const path = new TestTagPath(Tag.PatientID); 42 | assert.strictEqual(path.depth(), 1); 43 | }); 44 | 45 | it('should be 0 for empty tag paths', () => { 46 | assert.strictEqual(emptyTagPath.depth(), 0); 47 | }); 48 | 49 | it('should be 4 when pointing to a tag in three levels of sequences', () => { 50 | const path = new TestTagPath( 51 | Tag.PatientID, 52 | new TestTagPath( 53 | Tag.DerivationCodeSequence, 54 | new TestTagPath(Tag.DerivationCodeSequence, new TestTagPath(Tag.DerivationCodeSequence)), 55 | ), 56 | ); 57 | assert.strictEqual(path.depth(), 4); 58 | }); 59 | }); 60 | 61 | describe('A tag path', () => { 62 | it('should be root when pointing to root dataset', () => { 63 | const path = new TestTagPath(Tag.PatientID); 64 | assert(path.isRoot()); 65 | }); 66 | 67 | it('should not be root when pointing to a tag in a sequence', () => { 68 | const path = new TestTagPath(Tag.PatientID, new TestTagPath(Tag.DerivationCodeSequence)); 69 | assert(!path.isRoot()); 70 | }); 71 | }); 72 | 73 | describe('A list representation of tag path tags', () => { 74 | it('should contain a single entry for a tag in the root dataset', () => { 75 | const path = new TestTagPath(Tag.PatientID); 76 | assert.deepStrictEqual(path.toList(), [path]); 77 | }); 78 | 79 | it('should contain four entries for a path of depth 3', () => { 80 | const path = new TestTagPath( 81 | Tag.PatientID, 82 | new TestTagPath( 83 | Tag.DerivationCodeSequence, 84 | new TestTagPath(Tag.DerivationCodeSequence, new TestTagPath(Tag.DerivationCodeSequence)), 85 | ), 86 | ); 87 | assert.deepStrictEqual(path.toList(), [ 88 | path.previous().previous().previous(), 89 | path.previous().previous(), 90 | path.previous(), 91 | path, 92 | ]); 93 | }); 94 | }); 95 | 96 | describe('The tag path contains test', () => { 97 | it('should return for any tag number on the tag path', () => { 98 | const path = new TestTagPath(3, new TestTagPath(2, new TestTagPath(1))); 99 | assert(path.contains(1)); 100 | assert(path.contains(2)); 101 | assert(path.contains(3)); 102 | assert(!path.contains(4)); 103 | }); 104 | }); 105 | 106 | describe('The tag path take operation', () => { 107 | it('should preserve elements from the left', () => { 108 | const path = new TestTagPath(4, new TestTagPath(3, new TestTagPath(2, new TestTagPath(1)))); 109 | assert.strictEqual(path.take(-100), emptyTagPath); 110 | assert.strictEqual(path.take(0), emptyTagPath); 111 | assert.strictEqual(path.take(1), path.previous().previous().previous()); 112 | assert.strictEqual(path.take(2), path.previous().previous()); 113 | assert.strictEqual(path.take(3), path.previous()); 114 | assert.strictEqual(path.take(4), path); 115 | assert.strictEqual(path.take(100), path); 116 | }); 117 | }); 118 | 119 | describe('The head of a tag path', () => { 120 | it('should be the root element of the path', () => { 121 | assert.deepStrictEqual(new TestTagPath(1).head(), new TestTagPath(1)); 122 | assert.deepStrictEqual(new TestTagPath(2, new TestTagPath(1)).head(), new TestTagPath(1)); 123 | assert.deepStrictEqual(new TestTagPath(3, new TestTagPath(2, new TestTagPath(1))).head(), new TestTagPath(1)); 124 | }); 125 | }); 126 | -------------------------------------------------------------------------------- /test/test-data.ts: -------------------------------------------------------------------------------- 1 | import { 2 | concat, 3 | concatv, 4 | emptyBuffer, 5 | indeterminateLength, 6 | intToBytes, 7 | intToBytesLE, 8 | padToEvenLength, 9 | shortToBytes, 10 | tagToBytes, 11 | } from '../src/base'; 12 | import { Lookup } from '../src/lookup'; 13 | import { HeaderPart } from '../src/dicom-parts'; 14 | import { Tag } from '../src/tag'; 15 | import { UID } from '../src/uid'; 16 | 17 | export const preamble = concat(Buffer.from(new Array(128).fill(0)), Buffer.from('DICM')); 18 | 19 | export function element(tag: number, value: Buffer | string, bigEndian = false, explicitVR = true): Buffer { 20 | const bytes = value instanceof Buffer ? Buffer.from(value as Buffer) : Buffer.from(value as string); 21 | const valueBytes = padToEvenLength(bytes, tag); 22 | const headerBytes = HeaderPart.create(tag, Lookup.vrOf(tag), valueBytes.length, bigEndian, explicitVR).bytes; 23 | return concat(headerBytes, valueBytes); 24 | } 25 | 26 | export function fmiGroupLength(...fmis: Buffer[]): Buffer { 27 | return element( 28 | Tag.FileMetaInformationGroupLength, 29 | intToBytesLE(fmis.map((fmi) => fmi.length + (fmi.length % 2)).reduce((p, c) => p + c)), 30 | ); 31 | } 32 | 33 | export function fmiGroupLengthImplicit(...fmis: Buffer[]): Buffer { 34 | return element( 35 | Tag.FileMetaInformationGroupLength, 36 | intToBytesLE(fmis.map((fmi) => fmi.length + (fmi.length % 2)).reduce((p, c) => p + c)), 37 | false, 38 | false, 39 | ); 40 | } 41 | 42 | export function fmiVersion(bigEndian?: boolean, explicitVR?: boolean): Buffer { 43 | return element(Tag.FileMetaInformationVersion, Buffer.from([0x00, 0x01]), bigEndian, explicitVR); 44 | } 45 | 46 | export function transferSyntaxUID( 47 | uid: string = UID.ExplicitVRLittleEndian, 48 | bigEndian?: boolean, 49 | explicitVR?: boolean, 50 | ): Buffer { 51 | uid = uid || UID.ExplicitVRLittleEndian; 52 | return element(Tag.TransferSyntaxUID, uid, bigEndian, explicitVR); 53 | } 54 | 55 | export function mediaStorageSOPClassUID(bigEndian?: boolean, explicitVR?: boolean): Buffer { 56 | return element(Tag.MediaStorageSOPClassUID, UID.CTImageStorage, bigEndian, explicitVR); 57 | } 58 | 59 | export function mediaStorageSOPInstanceUID(bigEndian?: boolean, explicitVR?: boolean): Buffer { 60 | return element( 61 | Tag.MediaStorageSOPInstanceUID, 62 | '1.2.276.0.7230010.3.1.4.1536491920.17152.1480884676.735', 63 | bigEndian, 64 | explicitVR, 65 | ); 66 | } 67 | 68 | export function sopClassUID(bigEndian?: boolean, explicitVR?: boolean): Buffer { 69 | return element(Tag.SOPClassUID, UID.CTImageStorage, bigEndian, explicitVR); 70 | } 71 | 72 | export function groupLength(groupNumber: number, length: number, bigEndian = false, explicitVR = true): Buffer { 73 | const vrLength = explicitVR ? concat(Buffer.from('UL'), shortToBytes(4, bigEndian)) : intToBytes(4, bigEndian); 74 | return concatv(shortToBytes(groupNumber, bigEndian), Buffer.from([0, 0]), vrLength, intToBytes(length, bigEndian)); 75 | } 76 | 77 | export function patientNameJohnDoe(bigEndian?: boolean, explicitVR?: boolean): Buffer { 78 | return element(Tag.PatientName, 'John^Doe', bigEndian, explicitVR); 79 | } 80 | export function emptyPatientName(bigEndian?: boolean, explicitVR?: boolean): Buffer { 81 | return element(Tag.PatientName, '', bigEndian, explicitVR); 82 | } 83 | 84 | export function patientID(bigEndian?: boolean, explicitVR?: boolean): Buffer { 85 | return element(Tag.PatientID, '12345678', bigEndian, explicitVR); 86 | } 87 | 88 | export function studyDate(bigEndian?: boolean, explicitVR?: boolean): Buffer { 89 | return element(Tag.StudyDate, '19700101', bigEndian, explicitVR); 90 | } 91 | 92 | export function sequence( 93 | tag: number, 94 | length: number = indeterminateLength, 95 | bigEndian = false, 96 | explicitVR = true, 97 | ): Buffer { 98 | length = length === undefined ? indeterminateLength : length; 99 | const vrBytes = explicitVR ? concat(Buffer.from('SQ'), Buffer.from([0, 0])) : emptyBuffer; 100 | return concatv(tagToBytes(tag, bigEndian), vrBytes, intToBytes(length, bigEndian)); 101 | } 102 | 103 | export const cp264Sequence = concatv( 104 | tagToBytes(Tag.CTDIPhantomTypeCodeSequence), 105 | Buffer.from('UN'), 106 | Buffer.from([0, 0, 0xff, 0xff, 0xff, 0xff]), 107 | ); 108 | 109 | export function pixelData(length: number, bigEndian?: boolean, explicitVR?: boolean): Buffer { 110 | return element(Tag.PixelData, Buffer.from(new Array(length).fill(0)), bigEndian, explicitVR); 111 | } 112 | export function pixeDataFragments(bigEndian?: boolean): Buffer { 113 | return concatv( 114 | tagToBytes(Tag.PixelData, bigEndian), 115 | Buffer.from('OW'), 116 | Buffer.from([0, 0]), 117 | Buffer.from([0xff, 0xff, 0xff, 0xff]), 118 | ); 119 | } 120 | -------------------------------------------------------------------------------- /test/test-util.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert'; 2 | import { pipeline, Transform } from 'stream'; 3 | import { promisify } from 'util'; 4 | import zlib from 'zlib'; 5 | import { VR } from '../src'; 6 | import { 7 | Element, 8 | FragmentElement, 9 | FragmentsElement, 10 | ItemDelimitationElement, 11 | ItemElement, 12 | preambleElement, 13 | SequenceDelimitationElement, 14 | SequenceElement, 15 | ValueElement, 16 | } from '../src/dicom-elements'; 17 | import { 18 | DeflatedChunk, 19 | DicomPart, 20 | ElementsPart, 21 | FragmentsPart, 22 | HeaderPart, 23 | ItemDelimitationPart, 24 | ItemPart, 25 | MetaPart, 26 | PreamblePart, 27 | SequenceDelimitationPart, 28 | SequencePart, 29 | UnknownPart, 30 | ValueChunk, 31 | } from '../src/dicom-parts'; 32 | import { arraySink } from '../src/sinks'; 33 | import { singleSource } from '../src/sources'; 34 | 35 | export class TestPart extends MetaPart { 36 | constructor(public readonly id: string) { 37 | super(); 38 | } 39 | 40 | public toString(): string { 41 | return 'TestPart: ' + this.id; 42 | } 43 | } 44 | 45 | export class PartProbe { 46 | private offset = 0; 47 | 48 | constructor(public readonly array: DicomPart[]) {} 49 | 50 | public expectPreamble(): PartProbe { 51 | assert(this.array[this.offset] instanceof PreamblePart); 52 | this.offset++; 53 | return this; 54 | } 55 | 56 | public expectHeader(tag?: number, vr?: VR, length?: number): PartProbe { 57 | assert(this.array[this.offset] instanceof HeaderPart); 58 | const part = this.array[this.offset] as HeaderPart; 59 | if (length !== undefined) { 60 | assert.strictEqual(part.length, length); 61 | } 62 | if (vr !== undefined) { 63 | assert.strictEqual(part.vr.name, vr.name); 64 | } 65 | if (tag !== undefined) { 66 | assert.strictEqual(part.tag, tag); 67 | } 68 | this.offset++; 69 | return this; 70 | } 71 | 72 | public expectValueChunk(data?: Buffer): PartProbe { 73 | const part = this.array[this.offset]; 74 | assert(part instanceof ValueChunk); 75 | if (data !== undefined) { 76 | if (data instanceof Buffer) { 77 | assert.deepStrictEqual(part.bytes, data); 78 | } else { 79 | assert.strictEqual(part.bytes.length, data); 80 | } 81 | } 82 | this.offset++; 83 | return this; 84 | } 85 | 86 | public expectDeflatedChunk(): PartProbe { 87 | assert(this.array[this.offset] instanceof DeflatedChunk); 88 | this.offset++; 89 | return this; 90 | } 91 | 92 | public expectFragments(): PartProbe { 93 | assert(this.array[this.offset] instanceof FragmentsPart); 94 | this.offset++; 95 | return this; 96 | } 97 | 98 | public expectSequence(tag?: number, length?: number): PartProbe { 99 | assert(this.array[this.offset] instanceof SequencePart); 100 | const part = this.array[this.offset] as SequencePart; 101 | if (length !== undefined) { 102 | assert.equal(part.length, length); 103 | } 104 | if (tag !== undefined) { 105 | assert.strictEqual(part.tag, tag); 106 | } 107 | this.offset++; 108 | return this; 109 | } 110 | 111 | public expectItem(length?: number): PartProbe { 112 | assert(this.array[this.offset] instanceof ItemPart); 113 | const part = this.array[this.offset] as ItemPart; 114 | if (length !== undefined) { 115 | assert.equal(part.length, length); 116 | } 117 | this.offset++; 118 | return this; 119 | } 120 | 121 | public expectItemDelimitation(): PartProbe { 122 | assert(this.array[this.offset] instanceof ItemDelimitationPart); 123 | this.offset++; 124 | return this; 125 | } 126 | 127 | public expectSequenceDelimitation(): PartProbe { 128 | assert(this.array[this.offset] instanceof SequenceDelimitationPart); 129 | this.offset++; 130 | return this; 131 | } 132 | 133 | public expectFragment(length?: number): PartProbe { 134 | assert(this.array[this.offset] instanceof ItemPart); 135 | const part = this.array[this.offset] as ItemPart; 136 | if (length !== undefined) { 137 | assert.strictEqual(part.length, length); 138 | } 139 | this.offset++; 140 | return this; 141 | } 142 | 143 | public expectFragmentsDelimitation(): PartProbe { 144 | return this.expectSequenceDelimitation(); 145 | } 146 | 147 | public expectUnknownPart(): PartProbe { 148 | assert(this.array[this.offset] instanceof UnknownPart); 149 | this.offset++; 150 | return this; 151 | } 152 | 153 | public expectElements(elementsPart: ElementsPart): PartProbe { 154 | const part = this.array[this.offset]; 155 | assert(part instanceof ElementsPart); 156 | assert.deepStrictEqual(part, elementsPart); 157 | this.offset++; 158 | return this; 159 | } 160 | 161 | public expectTestPart(id?: string): PartProbe { 162 | assert(this.array[this.offset] instanceof TestPart); 163 | const part = this.array[this.offset] as TestPart; 164 | if (id !== undefined) { 165 | assert.equal(part.id, id); 166 | } 167 | this.offset++; 168 | return this; 169 | } 170 | 171 | public expectDicomComplete(): PartProbe { 172 | assert(this.offset >= this.array.length); 173 | this.offset++; 174 | return this; 175 | } 176 | } 177 | 178 | class ElementProbe { 179 | private offset = 0; 180 | 181 | constructor(public readonly array: Element[]) {} 182 | 183 | public expectElement(tag?: number, value?: Buffer): ElementProbe { 184 | const part: Element = this.array[this.offset]; 185 | assert(part instanceof ValueElement || part instanceof SequenceElement || part instanceof FragmentsElement); 186 | if (part instanceof ValueElement || part instanceof SequenceElement || part instanceof FragmentsElement) { 187 | if (value !== undefined && part instanceof ValueElement) { 188 | assert.deepStrictEqual(part.value.bytes, value); 189 | } 190 | if (tag !== undefined) { 191 | assert.strictEqual(part.tag, tag); 192 | } 193 | this.offset++; 194 | } 195 | return this; 196 | } 197 | 198 | public expectPreamble(): ElementProbe { 199 | assert.strictEqual(this.array[this.offset], preambleElement); 200 | this.offset++; 201 | return this; 202 | } 203 | 204 | public expectFragments(tag?: number): ElementProbe { 205 | const part = this.array[this.offset]; 206 | assert(part instanceof FragmentsElement); 207 | if (part instanceof FragmentsElement) { 208 | if (tag !== undefined) { 209 | assert.strictEqual(part.tag, tag); 210 | } 211 | this.offset++; 212 | } 213 | return this; 214 | } 215 | 216 | public expectFragment(length?: number): ElementProbe { 217 | const part = this.array[this.offset]; 218 | assert(part instanceof FragmentElement); 219 | if (part instanceof FragmentElement) { 220 | if (length !== undefined) { 221 | assert.strictEqual(part.length, length); 222 | } 223 | this.offset++; 224 | } 225 | return this; 226 | } 227 | 228 | public expectSequence(tag?: number, length?: number): ElementProbe { 229 | const part = this.array[this.offset]; 230 | assert(part instanceof SequenceElement); 231 | if (part instanceof SequenceElement) { 232 | if (length !== undefined) { 233 | assert.strictEqual(part.length, length); 234 | } 235 | if (tag !== undefined) { 236 | assert.strictEqual(part.tag, tag); 237 | } 238 | this.offset++; 239 | } 240 | return this; 241 | } 242 | 243 | public expectItem(length?: number): ElementProbe { 244 | const part = this.array[this.offset]; 245 | assert(part instanceof ItemElement); 246 | if (part instanceof ItemElement) { 247 | if (length !== undefined) { 248 | assert.strictEqual(part.length, length); 249 | } 250 | this.offset++; 251 | } 252 | return this; 253 | } 254 | 255 | public expectItemDelimitation(): ElementProbe { 256 | const part = this.array[this.offset]; 257 | assert(part instanceof ItemDelimitationElement); 258 | if (part instanceof ItemDelimitationElement) { 259 | this.offset++; 260 | } 261 | return this; 262 | } 263 | 264 | public expectSequenceDelimitation(): ElementProbe { 265 | const part = this.array[this.offset]; 266 | assert(part instanceof SequenceDelimitationElement); 267 | this.offset++; 268 | return this; 269 | } 270 | 271 | public expectDicomComplete(): ElementProbe { 272 | assert(this.offset >= this.array.length); 273 | this.offset++; 274 | return this; 275 | } 276 | } 277 | 278 | export const streamPromise = promisify(pipeline); 279 | export function partProbe(array: DicomPart[]): PartProbe { 280 | return new PartProbe(array); 281 | } 282 | export function elementProbe(array: Element[]): ElementProbe { 283 | return new ElementProbe(array); 284 | } 285 | export function testParts(bytes: Buffer, flow: Transform, assertParts: (parts: any[]) => void): Promise { 286 | return streamPromise(singleSource(bytes), flow, arraySink(assertParts)); 287 | } 288 | export function expectDicomError(asyncFunction: () => Promise): Promise { 289 | return assert.rejects(asyncFunction); 290 | } 291 | export function deflate(buffer: Buffer, gzip = false): Buffer { 292 | return gzip ? zlib.deflateSync(buffer) : zlib.deflateRawSync(buffer); 293 | } 294 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "target": "es6", 5 | "lib": ["es2015", "dom"], 6 | "sourceMap": true, 7 | "declaration": true, 8 | "noImplicitAny": true, 9 | "esModuleInterop": true, 10 | "rootDir": "src", 11 | "outDir": "dist/lib", 12 | "typeRoots" : ["./node_modules/@types", "./typings"] 13 | }, 14 | "files": [ 15 | "./src/index.ts" 16 | ] 17 | } -------------------------------------------------------------------------------- /typings/dicom-character-set/index.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | declare module 'dicom-character-set' { 3 | export const characterSets: Record[]; 4 | export function convertBytes(specificCharacterSet: any, bytes: any, options: any): string; 5 | } 6 | -------------------------------------------------------------------------------- /typings/multipipe/index.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | declare module 'multipipe' { 3 | export default function pipe(...streams: any[]): any; 4 | } 5 | --------------------------------------------------------------------------------