├── .gitignore ├── .jsdoc.json ├── .npmignore ├── .prettierignore ├── DOC.md ├── LICENSE.md ├── README.md ├── docs ├── chunks.js.html ├── constants.js.html ├── crc32.js.html ├── decode.js.html ├── demo │ ├── assets │ │ ├── generate-D99VbjkE.js │ │ ├── index-wognUqBc.js │ │ └── worker-Cs3gA_uN.js │ └── index.html ├── encode.js.html ├── fonts │ ├── OpenSans-Bold-webfont.eot │ ├── OpenSans-Bold-webfont.svg │ ├── OpenSans-Bold-webfont.woff │ ├── OpenSans-BoldItalic-webfont.eot │ ├── OpenSans-BoldItalic-webfont.svg │ ├── OpenSans-BoldItalic-webfont.woff │ ├── OpenSans-Italic-webfont.eot │ ├── OpenSans-Italic-webfont.svg │ ├── OpenSans-Italic-webfont.woff │ ├── OpenSans-Light-webfont.eot │ ├── OpenSans-Light-webfont.svg │ ├── OpenSans-Light-webfont.woff │ ├── OpenSans-LightItalic-webfont.eot │ ├── OpenSans-LightItalic-webfont.svg │ ├── OpenSans-LightItalic-webfont.woff │ ├── OpenSans-Regular-webfont.eot │ ├── OpenSans-Regular-webfont.svg │ └── OpenSans-Regular-webfont.woff ├── global.html ├── index.html ├── scripts │ ├── linenumber.js │ └── prettify │ │ ├── Apache-License-2.0.txt │ │ ├── lang-css.js │ │ └── prettify.js ├── styles │ ├── jsdoc-default.css │ ├── prettify-jsdoc.css │ └── prettify-tomorrow.css └── util.js.html ├── examples ├── bun-encode.js ├── deno-encode.js ├── deno-parallel-encode.js ├── encode-ancillary.js ├── encode-color-space.js ├── encode-simple.js ├── encode-stream.js ├── index.html ├── inspect.js ├── node-encode.js ├── profiles │ ├── AdobeRGB1998.icc │ ├── Display P3.icc │ └── sRGB IEC61966-2.1.icc ├── read-ihdr.js ├── util │ ├── adler32.js │ ├── icc-transform.js │ ├── parallel-encode-worker.js │ ├── pixels.js │ └── save.js └── web │ ├── generate.js │ ├── main.js │ └── worker.js ├── index.js ├── package-lock.json ├── package.json ├── src ├── chunks.js ├── constants.js ├── crc32.js ├── decode.js ├── encode.js └── util.js └── test ├── bench.js ├── benchmark ├── generate.js ├── main.js └── worker.js ├── deprecated ├── demo copy.js ├── demo-16bpp-display-p3.js ├── demo.js ├── export-png.js ├── inspect.js ├── png-for-print.js ├── save-png.js └── splice.js ├── encoded ├── generated-0.png ├── generated-1.png ├── generated-2.png ├── generated-3.png └── generated-4.png ├── generate-png-fixtures.js ├── index.html ├── png-fixtures.js └── test.js /.gitignore: -------------------------------------------------------------------------------- 1 | bower_components 2 | node_modules 3 | *.log 4 | .DS_Store 5 | 6 | /profiles/ 7 | test/fixtures/icc 8 | test/fixtures/ 9 | 10 | /tmp/ 11 | /dist/ 12 | /examples/dist/ -------------------------------------------------------------------------------- /.jsdoc.json: -------------------------------------------------------------------------------- 1 | { 2 | "plugins": [], 3 | "source": { 4 | "include": ["index.js", "src"] 5 | }, 6 | "sourceType": "module" 7 | } 8 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | bower_components 2 | node_modules 3 | *.log 4 | .DS_Store 5 | .npmignore 6 | LICENSE.md 7 | 8 | /profiles/ 9 | /examples/ 10 | /test/ 11 | /tmp/ 12 | /docs/ 13 | .jsdoc.json -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | src/png-metadata-writer/crc32.js 2 | test/png-fixtures.js -------------------------------------------------------------------------------- /DOC.md: -------------------------------------------------------------------------------- 1 | `png-tools` is a suite of low level tools for working with PNG files in JavaScript. 2 | 3 | ## Source Code 4 | 5 | The source code is on GitHub: 6 | 7 | [https://github.com/mattdesl/png-tools/](https://github.com/mattdesl/png-tools/) 8 | 9 | ## Docs 10 | 11 | The documentation is held here: 12 | 13 | [https://mattdesl.github.io/png-tools](https://mattdesl.github.io/png-tools) 14 | 15 | ## Usage 16 | 17 | For installation and usage, see the [README.md page](https://github.com/mattdesl/png-tools/) on the GitHub repository. 18 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | Copyright (c) 2024 Matt DesLauriers 3 | 4 | Permission is hereby granted, free of charge, to any person obtaining a copy 5 | of this software and associated documentation files (the "Software"), to deal 6 | in the Software without restriction, including without limitation the rights 7 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 8 | copies of the Software, and to permit persons to whom the Software is 9 | furnished to do so, subject to the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be included in all 12 | copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 17 | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, 18 | DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 19 | OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE 20 | OR OTHER DEALINGS IN THE SOFTWARE. 21 | 22 | -------------------------------------------------------------------------------- /docs/constants.js.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | JSDoc: Source: constants.js 6 | 7 | 8 | 9 | 12 | 13 | 14 | 15 | 16 | 17 | 18 |
19 | 20 |

Source: constants.js

21 | 22 | 23 | 24 | 25 | 26 | 27 |
28 |
29 |
export const PNG_HEADER = new Uint8Array([
 30 |   0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a,
 31 | ]);
 32 | 
 33 | /**
 34 |  * An enum for Intent when specifying sRGB chunk.
 35 |  *
 36 |  * @enum {Intent}
 37 |  * @property {number} Perceptual (0x00)
 38 |  * @property {number} Relative relative colorimetric (0x01)
 39 |  * @property {number} Saturation (0x02)
 40 |  * @property {number} Absolute absolute colorimetric (0x03)
 41 |  **/
 42 | export const Intent = Object.freeze({
 43 |   Perceptual: 0,
 44 |   Relative: 1, // Relative colorimetric
 45 |   Saturation: 2,
 46 |   Absolute: 3, // Aboslute colorimetric
 47 | });
 48 | 
 49 | /**
 50 |  * An enum for standard PNG scanline filter methods.
 51 |  *
 52 |  * @enum {FilterMethod}
 53 |  * @property {number} None No filter (0x00)
 54 |  * @property {number} Sub Compute from left (0x01)
 55 |  * @property {number} Up Compute from above scanline (0x02)
 56 |  * @property {number} Average Compute from average of up and left (0x03)
 57 |  * @property {number} Paeth Compute the PNG 'paeth' predictor from up & left (0x04)
 58 |  **/
 59 | export const FilterMethod = Object.freeze({
 60 |   None: 0x00,
 61 |   Sub: 0x01,
 62 |   Up: 0x02,
 63 |   Average: 0x03,
 64 |   Paeth: 0x04,
 65 | });
 66 | 
 67 | /**
 68 |  * An enum for standard PNG color types, such as RGB or RGBA.
 69 |  *
 70 |  * @enum {ColorType}
 71 |  * @property {number} GRAYSCALE (1)
 72 |  * @property {number} RGB (2)
 73 |  * @property {number} INDEXED (3)
 74 |  * @property {number} GRAYSCALE_ALPHA (4)
 75 |  * @property {number} RGBA (6)
 76 |  **/
 77 | export const ColorType = Object.freeze({
 78 |   GRAYSCALE: 1,
 79 |   RGB: 2,
 80 |   INDEXED: 3,
 81 |   GRAYSCALE_ALPHA: 4,
 82 |   RGBA: 6,
 83 | });
 84 | 
 85 | /**
 86 |  * An enum for standard PNG chunk type codes (4-byte Uint32 decimal), including critical and ancillary chunks.
 87 |  *
 88 |  * @enum {ChunkType}
 89 |  * @property {number} IHDR
 90 |  * @property {number} PLTE
 91 |  * @property {number} IDAT
 92 |  * @property {number} IEND
 93 |  * @property {number} (...) - see source for full list
 94 |  * */
 95 | export const ChunkType = Object.freeze({
 96 |   // Critical chunks
 97 |   IHDR: 0x49484452,
 98 |   PLTE: 0x504c5445,
 99 |   IDAT: 0x49444154,
100 |   IEND: 0x49454e44,
101 |   // Ancillary Chunks
102 |   cHRM: 0x6348524d,
103 |   gAMA: 0x67414d41,
104 |   iCCP: 0x69434350,
105 |   sBIT: 0x73424954,
106 |   sRGB: 0x73524742,
107 |   bKGD: 0x624b4744,
108 |   hIST: 0x68495354,
109 |   tRNS: 0x74524e53,
110 |   pHYs: 0x70485973,
111 |   sPLT: 0x73504c54,
112 |   tIME: 0x74494d45,
113 |   iTXt: 0x69545874,
114 |   tEXt: 0x74455874,
115 |   zTXt: 0x7a545874,
116 | });
117 | 
118 |
119 |
120 | 121 | 122 | 123 | 124 |
125 | 126 | 129 | 130 |
131 | 132 | 135 | 136 | 137 | 138 | 139 | 140 | -------------------------------------------------------------------------------- /docs/crc32.js.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | JSDoc: Source: crc32.js 6 | 7 | 8 | 9 | 12 | 13 | 14 | 15 | 16 | 17 | 18 |
19 | 20 |

Source: crc32.js

21 | 22 | 23 | 24 | 25 | 26 | 27 |
28 |
29 |
// Fast CRC32 in JavaScript
 30 | // 101arrowz (https://github.com/101arrowz)
 31 | // License: MIT
 32 | 
 33 | // Modified from 101arrowz's gist:
 34 | // https://gist.github.com/101arrowz/e58695f7ccfdf74f60ba22018093edea
 35 | 
 36 | // This code uses the Slice-by-16 algorithm to achieve performance
 37 | // roughly 2x greater than all other JS CRC32 implementations (e.g.
 38 | // crc32-js).
 39 | 
 40 | // Per local testing, Slice-by-16 outperforms Slice-by-4 by around 50%
 41 | // and Slice-by-8/Slice-by-32/Slice-by-64 by 10-30%
 42 | 
 43 | // This CRC implementation can compete with WASM CRC implementations
 44 | // as well, and it tends to perform between 30% faster and 10% slower
 45 | // than WASM CRC32 (>1MB input chunks is faster on WASM).
 46 | 
 47 | // CRC32 table
 48 | // perf: signed integers are 2x more likely to be Smi
 49 | // Smi is a V8 datatype in (-2**30, 2**30-1)
 50 | // Smi operations are much faster
 51 | 
 52 | function CRC32() {
 53 |   const crct = new Int32Array(4096);
 54 |   for (let i = 0; i < 256; ++i) {
 55 |     let c = i,
 56 |       k = 9;
 57 |     while (--k) c = (c & 1 && -306674912) ^ (c >>> 1);
 58 |     crct[i] = c;
 59 |   }
 60 |   for (let i = 0; i < 256; ++i) {
 61 |     let lv = crct[i];
 62 |     for (let j = 256; j < 4096; j += 256)
 63 |       lv = crct[i | j] = (lv >>> 8) ^ crct[lv & 255];
 64 |   }
 65 | 
 66 |   const crcts = [];
 67 |   for (let i = 0; i < 16; ) {
 68 |     crcts[i] = crct.subarray(i << 8, ++i << 8);
 69 |   }
 70 | 
 71 |   // prettier-ignore
 72 |   const [t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16] =
 73 |     crcts;
 74 | 
 75 |   // raw CRC function
 76 |   // stream by passing in previous CRC output as second parameter
 77 |   return function crc32(d, c = -1) {
 78 |     let i = 0;
 79 |     const len = d.length;
 80 |     const max = len - 16;
 81 |     for (; i < max; ) {
 82 |       c =
 83 |         t16[d[i++] ^ (c & 255)] ^
 84 |         t15[d[i++] ^ ((c >> 8) & 255)] ^
 85 |         t14[d[i++] ^ ((c >> 16) & 255)] ^
 86 |         t13[d[i++] ^ (c >>> 24)] ^
 87 |         t12[d[i++]] ^
 88 |         t11[d[i++]] ^
 89 |         t10[d[i++]] ^
 90 |         t9[d[i++]] ^
 91 |         t8[d[i++]] ^
 92 |         t7[d[i++]] ^
 93 |         t6[d[i++]] ^
 94 |         t5[d[i++]] ^
 95 |         t4[d[i++]] ^
 96 |         t3[d[i++]] ^
 97 |         t2[d[i++]] ^
 98 |         t1[d[i++]];
 99 |     }
100 |     for (; i < len; ++i) {
101 |       c = t1[(c & 255) ^ d[i]] ^ (c >>> 8);
102 |     }
103 |     return ~c;
104 |   };
105 | }
106 | 
107 | /**
108 |  * Calculate the CRC32 checksum of an array-like buffer.
109 | 
110 |  * @function crc32
111 |  * @param {ArrayLike} buf the array-like buffer to calculate the CRC32 of
112 |  * @param {number} [c=-1] the initial CRC32 value
113 |  * @returns {number} the CRC32 checksum
114 |  */
115 | export default (() => {
116 |   // Avoid allocating global memory unless necessary
117 |   let init = false;
118 |   let crc32_instance;
119 |   return function crc32(d, c = -1) {
120 |     if (!init) {
121 |       crc32_instance = CRC32();
122 |       init = true;
123 |     }
124 |     return crc32_instance(d, c);
125 |   };
126 | })();
127 | 
128 |
129 |
130 | 131 | 132 | 133 | 134 |
135 | 136 | 139 | 140 |
141 | 142 | 145 | 146 | 147 | 148 | 149 | 150 | -------------------------------------------------------------------------------- /docs/decode.js.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | JSDoc: Source: decode.js 6 | 7 | 8 | 9 | 12 | 13 | 14 | 15 | 16 | 17 | 18 |
19 | 20 |

Source: decode.js

21 | 22 | 23 | 24 | 25 | 26 | 27 |
28 |
29 |
import crc32 from "./crc32.js";
 30 | import { ChunkType, PNG_HEADER } from "./constants.js";
 31 | import { chunkTypeToName, decode_IHDR } from "./chunks.js";
 32 | 
 33 | /**
 34 |  * @typedef {Object} PNGReaderOptions
 35 |  * @property {boolean} [checkCRC=false] whether to check and verify CRC values of each chunk (slower but can detect errors and corruption earlier during parsing)
 36 |  * @property {boolean} [copy=true] whether to return a sliced copy of each chunk data instead of a shallow subarray view into the input buffer
 37 |  **/
 38 | 
 39 | /**
 40 |  * Reads a PNG buffer up to the end of the IHDR chunk and returns this metadata, giving its width, height, bit depth, and color type.
 41 |  *
 42 |  * @param {ArrayBufferView} buf the PNG buffer to read
 43 |  * @param {PNGReaderOptions} [opts={}] optional parameters for reading
 44 |  * @returns {IHDRData}
 45 |  **/
 46 | export function readIHDR(buf, opts = {}) {
 47 |   let meta = {};
 48 |   reader(buf, { ...opts, copy: false }, (type, view) => {
 49 |     if (type === ChunkType.IHDR) {
 50 |       meta = decode_IHDR(view);
 51 |       return false; // stop reading the rest of PNG
 52 |     }
 53 |   });
 54 |   return meta;
 55 | }
 56 | 
 57 | /**
 58 |  * Parses a PNG buffer and returns an array of chunks, each containing a type code and its data.
 59 |  * The individual chunks are not decoded, but left as raw Uint8Array data. If `copy` option is `false`,
 60 |  * the chunk data is a view into the original ArrayBufferView (no copy involved), which is more memory efficient
 61 |  * for large files.
 62 |  *
 63 |  * @param {ArrayBufferView} buf
 64 |  * @param {PNGReaderOptions} [opts={}] optional parameters for reading PNG chunks
 65 |  * @returns {Chunk[]} an array of chunks
 66 |  */
 67 | export function readChunks(buf, opts = {}) {
 68 |   const chunks = [];
 69 |   reader(buf, opts, (type, data) => chunks.push({ type, data }));
 70 |   return chunks;
 71 | }
 72 | 
 73 | /**
 74 |  * A low-level interface for stream reading a PNG file. With the speicifed buffer, this function reads
 75 |  * each chunk and calls the `read(type, data)` function, which is expected to do something with the chunk data.
 76 |  * If the `read()` function returns `false`, the stream will stop reading the rest of the PNG file and safely end early,
 77 |  * otherwise it will expect to end on an IEND type chunk to form a valid PNG file.
 78 |  *
 79 |  * @param {ArrayBufferView} buf
 80 |  * @param {PNGReaderOptions} [opts={}] optional parameters for reading PNG chunks
 81 |  * @returns {Chunk[]} an array of chunks
 82 |  */
 83 | export function reader(buf, opts = {}, read = () => {}) {
 84 |   if (!ArrayBuffer.isView(buf)) {
 85 |     throw new Error("Expected a typed array such as Uint8Array");
 86 |   }
 87 | 
 88 |   if (typeof opts === "function") {
 89 |     read = opts;
 90 |     opts = {};
 91 |   }
 92 | 
 93 |   const dv = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
 94 |   const data = new Uint8Array(dv.buffer, dv.byteOffset, dv.byteLength);
 95 | 
 96 |   if (data.length < PNG_HEADER.length) {
 97 |     throw new Error(`Buffer too small to contain PNG header`);
 98 |   }
 99 | 
100 |   const { checkCRC = false, copy = true } = opts;
101 | 
102 |   for (let i = 0; i < PNG_HEADER.length; i++) {
103 |     if (data[i] !== PNG_HEADER[i]) throw new Error(`Invalid PNG file header`);
104 |   }
105 | 
106 |   let ended = false;
107 |   let hasMetIHDR = false;
108 |   let idx = 8;
109 |   while (idx < data.length) {
110 |     // Length of current chunk
111 |     const chunkLength = dv.getUint32(idx);
112 |     idx += 4;
113 | 
114 |     // Extract 4-byte type code
115 |     const type = dv.getUint32(idx);
116 | 
117 |     // First chunk must be IHDR
118 |     if (!hasMetIHDR) {
119 |       if (type !== ChunkType.IHDR) throw new Error("Invalid PNG: IHDR missing");
120 |       hasMetIHDR = true;
121 |     }
122 | 
123 |     const chunkDataIdx = idx + 4;
124 |     if (checkCRC) {
125 |       // Get the chunk contents including the type code but not CRC code
126 |       const chunkBuffer = data.subarray(idx, chunkDataIdx + chunkLength);
127 | 
128 |       // Int32 CRC value that comes after the chunk data
129 |       const crcCode = dv.getInt32(chunkDataIdx + chunkLength);
130 |       let crcExpect = crc32(chunkBuffer);
131 |       if (crcExpect !== crcCode) {
132 |         throw new Error(
133 |           `CRC value for ${chunkTypeToName(
134 |             type
135 |           )} does not match, PNG file may be corrupted`
136 |         );
137 |       }
138 |     }
139 | 
140 |     // parse the current chunk
141 |     const v = read(
142 |       type,
143 |       copy
144 |         ? data.slice(chunkDataIdx, chunkDataIdx + chunkLength)
145 |         : data.subarray(chunkDataIdx, chunkDataIdx + chunkLength)
146 |     );
147 |     if (v === false || type === ChunkType.IEND) {
148 |       // safely end the stream
149 |       ended = true;
150 |       break;
151 |     }
152 | 
153 |     // Skip past the chunk data and CRC value
154 |     idx = chunkDataIdx + chunkLength + 4;
155 |   }
156 | 
157 |   if (!ended) {
158 |     throw new Error("PNG ended without IEND chunk");
159 |   }
160 | }
161 | 
162 |
163 |
164 | 165 | 166 | 167 | 168 |
169 | 170 | 173 | 174 |
175 | 176 | 179 | 180 | 181 | 182 | 183 | 184 | -------------------------------------------------------------------------------- /docs/demo/assets/generate-D99VbjkE.js: -------------------------------------------------------------------------------- 1 | (function(){"use strict";self.onmessage=async u=>{const{width:r,height:l,depth:h,channels:a}=u.data,i=h===16?Uint16Array:Uint8ClampedArray,f=h===16?65535:255;let s=new i(r*l*a).fill(f);const p=[1,0,0],x=[0,0,1];for(let t=0;tg(m));s[t*a+0]=n,s[t*a+1]=c,s[t*a+2]=e}for(let t=1;t 2 | 3 | 4 | 5 | 6 | benchmark 7 | 20 | 21 | 22 | 23 |
24 |

canvas params:

25 |

26 |     
27 | 34 | 35 | 36 | 37 | 38 |
39 | 40 |
41 | 42 |
43 | 44 | 45 | -------------------------------------------------------------------------------- /docs/encode.js.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | JSDoc: Source: encode.js 6 | 7 | 8 | 9 | 12 | 13 | 14 | 15 | 16 | 17 | 18 |
19 | 20 |

Source: encode.js

21 | 22 | 23 | 24 | 25 | 26 | 27 |
28 |
29 |
import crc32 from "./crc32.js";
 30 | import { ChunkType, ColorType, PNG_HEADER } from "./constants.js";
 31 | import { encode_IHDR, encode_IDAT_raw } from "./chunks.js";
 32 | 
 33 | /**
 34 |  * @typedef {Object} EncodeOptions
 35 |  * @property {Uint8Array} data the raw pixel data to encode
 36 |  * @property {number} width the width of the image
 37 |  * @property {number} height the height of the image
 38 |  * @property {ColorType} [colorType=ColorType.RGBA] the color type of the pixel data
 39 |  * @property {number} [depth=8] the bit depth of the image
 40 |  * @property {number} [filterMethod=FilterMethod.Paeth] the filter method to use
 41 |  * @property {number} [firstFilter=filter] the first scanline filter method to use
 42 |  * @property {Chunk[]} [ancillary=[]] additional chunks to include in the PNG
 43 |  */
 44 | 
 45 | /**
 46 |  * Encodes a PNG buffer from the given image and options, using the specified `deflate` algorithm and optional compression options.
 47 |  * The deflate function should have the signature `(buf, [deflateOptions]) => Uint8Array`.
 48 |  *
 49 |  * @param {EncodeOptions} options the encoding options
 50 |  * @param {Function} deflate the sync deflate function to use
 51 |  * @param {Object} [deflateOptions] optional deflate options passed to the deflate() function
 52 |  */
 53 | export function encode(options = {}, deflate, deflateOptions) {
 54 |   const { data, ancillary = [], colorType = ColorType.RGBA } = options;
 55 |   if (!data) throw new Error(`must specify { data }`);
 56 |   if (!deflate) throw new Error(`must specify a deflate function`);
 57 |   if (colorType !== ColorType.RGB && colorType !== ColorType.RGBA) {
 58 |     throw new Error(
 59 |       "only RGB or RGBA colorType encoding is currently supported"
 60 |     );
 61 |   }
 62 |   return writeChunks([
 63 |     { type: ChunkType.IHDR, data: encode_IHDR(options) },
 64 |     ...ancillary,
 65 |     {
 66 |       type: ChunkType.IDAT,
 67 |       data: deflate(encode_IDAT_raw(data, options), deflateOptions),
 68 |     },
 69 |     { type: ChunkType.IEND },
 70 |   ]);
 71 | }
 72 | 
 73 | /**
 74 |  * Encodes just the raw PNG header into a Uint8Array buffer.
 75 |  * @returns {Uint8Array} the PNG header
 76 |  */
 77 | export function encodeHeader() {
 78 |   return PNG_HEADER.slice();
 79 | }
 80 | 
 81 | /**
 82 |  * Encodes a single PNG chunk into a Uint8Array buffer, by writing the chunk length, type, data, and CRC value.
 83 |  * @param {Chunk} chunk the chunk to encode
 84 |  * @returns {Uint8Array} the encoded chunk buffer
 85 |  */
 86 | export function encodeChunk(chunk) {
 87 |   const length = chunk.data ? chunk.data.length : 0;
 88 |   const output = new Uint8Array(4 + length + 4 + 4);
 89 |   const dv = new DataView(output.buffer, output.byteOffset, output.byteLength);
 90 |   // Write chunk length
 91 |   let idx = 0;
 92 |   encodeChunkRaw(output, dv, chunk, idx);
 93 |   return output;
 94 | }
 95 | 
 96 | /**
 97 |  * Writes and formats an array of PNG chunks into a complete PNG buffer, including the PNG header.
 98 |  *
 99 |  * @param {Chunk[]} chunks the array of chunks to encode
100 |  * @returns {Uint8Array} the encoded PNG buffer
101 |  */
102 | export function writeChunks(chunks) {
103 |   let totalSize = PNG_HEADER.length; // start with header
104 |   let idx = totalSize;
105 |   for (let chunk of chunks) {
106 |     totalSize += chunk.data ? chunk.data.length : 0;
107 |     totalSize += 12; // length, code, CRC value (4 bytes each)
108 |   }
109 | 
110 |   const output = new Uint8Array(totalSize);
111 |   const dv = new DataView(output.buffer);
112 | 
113 |   // write header
114 |   output.set(PNG_HEADER, 0);
115 | 
116 |   for (let chunk of chunks) {
117 |     idx = encodeChunkRaw(output, dv, chunk, idx);
118 |   }
119 | 
120 |   return output;
121 | }
122 | 
123 | function encodeChunkRaw(output, dv, chunk, idx = 0) {
124 |   // Write chunk length
125 |   const length = chunk.data ? chunk.data.length : 0;
126 |   dv.setUint32(idx, length);
127 |   idx += 4;
128 | 
129 |   // Where the chunk index starts (before type code)
130 |   const chunkStartIdx = idx;
131 |   const chunkDataStartIdx = idx + 4;
132 |   const chunkDataEndIdx = chunkDataStartIdx + length;
133 | 
134 |   // Write chunk type code
135 |   const type = chunk.type;
136 |   dv.setUint32(chunkStartIdx, type);
137 | 
138 |   // Write chunk data
139 |   if (chunk.data) output.set(chunk.data, chunkDataStartIdx);
140 | 
141 |   // get the whole chunk buffer including type
142 |   const chunkBuf = output.subarray(chunkStartIdx, chunkDataEndIdx);
143 | 
144 |   // compute CRC and write it
145 |   const crcValue = crc32(chunkBuf);
146 |   dv.setInt32(chunkDataEndIdx, crcValue);
147 | 
148 |   // return next index for reading
149 |   return chunkDataEndIdx + 4;
150 | }
151 | 
152 |
153 |
154 | 155 | 156 | 157 | 158 |
159 | 160 | 163 | 164 |
165 | 166 |
167 | Documentation generated by JSDoc 4.0.3 on Tue May 28 2024 13:40:52 GMT+0100 (British Summer Time) 168 |
169 | 170 | 171 | 172 | 173 | 174 | -------------------------------------------------------------------------------- /docs/fonts/OpenSans-Bold-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/docs/fonts/OpenSans-Bold-webfont.eot -------------------------------------------------------------------------------- /docs/fonts/OpenSans-Bold-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/docs/fonts/OpenSans-Bold-webfont.woff -------------------------------------------------------------------------------- /docs/fonts/OpenSans-BoldItalic-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/docs/fonts/OpenSans-BoldItalic-webfont.eot -------------------------------------------------------------------------------- /docs/fonts/OpenSans-BoldItalic-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/docs/fonts/OpenSans-BoldItalic-webfont.woff -------------------------------------------------------------------------------- /docs/fonts/OpenSans-Italic-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/docs/fonts/OpenSans-Italic-webfont.eot -------------------------------------------------------------------------------- /docs/fonts/OpenSans-Italic-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/docs/fonts/OpenSans-Italic-webfont.woff -------------------------------------------------------------------------------- /docs/fonts/OpenSans-Light-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/docs/fonts/OpenSans-Light-webfont.eot -------------------------------------------------------------------------------- /docs/fonts/OpenSans-Light-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/docs/fonts/OpenSans-Light-webfont.woff -------------------------------------------------------------------------------- /docs/fonts/OpenSans-LightItalic-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/docs/fonts/OpenSans-LightItalic-webfont.eot -------------------------------------------------------------------------------- /docs/fonts/OpenSans-LightItalic-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/docs/fonts/OpenSans-LightItalic-webfont.woff -------------------------------------------------------------------------------- /docs/fonts/OpenSans-Regular-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/docs/fonts/OpenSans-Regular-webfont.eot -------------------------------------------------------------------------------- /docs/fonts/OpenSans-Regular-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/docs/fonts/OpenSans-Regular-webfont.woff -------------------------------------------------------------------------------- /docs/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | JSDoc: Home 6 | 7 | 8 | 9 | 12 | 13 | 14 | 15 | 16 | 17 | 18 |
19 | 20 |

Home

21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 |

30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 |
46 | 55 |
56 | 57 | 58 | 59 | 60 | 61 | 62 |
63 | 64 | 67 | 68 |
69 | 70 |
71 | Documentation generated by JSDoc 4.0.3 on Tue May 28 2024 13:40:52 GMT+0100 (British Summer Time) 72 |
73 | 74 | 75 | 76 | 77 | -------------------------------------------------------------------------------- /docs/scripts/linenumber.js: -------------------------------------------------------------------------------- 1 | /*global document */ 2 | (() => { 3 | const source = document.getElementsByClassName('prettyprint source linenums'); 4 | let i = 0; 5 | let lineNumber = 0; 6 | let lineId; 7 | let lines; 8 | let totalLines; 9 | let anchorHash; 10 | 11 | if (source && source[0]) { 12 | anchorHash = document.location.hash.substring(1); 13 | lines = source[0].getElementsByTagName('li'); 14 | totalLines = lines.length; 15 | 16 | for (; i < totalLines; i++) { 17 | lineNumber++; 18 | lineId = `line${lineNumber}`; 19 | lines[i].id = lineId; 20 | if (lineId === anchorHash) { 21 | lines[i].className += ' selected'; 22 | } 23 | } 24 | } 25 | })(); 26 | -------------------------------------------------------------------------------- /docs/scripts/prettify/Apache-License-2.0.txt: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /docs/scripts/prettify/lang-css.js: -------------------------------------------------------------------------------- 1 | PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t\n\f\r ]+/,null," \t\r\n "]],[["str",/^"(?:[^\n\f\r"\\]|\\(?:\r\n?|\n|\f)|\\[\S\s])*"/,null],["str",/^'(?:[^\n\f\r'\\]|\\(?:\r\n?|\n|\f)|\\[\S\s])*'/,null],["lang-css-str",/^url\(([^"')]*)\)/i],["kwd",/^(?:url|rgb|!important|@import|@page|@media|@charset|inherit)(?=[^\w-]|$)/i,null],["lang-css-kw",/^(-?(?:[_a-z]|\\[\da-f]+ ?)(?:[\w-]|\\\\[\da-f]+ ?)*)\s*:/i],["com",/^\/\*[^*]*\*+(?:[^*/][^*]*\*+)*\//],["com", 2 | /^(?:<\!--|--\>)/],["lit",/^(?:\d+|\d*\.\d+)(?:%|[a-z]+)?/i],["lit",/^#[\da-f]{3,6}/i],["pln",/^-?(?:[_a-z]|\\[\da-f]+ ?)(?:[\w-]|\\\\[\da-f]+ ?)*/i],["pun",/^[^\s\w"']+/]]),["css"]);PR.registerLangHandler(PR.createSimpleLexer([],[["kwd",/^-?(?:[_a-z]|\\[\da-f]+ ?)(?:[\w-]|\\\\[\da-f]+ ?)*/i]]),["css-kw"]);PR.registerLangHandler(PR.createSimpleLexer([],[["str",/^[^"')]+/]]),["css-str"]); 3 | -------------------------------------------------------------------------------- /docs/scripts/prettify/prettify.js: -------------------------------------------------------------------------------- 1 | var q=null;window.PR_SHOULD_USE_CONTINUATION=!0; 2 | (function(){function L(a){function m(a){var f=a.charCodeAt(0);if(f!==92)return f;var b=a.charAt(1);return(f=r[b])?f:"0"<=b&&b<="7"?parseInt(a.substring(1),8):b==="u"||b==="x"?parseInt(a.substring(2),16):a.charCodeAt(1)}function e(a){if(a<32)return(a<16?"\\x0":"\\x")+a.toString(16);a=String.fromCharCode(a);if(a==="\\"||a==="-"||a==="["||a==="]")a="\\"+a;return a}function h(a){for(var f=a.substring(1,a.length-1).match(/\\u[\dA-Fa-f]{4}|\\x[\dA-Fa-f]{2}|\\[0-3][0-7]{0,2}|\\[0-7]{1,2}|\\[\S\s]|[^\\]/g),a= 3 | [],b=[],o=f[0]==="^",c=o?1:0,i=f.length;c122||(d<65||j>90||b.push([Math.max(65,j)|32,Math.min(d,90)|32]),d<97||j>122||b.push([Math.max(97,j)&-33,Math.min(d,122)&-33]))}}b.sort(function(a,f){return a[0]-f[0]||f[1]-a[1]});f=[];j=[NaN,NaN];for(c=0;ci[0]&&(i[1]+1>i[0]&&b.push("-"),b.push(e(i[1])));b.push("]");return b.join("")}function y(a){for(var f=a.source.match(/\[(?:[^\\\]]|\\[\S\s])*]|\\u[\dA-Fa-f]{4}|\\x[\dA-Fa-f]{2}|\\\d+|\\[^\dux]|\(\?[!:=]|[()^]|[^()[\\^]+/g),b=f.length,d=[],c=0,i=0;c=2&&a==="["?f[c]=h(j):a!=="\\"&&(f[c]=j.replace(/[A-Za-z]/g,function(a){a=a.charCodeAt(0);return"["+String.fromCharCode(a&-33,a|32)+"]"}));return f.join("")}for(var t=0,s=!1,l=!1,p=0,d=a.length;p=5&&"lang-"===b.substring(0,5))&&!(o&&typeof o[1]==="string"))c=!1,b="src";c||(r[f]=b)}i=d;d+=f.length;if(c){c=o[1];var j=f.indexOf(c),k=j+c.length;o[2]&&(k=f.length-o[2].length,j=k-c.length);b=b.substring(5);B(l+i,f.substring(0,j),e,p);B(l+i+j,c,C(b,c),p);B(l+i+k,f.substring(k),e,p)}else p.push(l+i,b)}a.e=p}var h={},y;(function(){for(var e=a.concat(m), 9 | l=[],p={},d=0,g=e.length;d=0;)h[n.charAt(k)]=r;r=r[1];n=""+r;p.hasOwnProperty(n)||(l.push(r),p[n]=q)}l.push(/[\S\s]/);y=L(l)})();var t=m.length;return e}function u(a){var m=[],e=[];a.tripleQuotedStrings?m.push(["str",/^(?:'''(?:[^'\\]|\\[\S\s]|''?(?=[^']))*(?:'''|$)|"""(?:[^"\\]|\\[\S\s]|""?(?=[^"]))*(?:"""|$)|'(?:[^'\\]|\\[\S\s])*(?:'|$)|"(?:[^"\\]|\\[\S\s])*(?:"|$))/,q,"'\""]):a.multiLineStrings?m.push(["str",/^(?:'(?:[^'\\]|\\[\S\s])*(?:'|$)|"(?:[^"\\]|\\[\S\s])*(?:"|$)|`(?:[^\\`]|\\[\S\s])*(?:`|$))/, 10 | q,"'\"`"]):m.push(["str",/^(?:'(?:[^\n\r'\\]|\\.)*(?:'|$)|"(?:[^\n\r"\\]|\\.)*(?:"|$))/,q,"\"'"]);a.verbatimStrings&&e.push(["str",/^@"(?:[^"]|"")*(?:"|$)/,q]);var h=a.hashComments;h&&(a.cStyleComments?(h>1?m.push(["com",/^#(?:##(?:[^#]|#(?!##))*(?:###|$)|.*)/,q,"#"]):m.push(["com",/^#(?:(?:define|elif|else|endif|error|ifdef|include|ifndef|line|pragma|undef|warning)\b|[^\n\r]*)/,q,"#"]),e.push(["str",/^<(?:(?:(?:\.\.\/)*|\/?)(?:[\w-]+(?:\/[\w-]+)+)?[\w-]+\.h|[a-z]\w*)>/,q])):m.push(["com",/^#[^\n\r]*/, 11 | q,"#"]));a.cStyleComments&&(e.push(["com",/^\/\/[^\n\r]*/,q]),e.push(["com",/^\/\*[\S\s]*?(?:\*\/|$)/,q]));a.regexLiterals&&e.push(["lang-regex",/^(?:^^\.?|[!+-]|!=|!==|#|%|%=|&|&&|&&=|&=|\(|\*|\*=|\+=|,|-=|->|\/|\/=|:|::|;|<|<<|<<=|<=|=|==|===|>|>=|>>|>>=|>>>|>>>=|[?@[^]|\^=|\^\^|\^\^=|{|\||\|=|\|\||\|\|=|~|break|case|continue|delete|do|else|finally|instanceof|return|throw|try|typeof)\s*(\/(?=[^*/])(?:[^/[\\]|\\[\S\s]|\[(?:[^\\\]]|\\[\S\s])*(?:]|$))+\/)/]);(h=a.types)&&e.push(["typ",h]);a=(""+a.keywords).replace(/^ | $/g, 12 | "");a.length&&e.push(["kwd",RegExp("^(?:"+a.replace(/[\s,]+/g,"|")+")\\b"),q]);m.push(["pln",/^\s+/,q," \r\n\t\xa0"]);e.push(["lit",/^@[$_a-z][\w$@]*/i,q],["typ",/^(?:[@_]?[A-Z]+[a-z][\w$@]*|\w+_t\b)/,q],["pln",/^[$_a-z][\w$@]*/i,q],["lit",/^(?:0x[\da-f]+|(?:\d(?:_\d+)*\d*(?:\.\d*)?|\.\d\+)(?:e[+-]?\d+)?)[a-z]*/i,q,"0123456789"],["pln",/^\\[\S\s]?/,q],["pun",/^.[^\s\w"-$'./@\\`]*/,q]);return x(m,e)}function D(a,m){function e(a){switch(a.nodeType){case 1:if(k.test(a.className))break;if("BR"===a.nodeName)h(a), 13 | a.parentNode&&a.parentNode.removeChild(a);else for(a=a.firstChild;a;a=a.nextSibling)e(a);break;case 3:case 4:if(p){var b=a.nodeValue,d=b.match(t);if(d){var c=b.substring(0,d.index);a.nodeValue=c;(b=b.substring(d.index+d[0].length))&&a.parentNode.insertBefore(s.createTextNode(b),a.nextSibling);h(a);c||a.parentNode.removeChild(a)}}}}function h(a){function b(a,d){var e=d?a.cloneNode(!1):a,f=a.parentNode;if(f){var f=b(f,1),g=a.nextSibling;f.appendChild(e);for(var h=g;h;h=g)g=h.nextSibling,f.appendChild(h)}return e} 14 | for(;!a.nextSibling;)if(a=a.parentNode,!a)return;for(var a=b(a.nextSibling,0),e;(e=a.parentNode)&&e.nodeType===1;)a=e;d.push(a)}var k=/(?:^|\s)nocode(?:\s|$)/,t=/\r\n?|\n/,s=a.ownerDocument,l;a.currentStyle?l=a.currentStyle.whiteSpace:window.getComputedStyle&&(l=s.defaultView.getComputedStyle(a,q).getPropertyValue("white-space"));var p=l&&"pre"===l.substring(0,3);for(l=s.createElement("LI");a.firstChild;)l.appendChild(a.firstChild);for(var d=[l],g=0;g=0;){var h=m[e];A.hasOwnProperty(h)?window.console&&console.warn("cannot override language handler %s",h):A[h]=a}}function C(a,m){if(!a||!A.hasOwnProperty(a))a=/^\s*=o&&(h+=2);e>=c&&(a+=2)}}catch(w){"console"in window&&console.log(w&&w.stack?w.stack:w)}}var v=["break,continue,do,else,for,if,return,while"],w=[[v,"auto,case,char,const,default,double,enum,extern,float,goto,int,long,register,short,signed,sizeof,static,struct,switch,typedef,union,unsigned,void,volatile"], 18 | "catch,class,delete,false,import,new,operator,private,protected,public,this,throw,true,try,typeof"],F=[w,"alignof,align_union,asm,axiom,bool,concept,concept_map,const_cast,constexpr,decltype,dynamic_cast,explicit,export,friend,inline,late_check,mutable,namespace,nullptr,reinterpret_cast,static_assert,static_cast,template,typeid,typename,using,virtual,where"],G=[w,"abstract,boolean,byte,extends,final,finally,implements,import,instanceof,null,native,package,strictfp,super,synchronized,throws,transient"], 19 | H=[G,"as,base,by,checked,decimal,delegate,descending,dynamic,event,fixed,foreach,from,group,implicit,in,interface,internal,into,is,lock,object,out,override,orderby,params,partial,readonly,ref,sbyte,sealed,stackalloc,string,select,uint,ulong,unchecked,unsafe,ushort,var"],w=[w,"debugger,eval,export,function,get,null,set,undefined,var,with,Infinity,NaN"],I=[v,"and,as,assert,class,def,del,elif,except,exec,finally,from,global,import,in,is,lambda,nonlocal,not,or,pass,print,raise,try,with,yield,False,True,None"], 20 | J=[v,"alias,and,begin,case,class,def,defined,elsif,end,ensure,false,in,module,next,nil,not,or,redo,rescue,retry,self,super,then,true,undef,unless,until,when,yield,BEGIN,END"],v=[v,"case,done,elif,esac,eval,fi,function,in,local,set,then,until"],K=/^(DIR|FILE|vector|(de|priority_)?queue|list|stack|(const_)?iterator|(multi)?(set|map)|bitset|u?(int|float)\d*)/,N=/\S/,O=u({keywords:[F,H,w,"caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END"+ 21 | I,J,v],hashComments:!0,cStyleComments:!0,multiLineStrings:!0,regexLiterals:!0}),A={};k(O,["default-code"]);k(x([],[["pln",/^[^]*(?:>|$)/],["com",/^<\!--[\S\s]*?(?:--\>|$)/],["lang-",/^<\?([\S\s]+?)(?:\?>|$)/],["lang-",/^<%([\S\s]+?)(?:%>|$)/],["pun",/^(?:<[%?]|[%?]>)/],["lang-",/^]*>([\S\s]+?)<\/xmp\b[^>]*>/i],["lang-js",/^]*>([\S\s]*?)(<\/script\b[^>]*>)/i],["lang-css",/^]*>([\S\s]*?)(<\/style\b[^>]*>)/i],["lang-in.tag",/^(<\/?[a-z][^<>]*>)/i]]), 22 | ["default-markup","htm","html","mxml","xhtml","xml","xsl"]);k(x([["pln",/^\s+/,q," \t\r\n"],["atv",/^(?:"[^"]*"?|'[^']*'?)/,q,"\"'"]],[["tag",/^^<\/?[a-z](?:[\w-.:]*\w)?|\/?>$/i],["atn",/^(?!style[\s=]|on)[a-z](?:[\w:-]*\w)?/i],["lang-uq.val",/^=\s*([^\s"'>]*(?:[^\s"'/>]|\/(?=\s)))/],["pun",/^[/<->]+/],["lang-js",/^on\w+\s*=\s*"([^"]+)"/i],["lang-js",/^on\w+\s*=\s*'([^']+)'/i],["lang-js",/^on\w+\s*=\s*([^\s"'>]+)/i],["lang-css",/^style\s*=\s*"([^"]+)"/i],["lang-css",/^style\s*=\s*'([^']+)'/i],["lang-css", 23 | /^style\s*=\s*([^\s"'>]+)/i]]),["in.tag"]);k(x([],[["atv",/^[\S\s]+/]]),["uq.val"]);k(u({keywords:F,hashComments:!0,cStyleComments:!0,types:K}),["c","cc","cpp","cxx","cyc","m"]);k(u({keywords:"null,true,false"}),["json"]);k(u({keywords:H,hashComments:!0,cStyleComments:!0,verbatimStrings:!0,types:K}),["cs"]);k(u({keywords:G,cStyleComments:!0}),["java"]);k(u({keywords:v,hashComments:!0,multiLineStrings:!0}),["bsh","csh","sh"]);k(u({keywords:I,hashComments:!0,multiLineStrings:!0,tripleQuotedStrings:!0}), 24 | ["cv","py"]);k(u({keywords:"caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END",hashComments:!0,multiLineStrings:!0,regexLiterals:!0}),["perl","pl","pm"]);k(u({keywords:J,hashComments:!0,multiLineStrings:!0,regexLiterals:!0}),["rb"]);k(u({keywords:w,cStyleComments:!0,regexLiterals:!0}),["js"]);k(u({keywords:"all,and,by,catch,class,else,extends,false,finally,for,if,in,is,isnt,loop,new,no,not,null,of,off,on,or,return,super,then,true,try,unless,until,when,while,yes", 25 | hashComments:3,cStyleComments:!0,multilineStrings:!0,tripleQuotedStrings:!0,regexLiterals:!0}),["coffee"]);k(x([],[["str",/^[\S\s]+/]]),["regex"]);window.prettyPrintOne=function(a,m,e){var h=document.createElement("PRE");h.innerHTML=a;e&&D(h,e);E({g:m,i:e,h:h});return h.innerHTML};window.prettyPrint=function(a){function m(){for(var e=window.PR_SHOULD_USE_CONTINUATION?l.now()+250:Infinity;p=0){var k=k.match(g),f,b;if(b= 26 | !k){b=n;for(var o=void 0,c=b.firstChild;c;c=c.nextSibling)var i=c.nodeType,o=i===1?o?b:c:i===3?N.test(c.nodeValue)?b:o:o;b=(f=o===b?void 0:o)&&"CODE"===f.tagName}b&&(k=f.className.match(g));k&&(k=k[1]);b=!1;for(o=n.parentNode;o;o=o.parentNode)if((o.tagName==="pre"||o.tagName==="code"||o.tagName==="xmp")&&o.className&&o.className.indexOf("prettyprint")>=0){b=!0;break}b||((b=(b=n.className.match(/\blinenums\b(?::(\d+))?/))?b[1]&&b[1].length?+b[1]:!0:!1)&&D(n,b),d={g:k,h:n,i:b},E(d))}}p th:last-child { border-right: 1px solid #ddd; } 224 | 225 | .ancestors, .attribs { color: #999; } 226 | .ancestors a, .attribs a 227 | { 228 | color: #999 !important; 229 | text-decoration: none; 230 | } 231 | 232 | .clear 233 | { 234 | clear: both; 235 | } 236 | 237 | .important 238 | { 239 | font-weight: bold; 240 | color: #950B02; 241 | } 242 | 243 | .yes-def { 244 | text-indent: -1000px; 245 | } 246 | 247 | .type-signature { 248 | color: #aaa; 249 | } 250 | 251 | .name, .signature { 252 | font-family: Consolas, Monaco, 'Andale Mono', monospace; 253 | } 254 | 255 | .details { margin-top: 14px; border-left: 2px solid #DDD; } 256 | .details dt { width: 120px; float: left; padding-left: 10px; padding-top: 6px; } 257 | .details dd { margin-left: 70px; } 258 | .details ul { margin: 0; } 259 | .details ul { list-style-type: none; } 260 | .details li { margin-left: 30px; padding-top: 6px; } 261 | .details pre.prettyprint { margin: 0 } 262 | .details .object-value { padding-top: 0; } 263 | 264 | .description { 265 | margin-bottom: 1em; 266 | margin-top: 1em; 267 | } 268 | 269 | .code-caption 270 | { 271 | font-style: italic; 272 | font-size: 107%; 273 | margin: 0; 274 | } 275 | 276 | .source 277 | { 278 | border: 1px solid #ddd; 279 | width: 80%; 280 | overflow: auto; 281 | } 282 | 283 | .prettyprint.source { 284 | width: inherit; 285 | } 286 | 287 | .source code 288 | { 289 | font-size: 100%; 290 | line-height: 18px; 291 | display: block; 292 | padding: 4px 12px; 293 | margin: 0; 294 | background-color: #fff; 295 | color: #4D4E53; 296 | } 297 | 298 | .prettyprint code span.line 299 | { 300 | display: inline-block; 301 | } 302 | 303 | .prettyprint.linenums 304 | { 305 | padding-left: 70px; 306 | -webkit-user-select: none; 307 | -moz-user-select: none; 308 | -ms-user-select: none; 309 | user-select: none; 310 | } 311 | 312 | .prettyprint.linenums ol 313 | { 314 | padding-left: 0; 315 | } 316 | 317 | .prettyprint.linenums li 318 | { 319 | border-left: 3px #ddd solid; 320 | } 321 | 322 | .prettyprint.linenums li.selected, 323 | .prettyprint.linenums li.selected * 324 | { 325 | background-color: lightyellow; 326 | } 327 | 328 | .prettyprint.linenums li * 329 | { 330 | -webkit-user-select: text; 331 | -moz-user-select: text; 332 | -ms-user-select: text; 333 | user-select: text; 334 | } 335 | 336 | .params .name, .props .name, .name code { 337 | color: #4D4E53; 338 | font-family: Consolas, Monaco, 'Andale Mono', monospace; 339 | font-size: 100%; 340 | } 341 | 342 | .params td.description > p:first-child, 343 | .props td.description > p:first-child 344 | { 345 | margin-top: 0; 346 | padding-top: 0; 347 | } 348 | 349 | .params td.description > p:last-child, 350 | .props td.description > p:last-child 351 | { 352 | margin-bottom: 0; 353 | padding-bottom: 0; 354 | } 355 | 356 | .disabled { 357 | color: #454545; 358 | } 359 | -------------------------------------------------------------------------------- /docs/styles/prettify-jsdoc.css: -------------------------------------------------------------------------------- 1 | /* JSDoc prettify.js theme */ 2 | 3 | /* plain text */ 4 | .pln { 5 | color: #000000; 6 | font-weight: normal; 7 | font-style: normal; 8 | } 9 | 10 | /* string content */ 11 | .str { 12 | color: #006400; 13 | font-weight: normal; 14 | font-style: normal; 15 | } 16 | 17 | /* a keyword */ 18 | .kwd { 19 | color: #000000; 20 | font-weight: bold; 21 | font-style: normal; 22 | } 23 | 24 | /* a comment */ 25 | .com { 26 | font-weight: normal; 27 | font-style: italic; 28 | } 29 | 30 | /* a type name */ 31 | .typ { 32 | color: #000000; 33 | font-weight: normal; 34 | font-style: normal; 35 | } 36 | 37 | /* a literal value */ 38 | .lit { 39 | color: #006400; 40 | font-weight: normal; 41 | font-style: normal; 42 | } 43 | 44 | /* punctuation */ 45 | .pun { 46 | color: #000000; 47 | font-weight: bold; 48 | font-style: normal; 49 | } 50 | 51 | /* lisp open bracket */ 52 | .opn { 53 | color: #000000; 54 | font-weight: bold; 55 | font-style: normal; 56 | } 57 | 58 | /* lisp close bracket */ 59 | .clo { 60 | color: #000000; 61 | font-weight: bold; 62 | font-style: normal; 63 | } 64 | 65 | /* a markup tag name */ 66 | .tag { 67 | color: #006400; 68 | font-weight: normal; 69 | font-style: normal; 70 | } 71 | 72 | /* a markup attribute name */ 73 | .atn { 74 | color: #006400; 75 | font-weight: normal; 76 | font-style: normal; 77 | } 78 | 79 | /* a markup attribute value */ 80 | .atv { 81 | color: #006400; 82 | font-weight: normal; 83 | font-style: normal; 84 | } 85 | 86 | /* a declaration */ 87 | .dec { 88 | color: #000000; 89 | font-weight: bold; 90 | font-style: normal; 91 | } 92 | 93 | /* a variable name */ 94 | .var { 95 | color: #000000; 96 | font-weight: normal; 97 | font-style: normal; 98 | } 99 | 100 | /* a function name */ 101 | .fun { 102 | color: #000000; 103 | font-weight: bold; 104 | font-style: normal; 105 | } 106 | 107 | /* Specify class=linenums on a pre to get line numbering */ 108 | ol.linenums { 109 | margin-top: 0; 110 | margin-bottom: 0; 111 | } 112 | -------------------------------------------------------------------------------- /docs/styles/prettify-tomorrow.css: -------------------------------------------------------------------------------- 1 | /* Tomorrow Theme */ 2 | /* Original theme - https://github.com/chriskempson/tomorrow-theme */ 3 | /* Pretty printing styles. Used with prettify.js. */ 4 | /* SPAN elements with the classes below are added by prettyprint. */ 5 | /* plain text */ 6 | .pln { 7 | color: #4d4d4c; } 8 | 9 | @media screen { 10 | /* string content */ 11 | .str { 12 | color: #718c00; } 13 | 14 | /* a keyword */ 15 | .kwd { 16 | color: #8959a8; } 17 | 18 | /* a comment */ 19 | .com { 20 | color: #8e908c; } 21 | 22 | /* a type name */ 23 | .typ { 24 | color: #4271ae; } 25 | 26 | /* a literal value */ 27 | .lit { 28 | color: #f5871f; } 29 | 30 | /* punctuation */ 31 | .pun { 32 | color: #4d4d4c; } 33 | 34 | /* lisp open bracket */ 35 | .opn { 36 | color: #4d4d4c; } 37 | 38 | /* lisp close bracket */ 39 | .clo { 40 | color: #4d4d4c; } 41 | 42 | /* a markup tag name */ 43 | .tag { 44 | color: #c82829; } 45 | 46 | /* a markup attribute name */ 47 | .atn { 48 | color: #f5871f; } 49 | 50 | /* a markup attribute value */ 51 | .atv { 52 | color: #3e999f; } 53 | 54 | /* a declaration */ 55 | .dec { 56 | color: #f5871f; } 57 | 58 | /* a variable name */ 59 | .var { 60 | color: #c82829; } 61 | 62 | /* a function name */ 63 | .fun { 64 | color: #4271ae; } } 65 | /* Use higher contrast and text-weight for printable form. */ 66 | @media print, projection { 67 | .str { 68 | color: #060; } 69 | 70 | .kwd { 71 | color: #006; 72 | font-weight: bold; } 73 | 74 | .com { 75 | color: #600; 76 | font-style: italic; } 77 | 78 | .typ { 79 | color: #404; 80 | font-weight: bold; } 81 | 82 | .lit { 83 | color: #044; } 84 | 85 | .pun, .opn, .clo { 86 | color: #440; } 87 | 88 | .tag { 89 | color: #006; 90 | font-weight: bold; } 91 | 92 | .atn { 93 | color: #404; } 94 | 95 | .atv { 96 | color: #060; } } 97 | /* Style */ 98 | /* 99 | pre.prettyprint { 100 | background: white; 101 | font-family: Consolas, Monaco, 'Andale Mono', monospace; 102 | font-size: 12px; 103 | line-height: 1.5; 104 | border: 1px solid #ccc; 105 | padding: 10px; } 106 | */ 107 | 108 | /* Specify class=linenums on a pre to get line numbering */ 109 | ol.linenums { 110 | margin-top: 0; 111 | margin-bottom: 0; } 112 | 113 | /* IE indents via margin-left */ 114 | li.L0, 115 | li.L1, 116 | li.L2, 117 | li.L3, 118 | li.L4, 119 | li.L5, 120 | li.L6, 121 | li.L7, 122 | li.L8, 123 | li.L9 { 124 | /* */ } 125 | 126 | /* Alternate shading for lines */ 127 | li.L1, 128 | li.L3, 129 | li.L5, 130 | li.L7, 131 | li.L9 { 132 | /* */ } 133 | -------------------------------------------------------------------------------- /docs/util.js.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | JSDoc: Source: util.js 6 | 7 | 8 | 9 | 12 | 13 | 14 | 15 | 16 | 17 | 18 |
19 | 20 |

Source: util.js

21 | 22 | 23 | 24 | 25 | 26 | 27 |
28 |
29 |
import { ColorType, FilterMethod } from "./constants.js";
 30 | 
 31 | /**
 32 |  * Concatenates a given array of array-like data (array buffers, typed arrays) into a single Uint8Array.
 33 |  *
 34 |  * @param {ArrayLike[]} chunks
 35 |  * @returns Uint8Array concatenated data
 36 |  */
 37 | export function flattenBuffers(chunks) {
 38 |   let totalSize = 0;
 39 |   for (let chunk of chunks) {
 40 |     totalSize += chunk.length;
 41 |   }
 42 | 
 43 |   const result = new Uint8Array(totalSize);
 44 |   for (let i = 0, pos = 0; i < chunks.length; i++) {
 45 |     let chunk = chunks[i];
 46 |     result.set(chunk, pos);
 47 |     pos += chunk.length;
 48 |   }
 49 |   return result;
 50 | }
 51 | 
 52 | export function decodeNULTerminatedString(
 53 |   data,
 54 |   offset = 0,
 55 |   maxLength = Infinity
 56 | ) {
 57 |   const dv = new DataView(data.buffer, data.byteOffset, data.byteLength);
 58 |   let str = "";
 59 |   for (let i = 0; offset < data.length && i < maxLength; offset++, i++) {
 60 |     const b = dv.getUint8(offset);
 61 |     if (b === 0x00) {
 62 |       break;
 63 |     } else {
 64 |       const chr = String.fromCharCode(b);
 65 |       str += chr;
 66 |     }
 67 |   }
 68 |   // String is always terminated with NUL so we can move forward one more
 69 |   offset++;
 70 |   return [str, offset];
 71 | }
 72 | 
 73 | export function mergeData(...arrays) {
 74 |   // convert to byte arrays
 75 |   arrays = arrays.map((a) => {
 76 |     if (typeof a === "number") return new Uint8Array([a]);
 77 |     if (typeof a === "string") return convertStringToBytes(a);
 78 |     return a;
 79 |   });
 80 | 
 81 |   // Get the total length of all arrays.
 82 |   let length = 0;
 83 |   for (let array of arrays) length += array.length;
 84 | 
 85 |   // Create a new array with total length and merge all source arrays.
 86 |   let mergedArray = new Uint8Array(length);
 87 |   let offset = 0;
 88 |   for (let item of arrays) {
 89 |     mergedArray.set(item, offset);
 90 |     offset += item.length;
 91 |   }
 92 |   return mergedArray;
 93 | }
 94 | 
 95 | export function convertStringToBytes(val) {
 96 |   const data = new Uint8Array(val.length);
 97 |   for (let i = 0; i < data.length; i++) {
 98 |     data[i] = val.charCodeAt(i);
 99 |   }
100 |   return data;
101 | }
102 | 
103 | export function applyFilter(
104 |   out,
105 |   data,
106 |   i,
107 |   filter,
108 |   bytesPerPixel,
109 |   bytesPerScanline,
110 |   srcIdxInBytes,
111 |   dstIdxInBytesPlusOne
112 | ) {
113 |   if (filter === FilterMethod.Paeth) {
114 |     for (let j = 0; j < bytesPerScanline; j++) {
115 |       const left =
116 |         j < bytesPerPixel ? 0 : data[srcIdxInBytes + j - bytesPerPixel];
117 |       const up = i === 0 ? 0 : data[srcIdxInBytes + j - bytesPerScanline];
118 |       const upLeft =
119 |         i === 0 || j < bytesPerPixel
120 |           ? 0
121 |           : data[srcIdxInBytes + j - bytesPerScanline - bytesPerPixel];
122 |       out[dstIdxInBytesPlusOne + j] =
123 |         data[srcIdxInBytes + j] - paethPredictor(left, up, upLeft);
124 |     }
125 |   } else if (filter === FilterMethod.Sub) {
126 |     for (let j = 0; j < bytesPerScanline; j++) {
127 |       const leftPixel =
128 |         j < bytesPerPixel ? 0 : data[srcIdxInBytes + j - bytesPerPixel];
129 |       out[dstIdxInBytesPlusOne + j] = data[srcIdxInBytes + j] - leftPixel;
130 |     }
131 |   } else if (filter === FilterMethod.Up) {
132 |     for (let j = 0; j < bytesPerScanline; j++) {
133 |       const upPixel = i === 0 ? 0 : data[srcIdxInBytes + j - bytesPerScanline];
134 |       out[dstIdxInBytesPlusOne + j] = data[srcIdxInBytes + j] - upPixel;
135 |     }
136 |   } else if (filter === FilterMethod.Average) {
137 |     for (let j = 0; j < bytesPerScanline; j++) {
138 |       const left =
139 |         j < bytesPerPixel ? 0 : data[srcIdxInBytes + j - bytesPerPixel];
140 |       const up = i === 0 ? 0 : data[srcIdxInBytes + j - bytesPerScanline];
141 |       const avg = (left + up) >> 1;
142 |       out[dstIdxInBytesPlusOne + j] = data[srcIdxInBytes + j] - avg;
143 |     }
144 |   }
145 | 
146 |   // Should never get here in this version as applyFilter is only called
147 |   // when a non-None filter is specified
148 |   // if (filter === FilterMethod.None) {
149 |   //   for (let j = 0; j < bytesPerScanline; j++) {
150 |   //     out[dstIdxInBytesPlusOne + j] = data[srcIdxInBytes + j];
151 |   //   }
152 |   // }
153 | }
154 | 
155 | function paethPredictor(left, above, upLeft) {
156 |   let paeth = left + above - upLeft;
157 |   let pLeft = Math.abs(paeth - left);
158 |   let pAbove = Math.abs(paeth - above);
159 |   let pUpLeft = Math.abs(paeth - upLeft);
160 |   if (pLeft <= pAbove && pLeft <= pUpLeft) return left;
161 |   if (pAbove <= pUpLeft) return above;
162 |   return upLeft;
163 | }
164 | 
165 | /**
166 |  * Converts a ColorType enum to a human readable string, for example ColorType.RGBA (= 6) becomes "RGBA".
167 |  * Although these numerical constants are defined in the PNG spec, the exact string for each is not.
168 |  *
169 |  * @param {ColorType} colorType the type to convert
170 |  * @returns {string} a readable string
171 |  */
172 | export function colorTypeToString(colorType) {
173 |   const entries = Object.entries(ColorType);
174 |   return entries.find((e) => e[1] === colorType)[0];
175 | }
176 | 
177 | export function colorTypeToChannels(colorType) {
178 |   switch (colorType) {
179 |     case ColorType.GRAYSCALE:
180 |       return 1; // grayscale
181 |     case ColorType.RGB:
182 |       return 3; // RGB
183 |     case ColorType.INDEXED:
184 |       return 1; // indexed
185 |     case ColorType.GRAYSCALE_ALPHA:
186 |       return 2; // grayscale + alpha
187 |     case ColorType.RGBA:
188 |       return 4; // RGBA
189 |     default:
190 |       throw new Error(`Invalid colorType ${colorType}`);
191 |   }
192 | }
193 | 
194 |
195 |
196 | 197 | 198 | 199 | 200 |
201 | 202 | 205 | 206 |
207 | 208 |
209 | Documentation generated by JSDoc 4.0.3 on Tue May 28 2024 13:40:52 GMT+0100 (British Summer Time) 210 |
211 | 212 | 213 | 214 | 215 | 216 | -------------------------------------------------------------------------------- /examples/bun-encode.js: -------------------------------------------------------------------------------- 1 | import { ColorType, colorTypeToChannels, encode } from "../index.js"; 2 | import { deflate } from "pako"; 3 | 4 | const output = Bun.argv[2]; 5 | if (!output) 6 | throw new Error( 7 | "Must specify an output, example:\n bun run examples/bun-encode.js myfile.png" 8 | ); 9 | 10 | const width = 4096; 11 | const height = 4096; 12 | const colorType = ColorType.RGB; 13 | const depth = 8; 14 | const channels = colorTypeToChannels(colorType); 15 | 16 | const ArrType = depth === 16 ? Uint16Array : Uint8ClampedArray; 17 | const maxValue = depth === 16 ? 0xffff : 0xff; 18 | 19 | const data = new ArrType(width * height * channels).fill(maxValue); 20 | 21 | // create the first scanline of a gradient 22 | for (let x = 0; x < width; x++) { 23 | const u = width <= 1 ? 1 : x / (width - 1); 24 | const color = Math.round(u * maxValue); 25 | for (let c = 0; c < channels; c++) { 26 | data[x * channels + c] = color; 27 | } 28 | } 29 | 30 | // now quickly repeat this across the rest of the height 31 | for (let y = 1; y < height; y++) { 32 | const x = 0; 33 | const idx = x + y * width; 34 | data.copyWithin(idx * channels, 0, width * channels); 35 | } 36 | 37 | // encode an image 38 | console.time("encode"); 39 | const buf = encode( 40 | { 41 | width, 42 | height, 43 | data, 44 | colorType, 45 | depth, 46 | }, 47 | deflate 48 | ); 49 | console.timeEnd("encode"); 50 | 51 | // await Deno.mkdir(dirname(output), { recursive: true }); 52 | await Bun.write(output, buf); 53 | -------------------------------------------------------------------------------- /examples/deno-encode.js: -------------------------------------------------------------------------------- 1 | import { ColorType, colorTypeToChannels, encode } from "../index.js"; 2 | import { dirname } from "https://deno.land/std/path/mod.ts"; 3 | import { deflate } from "npm:pako"; 4 | 5 | const output = Deno.args[0]; 6 | if (!output) 7 | throw new Error( 8 | "Must specify an output, example:\n deno run -A deno-encode.js myfile.png" 9 | ); 10 | 11 | const width = 4096; 12 | const height = 4096; 13 | const colorType = ColorType.RGB; 14 | const depth = 8; 15 | const channels = colorTypeToChannels(colorType); 16 | 17 | const ArrType = depth === 16 ? Uint16Array : Uint8ClampedArray; 18 | const maxValue = depth === 16 ? 0xffff : 0xff; 19 | 20 | const data = new ArrType(width * height * channels).fill(maxValue); 21 | 22 | // create the first scanline of a gradient 23 | for (let x = 0; x < width; x++) { 24 | const u = width <= 1 ? 1 : x / (width - 1); 25 | const color = Math.round(u * maxValue); 26 | for (let c = 0; c < channels; c++) { 27 | data[x * channels + c] = color; 28 | } 29 | } 30 | 31 | // now quickly repeat this across the rest of the height 32 | for (let y = 1; y < height; y++) { 33 | const x = 0; 34 | const idx = x + y * width; 35 | data.copyWithin(idx * channels, 0, width * channels); 36 | } 37 | 38 | // encode an image 39 | console.time("encode"); 40 | const buf = encode( 41 | { 42 | width, 43 | height, 44 | data, 45 | colorType, 46 | depth, 47 | }, 48 | deflate 49 | ); 50 | console.timeEnd("encode"); 51 | 52 | await Deno.mkdir(dirname(output), { recursive: true }); 53 | await Deno.writeFile(output, buf); 54 | -------------------------------------------------------------------------------- /examples/deno-parallel-encode.js: -------------------------------------------------------------------------------- 1 | // Reference: 2 | // https://github.com/DavidBuchanan314/parallel-png-proposal 3 | 4 | import { 5 | ColorType, 6 | FilterMethod, 7 | colorTypeToChannels, 8 | encodeChunk, 9 | encodeHeader, 10 | ChunkType, 11 | encode_IHDR, 12 | colorTypeToString, 13 | flattenBuffers, 14 | } from "../index.js"; 15 | import { splitPixels } from "./util/pixels.js"; 16 | import { MultiProgressBar } from "https://deno.land/x/progress@v1.4.9/mod.ts"; 17 | import { adler32_combine } from "./util/adler32.js"; 18 | 19 | const output = Deno.args[0]; 20 | if (!output) 21 | throw new Error( 22 | "Must specify an output, example:\n deno run deno-parallel-encode.js myfile.png" 23 | ); 24 | 25 | const width = 16000; 26 | const height = 16000; 27 | const colorType = ColorType.RGB; 28 | const depth = 8; 29 | const channels = colorTypeToChannels(colorType); 30 | const filter = FilterMethod.Up; 31 | const pageCount = 16; 32 | console.log("Workers:", pageCount); 33 | 34 | const ArrType = depth === 16 ? Uint16Array : Uint8ClampedArray; 35 | const maxValue = depth === 16 ? 0xffff : 0xff; 36 | 37 | const data = new ArrType(width * height * channels); 38 | 39 | // quickly generate some image data 40 | const tileSize = Math.floor(width * 0.1); 41 | for (let y = 0; y < tileSize; y++) { 42 | for (let x = 0; x < width; x++) { 43 | for (let c = 0; c < channels; c++) { 44 | const idx = x + y * height; 45 | const px = Math.floor(x / tileSize); 46 | const py = Math.floor(y / (tileSize / 2)); 47 | const v = (px + py) % 2 === 0 ? maxValue : 0x00; 48 | data[idx * channels + c] = v; 49 | } 50 | } 51 | } 52 | 53 | // copy data across rest of buffer 54 | const tileChunkSize = tileSize * width * channels; 55 | let i = tileChunkSize; 56 | while (i < data.length) { 57 | data.copyWithin(i, 0, tileChunkSize); 58 | i += tileChunkSize; 59 | } 60 | 61 | // our image options 62 | const options = { 63 | width, 64 | height, 65 | depth, 66 | colorType, 67 | filter, 68 | }; 69 | 70 | console.log(`Image Size: %s x %s px`, width, height); 71 | console.log(`Depth: %s bpp`, depth); 72 | console.log(`Color Type: %s`, colorTypeToString(colorType)); 73 | 74 | // show progress 75 | const progressBar = new MultiProgressBar({ title: "encoding" }); 76 | 77 | const file = await Deno.open(output, { 78 | create: true, 79 | write: true, 80 | truncate: true, 81 | }); 82 | 83 | const fileWriter = file.writable.getWriter(); 84 | await fileWriter.ready; 85 | 86 | console.time("encode"); 87 | 88 | async function writeChunk(chunk) { 89 | return fileWriter.write(encodeChunk(chunk)); 90 | } 91 | 92 | // encode PNG header 93 | await fileWriter.write(encodeHeader()); 94 | 95 | // encode metadata 96 | await writeChunk({ 97 | type: ChunkType.IHDR, 98 | data: encode_IHDR(options), 99 | }); 100 | 101 | // number of pages i.e. number of threads that will be run 102 | // await progressBar.render(Array(pageCount)); 103 | 104 | const deflateOptions = { level: 3 }; 105 | 106 | const results = await processWorkers( 107 | data, 108 | options, 109 | pageCount, 110 | deflateOptions, 111 | (progresses) => 112 | progressBar.render( 113 | progresses.map((p) => ({ 114 | completed: p * 100, 115 | total: 100, 116 | })) 117 | ) 118 | ); 119 | 120 | let adler; 121 | for (let i = 0; i < results.length; i++) { 122 | const { result, adler: chunkAdler, size } = results[i]; 123 | adler = adler32_combine(adler, chunkAdler, size); 124 | 125 | let compressed = result; 126 | if (i === results.length - 1) { 127 | // last chunk, concat with adler32 128 | const adlerBytes = new Uint8Array(4); 129 | const dv = new DataView(adlerBytes.buffer); 130 | dv.setUint32(0, adler); 131 | compressed = flattenBuffers([result, adlerBytes]); 132 | } 133 | 134 | // encode the current IDAT chunk 135 | await writeChunk({ type: ChunkType.IDAT, data: compressed }); 136 | } 137 | 138 | // write ending chunk 139 | await writeChunk({ type: ChunkType.IEND }); 140 | 141 | // stop progress 142 | await progressBar.render(results.map((p) => ({ completed: 100, total: 100 }))); 143 | await progressBar.end(); 144 | 145 | // // end stream 146 | await fileWriter.close(); 147 | console.timeEnd("encode"); 148 | 149 | async function processWorkers( 150 | data, 151 | options, 152 | pageCount, 153 | deflateOptions, 154 | progress = () => {} 155 | ) { 156 | const { width, height, colorType = ColorType.RGBA } = options; 157 | const channels = colorTypeToChannels(colorType); 158 | 159 | const workerResults = Array(pageCount).fill(null); 160 | let remaining = pageCount; 161 | return new Promise((resolve) => { 162 | // split whole stream into smaller sections 163 | for (let { index, view, isFirst, isLast } of splitPixels( 164 | data, 165 | width, 166 | height, 167 | channels, 168 | pageCount 169 | )) { 170 | const worker = new Worker( 171 | new URL("./util/parallel-encode-worker.js", import.meta.url), 172 | { 173 | type: "module", 174 | } 175 | ); 176 | 177 | // we need to create a slice to pass it off to the worker 178 | // otherwise subarray view gets detached 179 | const sliced = view.slice(); 180 | 181 | worker.postMessage( 182 | { 183 | ...options, // image encoding options and data 184 | view: sliced, 185 | index, 186 | isFirst, 187 | isLast, 188 | deflateOptions, 189 | }, 190 | [sliced.buffer] 191 | ); 192 | const handler = async (ev) => { 193 | const r = ev.data; 194 | workerResults[r.index] = r; 195 | if (r.result) { 196 | worker.removeEventListener("message", handler); 197 | worker.terminate(); 198 | remaining--; 199 | const progresses = workerResults.map((r) => { 200 | return r ? r.progress || 0 : 0; 201 | }); 202 | await progress(progresses); 203 | if (remaining === 0) { 204 | resolve(workerResults); 205 | } else if (remaining < 0) { 206 | throw new Error("Worker received too many events"); 207 | } 208 | } 209 | }; 210 | worker.addEventListener("message", handler); 211 | } 212 | }); 213 | } 214 | -------------------------------------------------------------------------------- /examples/encode-ancillary.js: -------------------------------------------------------------------------------- 1 | import { 2 | ChunkType, 3 | ColorType, 4 | colorTypeToChannels, 5 | encode, 6 | encode_iTXt, 7 | encode_pHYs_PPI, 8 | } from "../index.js"; 9 | import { deflate } from "pako"; 10 | import fs from "node:fs/promises"; 11 | import { dirname } from "node:path"; 12 | import getDocument from "canvas-dimensions"; 13 | 14 | const output = process.argv[2]; 15 | if (!output) 16 | throw new Error( 17 | "Must specify an output, example:\n node encode-ancillary.js myfile.png" 18 | ); 19 | 20 | const pixelsPerInch = 300; 21 | const { canvasWidth: width, canvasHeight: height } = getDocument({ 22 | dimensions: "A4", 23 | pixelsPerInch, 24 | units: "cm", 25 | }); 26 | 27 | const colorType = ColorType.RGB; 28 | const channels = colorTypeToChannels(colorType); 29 | const data = new Uint8ClampedArray(width * height * channels); 30 | 31 | // fill with pure black pixels 32 | data.fill(0x00); 33 | 34 | // encode an image 35 | const buf = encode( 36 | { 37 | width, 38 | height, 39 | data, 40 | colorType, 41 | ancillary: [ 42 | { type: ChunkType.pHYs, data: encode_pHYs_PPI(pixelsPerInch) }, 43 | { 44 | // encode some JSON into the PNG as well for fun 45 | type: ChunkType.iTXt, 46 | data: encode_iTXt({ 47 | keyword: "metadata", 48 | text: JSON.stringify({ seed: "some-random-seed" }), 49 | }), 50 | }, 51 | ], 52 | }, 53 | deflate 54 | ); 55 | 56 | // mkdirp and write file 57 | try { 58 | await fs.mkdir(dirname(output), { recursive: true }); 59 | } catch (err) {} 60 | await fs.writeFile(output, buf); 61 | -------------------------------------------------------------------------------- /examples/encode-color-space.js: -------------------------------------------------------------------------------- 1 | import { deflate } from "pako"; 2 | import { parse as parseICC } from "icc"; 3 | 4 | import iccTransform from "./util/icc-transform.js"; 5 | import getDocument from "canvas-dimensions"; 6 | import fs from "node:fs/promises"; 7 | import { 8 | encode, 9 | ColorType, 10 | FilterMethod, 11 | colorTypeToChannels, 12 | encode_pHYs_PPI, 13 | ChunkType, 14 | encode_iCCP, 15 | colorTypeToString, 16 | } from "../index.js"; 17 | 18 | import * as path from "node:path"; 19 | import { fileURLToPath } from "node:url"; 20 | import { getTimestamp } from "./util/save.js"; 21 | 22 | const __dirname = path.dirname(fileURLToPath(import.meta.url)); 23 | 24 | const output = process.argv[2]; 25 | if (!output) 26 | throw new Error( 27 | "Must specify an output, example:\n node encode-color-space.js tmp/out/dir" 28 | ); 29 | 30 | const r = await fs.stat(output); 31 | if (!r.isDirectory()) 32 | throw new Error(`Expected output to be a directory: ${output}`); 33 | 34 | const size = { 35 | // Print size 36 | dimensions: "A2", 37 | pixelsPerInch: 300, 38 | units: "cm", 39 | }; 40 | 41 | const { 42 | width: physicalWidth, 43 | height: physicalHeight, 44 | canvasWidth: width, 45 | canvasHeight: height, 46 | units, 47 | pixelsPerInch, 48 | } = getDocument(size); 49 | 50 | const options = { 51 | width, 52 | height, 53 | depth: 16, 54 | filter: FilterMethod.Paeth, 55 | colorType: ColorType.RGB, 56 | // Color space that the source pixels are in 57 | srcColorSpace: "display-p3", 58 | // Color space that the destination pixels are in 59 | dstColorSpace: "display-p3", 60 | // What PPI to embed the image with 61 | pixelsPerInch, 62 | }; 63 | 64 | await run(options); 65 | 66 | async function run(options) { 67 | const { colorType, depth, srcColorSpace, dstColorSpace } = options; 68 | 69 | console.log( 70 | "Size: %s x %s %s (%s PPI)", 71 | physicalWidth, 72 | physicalHeight, 73 | units, 74 | pixelsPerInch 75 | ); 76 | console.log("Canvas Size: %s x %s px (%s bit)", width, height, depth); 77 | console.log( 78 | "Color Type: %s (%s channels)", 79 | colorTypeToString(colorType), 80 | colorTypeToChannels(colorType) 81 | ); 82 | 83 | const data = createImage(options); 84 | 85 | const buf = await encodeForPrint(data, options, { level: 3 }); 86 | 87 | const suffix = 88 | srcColorSpace === dstColorSpace 89 | ? srcColorSpace 90 | : `${srcColorSpace}-to-${dstColorSpace}`; 91 | 92 | const fname = `${getTimestamp()}-${depth}-bit-${suffix}.png`; 93 | const outFile = path.resolve(output, fname); 94 | console.log("Writing", fname); 95 | await fs.writeFile(outFile, buf); 96 | } 97 | 98 | function createImage(imageOptions) { 99 | const { width, height, depth, colorType } = imageOptions; 100 | const channels = colorTypeToChannels(colorType); 101 | const ArrayType = depth === 8 ? Uint8ClampedArray : Uint16Array; 102 | const maxValue = depth === 8 ? 0xff : 0xffff; 103 | let data = new ArrayType(width * height * channels).fill(maxValue); 104 | for (let y = 0, i = 0; y < height; y++) { 105 | for (let x = 0; x < width; x++, i++) { 106 | const u = (x + 1) / width; 107 | const v = (y + 1) / height; 108 | const R = u; 109 | const G = 0.5; 110 | const B = v; 111 | data[i * channels + 0] = Math.round(R * maxValue); 112 | data[i * channels + 1] = Math.round(G * maxValue); 113 | data[i * channels + 2] = Math.round(B * maxValue); 114 | } 115 | } 116 | return data; 117 | } 118 | 119 | async function encodeForPrint(data, options, deflateOptions) { 120 | const profiles = { 121 | AdobeRGB1998: "AdobeRGB1998", 122 | sRGB: "sRGB IEC61966-2.1", 123 | "display-p3": "Display P3", 124 | }; 125 | 126 | const { 127 | srcColorSpace = "sRGB", 128 | dstColorSpace = "sRGB", 129 | colorType, 130 | depth, 131 | pixelsPerInch, 132 | } = options; 133 | 134 | const channels = colorTypeToChannels(colorType); 135 | const maxValue = depth === 8 ? 0xff : 0xffff; 136 | 137 | const profileDir = path.join(__dirname, "./profiles"); 138 | 139 | const srcProfileFname = profiles[srcColorSpace]; 140 | if (!srcProfileFname) throw new Error(`no profile ${srcColorSpace}`); 141 | const dstProfileFname = profiles[dstColorSpace]; 142 | if (!dstProfileFname) throw new Error(`no profile ${dstColorSpace}`); 143 | 144 | const srcProfile = await fs.readFile( 145 | path.resolve(profileDir, `${srcProfileFname}.icc`) 146 | ); 147 | let dstProfile = srcProfile; 148 | 149 | // Color spaces do not match, we will convert A to B 150 | if (srcColorSpace !== dstColorSpace) { 151 | // Get the destination profile 152 | dstProfile = await fs.readFile( 153 | path.resolve(profileDir, `${dstProfileFname}.icc`) 154 | ); 155 | 156 | // little-cms doesn't yet directly support for 16 bit, so if using that, 157 | // we will convert to float32 data and back 158 | let inData; 159 | if (depth === 8) { 160 | inData = data; 161 | } else { 162 | inData = new Float32Array(width * height * channels); 163 | for (let i = 0; i < data.length; i++) { 164 | inData[i] = fromByte(data[i]); 165 | } 166 | } 167 | 168 | // Apply the actual color space transform with little-cms (WASM) 169 | console.log(`Transforming ${srcColorSpace} to ${dstColorSpace}`); 170 | const result = await iccTransform({ 171 | srcProfile: toArrayBuffer(srcProfile), 172 | dstProfile: toArrayBuffer(dstProfile), 173 | channels, 174 | width: width, 175 | height: height, 176 | data: inData, 177 | }); 178 | 179 | // bring the result from float back to 16 bit 180 | if (depth === 8) { 181 | data = result; 182 | } else { 183 | if (data.length !== result.length) 184 | throw new Error("data and transformed result size mismatch"); 185 | for (let i = 0; i < data.length; i++) { 186 | data[i] = toByte(result[i]); 187 | } 188 | } 189 | } 190 | 191 | const dstProfileCompressed = deflate(dstProfile, deflateOptions); 192 | const { description: dstProfileName } = parseICC(dstProfile); 193 | 194 | console.log("Destination Profile:", dstProfileName); 195 | 196 | console.log("Embedding ICC Profile:", dstProfileName); 197 | const iCCP = encode_iCCP({ 198 | name: dstProfileName, 199 | data: dstProfileCompressed, 200 | }); 201 | 202 | return encode( 203 | { 204 | ...options, 205 | data, 206 | ancillary: [ 207 | { type: ChunkType.iCCP, data: iCCP }, 208 | pixelsPerInch 209 | ? { type: ChunkType.pHYs, data: encode_pHYs_PPI(pixelsPerInch) } 210 | : false, 211 | ].filter(Boolean), 212 | }, 213 | deflate, 214 | deflateOptions 215 | ); 216 | 217 | function toByte(v) { 218 | return Math.max(0, Math.min(maxValue, Math.round(v * maxValue))); 219 | } 220 | 221 | function fromByte(b) { 222 | return b / maxValue; 223 | } 224 | 225 | function toArrayBuffer(buffer) { 226 | const arrayBuffer = new ArrayBuffer(buffer.length); 227 | const view = new Uint8Array(arrayBuffer); 228 | for (let i = 0; i < buffer.length; ++i) { 229 | view[i] = buffer[i]; 230 | } 231 | return arrayBuffer; 232 | } 233 | } 234 | -------------------------------------------------------------------------------- /examples/encode-simple.js: -------------------------------------------------------------------------------- 1 | import { ColorType, colorTypeToChannels, encode } from "../index.js"; 2 | import { deflate } from "pako"; 3 | 4 | const width = 4096; 5 | const height = 4096; 6 | const colorType = ColorType.RGB; 7 | const depth = 8; 8 | const channels = colorTypeToChannels(colorType); 9 | 10 | const ArrType = depth === 16 ? Uint16Array : Uint8ClampedArray; 11 | const maxValue = depth === 16 ? 0xffff : 0xff; 12 | 13 | const data = new ArrType(width * height * channels).fill(maxValue); 14 | 15 | // create the first scanline of a gradient 16 | for (let x = 0; x < width; x++) { 17 | const u = width <= 1 ? 1 : x / (width - 1); 18 | const color = Math.round(u * maxValue); 19 | for (let c = 0; c < channels; c++) { 20 | data[x * channels + c] = color; 21 | } 22 | } 23 | 24 | // now quickly repeat this across the rest of the height 25 | for (let y = 1; y < height; y++) { 26 | const x = 0; 27 | const idx = x + y * width; 28 | data.copyWithin(idx * channels, 0, width * channels); 29 | } 30 | 31 | // encode an image 32 | console.time("encode"); 33 | const buf = encode( 34 | { 35 | width, 36 | height, 37 | data, 38 | colorType, 39 | depth, 40 | }, 41 | deflate 42 | ); 43 | console.timeEnd("encode"); 44 | 45 | console.log("Encoded Bytes:", buf.byteLength); 46 | -------------------------------------------------------------------------------- /examples/encode-stream.js: -------------------------------------------------------------------------------- 1 | import { 2 | ColorType, 3 | FilterMethod, 4 | colorTypeToChannels, 5 | encodeChunk, 6 | encodeHeader, 7 | ChunkType, 8 | encode_IHDR, 9 | encode_IDAT_raw, 10 | colorTypeToString, 11 | } from "../index.js"; 12 | import { Deflate } from "pako"; 13 | import fs from "node:fs"; 14 | import { dirname } from "node:path"; 15 | import { SingleBar } from "cli-progress"; 16 | import { splitPixels } from "./util/pixels.js"; 17 | 18 | const output = process.argv[2]; 19 | if (!output) 20 | throw new Error( 21 | "Must specify an output, example:\n node encode-stream.js myfile.png" 22 | ); 23 | 24 | const width = 16000; 25 | const height = 16000; 26 | const colorType = ColorType.RGB; 27 | const depth = 16; 28 | const channels = colorTypeToChannels(colorType); 29 | const filter = FilterMethod.None; 30 | const deflateOptions = { level: 3 }; 31 | 32 | const ArrType = depth === 16 ? Uint16Array : Uint8ClampedArray; 33 | const maxValue = depth === 16 ? 0xffff : 0xff; 34 | 35 | const data = new ArrType(width * height * channels); 36 | 37 | // quickly generate some image data 38 | const tileSize = Math.floor(width * 0.1); 39 | for (let y = 0; y < tileSize; y++) { 40 | for (let x = 0; x < width; x++) { 41 | for (let c = 0; c < channels; c++) { 42 | const idx = x + y * height; 43 | const px = Math.floor(x / tileSize); 44 | const py = Math.floor(y / (tileSize / 2)); 45 | const v = (px + py) % 2 === 0 ? maxValue : 0x00; 46 | data[idx * channels + c] = v; 47 | } 48 | } 49 | } 50 | 51 | // copy data across rest of buffer 52 | const tileChunkSize = tileSize * width * channels; 53 | let i = tileChunkSize; 54 | while (i < data.length) { 55 | data.copyWithin(i, 0, tileChunkSize); 56 | i += tileChunkSize; 57 | } 58 | 59 | // our image options 60 | const options = { 61 | width, 62 | height, 63 | depth, 64 | colorType, 65 | filter, 66 | }; 67 | 68 | console.log(`Image Size: %s x %s px`, width, height); 69 | console.log(`Depth: %s bpp`, depth); 70 | console.log(`Color Type: %s`, colorTypeToString(colorType)); 71 | 72 | // mkdirp and create write stream 73 | try { 74 | await fs.mkdir(dirname(output), { recursive: true }); 75 | } catch (err) {} 76 | 77 | // show progress 78 | const progressBar = new SingleBar(); 79 | progressBar.start(100, 0); 80 | 81 | // create write stream 82 | const stream = fs.createWriteStream(output); 83 | stream.on("close", () => { 84 | console.log("File written to", output); 85 | }); 86 | 87 | function writeChunk(chunk) { 88 | stream.write(encodeChunk(chunk)); 89 | } 90 | 91 | console.time("encode"); 92 | 93 | // encode PNG header 94 | stream.write(encodeHeader()); 95 | 96 | // encode metadata 97 | writeChunk({ 98 | type: ChunkType.IHDR, 99 | data: encode_IHDR(options), 100 | }); 101 | 102 | // ... write any ancillary chunks ... 103 | 104 | // create and write IDAT chunk 105 | const deflator = new Deflate(deflateOptions); 106 | 107 | // Number of pages worth of data to process at a time 108 | // Note: you can simplify this code by just doing a single 109 | // page and deflator.push(idat, true) 110 | // The main benefits of splitting it up into pages: 111 | // 1. less in memory at one time (for really huge images) 112 | // 2. user isn't waiting a long time upfront 113 | const pageCount = 4; 114 | 115 | // current page and its total size 116 | let page = 0; 117 | let totalSize; 118 | 119 | // Overload the function to extract each individual compressed chunk 120 | deflator.onData = function (chunk) { 121 | // ensure the Deflator has its chunks 122 | this.chunks.push(chunk); 123 | 124 | // encode the current IDAT chunk 125 | writeChunk({ type: ChunkType.IDAT, data: chunk }); 126 | 127 | // determine total progress 128 | const strmProgress = (totalSize - this.strm.avail_in) / totalSize; 129 | const progress = Math.round((100 * (page + strmProgress)) / pageCount); 130 | progressBar.update(progress); 131 | }; 132 | 133 | // split whole stream into smaller sections 134 | for (let { view, isLast } of splitPixels( 135 | data, 136 | width, 137 | height, 138 | channels, 139 | pageCount 140 | )) { 141 | const idat = encode_IDAT_raw(view, { 142 | ...options, 143 | // Important: if you are going to do multiple separate IDAT chunks 144 | // you need to make sure the first scanline's filter is not one that 145 | // relies on the Up/Above scanline 146 | firstFilter: FilterMethod.Sub, 147 | }); 148 | totalSize = idat.byteLength; 149 | deflator.push(idat, isLast); 150 | page++; 151 | } 152 | 153 | if (deflator.err) { 154 | throw deflator.msg || msg[deflator.err]; 155 | } 156 | 157 | // write ending chunk 158 | writeChunk({ type: ChunkType.IEND }); 159 | 160 | // stop progress 161 | progressBar.stop(); 162 | 163 | // end stream 164 | stream.end(); 165 | console.timeEnd("encode"); 166 | -------------------------------------------------------------------------------- /examples/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | benchmark 7 | 20 | 21 | 22 |
23 |

canvas params:

24 |

25 |     
26 | 33 | 34 | 35 | 36 | 37 |
38 | 39 |
40 | 41 |
42 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /examples/inspect.js: -------------------------------------------------------------------------------- 1 | import { inflate } from "pako"; 2 | import { 3 | colorTypeToChannels, 4 | colorTypeToString, 5 | readChunks, 6 | decode_iCCP, 7 | ChunkType, 8 | decode_pHYs_PPI, 9 | decode_IHDR, 10 | decode_iTXt, 11 | chunkTypeToName, 12 | } from "../index.js"; 13 | import fs from "node:fs/promises"; 14 | import { parse as parseICC } from "icc"; 15 | 16 | const input = process.argv[2]; 17 | if (!input) 18 | throw new Error( 19 | "Must specify an input, example:\n node inspect.js myfile.png [icc_out.icc]" 20 | ); 21 | 22 | const icc = process.argv[3]; 23 | 24 | const buf = await fs.readFile(input); 25 | const chunks = readChunks(buf, { copy: false }); 26 | 27 | const { width, height, colorType, depth } = decode_IHDR( 28 | chunks.find(chunkFilter(ChunkType.IHDR)).data 29 | ); 30 | 31 | console.log(`Size: ${width} x ${height} px`); 32 | console.log( 33 | `Format: ${colorTypeToString(colorType)} (${colorTypeToChannels( 34 | colorType 35 | )} channels)` 36 | ); 37 | console.log(`Depth: ${depth} bpp`); 38 | 39 | const pHYs = chunks.find(chunkFilter(ChunkType.pHYs)); 40 | if (pHYs) { 41 | console.log("pixelsPerInch:", Math.round(decode_pHYs_PPI(pHYs.data))); 42 | } 43 | 44 | const iCCP = chunks.find(chunkFilter(ChunkType.iCCP)); 45 | if (iCCP) { 46 | const { name, data } = decode_iCCP(iCCP.data); 47 | console.log("Embedded Profile:", name); 48 | 49 | // decompress to get the ICC color profile 50 | const profileDecompressed = Buffer.from(inflate(data)); 51 | 52 | // if a subsequent input is given, it will write ICC out 53 | if (icc) await fs.writeFile(icc, profileDecompressed); 54 | 55 | // parse it with the 'icc' module to get more info 56 | const profileParsed = parseICC(profileDecompressed); 57 | console.log("Color Profile:", profileParsed); 58 | } else { 59 | console.log("No color profile data"); 60 | } 61 | 62 | const texts = [ChunkType.iTXt]; 63 | for (let type of texts) { 64 | const texts = chunks.filter(chunkFilter(type)); 65 | if (texts.length) { 66 | console.log("%s:", chunkTypeToName(type)); 67 | } 68 | for (let { data } of texts) { 69 | const txt = decode_iTXt(data); 70 | console.log(txt); 71 | } 72 | } 73 | 74 | function chunkFilter(type) { 75 | return (chunk) => chunk.type === type; 76 | } 77 | -------------------------------------------------------------------------------- /examples/node-encode.js: -------------------------------------------------------------------------------- 1 | import { 2 | ColorType, 3 | FilterMethod, 4 | colorTypeToChannels, 5 | encode, 6 | } from "../index.js"; 7 | import { deflate } from "pako"; 8 | import fs from "node:fs/promises"; 9 | import { dirname } from "node:path"; 10 | 11 | const output = process.argv[2]; 12 | if (!output) 13 | throw new Error( 14 | "Must specify an output, example:\n node node-encode.js myfile.png" 15 | ); 16 | 17 | // Encode faster but generates bigger files 18 | const IS_FAST_MODE = true; 19 | 20 | const width = 4096; 21 | const height = 4096; 22 | const colorType = ColorType.RGB; 23 | const depth = 8; 24 | const channels = colorTypeToChannels(colorType); 25 | 26 | const ArrType = depth === 16 ? Uint16Array : Uint8ClampedArray; 27 | const maxValue = depth === 16 ? 0xffff : 0xff; 28 | 29 | const data = new ArrType(width * height * channels).fill(maxValue); 30 | 31 | // create the first scanline of a gradient 32 | for (let x = 0; x < width; x++) { 33 | const u = width <= 1 ? 1 : x / (width - 1); 34 | const color = Math.round(u * maxValue); 35 | for (let c = 0; c < channels; c++) { 36 | data[x * channels + c] = color; 37 | } 38 | } 39 | 40 | // now quickly repeat this across the rest of the height 41 | for (let y = 1; y < height; y++) { 42 | const x = 0; 43 | const idx = x + y * width; 44 | data.copyWithin(idx * channels, 0, width * channels); 45 | } 46 | 47 | // encode an image 48 | console.time("encode"); 49 | const buf = encode( 50 | { 51 | width, 52 | height, 53 | data, 54 | colorType, 55 | depth, 56 | // speed things up a little by using no filtering, 57 | // at the cost of filesize 58 | filter: IS_FAST_MODE ? FilterMethod.None : FilterMethod.Paeth, 59 | }, 60 | deflate, 61 | { level: IS_FAST_MODE ? 3 : 6 } 62 | ); 63 | console.timeEnd("encode"); 64 | 65 | // mkdirp and write file 66 | try { 67 | await fs.mkdir(dirname(output), { recursive: true }); 68 | } catch (err) {} 69 | await fs.writeFile(output, buf); 70 | -------------------------------------------------------------------------------- /examples/profiles/AdobeRGB1998.icc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/examples/profiles/AdobeRGB1998.icc -------------------------------------------------------------------------------- /examples/profiles/Display P3.icc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/examples/profiles/Display P3.icc -------------------------------------------------------------------------------- /examples/profiles/sRGB IEC61966-2.1.icc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/examples/profiles/sRGB IEC61966-2.1.icc -------------------------------------------------------------------------------- /examples/read-ihdr.js: -------------------------------------------------------------------------------- 1 | import { colorTypeToChannels, colorTypeToString, readIHDR } from "../index.js"; 2 | import fs from "node:fs"; 3 | 4 | const input = process.argv[2]; 5 | if (!input) 6 | throw new Error( 7 | "Must specify an input, example:\n node read-ihdr.js myfile.png" 8 | ); 9 | 10 | // The first 33 bytes of a PNG file are the header + IHDR 11 | const byteCount = 33; 12 | const bytes = await readBytes(input, byteCount); 13 | 14 | // read IHDR chunk data 15 | const data = readIHDR(bytes); 16 | 17 | console.log(`Size: ${data.width} x ${data.height} px`); 18 | console.log( 19 | `Format: ${colorTypeToString(data.colorType)} (${colorTypeToChannels( 20 | data.colorType 21 | )} channels)` 22 | ); 23 | console.log(`Depth: ${data.depth} bpp`); 24 | 25 | async function readBytes(path, nBytes) { 26 | const chunks = []; 27 | const stream = fs.createReadStream(path, { 28 | start: 0, 29 | end: nBytes - 1, 30 | }); 31 | for await (let chunk of stream) { 32 | chunks.push(chunk); 33 | } 34 | return Buffer.concat(chunks); 35 | } 36 | -------------------------------------------------------------------------------- /examples/util/adler32.js: -------------------------------------------------------------------------------- 1 | const BASE = 65521; 2 | 3 | export function adler32_combine(adler1, adler2, len2) { 4 | // https://github.com/DavidBuchanan314/parallel-png-proposal 5 | // https://github.com/madler/zlib/blob/cacf7f1d4e3d44d871b605da3b647f07d718623f/adler32.c#L143 6 | if (adler1 == null) return adler2; 7 | let a1hi = (adler1 >>> 16) & 0xffff; 8 | let a1lo = adler1 & 0xffff; 9 | let a2hi = (adler2 >>> 16) & 0xffff; 10 | let a2lo = adler2 & 0xffff; 11 | let sum1 = (a1lo + a2lo - 1) % BASE; 12 | if (sum1 < 0) sum1 += BASE; // Handle negative results 13 | let sum2 = (len2 * a1lo + a1hi + a2hi - len2) % BASE; 14 | if (sum2 < 0) sum2 += BASE; // Handle negative results 15 | return (sum1 | (sum2 << 16)) >>> 0; // Ensure unsigned 32-bit result 16 | } 17 | 18 | export function adler32(buf, adler = 1, len = buf.length, pos = 0) { 19 | // from pako 20 | // https://github.com/nodeca/pako/blob/62cb729e7813176ce2d2694b89c8724680fca383/lib/zlib/adler32.js#L26 21 | let s1 = (adler & 0xffff) | 0, 22 | s2 = ((adler >>> 16) & 0xffff) | 0, 23 | n = 0; 24 | 25 | while (len !== 0) { 26 | // Set limit ~ twice less than 5552, to keep 27 | // s2 in 31-bits, because we force signed ints. 28 | // in other case %= will fail. 29 | n = len > 2000 ? 2000 : len; 30 | len -= n; 31 | 32 | do { 33 | s1 = (s1 + buf[pos++]) | 0; 34 | s2 = (s2 + s1) | 0; 35 | } while (--n); 36 | 37 | s1 %= 65521; 38 | s2 %= 65521; 39 | } 40 | 41 | return (s1 | (s2 << 16) | 0) >>> 0; 42 | } 43 | -------------------------------------------------------------------------------- /examples/util/icc-transform.js: -------------------------------------------------------------------------------- 1 | import { 2 | instantiate, 3 | cmsFLAGS_NOCACHE, 4 | cmsFLAGS_HIGHRESPRECALC, 5 | cmsFLAGS_NOOPTIMIZE, 6 | cmsFLAGS_BLACKPOINTCOMPENSATION, 7 | cmsInfoDescription, 8 | INTENT_PERCEPTUAL, 9 | } from "lcms-wasm"; 10 | 11 | export default async function iccTransform(opts = {}) { 12 | const lcms = await instantiate(); 13 | const { 14 | data, 15 | width, 16 | height, 17 | intent = INTENT_PERCEPTUAL, 18 | blackPointCompensation = true, 19 | } = opts; 20 | const nPixels = width * height; 21 | const IS_FLOAT = data instanceof Float32Array; 22 | 23 | const srcProfile = openProfile(opts.srcProfile); 24 | const dstProfile = openProfile(opts.dstProfile); 25 | 26 | function openProfile(buf) { 27 | const profile = lcms.cmsOpenProfileFromMem( 28 | new Uint8Array(buf), 29 | buf.byteLength 30 | ); 31 | if (!profile) throw new Error(`could not open profile ${path}`); 32 | return profile; 33 | } 34 | 35 | function getProfileName(profile) { 36 | return lcms.cmsGetProfileInfoASCII(profile, cmsInfoDescription, "en", "US"); 37 | } 38 | 39 | const profiles = [srcProfile, dstProfile]; 40 | profiles.forEach((c) => 41 | console.log( 42 | `Loaded ${getProfileName(c)} (Color Space: ${lcms.cmsGetColorSpaceASCII( 43 | c 44 | )})` 45 | ) 46 | ); 47 | 48 | let flags = cmsFLAGS_NOCACHE | cmsFLAGS_HIGHRESPRECALC | cmsFLAGS_NOOPTIMIZE; 49 | if (blackPointCompensation) { 50 | flags |= cmsFLAGS_BLACKPOINTCOMPENSATION; 51 | } 52 | 53 | const inputFormat = lcms.cmsFormatterForColorspaceOfProfile( 54 | srcProfile, 55 | IS_FLOAT ? 4 : 1, 56 | IS_FLOAT 57 | ); 58 | const outputFormat = lcms.cmsFormatterForColorspaceOfProfile( 59 | dstProfile, 60 | IS_FLOAT ? 4 : 1, 61 | IS_FLOAT 62 | ); 63 | 64 | const transform = lcms.cmsCreateTransform( 65 | srcProfile, 66 | inputFormat, 67 | dstProfile, 68 | outputFormat, 69 | intent, 70 | flags 71 | ); 72 | 73 | // Clean up the profiles once the transform is created 74 | lcms.cmsCloseProfile(srcProfile); 75 | lcms.cmsCloseProfile(dstProfile); 76 | 77 | const transformed = lcms.cmsDoTransform(transform, data, nPixels); 78 | return transformed; 79 | } 80 | -------------------------------------------------------------------------------- /examples/util/parallel-encode-worker.js: -------------------------------------------------------------------------------- 1 | import { FilterMethod, encode_IDAT_raw, flattenBuffers } from "../../index.js"; 2 | import { Deflate, constants } from "pako"; 3 | import { adler32 } from "./adler32.js"; 4 | 5 | const DEFAULT_INITIAL_FILTER = FilterMethod.Sub; 6 | 7 | self.onmessage = (msg) => { 8 | const options = msg.data; 9 | const { 10 | view, 11 | index, 12 | isFirst, 13 | isLast, 14 | deflateOptions, 15 | filter = FilterMethod.Paeth, 16 | } = options; 17 | 18 | let firstFilter; 19 | if (filter === FilterMethod.None || filter === FilterMethod.Sub) { 20 | firstFilter = filter; 21 | } else { 22 | firstFilter = DEFAULT_INITIAL_FILTER; 23 | } 24 | 25 | let idat = encode_IDAT_raw(view, { 26 | ...options, 27 | // Important: we need to encode the chunk with the first scanline filter being one that 28 | // is safe, i.e. not relying on any pixel data 'up' or 'above' 29 | firstFilter, 30 | }); 31 | 32 | const zChunks = []; 33 | 34 | const totalSize = idat.byteLength; 35 | 36 | let deflate = new Deflate({ ...deflateOptions, raw: true }); 37 | 38 | deflate.onData = function (zChunk) { 39 | zChunks.push(zChunk); 40 | const progress = (totalSize - this.strm.avail_in) / totalSize; 41 | self.postMessage({ progress, index }); 42 | }; 43 | deflate.push(idat, false); 44 | if (isLast) deflate.push([], constants.Z_FINISH); 45 | else deflate.push([], constants.Z_FULL_FLUSH); 46 | 47 | if (isFirst) { 48 | // push header 49 | let deflate = new Deflate(deflateOptions); 50 | deflate.push([], true); 51 | zChunks.unshift(deflate.result.slice(0, 2)); 52 | } 53 | 54 | let result = flattenBuffers(zChunks); 55 | const adler = adler32(idat); 56 | 57 | self.postMessage({ 58 | index, 59 | result, 60 | progress: 1, 61 | adler, 62 | size: idat.byteLength, 63 | }); 64 | }; 65 | -------------------------------------------------------------------------------- /examples/util/pixels.js: -------------------------------------------------------------------------------- 1 | export function* splitPixels(data, width, height, channels, splitCount) { 2 | const chunkHeight = Math.floor(height / splitCount); 3 | const chunkSize = chunkHeight * width * channels; 4 | for (let i = 0; i < splitCount; i++) { 5 | const start = i * chunkSize; 6 | const end = i === splitCount - 1 ? data.length : start + chunkSize; 7 | yield { 8 | index: i, 9 | start, 10 | end, 11 | view: data.subarray(start, end), 12 | isFirst: i === 0, 13 | isLast: i === splitCount - 1, 14 | }; 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /examples/util/save.js: -------------------------------------------------------------------------------- 1 | let link; 2 | 3 | export function downloadBuffer(buf, opts = {}) { 4 | const { filename = "download" } = opts; 5 | const blob = new Blob([buf], opts); 6 | return downloadBlob(blob, { filename }); 7 | } 8 | 9 | export function downloadBlob(blob, opts = {}) { 10 | return new Promise((resolve) => { 11 | const filename = opts.filename || getTimestamp(); 12 | if (!link) { 13 | link = document.createElement("a"); 14 | link.style.visibility = "hidden"; 15 | link.target = "_blank"; 16 | } 17 | link.download = filename; 18 | link.href = window.URL.createObjectURL(blob); 19 | document.body.appendChild(link); 20 | link.onclick = () => { 21 | link.onclick = () => {}; 22 | setTimeout(() => { 23 | window.URL.revokeObjectURL(blob); 24 | if (link.parentElement) link.parentElement.removeChild(link); 25 | link.removeAttribute("href"); 26 | resolve({ filename }); 27 | }); 28 | }; 29 | link.click(); 30 | }); 31 | } 32 | 33 | export function getTimestamp() { 34 | const today = new Date(); 35 | const yyyy = today.getFullYear(); 36 | let [mm, dd, hh, min, sec] = [ 37 | today.getMonth() + 1, // Months start at 0! 38 | today.getDate(), 39 | today.getHours(), 40 | today.getMinutes(), 41 | today.getSeconds(), 42 | ].map((c) => String(c).padStart(2, "0")); 43 | return `${yyyy}.${mm}.${dd}-${hh}.${min}.${sec}`; 44 | } 45 | 46 | export async function canvasToBuffer(canvas, opts = {}) { 47 | let blob; 48 | if (typeof canvas.convertToBlob === "function") { 49 | // for off screen canvas, e.g. worker threads 50 | blob = await canvas.convertToBlob(opts); 51 | } else { 52 | blob = await new Promise((resolve) => 53 | canvas.toBlob(resolve, opts.type, opts.quality) 54 | ); 55 | } 56 | const arrayBuf = await blob.arrayBuffer(); 57 | const buf = new Uint8Array(arrayBuf); 58 | return buf; 59 | } 60 | -------------------------------------------------------------------------------- /examples/web/generate.js: -------------------------------------------------------------------------------- 1 | self.onmessage = async (msg) => { 2 | const { width, height, depth, channels } = msg.data; 3 | const ArrType = depth === 16 ? Uint16Array : Uint8ClampedArray; 4 | const maxValue = depth === 16 ? 0xffff : 0xff; 5 | 6 | let data = new ArrType(width * height * channels).fill(maxValue); 7 | 8 | const A = [1, 0, 0]; 9 | const B = [0, 0, 1]; 10 | 11 | for (let x = 0; x < width; x++) { 12 | const u = width <= 1 ? 1 : x / (width - 1); 13 | const [r, g, b] = lerpArray(A, B, u).map((n) => toByte(n)); 14 | data[x * channels + 0] = r; 15 | data[x * channels + 1] = g; 16 | data[x * channels + 2] = b; 17 | } 18 | 19 | // quickly generate an image of expected size 20 | for (let y = 1; y < height; y++) { 21 | const x = 0; 22 | const idx = x + y * width; 23 | data.copyWithin(idx * channels, 0, width * channels); 24 | } 25 | 26 | // A much slower gradient but more pretty, using more colours 27 | // for (let y = 0, i = 0; y < height; y++) { 28 | // for (let x = 0; x < width; x++, i++) { 29 | // const u = (x + 1) / width; 30 | // const v = (y + 1) / height; 31 | // const R = u; 32 | // const G = 0.5; 33 | // const B = v; 34 | // data[i * channels + 0] = toByte(R); 35 | // data[i * channels + 1] = toByte(G); 36 | // data[i * channels + 2] = toByte(B); 37 | // if (channels === 4) data[i * channels + 3] = maxValue; 38 | // } 39 | // } 40 | self.postMessage(data); 41 | 42 | function lerp(min, max, t) { 43 | return min * (1 - t) + max * t; 44 | } 45 | 46 | function lerpArray(min, max, t, out = []) { 47 | for (var i = 0; i < min.length; i++) { 48 | out[i] = lerp(min[i], max[i], t); 49 | } 50 | return out; 51 | } 52 | 53 | function toByte(v) { 54 | return Math.max(0, Math.min(maxValue, Math.round(v * maxValue))); 55 | } 56 | }; 57 | -------------------------------------------------------------------------------- /examples/web/main.js: -------------------------------------------------------------------------------- 1 | import getDimensions from "canvas-dimensions"; 2 | import * as pako from "pako"; 3 | import * as FastPNG from "fast-png"; 4 | 5 | import { 6 | encode, 7 | ColorType, 8 | FilterMethod, 9 | ChunkType, 10 | writeChunks, 11 | readChunks, 12 | colorTypeToChannels, 13 | encode_pHYs_PPI, 14 | flattenBuffers, 15 | } from "../../index.js"; 16 | 17 | import { canvasToBuffer, downloadBlob } from "../util/save.js"; 18 | import prettyBytes from "pretty-bytes"; 19 | 20 | const params = { 21 | dimensions: "A4", 22 | pixelsPerInch: 300, 23 | units: "cm", 24 | depth: 16, 25 | colorType: ColorType.RGBA, 26 | filter: FilterMethod.Paeth, 27 | }; 28 | 29 | const { canvasWidth: width, canvasHeight: height } = getDimensions({ 30 | ...params, 31 | }); 32 | 33 | const colorTypeToString = (n) => { 34 | const entries = Object.entries(ColorType); 35 | return entries.find((e) => e[1] === n)[0]; 36 | }; 37 | 38 | const { depth, colorType, pixelsPerInch, filter } = params; 39 | const channels = colorTypeToChannels(colorType); 40 | 41 | const status = document.querySelector(".status"); 42 | const cancel = document.querySelector(".cancel"); 43 | const download = document.querySelector(".download"); 44 | const img = document.querySelector(".image"); 45 | let curBlob; 46 | 47 | download.onclick = (ev) => { 48 | ev.preventDefault(); 49 | if (curBlob) downloadBlob(curBlob, { filename: "download.png" }); 50 | }; 51 | 52 | // let canvas = document.createElement("canvas"); 53 | // const container = document.querySelector(".canvas-container"); 54 | // container.appendChild(canvas); 55 | const updateStatus = (n) => { 56 | let str; 57 | if (n === "worker") str = "Use a WebWorker to encode off the main thread."; 58 | else if (n === "file") 59 | str = 60 | "Use WebWorker + File System API to stream encode directly into a file on disk (Chrome only)."; 61 | else if (n === "canvas") { 62 | str = 63 | "Use Canvas2D toBlob() to encode a PNG, which only supports 8 bits per pixel."; 64 | } else if (n === "cpu") { 65 | str = 66 | "Use the main thread (no worker) to encode, which is simpler but halts the UI, does not report progress, and cannot be cancelled."; 67 | } else if (n === "fast-png") { 68 | str = "Use the fast-png module to encode, for benchmark comparison."; 69 | } 70 | status.textContent = str; 71 | }; 72 | 73 | const typeSelect = document.querySelector("select"); 74 | typeSelect.oninput = (ev) => { 75 | updateStatus(ev.currentTarget.value); 76 | curBlob = null; 77 | img.src = ""; 78 | download.setAttribute("disabled", true); 79 | }; 80 | updateStatus(typeSelect.value); 81 | 82 | document.querySelector(".info").textContent = JSON.stringify( 83 | { 84 | ...params, 85 | colorType: colorTypeToString(colorType), 86 | depth, 87 | width, 88 | height, 89 | }, 90 | null, 91 | 2 92 | ); 93 | 94 | async function encodeWorkerBuffered(data, signal) { 95 | let buffers = []; 96 | await encodeWorker(data, (d) => buffers.push(d), signal); 97 | return flattenBuffers(buffers); 98 | } 99 | 100 | async function encodeWorker(data, write, signal) { 101 | const options = { 102 | width, 103 | height, 104 | depth, 105 | colorType, 106 | data, 107 | pixelsPerInch, 108 | filter, 109 | }; 110 | return new Promise((resolve) => { 111 | const worker = new Worker(new URL("./worker.js", import.meta.url), { 112 | type: "module", 113 | }); 114 | const close = () => { 115 | worker.terminate(); 116 | resolve(); 117 | }; 118 | if (signal) signal.addEventListener("abort", close); 119 | worker.postMessage(options, [data.buffer]); 120 | const handler = async (ev) => { 121 | const data = ev.data; 122 | status.textContent = `Progress: ${Math.round(data.progress * 100)}%`; 123 | write(data.chunk); 124 | if (data.finished) { 125 | worker.removeEventListener("message", handler); 126 | if (signal) signal.removeEventListener("abort", close); 127 | close(); 128 | } 129 | }; 130 | worker.addEventListener("message", handler); 131 | }); 132 | } 133 | 134 | function encodeCPU(data) { 135 | const ancillary = []; 136 | 137 | // optionally embed resolution 138 | if (pixelsPerInch) { 139 | ancillary.push({ 140 | type: ChunkType.pHYs, 141 | data: encode_pHYs_PPI(pixelsPerInch), 142 | }); 143 | } 144 | 145 | const options = { 146 | width, 147 | height, 148 | data, 149 | depth, 150 | colorType, 151 | ancillary, 152 | filter, 153 | }; 154 | return encode(options, pako.deflate, { level: 3 }); 155 | } 156 | 157 | async function encodeCanvas(data) { 158 | const canvas = create8BitCanvas(data); 159 | let buffer = await canvasToBuffer(canvas); 160 | // if we have additional metadata, we can re-encode without having to re-compress 161 | if (pixelsPerInch) { 162 | let chunks = readChunks(buffer); 163 | // strip out an existing pHYs chunk if it exists 164 | chunks = chunks.filter((c) => c.type !== ChunkType.pHYs); 165 | // include the new chunk 166 | chunks.splice(1, 0, { 167 | type: ChunkType.pHYs, 168 | data: encode_pHYs_PPI(pixelsPerInch), 169 | }); 170 | // re-encode the chunks (does not re-compress the data stream) 171 | buffer = writeChunks(chunks); 172 | } 173 | return buffer; 174 | } 175 | 176 | async function getFileStream(opts = {}) { 177 | if (!window.showSaveFilePicker) { 178 | throw new Error("Not supported on this browser"); 179 | } 180 | const { filename = "download.png" } = opts; 181 | 182 | // create a new handle 183 | const newHandle = await window.showSaveFilePicker({ 184 | excludeAcceptAllOption: true, 185 | id: "benchmark", 186 | startIn: "downloads", 187 | suggestedName: filename, 188 | types: [ 189 | { 190 | description: "PNG Image", 191 | accept: { "image/png": [".png"] }, 192 | }, 193 | ], 194 | }); 195 | 196 | return newHandle.createWritable(); 197 | } 198 | 199 | async function encodeFileWriter(data, writer, signal) { 200 | let chain = Promise.resolve(); 201 | 202 | // create a chain of promises that write one after another 203 | const write = (chunk) => { 204 | chain = chain.then(() => writer.write(chunk)); 205 | }; 206 | 207 | // wait for encoder to finish 208 | await encodeWorker(data, write, signal); 209 | 210 | // make sure all writes are finished too 211 | await chain; 212 | 213 | // close writer 214 | await writer.close(); 215 | } 216 | 217 | function create8BitCanvas(data) { 218 | const rgba = new Uint8ClampedArray(width * height * 4); 219 | if (depth === 8 && channels === 4) { 220 | rgba.set(data); 221 | } else { 222 | const maxOutSample = 0xff; 223 | const maxInSample = depth === 16 ? 0xffff : 0xff; 224 | for (let i = 0; i < width * height; i++) { 225 | for (let c = 0; c < 4; c++) { 226 | if (c < channels) { 227 | let v = data[i * channels + c]; 228 | v = Math.round((v / maxInSample) * maxOutSample); 229 | v = Math.max(0, Math.min(maxOutSample, v)); 230 | rgba[i * 4 + c] = v; 231 | } else rgba[i * 4 + c] = 0xff; 232 | } 233 | } 234 | } 235 | 236 | const canvas = document.createElement("canvas"); 237 | const context = canvas.getContext("2d", { 238 | willReadFrequently: true, 239 | }); 240 | canvas.width = width; 241 | canvas.height = height; 242 | const imgData = context.createImageData(width, height); 243 | imgData.data.set(rgba); 244 | context.putImageData(imgData, 0, 0); 245 | return canvas; 246 | } 247 | 248 | async function doEncode(data) { 249 | const type = document.querySelector("select").value; 250 | let enc; 251 | 252 | const ac = new AbortController(); 253 | const signal = ac.signal; 254 | 255 | let didCancel = false; 256 | const onAbort = () => { 257 | didCancel = true; 258 | }; 259 | const doAbort = () => { 260 | ac.abort(); 261 | }; 262 | signal.addEventListener("abort", onAbort, { once: true }); 263 | cancel.addEventListener("click", doAbort, { once: true }); 264 | 265 | let fileWriter; 266 | if (type === "file") { 267 | fileWriter = await getFileStream(); 268 | } 269 | 270 | if (type === "file" || type === "worker") cancel.removeAttribute("disabled"); 271 | else cancel.setAttribute("disabled", true); 272 | 273 | const then = performance.now(); 274 | // console.profile("encode"); 275 | if (type === "cpu") enc = encodeCPU(data); 276 | else if (type === "canvas") enc = await encodeCanvas(data); 277 | else if (type === "worker") enc = await encodeWorkerBuffered(data, signal); 278 | else if (type === "file") await encodeFileWriter(data, fileWriter, signal); 279 | else if (type === "fast-png") { 280 | enc = FastPNG.encode({ data, width, height, channels, depth }); 281 | } 282 | // console.profileEnd("encode"); 283 | const now = performance.now(); 284 | 285 | signal.removeEventListener("abort", onAbort); 286 | cancel.removeEventListener("click", doAbort); 287 | cancel.setAttribute("disabled", true); 288 | 289 | if (didCancel) { 290 | console.log("Cancelled"); 291 | status.textContent = "Cancelled"; 292 | } else { 293 | const ms = Math.round(now - then); 294 | const bytesSuffix = enc ? ` (Bytes: ${prettyBytes(enc.byteLength)})` : ""; 295 | const timeStr = `Time: ${ms} ms` + bytesSuffix; 296 | console.log(timeStr); 297 | status.textContent = timeStr; 298 | 299 | if (enc) { 300 | curBlob = new Blob([enc], { type: "image/png" }); 301 | download.removeAttribute("disabled"); 302 | img.src = URL.createObjectURL(curBlob); 303 | } 304 | } 305 | } 306 | 307 | const worker = new Worker(new URL("./generate.js", import.meta.url), { 308 | type: "module", 309 | }); 310 | worker.postMessage({ width, height, channels, depth }); 311 | worker.addEventListener("message", (ev) => { 312 | const btn = document.querySelector(".encode"); 313 | btn.textContent = "Encode PNG"; 314 | 315 | btn.removeAttribute("disabled"); 316 | btn.onclick = async () => { 317 | btn.setAttribute("disabled", true); 318 | await new Promise((r) => setTimeout(r, 10)); 319 | try { 320 | console.log("encoding"); 321 | await doEncode(ev.data.slice()); 322 | } catch (err) { 323 | if (err.name != "AbortError") { 324 | console.error(err); 325 | alert(err.message); 326 | } 327 | } 328 | btn.removeAttribute("disabled"); 329 | }; 330 | }); 331 | -------------------------------------------------------------------------------- /examples/web/worker.js: -------------------------------------------------------------------------------- 1 | import { Deflate } from "pako"; 2 | import { 3 | ChunkType, 4 | encodeChunk, 5 | encodeHeader, 6 | encode_IDAT_raw, 7 | encode_IHDR, 8 | encode_pHYs_PPI, 9 | } from "../../index.js"; 10 | 11 | self.onmessage = async (msg) => { 12 | const options = msg.data; 13 | const deflateOptions = { level: 3 }; 14 | 15 | const postChunk = (progress, chunk) => { 16 | const finished = chunk.type === ChunkType.IEND; 17 | self.postMessage({ chunk: encodeChunk(chunk), progress, finished }); 18 | }; 19 | 20 | // 1. First post the raw header 21 | self.postMessage({ chunk: encodeHeader(), progress: 0, finished: false }); 22 | 23 | // 2. Now post the metadata chunk 24 | postChunk(0, { type: ChunkType.IHDR, data: encode_IHDR(options) }); 25 | 26 | // 2a (optional) Include any ancillary chunks like pixelsPerInch, text... 27 | if (options.pixelsPerInch) { 28 | postChunk(0, { 29 | type: ChunkType.pHYs, 30 | data: encode_pHYs_PPI(options.pixelsPerInch), 31 | }); 32 | } 33 | 34 | // 3. Now do deflate, and each time the deflator gets compressed data, 35 | // send it to the main thread as well for writing 36 | const deflator = new Deflate(deflateOptions); 37 | const idat = encode_IDAT_raw(options.data, options); 38 | const totalSize = idat.byteLength; 39 | 40 | // Overload the function to extract each individual compressed chunk 41 | deflator.onData = function (chunk) { 42 | // ensure the Deflator has its chunks 43 | this.chunks.push(chunk); 44 | 45 | // Also push to the PNG stream while we are at it 46 | const progress = (totalSize - this.strm.avail_in) / totalSize; 47 | postChunk(progress, { type: ChunkType.IDAT, data: chunk }); 48 | }; 49 | 50 | // Push with 'finish' parameter as true 51 | deflator.push(idat, true); 52 | 53 | if (deflator.err) { 54 | throw deflator.msg || msg[deflator.err]; 55 | } 56 | 57 | // 4. Finally, send the ending chunk as well 58 | postChunk(1, { type: ChunkType.IEND }); 59 | }; 60 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @project png-tools 3 | * @author Matt DesLauriers (@mattdesl) 4 | * @license MIT see LICENSE.md file in GitHub repository 5 | */ 6 | 7 | import crc32 from "./src/crc32.js"; 8 | 9 | import { 10 | flattenBuffers, 11 | colorTypeToChannels, 12 | colorTypeToString, 13 | } from "./src/util.js"; 14 | 15 | import { ChunkType, ColorType, FilterMethod, Intent } from "./src/constants.js"; 16 | 17 | import { 18 | encode, 19 | encodeHeader, 20 | encodeChunk, 21 | writeChunks, 22 | } from "./src/encode.js"; 23 | 24 | import { readChunks, readIHDR, reader } from "./src/decode.js"; 25 | 26 | import { 27 | encode_IDAT_raw, 28 | encode_pHYs, 29 | encode_pHYs_PPI, 30 | encode_sRGB, 31 | encode_standardChromatics, 32 | encode_standardGamma, 33 | encode_iTXt, 34 | encode_IHDR, 35 | encode_iCCP, 36 | decode_iCCP, 37 | decode_iTXt, 38 | decode_IHDR, 39 | decode_pHYs, 40 | decode_pHYs_PPI, 41 | chunkNameToType, 42 | chunkTypeToName, 43 | } from "./src/chunks.js"; 44 | 45 | export { 46 | // Utils 47 | crc32, 48 | flattenBuffers, 49 | colorTypeToChannels, 50 | colorTypeToString, 51 | 52 | // Export constants 53 | ChunkType, 54 | ColorType, 55 | FilterMethod, 56 | Intent, 57 | 58 | // Encoding 59 | encode, 60 | encodeHeader, 61 | encodeChunk, 62 | writeChunks, 63 | 64 | // Decoding 65 | readChunks, 66 | readIHDR, 67 | reader, 68 | 69 | // Chunk utils 70 | encode_IDAT_raw, 71 | encode_pHYs, 72 | encode_pHYs_PPI, 73 | encode_sRGB, 74 | encode_standardChromatics, 75 | encode_standardGamma, 76 | encode_iTXt, 77 | encode_IHDR, 78 | encode_iCCP, 79 | decode_iCCP, 80 | decode_iTXt, 81 | decode_IHDR, 82 | decode_pHYs, 83 | decode_pHYs_PPI, 84 | chunkNameToType, 85 | chunkTypeToName, 86 | }; 87 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "png-tools", 3 | "version": "1.0.3", 4 | "description": "low-level PNG codec tools", 5 | "main": "./index.js", 6 | "license": "MIT", 7 | "type": "module", 8 | "author": { 9 | "name": "Matt DesLauriers", 10 | "url": "https://github.com/mattdesl" 11 | }, 12 | "devDependencies": { 13 | "buffer": "^6.0.3", 14 | "canvas-dimensions": "^1.0.0", 15 | "canvas-sketch": "^0.7.7", 16 | "cli-progress": "^3.12.0", 17 | "convert-length": "^1.0.1", 18 | "fast-png": "^6.2.0", 19 | "icc": "^3.0.0", 20 | "jsdoc": "^4.0.3", 21 | "lcms-wasm": "^1.0.2", 22 | "pako": "^2.1.0", 23 | "pretty-bytes": "^6.1.1", 24 | "tape": "^5.7.5", 25 | "vite": "^5.2.11" 26 | }, 27 | "scripts": { 28 | "test": "node test/test.js", 29 | "test:build": "node test/generate-png-fixtures.js", 30 | "demo": "vite examples/", 31 | "demo:build": "vite build ./examples/ --outDir ../docs/demo --base='./' --emptyOutDir", 32 | "docs": "jsdoc -c .jsdoc.json -d docs -R DOC.md" 33 | }, 34 | "keywords": [ 35 | "png", 36 | "parallel", 37 | "encoder", 38 | "ppi", 39 | "pixelsPerInch", 40 | "dpi", 41 | "pHYs", 42 | "iCCP", 43 | "chunk", 44 | "chunks", 45 | "metadata", 46 | "exif", 47 | "tag", 48 | "encoding", 49 | "resolution", 50 | "color", 51 | "profile", 52 | "embedded" 53 | ], 54 | "repository": { 55 | "type": "git", 56 | "url": "git://github.com/mattdesl/png-tools.git" 57 | }, 58 | "homepage": "https://github.com/mattdesl/png-tools", 59 | "bugs": { 60 | "url": "https://github.com/mattdesl/png-tools/issues" 61 | }, 62 | "dependencies": { 63 | "esdoc2-ecmascript-proposal-plugin": "^2.1.1" 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/constants.js: -------------------------------------------------------------------------------- 1 | export const PNG_HEADER = new Uint8Array([ 2 | 0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a, 3 | ]); 4 | 5 | /** 6 | * An enum for Intent when specifying sRGB chunk. 7 | * 8 | * @enum {Intent} 9 | * @property {number} Perceptual (0x00) 10 | * @property {number} Relative relative colorimetric (0x01) 11 | * @property {number} Saturation (0x02) 12 | * @property {number} Absolute absolute colorimetric (0x03) 13 | **/ 14 | export const Intent = Object.freeze({ 15 | Perceptual: 0, 16 | Relative: 1, // Relative colorimetric 17 | Saturation: 2, 18 | Absolute: 3, // Aboslute colorimetric 19 | }); 20 | 21 | /** 22 | * An enum for standard PNG scanline filter methods. 23 | * 24 | * @enum {FilterMethod} 25 | * @property {number} None No filter (0x00) 26 | * @property {number} Sub Compute from left (0x01) 27 | * @property {number} Up Compute from above scanline (0x02) 28 | * @property {number} Average Compute from average of up and left (0x03) 29 | * @property {number} Paeth Compute the PNG 'paeth' predictor from up & left (0x04) 30 | **/ 31 | export const FilterMethod = Object.freeze({ 32 | None: 0x00, 33 | Sub: 0x01, 34 | Up: 0x02, 35 | Average: 0x03, 36 | Paeth: 0x04, 37 | }); 38 | 39 | /** 40 | * An enum for standard PNG color types, such as RGB or RGBA. 41 | * 42 | * @enum {ColorType} 43 | * @property {number} GRAYSCALE (1) 44 | * @property {number} RGB (2) 45 | * @property {number} INDEXED (3) 46 | * @property {number} GRAYSCALE_ALPHA (4) 47 | * @property {number} RGBA (6) 48 | **/ 49 | export const ColorType = Object.freeze({ 50 | GRAYSCALE: 1, 51 | RGB: 2, 52 | INDEXED: 3, 53 | GRAYSCALE_ALPHA: 4, 54 | RGBA: 6, 55 | }); 56 | 57 | /** 58 | * An enum for standard PNG chunk type codes (4-byte Uint32 decimal), including critical and ancillary chunks. 59 | * 60 | * @enum {ChunkType} 61 | * @property {number} IHDR 62 | * @property {number} PLTE 63 | * @property {number} IDAT 64 | * @property {number} IEND 65 | * @property {number} (...) - see source for full list 66 | * */ 67 | export const ChunkType = Object.freeze({ 68 | // Critical chunks 69 | IHDR: 0x49484452, 70 | PLTE: 0x504c5445, 71 | IDAT: 0x49444154, 72 | IEND: 0x49454e44, 73 | // Ancillary Chunks 74 | cHRM: 0x6348524d, 75 | gAMA: 0x67414d41, 76 | iCCP: 0x69434350, 77 | sBIT: 0x73424954, 78 | sRGB: 0x73524742, 79 | bKGD: 0x624b4744, 80 | hIST: 0x68495354, 81 | tRNS: 0x74524e53, 82 | pHYs: 0x70485973, 83 | sPLT: 0x73504c54, 84 | tIME: 0x74494d45, 85 | iTXt: 0x69545874, 86 | tEXt: 0x74455874, 87 | zTXt: 0x7a545874, 88 | }); 89 | -------------------------------------------------------------------------------- /src/crc32.js: -------------------------------------------------------------------------------- 1 | // Fast CRC32 in JavaScript 2 | // 101arrowz (https://github.com/101arrowz) 3 | // License: MIT 4 | 5 | // Modified from 101arrowz's gist: 6 | // https://gist.github.com/101arrowz/e58695f7ccfdf74f60ba22018093edea 7 | 8 | // This code uses the Slice-by-16 algorithm to achieve performance 9 | // roughly 2x greater than all other JS CRC32 implementations (e.g. 10 | // crc32-js). 11 | 12 | // Per local testing, Slice-by-16 outperforms Slice-by-4 by around 50% 13 | // and Slice-by-8/Slice-by-32/Slice-by-64 by 10-30% 14 | 15 | // This CRC implementation can compete with WASM CRC implementations 16 | // as well, and it tends to perform between 30% faster and 10% slower 17 | // than WASM CRC32 (>1MB input chunks is faster on WASM). 18 | 19 | // CRC32 table 20 | // perf: signed integers are 2x more likely to be Smi 21 | // Smi is a V8 datatype in (-2**30, 2**30-1) 22 | // Smi operations are much faster 23 | 24 | function CRC32() { 25 | const crct = new Int32Array(4096); 26 | for (let i = 0; i < 256; ++i) { 27 | let c = i, 28 | k = 9; 29 | while (--k) c = (c & 1 && -306674912) ^ (c >>> 1); 30 | crct[i] = c; 31 | } 32 | for (let i = 0; i < 256; ++i) { 33 | let lv = crct[i]; 34 | for (let j = 256; j < 4096; j += 256) 35 | lv = crct[i | j] = (lv >>> 8) ^ crct[lv & 255]; 36 | } 37 | 38 | const crcts = []; 39 | for (let i = 0; i < 16; ) { 40 | crcts[i] = crct.subarray(i << 8, ++i << 8); 41 | } 42 | 43 | // prettier-ignore 44 | const [t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16] = 45 | crcts; 46 | 47 | // raw CRC function 48 | // stream by passing in previous CRC output as second parameter 49 | return function crc32(d, c = -1) { 50 | let i = 0; 51 | const len = d.length; 52 | const max = len - 16; 53 | for (; i < max; ) { 54 | c = 55 | t16[d[i++] ^ (c & 255)] ^ 56 | t15[d[i++] ^ ((c >> 8) & 255)] ^ 57 | t14[d[i++] ^ ((c >> 16) & 255)] ^ 58 | t13[d[i++] ^ (c >>> 24)] ^ 59 | t12[d[i++]] ^ 60 | t11[d[i++]] ^ 61 | t10[d[i++]] ^ 62 | t9[d[i++]] ^ 63 | t8[d[i++]] ^ 64 | t7[d[i++]] ^ 65 | t6[d[i++]] ^ 66 | t5[d[i++]] ^ 67 | t4[d[i++]] ^ 68 | t3[d[i++]] ^ 69 | t2[d[i++]] ^ 70 | t1[d[i++]]; 71 | } 72 | for (; i < len; ++i) { 73 | c = t1[(c & 255) ^ d[i]] ^ (c >>> 8); 74 | } 75 | return ~c; 76 | }; 77 | } 78 | 79 | /** 80 | * Calculate the CRC32 checksum of an array-like buffer. 81 | 82 | * @function crc32 83 | * @param {ArrayLike} buf the array-like buffer to calculate the CRC32 of 84 | * @param {number} [c=-1] the initial CRC32 value 85 | * @returns {number} the CRC32 checksum 86 | */ 87 | export default (() => { 88 | // Avoid allocating global memory unless necessary 89 | let init = false; 90 | let crc32_instance; 91 | return function crc32(d, c = -1) { 92 | if (!init) { 93 | crc32_instance = CRC32(); 94 | init = true; 95 | } 96 | return crc32_instance(d, c); 97 | }; 98 | })(); 99 | -------------------------------------------------------------------------------- /src/decode.js: -------------------------------------------------------------------------------- 1 | import crc32 from "./crc32.js"; 2 | import { ChunkType, PNG_HEADER } from "./constants.js"; 3 | import { chunkTypeToName, decode_IHDR } from "./chunks.js"; 4 | 5 | /** 6 | * @typedef {Object} PNGReaderOptions 7 | * @property {boolean} [checkCRC=false] whether to check and verify CRC values of each chunk (slower but can detect errors and corruption earlier during parsing) 8 | * @property {boolean} [copy=true] whether to return a sliced copy of each chunk data instead of a shallow subarray view into the input buffer 9 | **/ 10 | 11 | /** 12 | * Reads a PNG buffer up to the end of the IHDR chunk and returns this metadata, giving its width, height, bit depth, and color type. 13 | * 14 | * @param {ArrayBufferView} buf the PNG buffer to read 15 | * @param {PNGReaderOptions} [opts={}] optional parameters for reading 16 | * @returns {IHDRData} 17 | **/ 18 | export function readIHDR(buf, opts = {}) { 19 | let meta = {}; 20 | reader(buf, { ...opts, copy: false }, (type, view) => { 21 | if (type === ChunkType.IHDR) { 22 | meta = decode_IHDR(view); 23 | return false; // stop reading the rest of PNG 24 | } 25 | }); 26 | return meta; 27 | } 28 | 29 | /** 30 | * Parses a PNG buffer and returns an array of chunks, each containing a type code and its data. 31 | * The individual chunks are not decoded, but left as raw Uint8Array data. If `copy` option is `false`, 32 | * the chunk data is a view into the original ArrayBufferView (no copy involved), which is more memory efficient 33 | * for large files. 34 | * 35 | * @param {ArrayBufferView} buf 36 | * @param {PNGReaderOptions} [opts={}] optional parameters for reading PNG chunks 37 | * @returns {Chunk[]} an array of chunks 38 | */ 39 | export function readChunks(buf, opts = {}) { 40 | const chunks = []; 41 | reader(buf, opts, (type, data) => chunks.push({ type, data })); 42 | return chunks; 43 | } 44 | 45 | /** 46 | * A low-level interface for stream reading a PNG file. With the speicifed buffer, this function reads 47 | * each chunk and calls the `read(type, data)` function, which is expected to do something with the chunk data. 48 | * If the `read()` function returns `false`, the stream will stop reading the rest of the PNG file and safely end early, 49 | * otherwise it will expect to end on an IEND type chunk to form a valid PNG file. 50 | * 51 | * @param {ArrayBufferView} buf 52 | * @param {PNGReaderOptions} [opts={}] optional parameters for reading PNG chunks 53 | * @returns {Chunk[]} an array of chunks 54 | */ 55 | export function reader(buf, opts = {}, read = () => {}) { 56 | if (!ArrayBuffer.isView(buf)) { 57 | throw new Error("Expected a typed array such as Uint8Array"); 58 | } 59 | 60 | if (typeof opts === "function") { 61 | read = opts; 62 | opts = {}; 63 | } 64 | 65 | const dv = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); 66 | const data = new Uint8Array(dv.buffer, dv.byteOffset, dv.byteLength); 67 | 68 | if (data.length < PNG_HEADER.length) { 69 | throw new Error(`Buffer too small to contain PNG header`); 70 | } 71 | 72 | const { checkCRC = false, copy = true } = opts; 73 | 74 | for (let i = 0; i < PNG_HEADER.length; i++) { 75 | if (data[i] !== PNG_HEADER[i]) throw new Error(`Invalid PNG file header`); 76 | } 77 | 78 | let ended = false; 79 | let hasMetIHDR = false; 80 | let idx = 8; 81 | while (idx < data.length) { 82 | // Length of current chunk 83 | const chunkLength = dv.getUint32(idx); 84 | idx += 4; 85 | 86 | // Extract 4-byte type code 87 | const type = dv.getUint32(idx); 88 | 89 | // First chunk must be IHDR 90 | if (!hasMetIHDR) { 91 | if (type !== ChunkType.IHDR) throw new Error("Invalid PNG: IHDR missing"); 92 | hasMetIHDR = true; 93 | } 94 | 95 | const chunkDataIdx = idx + 4; 96 | if (checkCRC) { 97 | // Get the chunk contents including the type code but not CRC code 98 | const chunkBuffer = data.subarray(idx, chunkDataIdx + chunkLength); 99 | 100 | // Int32 CRC value that comes after the chunk data 101 | const crcCode = dv.getInt32(chunkDataIdx + chunkLength); 102 | let crcExpect = crc32(chunkBuffer); 103 | if (crcExpect !== crcCode) { 104 | throw new Error( 105 | `CRC value for ${chunkTypeToName( 106 | type 107 | )} does not match, PNG file may be corrupted` 108 | ); 109 | } 110 | } 111 | 112 | // parse the current chunk 113 | const v = read( 114 | type, 115 | copy 116 | ? data.slice(chunkDataIdx, chunkDataIdx + chunkLength) 117 | : data.subarray(chunkDataIdx, chunkDataIdx + chunkLength) 118 | ); 119 | if (v === false || type === ChunkType.IEND) { 120 | // safely end the stream 121 | ended = true; 122 | break; 123 | } 124 | 125 | // Skip past the chunk data and CRC value 126 | idx = chunkDataIdx + chunkLength + 4; 127 | } 128 | 129 | if (!ended) { 130 | throw new Error("PNG ended without IEND chunk"); 131 | } 132 | } 133 | -------------------------------------------------------------------------------- /src/encode.js: -------------------------------------------------------------------------------- 1 | import crc32 from "./crc32.js"; 2 | import { ChunkType, ColorType, PNG_HEADER } from "./constants.js"; 3 | import { encode_IHDR, encode_IDAT_raw } from "./chunks.js"; 4 | 5 | /** 6 | * @typedef {Object} EncodeOptions 7 | * @property {Uint8Array} data the raw pixel data to encode 8 | * @property {number} width the width of the image 9 | * @property {number} height the height of the image 10 | * @property {ColorType} [colorType=ColorType.RGBA] the color type of the pixel data 11 | * @property {number} [depth=8] the bit depth of the image 12 | * @property {number} [filterMethod=FilterMethod.Paeth] the filter method to use 13 | * @property {number} [firstFilter=filter] the first scanline filter method to use 14 | * @property {Chunk[]} [ancillary=[]] additional chunks to include in the PNG 15 | */ 16 | 17 | /** 18 | * Encodes a PNG buffer from the given image and options, using the specified `deflate` algorithm and optional compression options. 19 | * The deflate function should have the signature `(buf, [deflateOptions]) => Uint8Array`. 20 | * 21 | * @param {EncodeOptions} options the encoding options 22 | * @param {Function} deflate the sync deflate function to use 23 | * @param {Object} [deflateOptions] optional deflate options passed to the deflate() function 24 | */ 25 | export function encode(options = {}, deflate, deflateOptions) { 26 | const { data, ancillary = [], colorType = ColorType.RGBA } = options; 27 | if (!data) throw new Error(`must specify { data }`); 28 | if (!deflate) throw new Error(`must specify a deflate function`); 29 | if (colorType !== ColorType.RGB && colorType !== ColorType.RGBA) { 30 | throw new Error( 31 | "only RGB or RGBA colorType encoding is currently supported" 32 | ); 33 | } 34 | return writeChunks([ 35 | { type: ChunkType.IHDR, data: encode_IHDR(options) }, 36 | ...ancillary, 37 | { 38 | type: ChunkType.IDAT, 39 | data: deflate(encode_IDAT_raw(data, options), deflateOptions), 40 | }, 41 | { type: ChunkType.IEND }, 42 | ]); 43 | } 44 | 45 | /** 46 | * Encodes just the raw PNG header into a Uint8Array buffer. 47 | * @returns {Uint8Array} the PNG header 48 | */ 49 | export function encodeHeader() { 50 | return PNG_HEADER.slice(); 51 | } 52 | 53 | /** 54 | * Encodes a single PNG chunk into a Uint8Array buffer, by writing the chunk length, type, data, and CRC value. 55 | * @param {Chunk} chunk the chunk to encode 56 | * @returns {Uint8Array} the encoded chunk buffer 57 | */ 58 | export function encodeChunk(chunk) { 59 | const length = chunk.data ? chunk.data.length : 0; 60 | const output = new Uint8Array(4 + length + 4 + 4); 61 | const dv = new DataView(output.buffer, output.byteOffset, output.byteLength); 62 | // Write chunk length 63 | let idx = 0; 64 | encodeChunkRaw(output, dv, chunk, idx); 65 | return output; 66 | } 67 | 68 | /** 69 | * Writes and formats an array of PNG chunks into a complete PNG buffer, including the PNG header. 70 | * 71 | * @param {Chunk[]} chunks the array of chunks to encode 72 | * @returns {Uint8Array} the encoded PNG buffer 73 | */ 74 | export function writeChunks(chunks) { 75 | let totalSize = PNG_HEADER.length; // start with header 76 | let idx = totalSize; 77 | for (let chunk of chunks) { 78 | totalSize += chunk.data ? chunk.data.length : 0; 79 | totalSize += 12; // length, code, CRC value (4 bytes each) 80 | } 81 | 82 | const output = new Uint8Array(totalSize); 83 | const dv = new DataView(output.buffer); 84 | 85 | // write header 86 | output.set(PNG_HEADER, 0); 87 | 88 | for (let chunk of chunks) { 89 | idx = encodeChunkRaw(output, dv, chunk, idx); 90 | } 91 | 92 | return output; 93 | } 94 | 95 | function encodeChunkRaw(output, dv, chunk, idx = 0) { 96 | // Write chunk length 97 | const length = chunk.data ? chunk.data.length : 0; 98 | dv.setUint32(idx, length); 99 | idx += 4; 100 | 101 | // Where the chunk index starts (before type code) 102 | const chunkStartIdx = idx; 103 | const chunkDataStartIdx = idx + 4; 104 | const chunkDataEndIdx = chunkDataStartIdx + length; 105 | 106 | // Write chunk type code 107 | const type = chunk.type; 108 | dv.setUint32(chunkStartIdx, type); 109 | 110 | // Write chunk data 111 | if (chunk.data) output.set(chunk.data, chunkDataStartIdx); 112 | 113 | // get the whole chunk buffer including type 114 | const chunkBuf = output.subarray(chunkStartIdx, chunkDataEndIdx); 115 | 116 | // compute CRC and write it 117 | const crcValue = crc32(chunkBuf); 118 | dv.setInt32(chunkDataEndIdx, crcValue); 119 | 120 | // return next index for reading 121 | return chunkDataEndIdx + 4; 122 | } 123 | -------------------------------------------------------------------------------- /src/util.js: -------------------------------------------------------------------------------- 1 | import { ColorType, FilterMethod } from "./constants.js"; 2 | 3 | /** 4 | * Concatenates a given array of array-like data (array buffers, typed arrays) into a single Uint8Array. 5 | * 6 | * @param {ArrayLike[]} chunks 7 | * @returns Uint8Array concatenated data 8 | */ 9 | export function flattenBuffers(chunks) { 10 | let totalSize = 0; 11 | for (let chunk of chunks) { 12 | totalSize += chunk.length; 13 | } 14 | 15 | const result = new Uint8Array(totalSize); 16 | for (let i = 0, pos = 0; i < chunks.length; i++) { 17 | let chunk = chunks[i]; 18 | result.set(chunk, pos); 19 | pos += chunk.length; 20 | } 21 | return result; 22 | } 23 | 24 | export function decodeNULTerminatedString( 25 | data, 26 | offset = 0, 27 | maxLength = Infinity 28 | ) { 29 | const dv = new DataView(data.buffer, data.byteOffset, data.byteLength); 30 | let str = ""; 31 | for (let i = 0; offset < data.length && i < maxLength; offset++, i++) { 32 | const b = dv.getUint8(offset); 33 | if (b === 0x00) { 34 | break; 35 | } else { 36 | const chr = String.fromCharCode(b); 37 | str += chr; 38 | } 39 | } 40 | // String is always terminated with NUL so we can move forward one more 41 | offset++; 42 | return [str, offset]; 43 | } 44 | 45 | export function mergeData(...arrays) { 46 | // convert to byte arrays 47 | arrays = arrays.map((a) => { 48 | if (typeof a === "number") return new Uint8Array([a]); 49 | if (typeof a === "string") return convertStringToBytes(a); 50 | return a; 51 | }); 52 | 53 | // Get the total length of all arrays. 54 | let length = 0; 55 | for (let array of arrays) length += array.length; 56 | 57 | // Create a new array with total length and merge all source arrays. 58 | let mergedArray = new Uint8Array(length); 59 | let offset = 0; 60 | for (let item of arrays) { 61 | mergedArray.set(item, offset); 62 | offset += item.length; 63 | } 64 | return mergedArray; 65 | } 66 | 67 | export function convertStringToBytes(val) { 68 | const data = new Uint8Array(val.length); 69 | for (let i = 0; i < data.length; i++) { 70 | data[i] = val.charCodeAt(i); 71 | } 72 | return data; 73 | } 74 | 75 | export function applyFilter( 76 | out, 77 | data, 78 | i, 79 | filter, 80 | bytesPerPixel, 81 | bytesPerScanline, 82 | srcIdxInBytes, 83 | dstIdxInBytesPlusOne 84 | ) { 85 | if (filter === FilterMethod.Paeth) { 86 | for (let j = 0; j < bytesPerScanline; j++) { 87 | const left = 88 | j < bytesPerPixel ? 0 : data[srcIdxInBytes + j - bytesPerPixel]; 89 | const up = i === 0 ? 0 : data[srcIdxInBytes + j - bytesPerScanline]; 90 | const upLeft = 91 | i === 0 || j < bytesPerPixel 92 | ? 0 93 | : data[srcIdxInBytes + j - bytesPerScanline - bytesPerPixel]; 94 | out[dstIdxInBytesPlusOne + j] = 95 | data[srcIdxInBytes + j] - paethPredictor(left, up, upLeft); 96 | } 97 | } else if (filter === FilterMethod.Sub) { 98 | for (let j = 0; j < bytesPerScanline; j++) { 99 | const leftPixel = 100 | j < bytesPerPixel ? 0 : data[srcIdxInBytes + j - bytesPerPixel]; 101 | out[dstIdxInBytesPlusOne + j] = data[srcIdxInBytes + j] - leftPixel; 102 | } 103 | } else if (filter === FilterMethod.Up) { 104 | for (let j = 0; j < bytesPerScanline; j++) { 105 | const upPixel = i === 0 ? 0 : data[srcIdxInBytes + j - bytesPerScanline]; 106 | out[dstIdxInBytesPlusOne + j] = data[srcIdxInBytes + j] - upPixel; 107 | } 108 | } else if (filter === FilterMethod.Average) { 109 | for (let j = 0; j < bytesPerScanline; j++) { 110 | const left = 111 | j < bytesPerPixel ? 0 : data[srcIdxInBytes + j - bytesPerPixel]; 112 | const up = i === 0 ? 0 : data[srcIdxInBytes + j - bytesPerScanline]; 113 | const avg = (left + up) >> 1; 114 | out[dstIdxInBytesPlusOne + j] = data[srcIdxInBytes + j] - avg; 115 | } 116 | } 117 | 118 | // Should never get here in this version as applyFilter is only called 119 | // when a non-None filter is specified 120 | // if (filter === FilterMethod.None) { 121 | // for (let j = 0; j < bytesPerScanline; j++) { 122 | // out[dstIdxInBytesPlusOne + j] = data[srcIdxInBytes + j]; 123 | // } 124 | // } 125 | } 126 | 127 | function paethPredictor(left, above, upLeft) { 128 | let paeth = left + above - upLeft; 129 | let pLeft = Math.abs(paeth - left); 130 | let pAbove = Math.abs(paeth - above); 131 | let pUpLeft = Math.abs(paeth - upLeft); 132 | if (pLeft <= pAbove && pLeft <= pUpLeft) return left; 133 | if (pAbove <= pUpLeft) return above; 134 | return upLeft; 135 | } 136 | 137 | /** 138 | * Converts a ColorType enum to a human readable string, for example ColorType.RGBA (= 6) becomes "RGBA". 139 | * Although these numerical constants are defined in the PNG spec, the exact string for each is not. 140 | * 141 | * @param {ColorType} colorType the type to convert 142 | * @returns {string} a readable string 143 | */ 144 | export function colorTypeToString(colorType) { 145 | const entries = Object.entries(ColorType); 146 | return entries.find((e) => e[1] === colorType)[0]; 147 | } 148 | 149 | export function colorTypeToChannels(colorType) { 150 | switch (colorType) { 151 | case ColorType.GRAYSCALE: 152 | return 1; // grayscale 153 | case ColorType.RGB: 154 | return 3; // RGB 155 | case ColorType.INDEXED: 156 | return 1; // indexed 157 | case ColorType.GRAYSCALE_ALPHA: 158 | return 2; // grayscale + alpha 159 | case ColorType.RGBA: 160 | return 4; // RGBA 161 | default: 162 | throw new Error(`Invalid colorType ${colorType}`); 163 | } 164 | } 165 | -------------------------------------------------------------------------------- /test/bench.js: -------------------------------------------------------------------------------- 1 | import { 2 | ColorType, 3 | FilterMethod, 4 | colorTypeToChannels, 5 | encode, 6 | } from "../index.js"; 7 | import { deflate } from "pako"; 8 | 9 | const image = createImage({ 10 | width: 4096 * 2, 11 | height: 4096 * 2, 12 | depth: 8, 13 | filter: FilterMethod.Up, 14 | }); 15 | 16 | // encode an image 17 | console.time("encode"); 18 | encode(image, deflate, { level: 3 }); 19 | console.timeEnd("encode"); 20 | 21 | function createImage(opts = {}) { 22 | const { width, height, colorType = ColorType.RGBA, depth = 8 } = opts; 23 | const ArrType = depth === 16 ? Uint16Array : Uint8ClampedArray; 24 | const maxValue = depth === 16 ? 0xffff : 0xff; 25 | 26 | const channels = colorTypeToChannels(colorType); 27 | let data = new ArrType(width * height * channels).fill(maxValue); 28 | 29 | const A = [1, 0, 0]; 30 | const B = [0, 0, 1]; 31 | 32 | for (let x = 0; x < width; x++) { 33 | const u = width <= 1 ? 1 : x / (width - 1); 34 | const [r, g, b] = lerpArray(A, B, u).map((n) => toByte(n)); 35 | data[x * channels + 0] = r; 36 | data[x * channels + 1] = g; 37 | data[x * channels + 2] = b; 38 | } 39 | 40 | // quickly generate an image of expected size 41 | for (let y = 1; y < height; y++) { 42 | const x = 0; 43 | const idx = x + y * width; 44 | data.copyWithin(idx * channels, 0, width * channels); 45 | } 46 | 47 | return { ...opts, colorType, depth, data }; 48 | 49 | function lerp(min, max, t) { 50 | return min * (1 - t) + max * t; 51 | } 52 | 53 | function lerpArray(min, max, t, out = []) { 54 | for (var i = 0; i < min.length; i++) { 55 | out[i] = lerp(min[i], max[i], t); 56 | } 57 | return out; 58 | } 59 | 60 | function toByte(v) { 61 | return Math.max(0, Math.min(maxValue, Math.round(v * maxValue))); 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /test/benchmark/generate.js: -------------------------------------------------------------------------------- 1 | self.onmessage = async (msg) => { 2 | const { width, height, depth, channels } = msg.data; 3 | const ArrType = depth === 16 ? Uint16Array : Uint8ClampedArray; 4 | const maxValue = depth === 16 ? 0xffff : 0xff; 5 | 6 | let data = new ArrType(width * height * channels).fill(maxValue); 7 | 8 | const A = [1, 0, 0]; 9 | const B = [0, 0, 1]; 10 | 11 | for (let x = 0; x < width; x++) { 12 | const u = width <= 1 ? 1 : x / (width - 1); 13 | const [r, g, b] = lerpArray(A, B, u).map((n) => toByte(n)); 14 | data[x * channels + 0] = r; 15 | data[x * channels + 1] = g; 16 | data[x * channels + 2] = b; 17 | } 18 | 19 | // quickly generate an image of expected size 20 | for (let y = 1; y < height; y++) { 21 | const x = 0; 22 | const idx = x + y * width; 23 | data.copyWithin(idx * channels, 0, width * channels); 24 | } 25 | 26 | // A much slower gradient but more pretty, using more colours 27 | // for (let y = 0, i = 0; y < height; y++) { 28 | // for (let x = 0; x < width; x++, i++) { 29 | // const u = (x + 1) / width; 30 | // const v = (y + 1) / height; 31 | // const R = u; 32 | // const G = 0.5; 33 | // const B = v; 34 | // data[i * channels + 0] = toByte(R); 35 | // data[i * channels + 1] = toByte(G); 36 | // data[i * channels + 2] = toByte(B); 37 | // if (channels === 4) data[i * channels + 3] = maxValue; 38 | // } 39 | // } 40 | self.postMessage(data); 41 | 42 | function lerp(min, max, t) { 43 | return min * (1 - t) + max * t; 44 | } 45 | 46 | function lerpArray(min, max, t, out = []) { 47 | for (var i = 0; i < min.length; i++) { 48 | out[i] = lerp(min[i], max[i], t); 49 | } 50 | return out; 51 | } 52 | 53 | function toByte(v) { 54 | return Math.max(0, Math.min(maxValue, Math.round(v * maxValue))); 55 | } 56 | }; 57 | -------------------------------------------------------------------------------- /test/benchmark/main.js: -------------------------------------------------------------------------------- 1 | import getDimensions from "canvas-dimensions"; 2 | import * as pako from "pako"; 3 | import * as FastPNG from "fast-png"; 4 | 5 | import { 6 | encode, 7 | ColorType, 8 | FilterMethod, 9 | ChunkType, 10 | writeChunks, 11 | readChunks, 12 | withoutChunks, 13 | } from "../../src/png-io.js"; 14 | import { 15 | colorTypeToChannels, 16 | encode_pHYs_PPI, 17 | flattenBuffers, 18 | } from "../../src/util.js"; 19 | import { canvasToBuffer, downloadBlob } from "../../examples/util/save.js"; 20 | import prettyBytes from "pretty-bytes"; 21 | 22 | const params = { 23 | dimensions: "A0", 24 | pixelsPerInch: 150, 25 | units: "cm", 26 | depth: 16, 27 | colorType: ColorType.RGBA, 28 | filter: FilterMethod.Paeth, 29 | }; 30 | 31 | const { canvasWidth: width, canvasHeight: height } = getDimensions({ 32 | ...params, 33 | }); 34 | 35 | const colorTypeToString = (n) => { 36 | const entries = Object.entries(ColorType); 37 | return entries.find((e) => e[1] === n)[0]; 38 | }; 39 | 40 | const { depth, colorType, pixelsPerInch, filter } = params; 41 | const channels = colorTypeToChannels(colorType); 42 | 43 | const status = document.querySelector(".status"); 44 | const cancel = document.querySelector(".cancel"); 45 | const download = document.querySelector(".download"); 46 | const img = document.querySelector(".image"); 47 | let curBlob; 48 | 49 | download.onclick = (ev) => { 50 | ev.preventDefault(); 51 | if (curBlob) downloadBlob(curBlob, { filename: "download.png" }); 52 | }; 53 | 54 | // let canvas = document.createElement("canvas"); 55 | // const container = document.querySelector(".canvas-container"); 56 | // container.appendChild(canvas); 57 | const updateStatus = (n) => { 58 | let str; 59 | if (n === "worker") str = "Use a WebWorker to encode off the main thread."; 60 | else if (n === "file") 61 | str = 62 | "Use WebWorker + File System API to stream encode directly into a file on disk (Chrome only)."; 63 | else if (n === "canvas") { 64 | str = 65 | "Use Canvas2D toBlob() to encode a PNG, which only supports 8 bits per pixel."; 66 | } else if (n === "cpu") { 67 | str = 68 | "Use the main thread (no worker) to encode, which is simpler but halts the UI, does not report progress, and cannot be cancelled."; 69 | } else if (n === "fast-png") { 70 | str = "Use the fast-png module to encode, for benchmark comparison."; 71 | } 72 | status.textContent = str; 73 | }; 74 | 75 | const typeSelect = document.querySelector("select"); 76 | typeSelect.oninput = (ev) => { 77 | updateStatus(ev.currentTarget.value); 78 | curBlob = null; 79 | img.src = ""; 80 | download.setAttribute("disabled", true); 81 | }; 82 | updateStatus(typeSelect.value); 83 | 84 | document.querySelector(".info").textContent = JSON.stringify( 85 | { 86 | ...params, 87 | colorType: colorTypeToString(colorType), 88 | depth, 89 | width, 90 | height, 91 | }, 92 | null, 93 | 2 94 | ); 95 | 96 | async function encodeWorkerBuffered(data, signal) { 97 | let buffers = []; 98 | await encodeWorker(data, (d) => buffers.push(d), signal); 99 | return flattenBuffers(buffers); 100 | } 101 | 102 | async function encodeWorker(data, write, signal) { 103 | const options = { 104 | width, 105 | height, 106 | depth, 107 | colorType, 108 | data, 109 | pixelsPerInch, 110 | filter, 111 | }; 112 | return new Promise((resolve) => { 113 | const worker = new Worker(new URL("./worker.js", import.meta.url), { 114 | type: "module", 115 | }); 116 | const close = () => { 117 | worker.terminate(); 118 | resolve(); 119 | }; 120 | if (signal) signal.addEventListener("abort", close); 121 | worker.postMessage(options, [data.buffer]); 122 | const handler = async (ev) => { 123 | const data = ev.data; 124 | status.textContent = `Progress: ${Math.round(data.progress * 100)}%`; 125 | write(data.chunk); 126 | if (data.finished) { 127 | worker.removeEventListener("message", handler); 128 | if (signal) signal.removeEventListener("abort", close); 129 | close(); 130 | } 131 | }; 132 | worker.addEventListener("message", handler); 133 | }); 134 | } 135 | 136 | function encodeCPU(data) { 137 | const ancillary = []; 138 | 139 | // optionally embed resolution 140 | if (pixelsPerInch) { 141 | ancillary.push({ 142 | type: ChunkType.pHYs, 143 | data: encode_pHYs_PPI(pixelsPerInch), 144 | }); 145 | } 146 | 147 | const options = { 148 | width, 149 | height, 150 | data, 151 | depth, 152 | colorType, 153 | ancillary, 154 | filter, 155 | }; 156 | return encode(options, pako.deflate, { level: 3 }); 157 | } 158 | 159 | async function encodeCanvas(data) { 160 | const canvas = create8BitCanvas(data); 161 | let buffer = await canvasToBuffer(canvas); 162 | // if we have additional metadata, we can re-encode without having to re-compress 163 | if (pixelsPerInch) { 164 | let chunks = readChunks(buffer); 165 | // strip out an existing pHYs chunk if it exists 166 | chunks = chunks.filter((c) => c.type !== ChunkType.pHYs); 167 | // include the new chunk 168 | chunks.splice(1, 0, { 169 | type: ChunkType.pHYs, 170 | data: encode_pHYs_PPI(pixelsPerInch), 171 | }); 172 | // re-encode the chunks (does not re-compress the data stream) 173 | buffer = writeChunks(chunks); 174 | } 175 | return buffer; 176 | } 177 | 178 | async function getFileStream(opts = {}) { 179 | if (!window.showSaveFilePicker) { 180 | throw new Error("Not supported on this browser"); 181 | } 182 | const { filename = "download.png" } = opts; 183 | 184 | // create a new handle 185 | const newHandle = await window.showSaveFilePicker({ 186 | excludeAcceptAllOption: true, 187 | id: "benchmark", 188 | startIn: "downloads", 189 | suggestedName: filename, 190 | types: [ 191 | { 192 | description: "PNG Image", 193 | accept: { "image/png": [".png"] }, 194 | }, 195 | ], 196 | }); 197 | 198 | return newHandle.createWritable(); 199 | } 200 | 201 | async function encodeFileWriter(data, writer, signal) { 202 | let chain = Promise.resolve(); 203 | 204 | // create a chain of promises that write one after another 205 | const write = (chunk) => { 206 | chain = chain.then(() => writer.write(chunk)); 207 | }; 208 | 209 | // wait for encoder to finish 210 | await encodeWorker(data, write, signal); 211 | 212 | // make sure all writes are finished too 213 | await chain; 214 | 215 | // close writer 216 | await writer.close(); 217 | } 218 | 219 | function create8BitCanvas(data) { 220 | const rgba = new Uint8ClampedArray(width * height * 4); 221 | if (depth === 8 && channels === 4) { 222 | rgba.set(data); 223 | } else { 224 | const maxOutSample = 0xff; 225 | const maxInSample = depth === 16 ? 0xffff : 0xff; 226 | for (let i = 0; i < width * height; i++) { 227 | for (let c = 0; c < 4; c++) { 228 | if (c < channels) { 229 | let v = data[i * channels + c]; 230 | v = Math.round((v / maxInSample) * maxOutSample); 231 | v = Math.max(0, Math.min(maxOutSample, v)); 232 | rgba[i * 4 + c] = v; 233 | } else rgba[i * 4 + c] = 0xff; 234 | } 235 | } 236 | } 237 | 238 | const canvas = document.createElement("canvas"); 239 | const context = canvas.getContext("2d", { 240 | willReadFrequently: true, 241 | }); 242 | canvas.width = width; 243 | canvas.height = height; 244 | const imgData = context.createImageData(width, height); 245 | imgData.data.set(rgba); 246 | context.putImageData(imgData, 0, 0); 247 | return canvas; 248 | } 249 | 250 | async function doEncode(data) { 251 | const type = document.querySelector("select").value; 252 | let enc; 253 | 254 | const ac = new AbortController(); 255 | const signal = ac.signal; 256 | 257 | let didCancel = false; 258 | const onAbort = () => { 259 | didCancel = true; 260 | }; 261 | const doAbort = () => { 262 | ac.abort(); 263 | }; 264 | signal.addEventListener("abort", onAbort, { once: true }); 265 | cancel.addEventListener("click", doAbort, { once: true }); 266 | 267 | let fileWriter; 268 | if (type === "file") { 269 | fileWriter = await getFileStream(); 270 | } 271 | 272 | if (type === "file" || type === "worker") cancel.removeAttribute("disabled"); 273 | else cancel.setAttribute("disabled", true); 274 | 275 | const then = performance.now(); 276 | // console.profile("encode"); 277 | if (type === "cpu") enc = encodeCPU(data); 278 | else if (type === "canvas") enc = await encodeCanvas(data); 279 | else if (type === "worker") enc = await encodeWorkerBuffered(data, signal); 280 | else if (type === "file") await encodeFileWriter(data, fileWriter, signal); 281 | else if (type === "fast-png") { 282 | enc = FastPNG.encode({ data, width, height, channels, depth }); 283 | } 284 | // console.profileEnd("encode"); 285 | const now = performance.now(); 286 | 287 | signal.removeEventListener("abort", onAbort); 288 | cancel.removeEventListener("click", doAbort); 289 | cancel.setAttribute("disabled", true); 290 | 291 | if (didCancel) { 292 | console.log("Cancelled"); 293 | status.textContent = "Cancelled"; 294 | } else { 295 | const ms = Math.round(now - then); 296 | const bytesSuffix = enc ? ` (Bytes: ${prettyBytes(enc.byteLength)})` : ""; 297 | const timeStr = `Time: ${ms} ms` + bytesSuffix; 298 | console.log(timeStr); 299 | status.textContent = timeStr; 300 | 301 | if (enc) { 302 | curBlob = new Blob([enc], { type: "image/png" }); 303 | download.removeAttribute("disabled"); 304 | img.src = URL.createObjectURL(curBlob); 305 | } 306 | } 307 | } 308 | 309 | const worker = new Worker(new URL("./generate.js", import.meta.url), { 310 | type: "module", 311 | }); 312 | worker.postMessage({ width, height, channels, depth }); 313 | worker.addEventListener("message", (ev) => { 314 | const btn = document.querySelector(".encode"); 315 | btn.textContent = "Encode PNG"; 316 | 317 | btn.removeAttribute("disabled"); 318 | btn.onclick = async () => { 319 | btn.setAttribute("disabled", true); 320 | await new Promise((r) => setTimeout(r, 10)); 321 | try { 322 | console.log("encoding"); 323 | await doEncode(ev.data.slice()); 324 | } catch (err) { 325 | if (err.name != "AbortError") { 326 | console.error(err); 327 | alert(err.message); 328 | } 329 | } 330 | btn.removeAttribute("disabled"); 331 | }; 332 | }); 333 | -------------------------------------------------------------------------------- /test/benchmark/worker.js: -------------------------------------------------------------------------------- 1 | import { Deflate } from "pako"; 2 | import { 3 | ChunkType, 4 | encodeChunk, 5 | matchesChunkType, 6 | encodeHeader, 7 | } from "../../src/png-io"; 8 | import { encode_IDAT_raw, encode_IHDR, encode_pHYs_PPI } from "../../src/util"; 9 | 10 | self.onmessage = async (msg) => { 11 | const options = msg.data; 12 | const deflateOptions = { level: 3 }; 13 | 14 | const postChunk = (progress, chunk) => { 15 | const finished = matchesChunkType(chunk.type, ChunkType.IEND); 16 | self.postMessage({ chunk: encodeChunk(chunk), progress, finished }); 17 | }; 18 | 19 | // 1. First post the raw header 20 | self.postMessage({ chunk: encodeHeader(), progress: 0, finished: false }); 21 | 22 | // 2. Now post the metadata chunk 23 | postChunk(0, { type: ChunkType.IHDR, data: encode_IHDR(options) }); 24 | 25 | // 2a (optional) Include any ancillary chunks like pixelsPerInch, text... 26 | if (options.pixelsPerInch) { 27 | postChunk(0, { 28 | type: ChunkType.pHYs, 29 | data: encode_pHYs_PPI(options.pixelsPerInch), 30 | }); 31 | } 32 | 33 | // 3. Now do deflate, and each time the deflator gets compressed data, 34 | // send it to the main thread as well for writing 35 | const deflator = new Deflate(deflateOptions); 36 | const idat = encode_IDAT_raw(options.data, options); 37 | const totalSize = idat.byteLength; 38 | 39 | // Overload the function to extract each individual compressed chunk 40 | deflator.onData = function (chunk) { 41 | // ensure the Deflator has its chunks 42 | this.chunks.push(chunk); 43 | 44 | // Also push to the PNG stream while we are at it 45 | const progress = (totalSize - this.strm.avail_in) / totalSize; 46 | postChunk(progress, { type: ChunkType.IDAT, data: chunk }); 47 | }; 48 | 49 | // Push with 'finish' parameter as true 50 | deflator.push(idat, true); 51 | 52 | if (deflator.err) { 53 | throw deflator.msg || msg[deflator.err]; 54 | } 55 | 56 | // 4. Finally, send the ending chunk as well 57 | postChunk(1, { type: ChunkType.IEND }); 58 | }; 59 | -------------------------------------------------------------------------------- /test/deprecated/demo copy.js: -------------------------------------------------------------------------------- 1 | import canvasSketch from "canvas-sketch"; 2 | import { createSaveSetup } from "./export-png.js"; 3 | import { canvasToBuffer, downloadBuffer } from "../save.js"; 4 | import { deflate, inflate } from "pako"; 5 | import ColorEngine from "jscolorengine"; 6 | import { 7 | extractChunks, 8 | encodeChunks, 9 | } from "../src/png-metadata-writer/index.js"; 10 | import { decode, encode } from "fast-png"; 11 | import { parse as parseICC } from "icc"; 12 | import { 13 | decodeProfileData, 14 | decode_iCCP, 15 | encode_iCCP, 16 | encode_iTXt, 17 | encode_pHYs_PPI, 18 | withoutChunks, 19 | } from "../../src/util.js"; 20 | import { Buffer } from "buffer"; 21 | import paperSizes from "../src/paper-sizes.js"; 22 | import convert from "convert-length"; 23 | 24 | const render = ({ context, width, height }) => { 25 | const grad = context.createLinearGradient(width * 0.1, 0, width * 0.9, 0); 26 | grad.addColorStop(0, "color(display-p3 0 1 0)"); 27 | grad.addColorStop(1, "color(display-p3 1 0 0)"); 28 | 29 | context.fillStyle = grad; 30 | // context.fillStyle = "color(display-p3 0 1 0)"; 31 | context.fillRect(0, 0, width, height); 32 | }; 33 | 34 | async function setup() { 35 | const canvas = document.createElement("canvas"); 36 | const context = canvas.getContext("2d", { 37 | colorSpace: "display-p3", 38 | }); 39 | document.body.appendChild(canvas); 40 | 41 | // A6 standard paper size 42 | // const { width, height } = paperSizes. 43 | const width = 256; 44 | const height = 256; 45 | canvas.width = width; 46 | canvas.height = height; 47 | canvas.style.width = `256px`; 48 | canvas.style.height = `256px`; 49 | render({ context, width, height }); 50 | 51 | const data = context.getImageData(0, 0, width, height); 52 | console.log("raw data", data.data); 53 | 54 | const png = await canvasToBuffer(canvas, {}); 55 | 56 | let chunks = extractChunks(png); 57 | 58 | const IHDR = chunks.find((c) => c.name === "IHDR"); 59 | { 60 | const dv = new DataView(IHDR.data.buffer); 61 | let off = 0; 62 | const width = dv.getUint32(off); 63 | off += 4; 64 | const height = dv.getUint32(off); 65 | off += 4; 66 | const bitDepth = dv.getUint8(off++); 67 | const colorType = dv.getUint8(off++); 68 | const compressionMethod = dv.getUint8(off++); 69 | const filterMethod = dv.getUint8(off++); 70 | const interlaceMethod = dv.getUint8(off++); 71 | console.log({ 72 | width, 73 | height, 74 | bitDepth, 75 | colorType, 76 | compressionMethod, 77 | filterMethod, 78 | interlaceMethod, 79 | }); 80 | } 81 | 82 | const decoded = decode(png); 83 | 84 | const reEncodeWithAdobe1998 = true; 85 | 86 | const iCCP = chunks.find((c) => c.name === "iCCP"); 87 | let iCCP_dst; 88 | let srcProfile; 89 | if (iCCP) { 90 | const profile = decode_iCCP(iCCP.data); 91 | const profileBuf = inflate(profile.data); 92 | console.log(`Src Profile:`, profile.name); 93 | 94 | // Parse information 95 | const parsed = parseICC(Buffer.from(profileBuf)); 96 | console.log(parsed); 97 | 98 | iCCP_dst = encode_iCCP({ 99 | name: parsed.description, 100 | data: deflate(profileBuf), 101 | }); 102 | 103 | srcProfile = new ColorEngine.Profile(); 104 | await new Promise((r) => srcProfile.loadBinary(profileBuf, r)); 105 | } else { 106 | srcProfile = "*sRGB"; 107 | } 108 | 109 | if (reEncodeWithAdobe1998) { 110 | const colorTransform = new ColorEngine.Transform({ 111 | buildLUT: true, 112 | dataFormat: "int8", 113 | BPC: true, 114 | }); 115 | 116 | const dstProfileBuf = new Uint8Array( 117 | await (await fetch("/profiles/Adobe/AdobeRGB1998.icc")).arrayBuffer() 118 | ); 119 | const dstProfile = new ColorEngine.Profile(); 120 | await new Promise((r) => dstProfile.loadBinary(dstProfileBuf, r)); 121 | const parsedDst = parseICC(Buffer.from(dstProfileBuf)); 122 | console.log("Destination Profile:", parsedDst); 123 | 124 | const w = decoded.width; 125 | const h = decoded.height; 126 | const rgbData = new Uint8ClampedArray(w * h * 3); 127 | for (let i = 0; i < rgbData.length; i++) { 128 | rgbData[i * 3 + 0] = decoded.data[i * 4 + 0]; 129 | rgbData[i * 3 + 1] = decoded.data[i * 4 + 1]; 130 | rgbData[i * 3 + 2] = decoded.data[i * 4 + 2]; 131 | } 132 | 133 | colorTransform.create( 134 | srcProfile, 135 | "*AdobeRGB", 136 | ColorEngine.eIntent.perceptual 137 | ); 138 | 139 | const rgbOut = colorTransform.transformArray( 140 | rgbData, 141 | false, 142 | false, 143 | false, 144 | w * h, 145 | "int8" 146 | ); 147 | 148 | console.log("RGB", rgbData); 149 | 150 | const rgba = new Uint8ClampedArray(w * h * 4); 151 | for (let i = 0; i < rgbData.length; i++) { 152 | rgba[i * 4 + 0] = rgbOut[i * 3 + 0]; 153 | rgba[i * 4 + 1] = rgbOut[i * 3 + 1]; 154 | rgba[i * 4 + 2] = rgbOut[i * 3 + 2]; 155 | rgba[i * 4 + 3] = decoded.data[i * 4 + 3]; 156 | } 157 | 158 | // re-encode and re-extract chunks 159 | chunks = extractChunks(encode({ ...decoded, data: rgba })); 160 | 161 | iCCP_dst = encode_iCCP({ 162 | name: parsedDst.description, 163 | data: deflate(dstProfileBuf), 164 | }); 165 | } 166 | 167 | createSaveSetup(async () => { 168 | const filename = `canvas-${Date.now()}.png`; 169 | // const png = await canvasToBuffer(canvas); 170 | 171 | const pixelsPerInch = 300; 172 | const pHYs = encode_pHYs_PPI(pixelsPerInch); 173 | const newChunks = withoutChunks(chunks, "pHYs"); 174 | newChunks.splice( 175 | 1, 176 | 0, 177 | { name: "pHYs", data: pHYs }, 178 | { name: "iCCP", data: iCCP_dst }, 179 | { 180 | name: "iTXt", 181 | data: encode_iTXt({ 182 | text: "hello world", 183 | }), 184 | } 185 | ); 186 | 187 | const newPng = encodeChunks(newChunks); 188 | downloadBuffer(newPng, { filename }); 189 | // exporter.write(png); 190 | }); 191 | } 192 | 193 | setup(); 194 | -------------------------------------------------------------------------------- /test/deprecated/demo-16bpp-display-p3.js: -------------------------------------------------------------------------------- 1 | import { deflate } from "pako"; 2 | import { encode } from "fast-png"; 3 | import { parse as parseICC } from "icc"; 4 | import { encode_iCCP, encode_pHYs_PPI, withoutChunks } from "../../src/util.js"; 5 | import { 6 | extractChunks, 7 | encodeChunks, 8 | } from "../src/png-metadata-writer/index.js"; 9 | import iccTransform from "../util/icc-transform.js"; 10 | import getDocument from "canvas-dimensions"; 11 | import fs from "node:fs/promises"; 12 | 13 | async function setup() { 14 | const { width, height, canvasWidth, canvasHeight, pixelsPerInch, units } = 15 | getDocument({ 16 | dimensions: "a4", 17 | units: "cm", 18 | pixelsPerInch: 300, 19 | }); 20 | 21 | console.log("Size: %s x %s %s (%s PPI)", width, height, units, pixelsPerInch); 22 | console.log("Canvas Size: %s x %s px", canvasWidth, canvasHeight); 23 | 24 | const depth = 16; 25 | 26 | // This is our desired *output* space that is transformed and embedded 27 | // We can leave it as display-p3 for screen, or if the file is going to print, 28 | // we may want to use Adobe RGB (1998), Pro Photo, eciRGB_v2_profile 29 | const srcColorSpace = "display-p3"; 30 | const dstColorSpace = "AdobeRGB1998"; 31 | 32 | const dtypeArray = depth === 8 ? Uint8ClampedArray : Uint16Array; 33 | const maxValue = depth === 8 ? 0xff : 0xffff; 34 | 35 | const channels = 3; 36 | let data = new dtypeArray(canvasWidth * canvasHeight * channels); 37 | for (let y = 0, i = 0; y < canvasHeight; y++) { 38 | for (let x = 0; x < canvasWidth; x++, i++) { 39 | const u = (x + 1) / canvasWidth; 40 | const v = (y + 1) / canvasHeight; 41 | const R = u; 42 | const G = 0.5; 43 | const B = v; 44 | data[i * channels + 0] = toByte(R); 45 | data[i * channels + 1] = toByte(G); 46 | data[i * channels + 2] = toByte(B); 47 | } 48 | } 49 | 50 | const profiles = { 51 | AdobeRGB1998: "AdobeRGB1998", 52 | sRGB: "sRGB IEC61966-2.1", 53 | "display-p3": "Display P3", 54 | }; 55 | 56 | const srcProfileFname = profiles[srcColorSpace]; 57 | if (!srcProfileFname) throw new Error(`no profile ${srcColorSpace}`); 58 | const dstProfileFname = profiles[dstColorSpace]; 59 | if (!dstProfileFname) throw new Error(`no profile ${dstColorSpace}`); 60 | 61 | const srcProfile = await fs.readFile(`profiles/raw/${srcProfileFname}.icc`); 62 | let dstProfile = srcProfile; 63 | 64 | if (srcColorSpace !== dstColorSpace) { 65 | dstProfile = await fs.readFile(`profiles/raw/${dstProfileFname}.icc`); 66 | 67 | let inData; 68 | if (depth === 8) inData = data; 69 | else { 70 | inData = new Float32Array(canvasWidth * canvasHeight * channels); 71 | for (let i = 0; i < data.length; i++) { 72 | inData[i] = fromByte(data[i]); 73 | } 74 | } 75 | 76 | console.log(`Transforming ${srcColorSpace} to ${dstColorSpace}`); 77 | const result = await iccTransform({ 78 | srcProfile: toArrayBuffer(srcProfile), 79 | dstProfile: toArrayBuffer(dstProfile), 80 | channels, 81 | width: canvasWidth, 82 | height: canvasHeight, 83 | data: inData, 84 | }); 85 | 86 | // bring the result from float back to 16 bit 87 | if (depth === 8) { 88 | data = result; 89 | } else { 90 | if (data.length !== result.length) 91 | throw new Error("data and transformed result size mismatch"); 92 | for (let i = 0; i < data.length; i++) { 93 | data[i] = toByte(result[i]); 94 | } 95 | } 96 | } 97 | 98 | const profileCompressed = deflate(dstProfile); 99 | const { description: profileName } = parseICC(dstProfile); 100 | 101 | let png = encode({ 102 | data, 103 | channels, 104 | width: canvasWidth, 105 | height: canvasHeight, 106 | depth, 107 | }); 108 | 109 | let chunks = extractChunks(png); 110 | 111 | // strip color profile and physical units 112 | chunks = withoutChunks(chunks, ["iCCP", "pHYs"]); 113 | 114 | // splice the new chunks back in 115 | chunks.splice( 116 | 1, 117 | 0, 118 | { 119 | name: "iCCP", 120 | data: encode_iCCP({ name: profileName, data: profileCompressed }), 121 | }, 122 | { name: "pHYs", data: encode_pHYs_PPI(pixelsPerInch) } 123 | ); 124 | 125 | console.log("Embedding ICC Profile:", profileName); 126 | 127 | // re-encode the PNG 128 | png = encodeChunks(chunks); 129 | 130 | const suffix = 131 | srcColorSpace === dstColorSpace 132 | ? srcColorSpace 133 | : `${srcColorSpace}-to-${dstColorSpace}`; 134 | await fs.writeFile(`test/fixtures/demo-${depth}-bit-${suffix}.png`, png); 135 | 136 | function toByte(v) { 137 | return Math.max(0, Math.min(maxValue, Math.round(v * maxValue))); 138 | } 139 | 140 | function fromByte(b) { 141 | return b / maxValue; 142 | } 143 | 144 | function toArrayBuffer(buffer) { 145 | const arrayBuffer = new ArrayBuffer(buffer.length); 146 | const view = new Uint8Array(arrayBuffer); 147 | for (let i = 0; i < buffer.length; ++i) { 148 | view[i] = buffer[i]; 149 | } 150 | return arrayBuffer; 151 | } 152 | } 153 | 154 | setup(); 155 | -------------------------------------------------------------------------------- /test/deprecated/demo.js: -------------------------------------------------------------------------------- 1 | // to test: 2 | // https://www.shadertoy.com/view/WlcBRn 3 | // https://www.jakelow.com/blog/hobby-curves 4 | 5 | import canvasSketch from "canvas-sketch"; 6 | import { createSaveSetup } from "./export-png.js"; 7 | import { canvasToBuffer, downloadBuffer } from "../save.js"; 8 | import { deflate, inflate } from "pako"; 9 | import ColorEngine from "jscolorengine"; 10 | import getDocument from "canvas-dimensions"; 11 | import { 12 | extractChunks, 13 | encodeChunks, 14 | } from "../src/png-metadata-writer/index.js"; 15 | import { decode, encode } from "fast-png"; 16 | import { parse as parseICC } from "icc"; 17 | import { 18 | decodeProfileData, 19 | decode_iCCP, 20 | encode_iCCP, 21 | encode_iTXt, 22 | encode_pHYs_PPI, 23 | withoutChunks, 24 | } from "../../src/util.js"; 25 | import { Buffer } from "buffer"; 26 | import paperSizes from "../src/paper-sizes.js"; 27 | import convert from "convert-length"; 28 | import pngForPrint from "./png-for-print.js"; 29 | 30 | const render = ({ context, width, height }) => { 31 | const grad = context.createLinearGradient(width * 0.1, 0, width * 0.9, 0); 32 | grad.addColorStop(0, "color(display-p3 0 1 0)"); 33 | grad.addColorStop(1, "color(display-p3 1 0 0)"); 34 | 35 | context.fillStyle = grad; 36 | // context.fillStyle = "color(display-p3 0 1 0)"; 37 | context.fillRect(0, 0, width, height); 38 | }; 39 | 40 | async function setup() { 41 | const { width, height, canvasWidth, canvasHeight } = getDocument({ 42 | dimensions: "a4", 43 | units: "cm", 44 | pixelsPerInch: 300, 45 | }); 46 | 47 | canvas.width = doc.canvasWidth; 48 | canvas.height = doc.canvasHeight; 49 | 50 | context.scale(canvasWidth / width, canvasHeight / height); 51 | 52 | const canvas = document.createElement("canvas"); 53 | const context = canvas.getContext("2d", { 54 | colorSpace: "display-p3", 55 | }); 56 | document.body.appendChild(canvas); 57 | 58 | canvas.width = canvasWidth; 59 | canvas.height = canvasWidth; 60 | canvas.style.width = `${canvasWidth}px`; 61 | canvas.style.height = `${canvasHeight}px`; 62 | render({ context, width, height }); 63 | 64 | // const data = context.getImageData(0, 0, width, height); 65 | // console.log("raw data", data.data); 66 | 67 | const png = await canvasToBuffer(canvas, {}); 68 | // const newPng = pngForPrint(png, { 69 | // pixelsPerInch: 300, 70 | // }); 71 | 72 | const newPng = png; 73 | console.log("Data:", newPng); 74 | 75 | createSaveSetup(async () => { 76 | const filename = `canvas-${Date.now()}.png`; 77 | downloadBuffer(newPng, { filename }); 78 | // exporter.write(png); 79 | }); 80 | } 81 | 82 | setup(); 83 | -------------------------------------------------------------------------------- /test/deprecated/export-png.js: -------------------------------------------------------------------------------- 1 | export function createSaveSetup(cb) { 2 | const keyDown = (ev) => { 3 | if (ev.key === "s" && (ev.metaKey || ev.ctrlKey)) { 4 | ev.preventDefault(); 5 | save(); 6 | } 7 | }; 8 | window.addEventListener("keydown", keyDown, { passive: false }); 9 | 10 | const button = document.createElement("button"); 11 | button.textContent = "Save PNG"; 12 | button.style.cssText = `position: absolute; top: 20px; left: 20px; z-index: 1000;`; 13 | document.body.appendChild(button); 14 | button.onclick = (ev) => { 15 | ev.preventDefault(); 16 | save(); 17 | }; 18 | 19 | const unload = () => { 20 | if (button.parentElement) button.parentElement.removeChild(button); 21 | window.removeEventListener("keydown", keyDown); 22 | }; 23 | unload.button = button; 24 | return unload; 25 | 26 | async function save() { 27 | cb(); 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /test/deprecated/inspect.js: -------------------------------------------------------------------------------- 1 | // A CLI tool to inspect color profiles in PNGs 2 | // node test/inspect.js test/fixtures/Red-Display-P3.png 3 | 4 | import { decode } from "fast-png"; 5 | import { extractChunks } from "../src/png-metadata-writer/index.js"; 6 | import { decode_iCCP, decode_iTXt } from "../../src/util.js"; 7 | import fs from "fs/promises"; 8 | import { parse as parseICC } from "icc"; 9 | import { inflate } from "pako"; 10 | 11 | async function inspect(file, iccOut) { 12 | const buf = await fs.readFile(file); 13 | if (file.endsWith(".icc")) { 14 | const profileParsed = parseICC(buf); 15 | console.log("Color Profile:", profileParsed); 16 | } else { 17 | const chunks = extractChunks(buf); 18 | const iCCP = chunks.find((c) => c.name === "iCCP"); 19 | if (iCCP) { 20 | const { name, compression, data } = decode_iCCP(iCCP.data); 21 | console.log("Embedded Profile:", name); 22 | 23 | const profileDecompressed = inflate(data); 24 | console.log(compression, data); 25 | const profileParsed = parseICC(Buffer.from(profileDecompressed)); 26 | console.log("Color Profile:", profileParsed); 27 | if (iccOut) { 28 | await fs.writeFile(iccOut, profileDecompressed); 29 | } 30 | } else { 31 | console.log("No color profile data"); 32 | } 33 | 34 | const iTXts = chunks.filter((c) => c.name === "iTXt"); 35 | for (let iTXt of iTXts) { 36 | // const data = decode_iTXt(iTXt.data); 37 | // console.log("iTXt Data:", data); 38 | } 39 | 40 | const { data, width, height } = decode(buf); 41 | console.log("Size: %d x %d px", width, height); 42 | console.log("Data:", data.slice(0, 4), " ..."); 43 | } 44 | } 45 | 46 | if (!process.argv[2]) throw new Error("no file specified"); 47 | inspect(process.argv[2], process.argv[3]); 48 | -------------------------------------------------------------------------------- /test/deprecated/png-for-print.js: -------------------------------------------------------------------------------- 1 | import canvasSketch from "canvas-sketch"; 2 | import { createSaveSetup } from "./export-png.js"; 3 | import { canvasToBuffer, downloadBuffer } from "../save.js"; 4 | import { deflate, inflate } from "pako"; 5 | import ColorEngine from "jscolorengine"; 6 | import { 7 | extractChunks, 8 | encodeChunks, 9 | } from "../src/png-metadata-writer/index.js"; 10 | import { decode, encode } from "fast-png"; 11 | import { parse as parseICC } from "icc"; 12 | import { 13 | Intent, 14 | decodeProfileData, 15 | decode_iCCP, 16 | encode_iCCP, 17 | encode_iTXt, 18 | encode_pHYs_PPI, 19 | encode_sRGB, 20 | encode_standardChromatics, 21 | encode_standardGamma, 22 | withoutChunks, 23 | } from "../../src/util.js"; 24 | import { Buffer } from "buffer"; 25 | import paperSizes from "../src/paper-sizes.js"; 26 | import convert from "convert-length"; 27 | 28 | export default function pngForPrint(pngBuffer, opts = {}) { 29 | const { pixelsPerInch, colorSpace, intent = Intent.Perceptual } = opts; 30 | const { width, height, data, depth } = decode(pngBuffer); 31 | if (depth !== 8) throw new Error("Currently only 8-bit depth is supported"); 32 | let chunks = extractChunks(pngBuffer); 33 | 34 | // If pixelsPerInch is unspecified, do not modify pHYs 35 | // If pixelsPerInch is specified, replace existing chunk 36 | if (pixelsPerInch) { 37 | chunks = withoutChunks(chunks, "pHYs"); 38 | const pHYs = encode_pHYs_PPI(pixelsPerInch); 39 | chunks.splice(1, 0, { name: "pHYs", data: pHYs }); 40 | } 41 | 42 | // User has specified the output as 'srgb' 43 | if (colorSpace === "srgb") { 44 | // Strip existing color chunks 45 | chunks = withoutChunks(chunks, ["gAMA", "cHRM", "sRGB", "iCCP"]); 46 | // Replace with new standardized sRGB chunks 47 | chunks.splice( 48 | 1, 49 | 0, 50 | { name: "sRGB", data: encode_sRGB(intent) }, 51 | { name: "gAMA", data: encode_standardGamma() }, 52 | { name: "cHRM", data: encode_standardChromatics() } 53 | ); 54 | } else if (colorSpace === "display-p3") { 55 | // Nothing to do 56 | } 57 | 58 | return encodeChunks(chunks); 59 | } 60 | -------------------------------------------------------------------------------- /test/deprecated/save-png.js: -------------------------------------------------------------------------------- 1 | // import * as png from "@stevebel/png"; 2 | // import { COLOR_TYPES } from "@stevebel/png/lib/helpers/color-types"; 3 | import { extractChunks, encodeChunks } from "./png-metadata-writer"; 4 | import { deflate } from "pako"; 5 | // import { encode as pngEncode } from "fast-png"; 6 | import convert from "convert-length"; 7 | 8 | let link; 9 | const noop = () => {}; 10 | const unitByteLookup = { 11 | m: 1, 12 | in: 2, 13 | }; 14 | 15 | function saveBlob(blob, opts = {}) { 16 | return new Promise((resolve) => { 17 | const filename = opts.filename || "download.png"; 18 | if (!link) { 19 | link = document.createElement("a"); 20 | link.style.visibility = "hidden"; 21 | link.target = "_blank"; 22 | } 23 | link.download = filename; 24 | link.href = window.URL.createObjectURL(blob); 25 | document.body.appendChild(link); 26 | link.onclick = () => { 27 | link.onclick = noop; 28 | setTimeout(() => { 29 | window.URL.revokeObjectURL(blob); 30 | if (link.parentElement) link.parentElement.removeChild(link); 31 | link.removeAttribute("href"); 32 | resolve({ filename }); 33 | }); 34 | }; 35 | link.click(); 36 | }); 37 | } 38 | 39 | export async function saveCanvas(canvas, opts = {}) { 40 | const { id, metadata, filename } = opts; 41 | let fileWriter; 42 | if (typeof window.showSaveFilePicker === "function") { 43 | const handle = await window.showSaveFilePicker({ 44 | startIn: "downloads", 45 | suggestedName: filename, 46 | excludeAcceptAllOption: true, 47 | id, 48 | types: [ 49 | { 50 | description: "PNG Image", 51 | accept: { 52 | "image/png": [".png"], 53 | }, 54 | }, 55 | ], 56 | }); 57 | 58 | fileWriter = await handle.createWritable(); 59 | } 60 | 61 | const type = "image/png"; 62 | const blob = await (canvas.convertToBlob 63 | ? canvas.convertToBlob(type) 64 | : new Promise((resolve) => canvas.toBlob(resolve, type))); 65 | const buf = new Uint8Array(await blob.arrayBuffer()); 66 | const encoded = addMetadata(buf, metadata); 67 | if (fileWriter) { 68 | fileWriter.write(encoded); 69 | fileWriter.close(); 70 | } else { 71 | const blob = new Blob([encoded], { type: "image/png" }); 72 | return saveBlob(blob, { filename }); 73 | } 74 | } 75 | 76 | export async function loadProfile(uri, name) { 77 | const resp = await fetch(uri); 78 | const buf = await resp.arrayBuffer(); 79 | const data = deflate(buf); 80 | return { data, name }; 81 | } 82 | 83 | function writeUInt32(uint8array, num, offset) { 84 | uint8array[offset] = (num & 0xff000000) >> 24; 85 | uint8array[offset + 1] = (num & 0x00ff0000) >> 16; 86 | uint8array[offset + 2] = (num & 0x0000ff00) >> 8; 87 | uint8array[offset + 3] = num & 0x000000ff; 88 | } 89 | 90 | function getProfileByteData(profileName, compressedData) { 91 | const nameBytes = convertStringToBytes(profileName).slice(0, 79); 92 | const buf = new Uint8Array(nameBytes.length + 2 + compressedData.length); 93 | buf.set(nameBytes, 0); 94 | buf.set(compressedData, nameBytes.length + 2); 95 | return buf; 96 | } 97 | 98 | export function addMetadata(encoded, metadata = {}) { 99 | const { profile, pixelsPerInch } = metadata; 100 | if (profile || pixelsPerInch) { 101 | const oldChunks = extractChunks(encoded).filter((c) => { 102 | const name = c.name; 103 | // discard existing profile/dimensions 104 | if (name === "iCCP" && profile) return false; 105 | if (name === "sRGB" && profile) return false; 106 | if (name === "pHYs" && pixelsPerInch) return false; 107 | return true; 108 | }); 109 | 110 | let iCCP; 111 | if (profile) { 112 | iCCP = { 113 | name: "iCCP", 114 | data: getProfileByteData(profile.name, deflate(profile.data)), 115 | }; 116 | } 117 | 118 | let pHYs; 119 | if (pixelsPerInch) { 120 | const units = "m"; 121 | const ppu = convert(1, units, "px", { pixelsPerInch, roundPixel: true }); 122 | 123 | const data = new Uint8Array(9); 124 | writeUInt32(data, ppu, 0); 125 | writeUInt32(data, ppu, 4); 126 | data[8] = 1; // meter unit 127 | 128 | pHYs = { name: "pHYs", data }; 129 | } 130 | 131 | const extraChunks = [iCCP, pHYs].filter(Boolean); 132 | const newChunks = oldChunks.slice(); 133 | newChunks.splice(1, 0, ...extraChunks); 134 | return encodeChunks(newChunks); 135 | } else { 136 | return encoded; 137 | } 138 | } 139 | 140 | function convertStringToBytes(val) { 141 | const data = new Uint8Array(val.length); 142 | for (let i = 0; i < val.length; i++) { 143 | data[i] = val.charCodeAt(i); 144 | } 145 | return data; 146 | } 147 | -------------------------------------------------------------------------------- /test/deprecated/splice.js: -------------------------------------------------------------------------------- 1 | import { test } from "brittle"; 2 | 3 | import { 4 | encodeChunks, 5 | extractChunks, 6 | } from "../src/png-metadata-writer/index.js"; 7 | import { 8 | decode_iTXt, 9 | encode_iTXt, 10 | encode_iCCP, 11 | encode_standardChromatics, 12 | encode_standardGamma, 13 | encode_pHYs_PPI, 14 | } from "../../src/util.js"; 15 | import fs from "fs/promises"; 16 | import { parse as parseICC } from "icc"; 17 | import { inflate, deflate } from "pako"; 18 | import { encode } from "fast-png"; 19 | import paperSizes from "../src/paper-sizes.js"; 20 | import convertLength from "convert-length"; 21 | 22 | async function start() { 23 | const { dimensions, units } = paperSizes.a6; 24 | const pixelsPerInch = 300; 25 | const [width, height] = dimensions.map((d) => 26 | convertLength(d, units, "px", { pixelsPerInch, roundPixel: true }) 27 | ); 28 | 29 | console.log("Dimensions:", dimensions.join(" x "), units); 30 | 31 | const data = new Uint8Array(4 * width * height); 32 | data.fill(0xff); 33 | 34 | const RGB = [233, 227, 213]; 35 | for (let i = 0; i < width * height; i++) { 36 | data[i * 4 + 0] = RGB[0]; 37 | data[i * 4 + 1] = RGB[1]; 38 | data[i * 4 + 2] = RGB[2]; 39 | } 40 | 41 | let png = encode({ 42 | data, 43 | width, 44 | height, 45 | }); 46 | 47 | const keyword = "metadata"; 48 | 49 | let chunks = extractChunks(png); 50 | chunks = withoutChunks(chunks, ["iCCP", "gAMA", "cHRM", "sRGB", "pHYs"]); 51 | chunks = chunks.filter((c) => { 52 | if (c.name === "iTXt") { 53 | const d = decode_iTXt(c.data); 54 | if (d.keyword === keyword) { 55 | return false; 56 | } 57 | } 58 | return true; 59 | }); 60 | 61 | const sRGB = await fs.readFile("test/fixtures/sRGB IEC61966-2.1.icc"); 62 | const sRGBData = parseICC(sRGB); 63 | console.log("Color Profile:", sRGBData.description); 64 | const profile = encode_iCCP({ 65 | name: sRGBData.description, 66 | data: deflate(sRGB), 67 | }); 68 | 69 | const pHYs = encode_pHYs_PPI(pixelsPerInch); 70 | chunks.splice( 71 | 1, 72 | 0, 73 | { name: "gAMA", data: encode_standardGamma() }, 74 | { name: "iCCP", data: profile }, 75 | { name: "cHRM", data: encode_standardChromatics() }, 76 | { name: "pHYs", data: pHYs }, 77 | { 78 | name: "iTXt", 79 | data: encode_iTXt({ 80 | keyword, 81 | text: "hello world", 82 | }), 83 | } 84 | ); 85 | 86 | png = encodeChunks(chunks); 87 | await fs.writeFile("test/fixtures/test.png", png); 88 | } 89 | 90 | start(); 91 | 92 | function withoutChunks(chunks, nameFilter) { 93 | if (typeof nameFilter === "string") nameFilter = [nameFilter]; 94 | return chunks.filter((c) => !nameFilter.includes(c.name)); 95 | } 96 | -------------------------------------------------------------------------------- /test/encoded/generated-0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/test/encoded/generated-0.png -------------------------------------------------------------------------------- /test/encoded/generated-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/test/encoded/generated-1.png -------------------------------------------------------------------------------- /test/encoded/generated-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/test/encoded/generated-2.png -------------------------------------------------------------------------------- /test/encoded/generated-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/test/encoded/generated-3.png -------------------------------------------------------------------------------- /test/encoded/generated-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mattdesl/png-tools/88c409f4edb749812e37f8d4c6e76407934e0494/test/encoded/generated-4.png -------------------------------------------------------------------------------- /test/generate-png-fixtures.js: -------------------------------------------------------------------------------- 1 | import { pngs } from "./png-fixtures.js"; 2 | import { encode } from "fast-png"; // use a known encoder 3 | import fs from "fs/promises"; 4 | 5 | for (let i = 0; i < pngs.length; i++) { 6 | try { 7 | const buf = encode(pngs[i]); 8 | await fs.writeFile(`test/encoded/generated-${i}.png`, buf); 9 | } catch (err) { 10 | console.error(`Error on PNG index ${i}`); 11 | console.error(err); 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /test/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | benchmark 7 | 20 | 21 | 22 |
23 |

canvas params:

24 |

25 |     
26 | 33 | 34 | 35 | 36 | 37 |
38 | 39 |
40 | 41 |
42 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /test/png-fixtures.js: -------------------------------------------------------------------------------- 1 | export const pngs = [ 2 | { 3 | width: 2, 4 | height: 2, 5 | depth: 8, 6 | channels: 3, 7 | data: new Uint8Array([ 8 | 0xff,0xff,0xff, 9 | 0xff,0x00,0x00, 10 | 0x00,0xff,0x00, 11 | 0x00,0x00,0xff 12 | ]) 13 | }, 14 | { 15 | width: 1, 16 | height: 3, 17 | depth: 8, 18 | channels: 3, 19 | data: new Uint8Array([ 20 | 0xff,0x00,0x00, 21 | 0x00,0xff,0x00, 22 | 0x00,0x00,0xff 23 | ]) 24 | }, 25 | { 26 | width: 3, 27 | height: 1, 28 | depth: 8, 29 | channels: 4, 30 | data: new Uint8Array([ 31 | 0xff,0x00,0x00,0xff, 32 | 0x00,0xff,0x00,0xff, 33 | 0x00,0x00,0xff,0xff 34 | ]) 35 | }, 36 | { 37 | width: 1, 38 | height: 3, 39 | depth: 16, 40 | channels: 3, 41 | data: new Uint16Array([ 42 | ...floatsToUint16([ 1, 0, 0 ]), 43 | ...floatsToUint16([ 0, 1, 0 ]), 44 | ...floatsToUint16([ 0, 0, 1 ]) 45 | ]) 46 | }, 47 | { 48 | width: 2, 49 | height: 2, 50 | depth: 16, 51 | channels: 4, 52 | data: new Uint16Array([ 53 | ...floatsToUint16([ 1, 0, 0, 1 ]), 54 | ...floatsToUint16([ 0, 1, 0, 0.5 ]), 55 | ...floatsToUint16([ 0, 0, 1, 0.25 ]), 56 | ...floatsToUint16([ 0, 0, 0, 0 ]) 57 | ]) 58 | } 59 | ] 60 | 61 | function floatsToUint16 (rgb) { 62 | return rgb.map(r => Math.max(0, Math.min(0xffff, Math.round(r * 0xffff)))) 63 | } 64 | -------------------------------------------------------------------------------- /test/test.js: -------------------------------------------------------------------------------- 1 | import test from "tape"; 2 | import fs from "fs/promises"; 3 | import { inflate, deflate } from "pako"; 4 | import { pngs } from "./png-fixtures.js"; 5 | import * as FastPNG from "fast-png"; 6 | 7 | import { 8 | // Utils 9 | crc32, 10 | flattenBuffers, 11 | colorTypeToChannels, 12 | 13 | // Constants 14 | ChunkType, 15 | ColorType, 16 | FilterMethod, 17 | Intent, 18 | 19 | // Encoding 20 | encode, 21 | encodeHeader, 22 | encodeChunk, 23 | writeChunks, 24 | 25 | // Decoding 26 | readChunks, 27 | readIHDR, 28 | reader, 29 | 30 | // Chunk utils 31 | encode_IDAT_raw, 32 | encode_pHYs, 33 | encode_pHYs_PPI, 34 | encode_sRGB, 35 | encode_standardChromatics, 36 | encode_standardGamma, 37 | encode_iTXt, 38 | encode_IHDR, 39 | encode_iCCP, 40 | decode_iCCP, 41 | decode_iTXt, 42 | decode_IHDR, 43 | chunkNameToType, 44 | chunkTypeToName, 45 | } from "../index.js"; 46 | 47 | test("crc32", async (t) => { 48 | const buf = new Uint8Array(32); 49 | for (let i = 0; i < buf.length; i++) { 50 | buf[i] = i ** 2 % 256; 51 | } 52 | const r = crc32(buf); 53 | t.equal(r, 1934867379); 54 | }); 55 | 56 | test("profile data", async (t) => { 57 | const enc = encode_iCCP({ 58 | name: "Some Profile", 59 | data: new Uint8Array([4, 3, 1, 2]), 60 | }); 61 | const data = decode_iCCP(enc); 62 | t.equals(data.name, "Some Profile"); 63 | t.equals(data.compression, 0); 64 | t.deepEquals(data.data, new Uint8Array([4, 3, 1, 2])); 65 | 66 | const long = 67 | "Some Profile With a Really Long Name This is Long Again Some Profile With a Really Long Name This is Long"; 68 | const enc2 = encode_iCCP({ name: long, data: new Uint8Array([4, 3, 1, 2]) }); 69 | const data2 = decode_iCCP(enc2); 70 | t.equals(data2.name.length, 79); 71 | t.deepEquals(data2.compression, 0); 72 | t.deepEquals(data2.data, new Uint8Array([4, 3, 1, 2])); 73 | }); 74 | 75 | test("iTXt data", async (t) => { 76 | const enc = encode_iTXt({ 77 | keyword: "metadata", 78 | compressionFlag: 0, 79 | compressionMethod: 0, 80 | languageTag: "en", 81 | translatedKeyword: "test", 82 | text: "hello world", 83 | }); 84 | const data = decode_iTXt(enc); 85 | t.deepEquals(data, { 86 | keyword: "metadata", 87 | compressionFlag: 0, 88 | compressionMethod: 0, 89 | languageTag: "en", 90 | translatedKeyword: "test", 91 | text: "hello world", 92 | }); 93 | }); 94 | 95 | test("iTXt data", async (t) => { 96 | const enc = encode_IHDR({ 97 | width: 256, 98 | height: 121, 99 | depth: 16, 100 | colorType: ColorType.GRAYSCALE, 101 | interlace: 1, 102 | }); 103 | const data = decode_IHDR(enc); 104 | t.deepEquals(data, { 105 | width: 256, 106 | height: 121, 107 | depth: 16, 108 | colorType: ColorType.GRAYSCALE, 109 | compression: 0, 110 | filter: 0, 111 | interlace: 1, 112 | }); 113 | }); 114 | 115 | test("encoder matches", async (t) => { 116 | for (let i = 0; i < pngs.length; i++) { 117 | const colorType = pngs[i].channels === 4 ? ColorType.RGBA : ColorType.RGB; 118 | const input = pngs[i]; 119 | const enc0 = FastPNG.encode(input); 120 | const enc1 = encode( 121 | { 122 | ...input, 123 | filter: FilterMethod.None, 124 | colorType, 125 | }, 126 | deflate, 127 | { level: 3 } 128 | ); 129 | const c0 = readChunks(enc0).find((f) => f.type === ChunkType.IDAT); 130 | const c1 = readChunks(enc1).find((f) => f.type === ChunkType.IDAT); 131 | 132 | const eq = Buffer.from(inflate(c0.data)).equals( 133 | Buffer.from(inflate(c1.data)) 134 | ); 135 | t.ok(eq, "buffer equals idx " + i); 136 | } 137 | }); 138 | 139 | test("test png encoder filtering", async (t) => { 140 | const arr = pngs; 141 | for (let i = 0; i < arr.length; i++) { 142 | const colorType = arr[i].channels === 4 ? ColorType.RGBA : ColorType.RGB; 143 | const input = arr[i]; 144 | const filters = Object.values(FilterMethod); 145 | for (let f of filters) { 146 | const enc = encode({ ...input, colorType, filter: f }, deflate, { 147 | level: 3, 148 | }); 149 | const { data } = FastPNG.decode(enc); 150 | t.deepEqual(input.data, data, `img ${i} filter ${f}`); 151 | } 152 | } 153 | }); 154 | 155 | test("comparison png decoder works", async (t) => { 156 | for (let i = 0; i < pngs.length; i++) { 157 | const buf = await fs.readFile(`test/encoded/generated-${i}.png`); 158 | const data = FastPNG.decode(buf); 159 | t.deepEqual(data, { ...pngs[i], text: {} }); 160 | } 161 | }); 162 | 163 | test("our png decoder works", async (t) => { 164 | for (let i = 0; i < pngs.length; i++) { 165 | const buf = await fs.readFile(`test/encoded/generated-${i}.png`); 166 | const chunks = readChunks(buf); 167 | const data = writeChunks(chunks); 168 | t.ok(Buffer.from(buf).equals(Buffer.from(data)), "buffers equal"); 169 | } 170 | 171 | for (let i = 0; i < pngs.length; i++) { 172 | const png = pngs[i]; 173 | const buf = await fs.readFile(`test/encoded/generated-${i}.png`); 174 | const meta = readIHDR(buf); 175 | t.equals(meta.width, png.width); 176 | t.equals(meta.height, png.height); 177 | t.equals(meta.depth, png.depth); 178 | t.equals(meta.colorType, png.channels === 3 ? 2 : 6); 179 | } 180 | 181 | const inputBuf = await fs.readFile(`test/encoded/generated-0.png`); 182 | const inputLargerBuf = new Uint8Array(inputBuf.length + 8); 183 | inputLargerBuf.set(inputBuf, 4); 184 | const subBuf = inputLargerBuf.subarray(4, 4 + inputBuf.length); 185 | t.deepEqual( 186 | readIHDR(subBuf), 187 | { 188 | width: 2, 189 | height: 2, 190 | depth: 8, 191 | colorType: 2, 192 | compression: 0, 193 | filter: 0, 194 | interlace: 0, 195 | }, 196 | "subarray should work" 197 | ); 198 | 199 | t.equals(0x49484452, ChunkType.IHDR); 200 | t.equals(chunkNameToType("IHDR"), ChunkType.IHDR); 201 | t.equals(chunkTypeToName(ChunkType.IHDR), "IHDR"); 202 | }); 203 | 204 | test("encode and decode fields", async (t) => { 205 | t.deepEqual( 206 | decode_IHDR( 207 | encode_IHDR({ 208 | width: 256, 209 | height: 256, 210 | }) 211 | ), 212 | { 213 | width: 256, 214 | height: 256, 215 | depth: 8, 216 | colorType: 6, 217 | compression: 0, 218 | filter: 0, 219 | interlace: 0, 220 | } 221 | ); 222 | 223 | t.deepEqual( 224 | decode_IHDR( 225 | encode_IHDR({ 226 | width: 128, 227 | height: 256, 228 | depth: 16, 229 | colorType: 2, 230 | interlace: 1, 231 | }) 232 | ), 233 | { 234 | width: 128, 235 | height: 256, 236 | depth: 16, 237 | colorType: 2, 238 | compression: 0, 239 | filter: 0, 240 | interlace: 1, 241 | } 242 | ); 243 | }); 244 | 245 | // writes the chunk types in hex 246 | function writeChunkTable() { 247 | const ChunkTypeNames = [ 248 | // Critical 249 | "IHDR", 250 | "PLTE", 251 | "IDAT", 252 | "IEND", 253 | // Ancillary 254 | "cHRM", 255 | "gAMA", 256 | "iCCP", 257 | "sBIT", 258 | "sRGB", 259 | "bKGD", 260 | "hIST", 261 | "tRNS", 262 | "pHYs", 263 | "sPLT", 264 | "tIME", 265 | "iTXt", 266 | "tEXt", 267 | "zTXt", 268 | ]; 269 | 270 | for (let name of ChunkTypeNames) { 271 | console.log( 272 | ` ${name}: 0x${chunkNameToType(name).toString(16).padStart(2, "0")},` 273 | ); 274 | } 275 | } 276 | --------------------------------------------------------------------------------