├── .gitattributes ├── tsconfig.types.json ├── .gitignore ├── eslint.config.mjs ├── .editorconfig ├── tsconfig.types.test.json ├── tsconfig.json ├── jest.config.js ├── .prettierrc.js ├── lib ├── helpers │ ├── getName.js │ ├── getSource.js │ ├── splitIntoLines.js │ ├── getGeneratedSourceInfo.js │ ├── splitIntoPotentialTokens.js │ ├── streamChunksOfRawSource.js │ ├── streamChunks.js │ ├── readMappings.js │ ├── streamAndGetSourceAndMap.js │ ├── stringBufferUtils.js │ ├── getFromStreamChunks.js │ ├── createMappingsSerializer.js │ ├── streamChunksOfCombinedSourceMap.js │ └── streamChunksOfSourceMap.js ├── SizeOnlySource.js ├── Source.js ├── CompatSource.js ├── index.js ├── RawSource.js ├── PrefixSource.js ├── OriginalSource.js ├── SourceMapSource.js ├── ConcatSource.js └── CachedSource.js ├── test ├── SizeOnlySource.js ├── package-entry.js ├── CompatSource.js ├── __mocks__ │ └── createMappingsSerializer.js ├── PrefixSource.js ├── helpers.js ├── RawSource.js ├── OriginalSource.js ├── ConcatSource.js ├── Fuzzy.js ├── ReplaceSource.js ├── CachedSource.js └── SourceMapSource.js ├── LICENSE ├── .github └── workflows │ └── test.yml ├── package.json ├── README.md └── types.d.ts /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto eol=lf 2 | -------------------------------------------------------------------------------- /tsconfig.types.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig" 3 | } 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /node_modules 2 | /coverage 3 | .eslintcache 4 | yarn.lock 5 | -------------------------------------------------------------------------------- /eslint.config.mjs: -------------------------------------------------------------------------------- 1 | import { defineConfig } from "eslint/config"; 2 | import config from "eslint-config-webpack"; 3 | 4 | export default defineConfig([ 5 | { 6 | extends: [config], 7 | rules: { 8 | "n/prefer-node-protocol": "off", 9 | }, 10 | }, 11 | ]); 12 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | indent_style = tab 5 | indent_size = 2 6 | charset = utf-8 7 | trim_trailing_whitespace = true 8 | insert_final_newline = true 9 | max_line_length = 80 10 | 11 | [*.{yml,yaml,json}] 12 | indent_style = space 13 | indent_size = 2 -------------------------------------------------------------------------------- /tsconfig.types.test.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig", 3 | "compilerOptions": { 4 | "strict": false, 5 | "noImplicitThis": true, 6 | "alwaysStrict": true, 7 | "strictNullChecks": true, 8 | "types": ["node", "jest"], 9 | }, 10 | "include": ["test/*.js"] 11 | } 12 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es2018", 4 | "module": "commonjs", 5 | "lib": ["es2018"], 6 | "allowJs": true, 7 | "checkJs": true, 8 | "noEmit": true, 9 | "strict": true, 10 | "alwaysStrict": true, 11 | "types": ["node"], 12 | "esModuleInterop": true 13 | }, 14 | "include": ["lib/**/*.js"] 15 | } 16 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | /** @type {import('jest').Config} */ 4 | const config = { 5 | prettierPath: require.resolve("prettier-2"), 6 | forceExit: true, 7 | testMatch: ["/test/*.js"], 8 | testPathIgnorePatterns: ["/test/helpers.js"], 9 | transformIgnorePatterns: [""], 10 | testEnvironment: "node", 11 | }; 12 | 13 | module.exports = config; 14 | -------------------------------------------------------------------------------- /.prettierrc.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | module.exports = { 4 | printWidth: 80, 5 | useTabs: true, 6 | tabWidth: 2, 7 | trailingComma: "all", 8 | arrowParens: "always", 9 | overrides: [ 10 | { 11 | files: "*.json", 12 | options: { 13 | parser: "json", 14 | useTabs: false, 15 | }, 16 | }, 17 | { 18 | files: "*.{cts,mts,ts}", 19 | options: { 20 | parser: "typescript", 21 | }, 22 | }, 23 | ], 24 | }; 25 | -------------------------------------------------------------------------------- /lib/helpers/getName.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | /** @typedef {import("../Source").RawSourceMap} RawSourceMap */ 9 | 10 | /** 11 | * @param {RawSourceMap} sourceMap source map 12 | * @param {number} index index 13 | * @returns {string | undefined | null} name 14 | */ 15 | const getName = (sourceMap, index) => { 16 | if (index < 0) return null; 17 | const { names } = sourceMap; 18 | return names[index]; 19 | }; 20 | 21 | module.exports = getName; 22 | -------------------------------------------------------------------------------- /test/SizeOnlySource.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const { SizeOnlySource } = require("../"); 4 | 5 | describe("sizeOnlySource", () => { 6 | it("should report the size", () => { 7 | const source = new SizeOnlySource(42); 8 | expect(source.size()).toBe(42); 9 | }); 10 | 11 | for (const method of ["source", "map", "sourceAndMap", "buffer"]) { 12 | it(`should throw on ${method}()`, () => { 13 | const source = new SizeOnlySource(42); 14 | expect(() => { 15 | // @ts-expect-error for tests 16 | source[/** @type {keyof SizeOnlySource} */ (method)](); 17 | }).toThrow(/not available/); 18 | }); 19 | } 20 | }); 21 | -------------------------------------------------------------------------------- /test/package-entry.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | describe("package-entry", () => { 4 | // eslint-disable-next-line jest/expect-expect 5 | it("should not throw SyntaxError", () => { 6 | require("../"); 7 | }); 8 | 9 | it("should expose Sources", () => { 10 | for (const name of [ 11 | "Source", 12 | "CachedSource", 13 | "ConcatSource", 14 | "OriginalSource", 15 | "PrefixSource", 16 | "RawSource", 17 | "ReplaceSource", 18 | "SizeOnlySource", 19 | "SourceMapSource", 20 | "CompatSource", 21 | ]) { 22 | expect(require("../")[name]).toBe(require(`../lib/${name}`)); 23 | expect(require("../")[name]).toBe(require(`../lib/${name}`)); 24 | } 25 | }); 26 | }); 27 | -------------------------------------------------------------------------------- /lib/helpers/getSource.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | /** @typedef {import("../Source").RawSourceMap} RawSourceMap */ 9 | 10 | /** 11 | * @param {RawSourceMap} sourceMap source map 12 | * @param {number} index index 13 | * @returns {string | null} name 14 | */ 15 | const getSource = (sourceMap, index) => { 16 | if (index < 0) return null; 17 | const { sourceRoot, sources } = sourceMap; 18 | const source = sources[index]; 19 | if (!sourceRoot) return source; 20 | if (sourceRoot.endsWith("/")) return sourceRoot + source; 21 | return `${sourceRoot}/${source}`; 22 | }; 23 | 24 | module.exports = getSource; 25 | -------------------------------------------------------------------------------- /lib/helpers/splitIntoLines.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | /** 9 | * @param {string} str string 10 | * @returns {string[]} array of string separated by lines 11 | */ 12 | const splitIntoLines = (str) => { 13 | const results = []; 14 | const len = str.length; 15 | let i = 0; 16 | while (i < len) { 17 | const cc = str.charCodeAt(i); 18 | // 10 is "\n".charCodeAt(0) 19 | if (cc === 10) { 20 | results.push("\n"); 21 | i++; 22 | } else { 23 | let j = i + 1; 24 | // 10 is "\n".charCodeAt(0) 25 | while (j < len && str.charCodeAt(j) !== 10) j++; 26 | results.push(str.slice(i, j + 1)); 27 | i = j + 1; 28 | } 29 | } 30 | return results; 31 | }; 32 | 33 | module.exports = splitIntoLines; 34 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 JS Foundation and other contributors 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /lib/helpers/getGeneratedSourceInfo.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | const CHAR_CODE_NEW_LINE = "\n".charCodeAt(0); 9 | 10 | /** 11 | * @typedef {object} GeneratedSourceInfo 12 | * @property {number=} generatedLine generated line 13 | * @property {number=} generatedColumn generated column 14 | * @property {string=} source source 15 | */ 16 | 17 | /** 18 | * @param {string | undefined} source source 19 | * @returns {GeneratedSourceInfo} source info 20 | */ 21 | const getGeneratedSourceInfo = (source) => { 22 | if (source === undefined) { 23 | return {}; 24 | } 25 | const lastLineStart = source.lastIndexOf("\n"); 26 | if (lastLineStart === -1) { 27 | return { 28 | generatedLine: 1, 29 | generatedColumn: source.length, 30 | source, 31 | }; 32 | } 33 | let generatedLine = 2; 34 | for (let i = 0; i < lastLineStart; i++) { 35 | if (source.charCodeAt(i) === CHAR_CODE_NEW_LINE) generatedLine++; 36 | } 37 | return { 38 | generatedLine, 39 | generatedColumn: source.length - lastLineStart - 1, 40 | source, 41 | }; 42 | }; 43 | 44 | module.exports = getGeneratedSourceInfo; 45 | -------------------------------------------------------------------------------- /lib/helpers/splitIntoPotentialTokens.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | // \n = 10 9 | // ; = 59 10 | // { = 123 11 | // } = 125 12 | // = 32 13 | // \r = 13 14 | // \t = 9 15 | 16 | /** 17 | * @param {string} str string 18 | * @returns {string[] | null} array of string separated by potential tokens 19 | */ 20 | const splitIntoPotentialTokens = (str) => { 21 | const len = str.length; 22 | if (len === 0) return null; 23 | const results = []; 24 | let i = 0; 25 | while (i < len) { 26 | const start = i; 27 | block: { 28 | let cc = str.charCodeAt(i); 29 | while (cc !== 10 && cc !== 59 && cc !== 123 && cc !== 125) { 30 | if (++i >= len) break block; 31 | cc = str.charCodeAt(i); 32 | } 33 | while ( 34 | cc === 59 || 35 | cc === 32 || 36 | cc === 123 || 37 | cc === 125 || 38 | cc === 13 || 39 | cc === 9 40 | ) { 41 | if (++i >= len) break block; 42 | cc = str.charCodeAt(i); 43 | } 44 | if (cc === 10) { 45 | i++; 46 | } 47 | } 48 | results.push(str.slice(start, i)); 49 | } 50 | return results; 51 | }; 52 | 53 | module.exports = splitIntoPotentialTokens; 54 | -------------------------------------------------------------------------------- /test/CompatSource.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | jest.mock("./__mocks__/createMappingsSerializer"); 4 | 5 | const { CompatSource } = require("../"); 6 | const { RawSource } = require("../"); 7 | 8 | describe("compatSource", () => { 9 | it("should emulate all methods", () => { 10 | const CONTENT = "Line1\n\nLine3\n"; 11 | const source = CompatSource.from({ 12 | source() { 13 | return CONTENT; 14 | }, 15 | size() { 16 | return 42; 17 | }, 18 | }); 19 | expect(CompatSource.from(source)).toEqual(source); 20 | const rawSource = new RawSource(CONTENT); 21 | expect(CompatSource.from(rawSource)).toEqual(rawSource); 22 | expect(source.source()).toEqual(CONTENT); 23 | expect(source.size()).toBe(42); 24 | expect(source.buffer()).toEqual(Buffer.from(CONTENT)); 25 | expect(source.map()).toBeNull(); 26 | const sourceAndMap = source.sourceAndMap(); 27 | expect(sourceAndMap).toHaveProperty("source", CONTENT); 28 | expect(sourceAndMap).toHaveProperty("map", null); 29 | /** @type {(string | Buffer)[]} */ 30 | const calledWith = []; 31 | source.updateHash({ 32 | // @ts-expect-error for tests 33 | update(value) { 34 | calledWith.push(value); 35 | }, 36 | }); 37 | expect(calledWith).toEqual([Buffer.from(CONTENT)]); 38 | }); 39 | }); 40 | -------------------------------------------------------------------------------- /lib/SizeOnlySource.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | const Source = require("./Source"); 9 | 10 | /** @typedef {import("./Source").HashLike} HashLike */ 11 | /** @typedef {import("./Source").MapOptions} MapOptions */ 12 | /** @typedef {import("./Source").RawSourceMap} RawSourceMap */ 13 | /** @typedef {import("./Source").SourceValue} SourceValue */ 14 | 15 | class SizeOnlySource extends Source { 16 | /** 17 | * @param {number} size size 18 | */ 19 | constructor(size) { 20 | super(); 21 | this._size = size; 22 | } 23 | 24 | _error() { 25 | return new Error( 26 | "Content and Map of this Source is not available (only size() is supported)", 27 | ); 28 | } 29 | 30 | size() { 31 | return this._size; 32 | } 33 | 34 | /** 35 | * @returns {SourceValue} source 36 | */ 37 | source() { 38 | throw this._error(); 39 | } 40 | 41 | /** 42 | * @returns {Buffer} buffer 43 | */ 44 | buffer() { 45 | throw this._error(); 46 | } 47 | 48 | /** 49 | * @param {MapOptions=} options map options 50 | * @returns {RawSourceMap | null} map 51 | */ 52 | // eslint-disable-next-line no-unused-vars 53 | map(options) { 54 | throw this._error(); 55 | } 56 | 57 | /** 58 | * @param {HashLike} hash hash 59 | * @returns {void} 60 | */ 61 | // eslint-disable-next-line no-unused-vars 62 | updateHash(hash) { 63 | throw this._error(); 64 | } 65 | } 66 | 67 | module.exports = SizeOnlySource; 68 | -------------------------------------------------------------------------------- /test/__mocks__/createMappingsSerializer.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | const createMappingsSerializer = jest.requireActual( 9 | "../createMappingsSerializer", 10 | ); 11 | 12 | module.exports = (options) => { 13 | const fn = createMappingsSerializer(options); 14 | let lastLine = 1; 15 | let lastColumn = -1; 16 | return ( 17 | generatedLine, 18 | generatedColumn, 19 | sourceIndex, 20 | originalLine, 21 | originalColumn, 22 | nameIndex, 23 | ) => { 24 | if ( 25 | generatedLine >= lastLine && 26 | generatedColumn > (generatedLine === lastLine ? lastColumn : -1) && 27 | (sourceIndex === -1 28 | ? originalLine === -1 && originalColumn === -1 && nameIndex === -1 29 | : sourceIndex >= 0 && 30 | originalLine >= 1 && 31 | originalColumn >= 0 && 32 | nameIndex >= -1) 33 | ) { 34 | lastLine = generatedLine; 35 | lastColumn = generatedColumn; 36 | return fn( 37 | generatedLine, 38 | generatedColumn, 39 | sourceIndex, 40 | originalLine, 41 | originalColumn, 42 | nameIndex, 43 | ); 44 | } 45 | throw new Error(`Invalid mapping passed to mapping serializer: 46 | generatedLine = ${generatedLine} (lastLine = ${lastLine}), 47 | generatedColumn = ${generatedColumn} (lastColumn = ${lastColumn}), 48 | sourceIndex = ${sourceIndex}, 49 | originalLine = ${originalLine}, 50 | originalColumn = ${originalColumn}, 51 | nameIndex = ${nameIndex}`); 52 | }; 53 | }; 54 | -------------------------------------------------------------------------------- /lib/helpers/streamChunksOfRawSource.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | const getGeneratedSourceInfo = require("./getGeneratedSourceInfo"); 9 | const splitIntoLines = require("./splitIntoLines"); 10 | 11 | /** @typedef {import("./getGeneratedSourceInfo").GeneratedSourceInfo} GeneratedSourceInfo */ 12 | /** @typedef {import("./streamChunks").OnChunk} OnChunk */ 13 | /** @typedef {import("./streamChunks").OnName} OnName */ 14 | /** @typedef {import("./streamChunks").OnSource} OnSource */ 15 | 16 | /** 17 | * @param {string} source source 18 | * @param {OnChunk} onChunk on chunk 19 | * @param {OnSource} _onSource on source 20 | * @param {OnName} _onName on name 21 | * @returns {GeneratedSourceInfo} source info 22 | */ 23 | const streamChunksOfRawSource = (source, onChunk, _onSource, _onName) => { 24 | let line = 1; 25 | const matches = splitIntoLines(source); 26 | /** @type {undefined | string} */ 27 | let match; 28 | for (match of matches) { 29 | onChunk(match, line, 0, -1, -1, -1, -1); 30 | line++; 31 | } 32 | return matches.length === 0 || /** @type {string} */ (match).endsWith("\n") 33 | ? { 34 | generatedLine: matches.length + 1, 35 | generatedColumn: 0, 36 | } 37 | : { 38 | generatedLine: matches.length, 39 | generatedColumn: /** @type {string} */ (match).length, 40 | }; 41 | }; 42 | 43 | /** 44 | * @param {string} source source 45 | * @param {OnChunk} onChunk on chunk 46 | * @param {OnSource} onSource on source 47 | * @param {OnName} onName on name 48 | * @param {boolean} finalSource is final source 49 | * @returns {GeneratedSourceInfo} source info 50 | */ 51 | module.exports = (source, onChunk, onSource, onName, finalSource) => 52 | finalSource 53 | ? getGeneratedSourceInfo(source) 54 | : streamChunksOfRawSource(source, onChunk, onSource, onName); 55 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | 11 | jobs: 12 | lint: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v4 16 | - name: Use Node.js 17 | uses: actions/setup-node@v4 18 | with: 19 | node-version: lts/* 20 | cache: "npm" 21 | - run: npm ci 22 | - name: Cache eslint result 23 | uses: actions/cache@v4 24 | with: 25 | path: .eslintcache 26 | key: lint-eslint-${{ runner.os }}-node-${{ hashFiles('**/package-lock.json', '**/eslint.config.mjs') }} 27 | restore-keys: lint-eslint- 28 | - run: npm run lint 29 | test: 30 | strategy: 31 | fail-fast: false 32 | matrix: 33 | os: [ubuntu-latest, windows-latest, macos-latest] 34 | node-version: [10.x, 12.x, 14.x, 16.x, 18.x, 20.x, 22.x, 24.x] 35 | runs-on: ${{ matrix.os }} 36 | steps: 37 | - uses: actions/checkout@v4 38 | - uses: actions/github-script@v7 39 | id: calculate_architecture 40 | with: 41 | result-encoding: string 42 | script: | 43 | if ('${{ matrix.os }}' === 'macos-latest' && ('${{ matrix['node-version'] }}' === '10.x' || '${{ matrix['node-version'] }}' === '12.x' || '${{ matrix['node-version'] }}' === '14.x')) { 44 | return "x64" 45 | } else { 46 | return '' 47 | } 48 | - name: Use Node.js ${{ matrix.node-version }} 49 | uses: actions/setup-node@v4 50 | with: 51 | node-version: ${{ matrix.node-version }} 52 | architecture: ${{ steps.calculate_architecture.outputs.result }} 53 | cache: "npm" 54 | - run: npm install 55 | if: matrix.node-version == '10.x' || matrix.node-version == '12.x' || matrix.node-version == '14.x' 56 | - run: npm ci 57 | if: matrix.node-version != '10.x' && matrix.node-version != '12.x' && matrix.node-version != '14.x' 58 | - run: npm run cover 59 | - uses: codecov/codecov-action@v5 60 | with: 61 | flags: integration 62 | token: ${{ secrets.CODECOV_TOKEN }} 63 | -------------------------------------------------------------------------------- /lib/Source.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | /** 9 | * @typedef {object} MapOptions 10 | * @property {boolean=} columns need columns? 11 | * @property {boolean=} module is module 12 | */ 13 | 14 | /** 15 | * @typedef {object} RawSourceMap 16 | * @property {number} version version 17 | * @property {string[]} sources sources 18 | * @property {string[]} names names 19 | * @property {string=} sourceRoot source root 20 | * @property {string[]=} sourcesContent sources content 21 | * @property {string} mappings mappings 22 | * @property {string} file file 23 | * @property {string=} debugId debug id 24 | * @property {number[]=} ignoreList ignore list 25 | */ 26 | 27 | /** @typedef {string | Buffer} SourceValue */ 28 | 29 | /** 30 | * @typedef {object} SourceAndMap 31 | * @property {SourceValue} source source 32 | * @property {RawSourceMap | null} map map 33 | */ 34 | 35 | /** 36 | * @typedef {object} HashLike 37 | * @property {(data: string | Buffer, inputEncoding?: string) => HashLike} update make hash update 38 | * @property {(encoding?: string) => string | Buffer} digest get hash digest 39 | */ 40 | 41 | class Source { 42 | /** 43 | * @returns {SourceValue} source 44 | */ 45 | source() { 46 | throw new Error("Abstract"); 47 | } 48 | 49 | buffer() { 50 | const source = this.source(); 51 | if (Buffer.isBuffer(source)) return source; 52 | return Buffer.from(source, "utf8"); 53 | } 54 | 55 | size() { 56 | return this.buffer().length; 57 | } 58 | 59 | /** 60 | * @param {MapOptions=} options map options 61 | * @returns {RawSourceMap | null} map 62 | */ 63 | // eslint-disable-next-line no-unused-vars 64 | map(options) { 65 | return null; 66 | } 67 | 68 | /** 69 | * @param {MapOptions=} options map options 70 | * @returns {SourceAndMap} source and map 71 | */ 72 | sourceAndMap(options) { 73 | return { 74 | source: this.source(), 75 | map: this.map(options), 76 | }; 77 | } 78 | 79 | /** 80 | * @param {HashLike} hash hash 81 | * @returns {void} 82 | */ 83 | // eslint-disable-next-line no-unused-vars 84 | updateHash(hash) { 85 | throw new Error("Abstract"); 86 | } 87 | } 88 | 89 | module.exports = Source; 90 | -------------------------------------------------------------------------------- /lib/helpers/streamChunks.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | const streamChunksOfRawSource = require("./streamChunksOfRawSource"); 9 | const streamChunksOfSourceMap = require("./streamChunksOfSourceMap"); 10 | 11 | /** @typedef {import("../Source")} Source */ 12 | /** @typedef {import("./getGeneratedSourceInfo").GeneratedSourceInfo} GeneratedSourceInfo */ 13 | /** @typedef {(chunk: string | undefined, generatedLine: number, generatedColumn: number, sourceIndex: number, originalLine: number, originalColumn: number, nameIndex: number) => void} OnChunk */ 14 | /** @typedef {(sourceIndex: number, source: string | null, sourceContent: string | undefined) => void} OnSource */ 15 | /** @typedef {(nameIndex: number, name: string) => void} OnName */ 16 | 17 | /** @typedef {{ source?: boolean, finalSource?: boolean, columns?: boolean }} Options */ 18 | 19 | /** 20 | * @callback StreamChunksFunction 21 | * @param {Options} options options 22 | * @param {OnChunk} onChunk on chunk 23 | * @param {OnSource} onSource on source 24 | * @param {OnName} onName on name 25 | */ 26 | 27 | /** @typedef {Source & { streamChunks?: StreamChunksFunction }} SourceMaybeWithStreamChunksFunction */ 28 | 29 | /** 30 | * @param {SourceMaybeWithStreamChunksFunction} source source 31 | * @param {Options} options options 32 | * @param {OnChunk} onChunk on chunk 33 | * @param {OnSource} onSource on source 34 | * @param {OnName} onName on name 35 | * @returns {GeneratedSourceInfo} generated source info 36 | */ 37 | module.exports = (source, options, onChunk, onSource, onName) => { 38 | if (typeof source.streamChunks === "function") { 39 | return source.streamChunks(options, onChunk, onSource, onName); 40 | } 41 | const sourceAndMap = source.sourceAndMap(options); 42 | if (sourceAndMap.map) { 43 | return streamChunksOfSourceMap( 44 | /** @type {string} */ 45 | (sourceAndMap.source), 46 | sourceAndMap.map, 47 | onChunk, 48 | onSource, 49 | onName, 50 | Boolean(options && options.finalSource), 51 | Boolean(options && options.columns !== false), 52 | ); 53 | } 54 | return streamChunksOfRawSource( 55 | /** @type {string} */ 56 | (sourceAndMap.source), 57 | onChunk, 58 | onSource, 59 | onName, 60 | Boolean(options && options.finalSource), 61 | ); 62 | }; 63 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "webpack-sources", 3 | "version": "3.3.3", 4 | "description": "Source code handling classes for webpack", 5 | "keywords": ["webpack", "source-map"], 6 | "homepage": "https://github.com/webpack/webpack-sources#readme", 7 | "bugs": { 8 | "url": "https://github.com/webpack/webpack-sources/issues" 9 | }, 10 | "repository": { 11 | "type": "git", 12 | "url": "git+https://github.com/webpack/webpack-sources.git" 13 | }, 14 | "license": "MIT", 15 | "author": "Tobias Koppers @sokra", 16 | "main": "lib/index.js", 17 | "types": "types.d.ts", 18 | "files": ["lib/", "types.d.ts"], 19 | "scripts": { 20 | "lint": "npm run lint:code && npm run lint:types && npm run lint:types-test && npm run lint:special", 21 | "lint:code": "eslint --cache .", 22 | "lint:special": "node node_modules/tooling/inherit-types && node node_modules/tooling/format-file-header && node node_modules/tooling/generate-types", 23 | "lint:types": "tsc", 24 | "lint:types-test": "tsc -p tsconfig.types.test.json", 25 | "fmt": "npm run fmt:base -- --loglevel warn --write", 26 | "fmt:check": "npm run fmt:base -- --check", 27 | "fmt:base": "prettier --cache --ignore-unknown .", 28 | "fix": "npm run fix:code && npm run fix:special", 29 | "fix:code": "npm run lint:code -- --fix", 30 | "fix:special": "node node_modules/tooling/inherit-types --write && node node_modules/tooling/format-file-header --write && node node_modules/tooling/generate-types --write", 31 | "pretest": "npm run lint", 32 | "test": "jest", 33 | "cover": "jest --coverage" 34 | }, 35 | "devDependencies": { 36 | "@eslint/js": "^9.28.0", 37 | "@eslint/markdown": "^7.2.0", 38 | "@stylistic/eslint-plugin": "^5.6.1", 39 | "@types/jest": "^27.5.2", 40 | "globals": "^16.2.0", 41 | "eslint": "^9.28.0", 42 | "eslint-config-webpack": "^4.0.8", 43 | "eslint-config-prettier": "^10.1.5", 44 | "eslint-plugin-import": "^2.31.0", 45 | "eslint-plugin-jest": "^29.5.0", 46 | "eslint-plugin-jsdoc": "^61.5.0", 47 | "eslint-plugin-n": "^17.19.0", 48 | "eslint-plugin-prettier": "^5.4.1", 49 | "eslint-plugin-unicorn": "^62.0.0", 50 | "jest": "^27.5.1", 51 | "prettier": "^3.5.3", 52 | "prettier-2": "npm:prettier@^2", 53 | "source-map": "^0.7.3", 54 | "sourcemap-validator": "^2.1.0", 55 | "tooling": "webpack/tooling#v1.24.4", 56 | "typescript": "^5.3.3", 57 | "webpack": "^5.99.9" 58 | }, 59 | "engines": { 60 | "node": ">=10.13.0" 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /lib/CompatSource.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | const Source = require("./Source"); 9 | 10 | /** @typedef {import("./Source").HashLike} HashLike */ 11 | /** @typedef {import("./Source").MapOptions} MapOptions */ 12 | /** @typedef {import("./Source").RawSourceMap} RawSourceMap */ 13 | /** @typedef {import("./Source").SourceAndMap} SourceAndMap */ 14 | /** @typedef {import("./Source").SourceValue} SourceValue */ 15 | 16 | /** 17 | * @typedef {object} SourceLike 18 | * @property {() => SourceValue} source source 19 | * @property {(() => Buffer)=} buffer buffer 20 | * @property {(() => number)=} size size 21 | * @property {((options?: MapOptions) => RawSourceMap | null)=} map map 22 | * @property {((options?: MapOptions) => SourceAndMap)=} sourceAndMap source and map 23 | * @property {((hash: HashLike) => void)=} updateHash hash updater 24 | */ 25 | 26 | class CompatSource extends Source { 27 | /** 28 | * @param {SourceLike} sourceLike source like 29 | * @returns {Source} source 30 | */ 31 | static from(sourceLike) { 32 | return sourceLike instanceof Source 33 | ? sourceLike 34 | : new CompatSource(sourceLike); 35 | } 36 | 37 | /** 38 | * @param {SourceLike} sourceLike source like 39 | */ 40 | constructor(sourceLike) { 41 | super(); 42 | this._sourceLike = sourceLike; 43 | } 44 | 45 | /** 46 | * @returns {SourceValue} source 47 | */ 48 | source() { 49 | return this._sourceLike.source(); 50 | } 51 | 52 | buffer() { 53 | if (typeof this._sourceLike.buffer === "function") { 54 | return this._sourceLike.buffer(); 55 | } 56 | return super.buffer(); 57 | } 58 | 59 | size() { 60 | if (typeof this._sourceLike.size === "function") { 61 | return this._sourceLike.size(); 62 | } 63 | return super.size(); 64 | } 65 | 66 | /** 67 | * @param {MapOptions=} options map options 68 | * @returns {RawSourceMap | null} map 69 | */ 70 | map(options) { 71 | if (typeof this._sourceLike.map === "function") { 72 | return this._sourceLike.map(options); 73 | } 74 | return super.map(options); 75 | } 76 | 77 | /** 78 | * @param {MapOptions=} options map options 79 | * @returns {SourceAndMap} source and map 80 | */ 81 | sourceAndMap(options) { 82 | if (typeof this._sourceLike.sourceAndMap === "function") { 83 | return this._sourceLike.sourceAndMap(options); 84 | } 85 | return super.sourceAndMap(options); 86 | } 87 | 88 | /** 89 | * @param {HashLike} hash hash 90 | * @returns {void} 91 | */ 92 | updateHash(hash) { 93 | if (typeof this._sourceLike.updateHash === "function") { 94 | return this._sourceLike.updateHash(hash); 95 | } 96 | if (typeof this._sourceLike.map === "function") { 97 | throw new Error( 98 | "A Source-like object with a 'map' method must also provide an 'updateHash' method", 99 | ); 100 | } 101 | hash.update(this.buffer()); 102 | } 103 | } 104 | 105 | module.exports = CompatSource; 106 | -------------------------------------------------------------------------------- /test/PrefixSource.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | jest.mock("./__mocks__/createMappingsSerializer"); 4 | 5 | const { PrefixSource } = require("../"); 6 | const { OriginalSource } = require("../"); 7 | const { ConcatSource } = require("../"); 8 | const { withReadableMappings } = require("./helpers"); 9 | 10 | describe("prefixSource", () => { 11 | it("should prefix a source", () => { 12 | const source = new PrefixSource( 13 | "\t", 14 | new OriginalSource( 15 | "console.log('test');console.log('test2');\nconsole.log('test22');\n", 16 | "console.js", 17 | ), 18 | ); 19 | const expectedMap1 = { 20 | version: 3, 21 | file: "x", 22 | mappings: "AAAA;AACA", 23 | names: [], 24 | sources: ["console.js"], 25 | sourcesContent: [ 26 | "console.log('test');console.log('test2');\nconsole.log('test22');\n", 27 | ], 28 | }; 29 | const expectedSource = [ 30 | "\tconsole.log('test');console.log('test2');", 31 | "\tconsole.log('test22');", 32 | "", 33 | ].join("\n"); 34 | expect(source.size()).toBe(67); 35 | expect(source.source()).toEqual(expectedSource); 36 | expect( 37 | source.map({ 38 | columns: false, 39 | }), 40 | ).toEqual(expectedMap1); 41 | expect( 42 | source.sourceAndMap({ 43 | columns: false, 44 | }), 45 | ).toEqual({ 46 | source: expectedSource, 47 | map: expectedMap1, 48 | }); 49 | const expectedMap2 = { 50 | version: 3, 51 | file: "x", 52 | mappings: "CAAA,oBAAoB;CACpB", 53 | names: [], 54 | sources: ["console.js"], 55 | sourcesContent: [ 56 | "console.log('test');console.log('test2');\nconsole.log('test22');\n", 57 | ], 58 | }; 59 | const result = source.sourceAndMap(); 60 | expect(result.source).toEqual(expectedSource); 61 | expect(withReadableMappings(result.map)).toEqual( 62 | withReadableMappings(expectedMap2), 63 | ); 64 | expect(withReadableMappings(source.map())).toEqual( 65 | withReadableMappings(expectedMap2), 66 | ); 67 | }); 68 | 69 | it("should have consistent source/sourceAndMap behavior", () => { 70 | const source = new PrefixSource( 71 | "\t", 72 | new ConcatSource( 73 | new OriginalSource("console.log('test');\n", "consoleA.js"), 74 | new OriginalSource("\nconsole.log('test1');\n\n", "consoleB.js"), 75 | new OriginalSource("\nconsole.log('test2');\n", "consoleC.js"), 76 | new OriginalSource("console.log('test3');", "consoleD.js"), 77 | new OriginalSource("\n", "empty.js"), 78 | new OriginalSource("console.log('test4');", "consoleE.js"), 79 | ), 80 | ); 81 | 82 | const actualSource = source.source(); 83 | const expectedSource = [ 84 | "\tconsole.log('test');\n", 85 | "\t\n\tconsole.log('test1');\n\t\n", 86 | "\t\n\tconsole.log('test2');\n", 87 | "\tconsole.log('test3');", 88 | "\n\t", 89 | "console.log('test4');", 90 | ].join(""); 91 | 92 | expect(actualSource).toEqual(expectedSource); 93 | expect(actualSource).toEqual(source.sourceAndMap().source); 94 | }); 95 | 96 | it("should handle newlines correctly", () => { 97 | const source = new PrefixSource( 98 | "*", 99 | new ConcatSource( 100 | "Line", 101 | " and more\n", 102 | "double nl\n\n", 103 | "nl\nline\nin\nline\n", 104 | "\nstart with nl", 105 | "\n\n\nempty lines", 106 | ), 107 | ); 108 | 109 | expect(source.sourceAndMap().source).toEqual(source.source()); 110 | }); 111 | }); 112 | -------------------------------------------------------------------------------- /test/helpers.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const readMappings = require("../lib/helpers/readMappings"); 4 | 5 | module.exports.readableMappings = (mappings, sources, names, generatedCode) => { 6 | let str = ""; 7 | let bufferedGeneratedAnnotation = ""; 8 | let currentLine = 1; 9 | let currentColumn = 0; 10 | let currentColumnMapped = false; 11 | let first = true; 12 | const lines = generatedCode ? generatedCode.split("\n") : []; 13 | readMappings( 14 | mappings, 15 | ( 16 | generatedLine, 17 | generatedColumn, 18 | sourceIndex, 19 | originalLine, 20 | originalColumn, 21 | nameIndex, 22 | ) => { 23 | if (first) { 24 | first = false; 25 | str += `${generatedLine}`; 26 | } else if (currentLine === generatedLine) { 27 | str += ", "; 28 | } else { 29 | str += "\n"; 30 | if (currentLine - 1 < lines.length) { 31 | const line = lines[currentLine - 1]; 32 | if (line.length > currentColumn) { 33 | bufferedGeneratedAnnotation += currentColumnMapped 34 | ? `^${"_".repeat(line.length - currentColumn - 1)}` 35 | : ".".repeat(line.length - currentColumn); 36 | } 37 | if (bufferedGeneratedAnnotation) { 38 | str += `${line}\n${bufferedGeneratedAnnotation}\n`; 39 | bufferedGeneratedAnnotation = ""; 40 | } 41 | } 42 | str += `${generatedLine}`; 43 | currentColumn = 0; 44 | currentColumnMapped = false; 45 | } 46 | currentLine = generatedLine; 47 | str += `:${generatedColumn}`; 48 | if (sourceIndex >= 0) { 49 | str += ` -> [${ 50 | sources ? sources[sourceIndex] : sourceIndex 51 | }] ${originalLine}:${originalColumn}`; 52 | } 53 | if (nameIndex >= 0) { 54 | str += ` (${names ? names[nameIndex] : nameIndex})`; 55 | } 56 | if (generatedLine - 1 < lines.length && generatedColumn > currentColumn) { 57 | const line = lines[generatedLine - 1]; 58 | if (generatedColumn > line.length) { 59 | bufferedGeneratedAnnotation += "^... OUT OF LINE"; 60 | } else { 61 | bufferedGeneratedAnnotation += currentColumnMapped 62 | ? `^${"_".repeat(generatedColumn - currentColumn - 1)}` 63 | : ".".repeat(generatedColumn - currentColumn); 64 | } 65 | } 66 | currentColumn = generatedColumn; 67 | currentColumnMapped = sourceIndex >= 0; 68 | }, 69 | ); 70 | if (currentLine - 1 < lines.length) { 71 | const line = lines[currentLine - 1]; 72 | if (line.length > currentColumn) { 73 | bufferedGeneratedAnnotation += currentColumnMapped 74 | ? `^${"_".repeat(line.length - currentColumn - 1)}` 75 | : ".".repeat(line.length - currentColumn); 76 | } 77 | if (bufferedGeneratedAnnotation) { 78 | str += `\n${line}\n${bufferedGeneratedAnnotation}\n`; 79 | bufferedGeneratedAnnotation = ""; 80 | } 81 | } 82 | return str; 83 | }; 84 | 85 | module.exports.withReadableMappings = (sourceMap, generatedCode) => { 86 | if (!sourceMap) return sourceMap; 87 | if (sourceMap.map) { 88 | return { 89 | ...sourceMap, 90 | _mappings: module.exports.readableMappings( 91 | sourceMap.map.mappings, 92 | sourceMap.map.sources, 93 | sourceMap.map.names, 94 | sourceMap.source, 95 | ), 96 | }; 97 | } 98 | return { 99 | ...sourceMap, 100 | _mappings: module.exports.readableMappings( 101 | sourceMap.mappings, 102 | sourceMap.sources, 103 | sourceMap.names, 104 | generatedCode, 105 | ), 106 | }; 107 | }; 108 | -------------------------------------------------------------------------------- /lib/helpers/readMappings.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | const ALPHABET = 9 | "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; 10 | 11 | const CONTINUATION_BIT = 0x20; 12 | const END_SEGMENT_BIT = 0x40; 13 | const NEXT_LINE = END_SEGMENT_BIT | 0x01; 14 | const INVALID = END_SEGMENT_BIT | 0x02; 15 | const DATA_MASK = 0x1f; 16 | 17 | const ccToValue = new Uint8Array("z".charCodeAt(0) + 1); 18 | 19 | ccToValue.fill(INVALID); 20 | 21 | for (let i = 0; i < ALPHABET.length; i++) { 22 | ccToValue[ALPHABET.charCodeAt(i)] = i; 23 | } 24 | 25 | ccToValue[",".charCodeAt(0)] = END_SEGMENT_BIT; 26 | ccToValue[";".charCodeAt(0)] = NEXT_LINE; 27 | 28 | const ccMax = ccToValue.length - 1; 29 | 30 | /** @typedef {(generatedLine: number, generatedColumn: number, sourceIndex: number, originalLine: number, originalColumn: number, nameIndex: number) => void} OnMapping */ 31 | 32 | /** 33 | * @param {string} mappings the mappings string 34 | * @param {OnMapping} onMapping called for each mapping 35 | * @returns {void} 36 | */ 37 | const readMappings = (mappings, onMapping) => { 38 | // generatedColumn, [sourceIndex, originalLine, orignalColumn, [nameIndex]] 39 | const currentData = new Uint32Array([0, 0, 1, 0, 0]); 40 | let currentDataPos = 0; 41 | // currentValue will include a sign bit at bit 0 42 | let currentValue = 0; 43 | let currentValuePos = 0; 44 | let generatedLine = 1; 45 | let generatedColumn = -1; 46 | for (let i = 0; i < mappings.length; i++) { 47 | const cc = mappings.charCodeAt(i); 48 | if (cc > ccMax) continue; 49 | const value = ccToValue[cc]; 50 | if ((value & END_SEGMENT_BIT) !== 0) { 51 | // End current segment 52 | if (currentData[0] > generatedColumn) { 53 | if (currentDataPos === 1) { 54 | onMapping(generatedLine, currentData[0], -1, -1, -1, -1); 55 | } else if (currentDataPos === 4) { 56 | onMapping( 57 | generatedLine, 58 | currentData[0], 59 | currentData[1], 60 | currentData[2], 61 | currentData[3], 62 | -1, 63 | ); 64 | } else if (currentDataPos === 5) { 65 | onMapping( 66 | generatedLine, 67 | currentData[0], 68 | currentData[1], 69 | currentData[2], 70 | currentData[3], 71 | currentData[4], 72 | ); 73 | } 74 | [generatedColumn] = currentData; 75 | } 76 | currentDataPos = 0; 77 | if (value === NEXT_LINE) { 78 | // Start new line 79 | generatedLine++; 80 | currentData[0] = 0; 81 | generatedColumn = -1; 82 | } 83 | } else if ((value & CONTINUATION_BIT) === 0) { 84 | // last sextet 85 | currentValue |= value << currentValuePos; 86 | const finalValue = 87 | currentValue & 1 ? -(currentValue >> 1) : currentValue >> 1; 88 | currentData[currentDataPos++] += finalValue; 89 | currentValuePos = 0; 90 | currentValue = 0; 91 | } else { 92 | currentValue |= (value & DATA_MASK) << currentValuePos; 93 | currentValuePos += 5; 94 | } 95 | } 96 | // End current segment 97 | if (currentDataPos === 1) { 98 | onMapping(generatedLine, currentData[0], -1, -1, -1, -1); 99 | } else if (currentDataPos === 4) { 100 | onMapping( 101 | generatedLine, 102 | currentData[0], 103 | currentData[1], 104 | currentData[2], 105 | currentData[3], 106 | -1, 107 | ); 108 | } else if (currentDataPos === 5) { 109 | onMapping( 110 | generatedLine, 111 | currentData[0], 112 | currentData[1], 113 | currentData[2], 114 | currentData[3], 115 | currentData[4], 116 | ); 117 | } 118 | }; 119 | 120 | module.exports = readMappings; 121 | -------------------------------------------------------------------------------- /lib/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | /** @typedef {import("./CachedSource").CachedData} CachedData */ 9 | /** @typedef {import("./CompatSource").SourceLike} SourceLike */ 10 | /** @typedef {import("./ConcatSource").Child} ConcatSourceChild */ 11 | /** @typedef {import("./ReplaceSource").Replacement} Replacement */ 12 | /** @typedef {import("./Source").HashLike} HashLike */ 13 | /** @typedef {import("./Source").MapOptions} MapOptions */ 14 | /** @typedef {import("./Source").RawSourceMap} RawSourceMap */ 15 | /** @typedef {import("./Source").SourceAndMap} SourceAndMap */ 16 | /** @typedef {import("./Source").SourceValue} SourceValue */ 17 | /** @typedef {import("./helpers/getGeneratedSourceInfo").GeneratedSourceInfo} GeneratedSourceInfo */ 18 | /** @typedef {import("./helpers/streamChunks").OnChunk} OnChunk */ 19 | /** @typedef {import("./helpers/streamChunks").OnName} OnName */ 20 | /** @typedef {import("./helpers/streamChunks").OnSource} OnSource */ 21 | /** @typedef {import("./helpers/streamChunks").Options} StreamChunksOptions */ 22 | 23 | /** 24 | * @template T 25 | * @param {() => T} fn memorized function 26 | * @returns {() => T} new function 27 | */ 28 | const memoize = (fn) => { 29 | let cache = false; 30 | /** @type {T | undefined} */ 31 | let result; 32 | return () => { 33 | if (cache) { 34 | return /** @type {T} */ (result); 35 | } 36 | 37 | result = fn(); 38 | cache = true; 39 | // Allow to clean up memory for fn 40 | // and all dependent resources 41 | /** @type {(() => T) | undefined} */ 42 | (fn) = undefined; 43 | return /** @type {T} */ (result); 44 | }; 45 | }; 46 | 47 | /** 48 | * @template A 49 | * @template B 50 | * @param {A} obj input a 51 | * @param {B} exports input b 52 | * @returns {A & B} merged 53 | */ 54 | const mergeExports = (obj, exports) => { 55 | const descriptors = Object.getOwnPropertyDescriptors(exports); 56 | for (const name of Object.keys(descriptors)) { 57 | const descriptor = descriptors[name]; 58 | if (descriptor.get) { 59 | const fn = descriptor.get; 60 | Object.defineProperty(obj, name, { 61 | configurable: false, 62 | enumerable: true, 63 | get: memoize(fn), 64 | }); 65 | } else if (typeof descriptor.value === "object") { 66 | Object.defineProperty(obj, name, { 67 | configurable: false, 68 | enumerable: true, 69 | writable: false, 70 | value: mergeExports({}, descriptor.value), 71 | }); 72 | } else { 73 | throw new Error( 74 | "Exposed values must be either a getter or an nested object", 75 | ); 76 | } 77 | } 78 | return /** @type {A & B} */ (Object.freeze(obj)); 79 | }; 80 | 81 | module.exports = mergeExports( 82 | {}, 83 | { 84 | get Source() { 85 | return require("./Source"); 86 | }, 87 | get RawSource() { 88 | return require("./RawSource"); 89 | }, 90 | get OriginalSource() { 91 | return require("./OriginalSource"); 92 | }, 93 | get SourceMapSource() { 94 | return require("./SourceMapSource"); 95 | }, 96 | get CachedSource() { 97 | return require("./CachedSource"); 98 | }, 99 | get ConcatSource() { 100 | return require("./ConcatSource"); 101 | }, 102 | get ReplaceSource() { 103 | return require("./ReplaceSource"); 104 | }, 105 | get PrefixSource() { 106 | return require("./PrefixSource"); 107 | }, 108 | get SizeOnlySource() { 109 | return require("./SizeOnlySource"); 110 | }, 111 | get CompatSource() { 112 | return require("./CompatSource"); 113 | }, 114 | util: { 115 | get stringBufferUtils() { 116 | return require("./helpers/stringBufferUtils"); 117 | }, 118 | }, 119 | }, 120 | ); 121 | -------------------------------------------------------------------------------- /lib/helpers/streamAndGetSourceAndMap.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | const createMappingsSerializer = require("./createMappingsSerializer"); 9 | const streamChunks = require("./streamChunks"); 10 | 11 | /** @typedef {import("../Source").RawSourceMap} RawSourceMap */ 12 | /** @typedef {import("./streamChunks").GeneratedSourceInfo} GeneratedSourceInfo */ 13 | /** @typedef {import("./streamChunks").OnChunk} OnChunk */ 14 | /** @typedef {import("./streamChunks").OnName} OnName */ 15 | /** @typedef {import("./streamChunks").OnSource} OnSource */ 16 | /** @typedef {import("./streamChunks").Options} Options */ 17 | /** @typedef {import("./streamChunks").SourceMaybeWithStreamChunksFunction} SourceMaybeWithStreamChunksFunction */ 18 | 19 | /** 20 | * @param {SourceMaybeWithStreamChunksFunction} inputSource input source 21 | * @param {Options} options options 22 | * @param {OnChunk} onChunk on chunk 23 | * @param {OnSource} onSource on source 24 | * @param {OnName} onName on name 25 | * @returns {{ result: GeneratedSourceInfo, source: string, map: RawSourceMap | null }} result 26 | */ 27 | const streamAndGetSourceAndMap = ( 28 | inputSource, 29 | options, 30 | onChunk, 31 | onSource, 32 | onName, 33 | ) => { 34 | let code = ""; 35 | let mappings = ""; 36 | /** @type {(string | null)[]} */ 37 | const potentialSources = []; 38 | /** @type {(string | null)[]} */ 39 | const potentialSourcesContent = []; 40 | /** @type {(string | null)[]} */ 41 | const potentialNames = []; 42 | const addMapping = createMappingsSerializer({ ...options, columns: true }); 43 | const finalSource = Boolean(options && options.finalSource); 44 | const { generatedLine, generatedColumn, source } = streamChunks( 45 | inputSource, 46 | options, 47 | ( 48 | chunk, 49 | generatedLine, 50 | generatedColumn, 51 | sourceIndex, 52 | originalLine, 53 | originalColumn, 54 | nameIndex, 55 | ) => { 56 | if (chunk !== undefined) code += chunk; 57 | mappings += addMapping( 58 | generatedLine, 59 | generatedColumn, 60 | sourceIndex, 61 | originalLine, 62 | originalColumn, 63 | nameIndex, 64 | ); 65 | return onChunk( 66 | finalSource ? undefined : chunk, 67 | generatedLine, 68 | generatedColumn, 69 | sourceIndex, 70 | originalLine, 71 | originalColumn, 72 | nameIndex, 73 | ); 74 | }, 75 | (sourceIndex, source, sourceContent) => { 76 | while (potentialSources.length < sourceIndex) { 77 | potentialSources.push(null); 78 | } 79 | potentialSources[sourceIndex] = source; 80 | if (sourceContent !== undefined) { 81 | while (potentialSourcesContent.length < sourceIndex) { 82 | potentialSourcesContent.push(null); 83 | } 84 | potentialSourcesContent[sourceIndex] = sourceContent; 85 | } 86 | return onSource(sourceIndex, source, sourceContent); 87 | }, 88 | (nameIndex, name) => { 89 | while (potentialNames.length < nameIndex) { 90 | potentialNames.push(null); 91 | } 92 | potentialNames[nameIndex] = name; 93 | return onName(nameIndex, name); 94 | }, 95 | ); 96 | const resultSource = source !== undefined ? source : code; 97 | 98 | return { 99 | result: { 100 | generatedLine, 101 | generatedColumn, 102 | source: finalSource ? resultSource : undefined, 103 | }, 104 | source: resultSource, 105 | map: 106 | mappings.length > 0 107 | ? { 108 | version: 3, 109 | file: "x", 110 | mappings, 111 | // We handle broken sources as `null`, in spec this field should be string, but no information what we should do in such cases if we change type it will be breaking change 112 | sources: /** @type {string[]} */ (potentialSources), 113 | sourcesContent: 114 | potentialSourcesContent.length > 0 115 | ? /** @type {string[]} */ (potentialSourcesContent) 116 | : undefined, 117 | names: /** @type {string[]} */ (potentialNames), 118 | } 119 | : null, 120 | }; 121 | }; 122 | 123 | module.exports = streamAndGetSourceAndMap; 124 | -------------------------------------------------------------------------------- /lib/helpers/stringBufferUtils.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Mark Knichel @mknichel 4 | */ 5 | 6 | "use strict"; 7 | 8 | let dualStringBufferCaching = true; 9 | 10 | /** 11 | * @returns {boolean} Whether the optimization to cache copies of both the 12 | * string and buffer version of source content is enabled. This is enabled by 13 | * default to improve performance but can consume more memory since values are 14 | * stored twice. 15 | */ 16 | function isDualStringBufferCachingEnabled() { 17 | return dualStringBufferCaching; 18 | } 19 | 20 | /** 21 | * Enables an optimization to save both string and buffer in memory to avoid 22 | * repeat conversions between the two formats when they are requested. This 23 | * is enabled by default. This option can improve performance but can consume 24 | * additional memory since values are stored twice. 25 | * @returns {void} 26 | */ 27 | function enableDualStringBufferCaching() { 28 | dualStringBufferCaching = true; 29 | } 30 | 31 | /** 32 | * Disables the optimization to save both string and buffer in memory. This 33 | * may increase performance but should reduce memory usage in the Webpack 34 | * compiler. 35 | * @returns {void} 36 | */ 37 | function disableDualStringBufferCaching() { 38 | dualStringBufferCaching = false; 39 | } 40 | 41 | const interningStringMap = new Map(); 42 | 43 | let enableStringInterningRefCount = 0; 44 | 45 | /** 46 | * @returns {boolean} value 47 | */ 48 | function isStringInterningEnabled() { 49 | return enableStringInterningRefCount > 0; 50 | } 51 | 52 | /** 53 | * Starts a memory optimization to avoid repeat copies of the same string in 54 | * memory by caching a single reference to the string. This can reduce memory 55 | * usage if the same string is repeated many times in the compiler, such as 56 | * when Webpack layers are used with the same files. 57 | * 58 | * {@link exitStringInterningRange} should be called when string interning is 59 | * no longer necessary to free up the memory used by the interned strings. If 60 | * {@link enterStringInterningRange} has been called multiple times, then 61 | * this method may not immediately free all the memory until 62 | * {@link exitStringInterningRange} has been called to end all string 63 | * interning ranges. 64 | * @returns {void} 65 | */ 66 | function enterStringInterningRange() { 67 | enableStringInterningRefCount++; 68 | } 69 | 70 | /** 71 | * Stops the current string interning range. Once all string interning ranges 72 | * have been exited, this method will free all the memory used by the interned 73 | * strings. This method should be called once for each time that 74 | * {@link enterStringInterningRange} was called. 75 | * @returns {void} 76 | */ 77 | function exitStringInterningRange() { 78 | if (--enableStringInterningRefCount <= 0) { 79 | interningStringMap.clear(); 80 | enableStringInterningRefCount = 0; 81 | } 82 | } 83 | 84 | /** 85 | * Saves the string in a map to ensure that only one copy of the string exists 86 | * in memory at a given time. This is controlled by {@link enableStringInterning} 87 | * and {@link disableStringInterning}. Callers are expect to manage the memory 88 | * of the interned strings by calling {@link disableStringInterning} after the 89 | * compiler no longer needs to save the interned memory. 90 | * @param {string} str A string to be interned. 91 | * @returns {string} The original string or a reference to an existing string of the same value if it has already been interned. 92 | */ 93 | function internString(str) { 94 | if ( 95 | !isStringInterningEnabled() || 96 | !str || 97 | str.length < 128 || 98 | typeof str !== "string" 99 | ) { 100 | return str; 101 | } 102 | let internedString = interningStringMap.get(str); 103 | if (internedString === undefined) { 104 | internedString = str; 105 | interningStringMap.set(str, internedString); 106 | } 107 | return internedString; 108 | } 109 | 110 | module.exports = { 111 | disableDualStringBufferCaching, 112 | enableDualStringBufferCaching, 113 | enterStringInterningRange, 114 | exitStringInterningRange, 115 | internString, 116 | isDualStringBufferCachingEnabled, 117 | }; 118 | -------------------------------------------------------------------------------- /lib/RawSource.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | const Source = require("./Source"); 9 | const streamChunksOfRawSource = require("./helpers/streamChunksOfRawSource"); 10 | const { 11 | internString, 12 | isDualStringBufferCachingEnabled, 13 | } = require("./helpers/stringBufferUtils"); 14 | 15 | /** @typedef {import("./Source").HashLike} HashLike */ 16 | /** @typedef {import("./Source").MapOptions} MapOptions */ 17 | /** @typedef {import("./Source").RawSourceMap} RawSourceMap */ 18 | /** @typedef {import("./Source").SourceValue} SourceValue */ 19 | /** @typedef {import("./helpers/getGeneratedSourceInfo").GeneratedSourceInfo} GeneratedSourceInfo */ 20 | /** @typedef {import("./helpers/streamChunks").OnChunk} OnChunk */ 21 | /** @typedef {import("./helpers/streamChunks").OnName} OnName */ 22 | /** @typedef {import("./helpers/streamChunks").OnSource} OnSource */ 23 | /** @typedef {import("./helpers/streamChunks").Options} Options */ 24 | 25 | class RawSource extends Source { 26 | /** 27 | * @param {string | Buffer} value value 28 | * @param {boolean=} convertToString convert to string 29 | */ 30 | constructor(value, convertToString = false) { 31 | super(); 32 | const isBuffer = Buffer.isBuffer(value); 33 | if (!isBuffer && typeof value !== "string") { 34 | throw new TypeError("argument 'value' must be either string or Buffer"); 35 | } 36 | this._valueIsBuffer = !convertToString && isBuffer; 37 | const internedString = 38 | typeof value === "string" ? internString(value) : undefined; 39 | /** 40 | * @private 41 | * @type {undefined | string | Buffer} 42 | */ 43 | this._value = 44 | convertToString && isBuffer 45 | ? undefined 46 | : typeof value === "string" 47 | ? internedString 48 | : value; 49 | /** 50 | * @private 51 | * @type {undefined | Buffer} 52 | */ 53 | this._valueAsBuffer = isBuffer ? value : undefined; 54 | /** 55 | * @private 56 | * @type {undefined | string} 57 | */ 58 | this._valueAsString = isBuffer ? undefined : internedString; 59 | } 60 | 61 | isBuffer() { 62 | return this._valueIsBuffer; 63 | } 64 | 65 | /** 66 | * @returns {SourceValue} source 67 | */ 68 | source() { 69 | if (this._value === undefined) { 70 | const value = 71 | /** @type {Buffer} */ 72 | (this._valueAsBuffer).toString("utf8"); 73 | if (isDualStringBufferCachingEnabled()) { 74 | this._value = internString(value); 75 | } 76 | return value; 77 | } 78 | return this._value; 79 | } 80 | 81 | buffer() { 82 | if (this._valueAsBuffer === undefined) { 83 | const value = Buffer.from(/** @type {string} */ (this._value), "utf8"); 84 | if (isDualStringBufferCachingEnabled()) { 85 | this._valueAsBuffer = value; 86 | } 87 | return value; 88 | } 89 | return this._valueAsBuffer; 90 | } 91 | 92 | /** 93 | * @param {MapOptions=} options map options 94 | * @returns {RawSourceMap | null} map 95 | */ 96 | // eslint-disable-next-line no-unused-vars 97 | map(options) { 98 | return null; 99 | } 100 | 101 | /** 102 | * @param {Options} options options 103 | * @param {OnChunk} onChunk called for each chunk of code 104 | * @param {OnSource} onSource called for each source 105 | * @param {OnName} onName called for each name 106 | * @returns {GeneratedSourceInfo} generated source info 107 | */ 108 | streamChunks(options, onChunk, onSource, onName) { 109 | let strValue = this._valueAsString; 110 | if (strValue === undefined) { 111 | const value = this.source(); 112 | strValue = typeof value === "string" ? value : value.toString("utf8"); 113 | if (isDualStringBufferCachingEnabled()) { 114 | this._valueAsString = internString(strValue); 115 | } 116 | } 117 | return streamChunksOfRawSource( 118 | strValue, 119 | onChunk, 120 | onSource, 121 | onName, 122 | Boolean(options && options.finalSource), 123 | ); 124 | } 125 | 126 | /** 127 | * @param {HashLike} hash hash 128 | * @returns {void} 129 | */ 130 | updateHash(hash) { 131 | hash.update("RawSource"); 132 | hash.update(this.buffer()); 133 | } 134 | } 135 | 136 | module.exports = RawSource; 137 | -------------------------------------------------------------------------------- /test/RawSource.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const crypto = require("crypto"); 4 | const BatchedHash = require("webpack/lib/util/hash/BatchedHash"); 5 | const createMd4 = require("webpack/lib/util/hash/md4"); 6 | const createXXHash64 = require("webpack/lib/util/hash/xxhash64"); 7 | const { RawSource } = require("../"); 8 | const { 9 | disableDualStringBufferCaching, 10 | enableDualStringBufferCaching, 11 | enterStringInterningRange, 12 | exitStringInterningRange, 13 | } = require("../lib/helpers/stringBufferUtils"); 14 | 15 | const CODE_STRING = 16 | "console.log('test');\nconsole.log('test2');\nconsole.log('test22');\n"; 17 | 18 | describe("rawSource", () => { 19 | it("converts to buffer correctly", () => { 20 | const source = new RawSource(Buffer.from(CODE_STRING), true); 21 | expect(source.isBuffer()).toBe(false); 22 | expect(source.buffer().toString("utf8")).toEqual(CODE_STRING); 23 | // The buffer conversion should be cached. 24 | expect(source.buffer()).toStrictEqual(source.buffer()); 25 | }); 26 | 27 | it("stream chunks works correctly", () => { 28 | const source = new RawSource(CODE_STRING, true); 29 | // @ts-expect-error for tests 30 | source.streamChunks(null, (line, lineNum) => { 31 | expect(line).toBe(`console.log('test${"2".repeat(lineNum - 1)}');\n`); 32 | }); 33 | expect.assertions(3); 34 | }); 35 | 36 | for (const hash of [ 37 | ["md5", [crypto.createHash("md5"), crypto.createHash("md5")]], 38 | ["md4", [new BatchedHash(createMd4()), new BatchedHash(createMd4())]], 39 | [ 40 | "xxhash64", 41 | [new BatchedHash(createXXHash64()), new BatchedHash(createXXHash64())], 42 | ], 43 | ]) { 44 | it(`should have the same hash (${hash[0]}) for string and Buffer`, () => { 45 | const sourceString = new RawSource("Text"); 46 | const sourceBuffer = new RawSource(Buffer.from("Text")); 47 | 48 | expect(sourceString.source()).toBe("Text"); 49 | expect(sourceString.buffer()).toEqual(sourceBuffer.buffer()); 50 | 51 | sourceString.updateHash(hash[1][0]); 52 | sourceBuffer.updateHash(hash[1][1]); 53 | 54 | expect(hash[1][0].digest("hex")).toBe(hash[1][1].digest("hex")); 55 | }); 56 | } 57 | 58 | for (const hash of [ 59 | ["md5", [crypto.createHash("md5"), crypto.createHash("md5")]], 60 | ["md4", [new BatchedHash(createMd4()), new BatchedHash(createMd4())]], 61 | [ 62 | "xxhash64", 63 | [new BatchedHash(createXXHash64()), new BatchedHash(createXXHash64())], 64 | ], 65 | ]) { 66 | it(`should have the same hash (${hash[0]}) for string and Buffer (convert to string)`, () => { 67 | const sourceString = new RawSource("Text", true); 68 | const sourceBuffer = new RawSource(Buffer.from("Text"), true); 69 | 70 | expect(sourceString.source()).toBe("Text"); 71 | expect(sourceString.buffer()).toEqual(sourceBuffer.buffer()); 72 | 73 | sourceString.updateHash(hash[1][0]); 74 | sourceBuffer.updateHash(hash[1][1]); 75 | 76 | expect(hash[1][0].digest("hex")).toBe(hash[1][1].digest("hex")); 77 | }); 78 | } 79 | 80 | describe("memory optimizations are enabled", () => { 81 | beforeEach(() => { 82 | disableDualStringBufferCaching(); 83 | enterStringInterningRange(); 84 | }); 85 | 86 | afterEach(() => { 87 | enableDualStringBufferCaching(); 88 | exitStringInterningRange(); 89 | }); 90 | 91 | it("should create new buffers when caching is not enabled", () => { 92 | const source = new RawSource(CODE_STRING, true); 93 | expect(source.buffer().toString("utf8")).toEqual(CODE_STRING); 94 | // The buffer conversion should not be cached. 95 | expect(source.buffer()).toStrictEqual(source.buffer()); 96 | }); 97 | 98 | it("should not create new buffers when original value is a buffer", () => { 99 | const originalValue = Buffer.from(CODE_STRING); 100 | const source = new RawSource(originalValue, true); 101 | expect(source.buffer().toString("utf8")).toEqual(CODE_STRING); 102 | // The same buffer as the original value should always be returned. 103 | expect(originalValue).toStrictEqual(source.buffer()); 104 | expect(source.buffer()).toStrictEqual(source.buffer()); 105 | }); 106 | 107 | it("stream chunks works correctly", () => { 108 | const source = new RawSource(CODE_STRING, true); 109 | // @ts-expect-error for tests 110 | source.streamChunks(null, (line, lineNum) => { 111 | expect(line).toBe(`console.log('test${"2".repeat(lineNum - 1)}');\n`); 112 | }); 113 | expect.assertions(3); 114 | }); 115 | }); 116 | }); 117 | -------------------------------------------------------------------------------- /lib/PrefixSource.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | const RawSource = require("./RawSource"); 9 | const Source = require("./Source"); 10 | const { getMap, getSourceAndMap } = require("./helpers/getFromStreamChunks"); 11 | const streamChunks = require("./helpers/streamChunks"); 12 | 13 | /** @typedef {import("./Source").HashLike} HashLike */ 14 | /** @typedef {import("./Source").MapOptions} MapOptions */ 15 | /** @typedef {import("./Source").RawSourceMap} RawSourceMap */ 16 | /** @typedef {import("./Source").SourceAndMap} SourceAndMap */ 17 | /** @typedef {import("./Source").SourceValue} SourceValue */ 18 | /** @typedef {import("./helpers/getGeneratedSourceInfo").GeneratedSourceInfo} GeneratedSourceInfo */ 19 | /** @typedef {import("./helpers/streamChunks").OnChunk} OnChunk */ 20 | /** @typedef {import("./helpers/streamChunks").OnName} OnName */ 21 | /** @typedef {import("./helpers/streamChunks").OnSource} OnSource */ 22 | /** @typedef {import("./helpers/streamChunks").Options} Options */ 23 | 24 | const REPLACE_REGEX = /\n(?=.|\s)/g; 25 | 26 | class PrefixSource extends Source { 27 | /** 28 | * @param {string} prefix prefix 29 | * @param {string | Buffer | Source} source source 30 | */ 31 | constructor(prefix, source) { 32 | super(); 33 | /** 34 | * @private 35 | * @type {Source} 36 | */ 37 | this._source = 38 | typeof source === "string" || Buffer.isBuffer(source) 39 | ? new RawSource(source, true) 40 | : source; 41 | this._prefix = prefix; 42 | } 43 | 44 | getPrefix() { 45 | return this._prefix; 46 | } 47 | 48 | original() { 49 | return this._source; 50 | } 51 | 52 | /** 53 | * @returns {SourceValue} source 54 | */ 55 | source() { 56 | const node = /** @type {string} */ (this._source.source()); 57 | const prefix = this._prefix; 58 | return prefix + node.replace(REPLACE_REGEX, `\n${prefix}`); 59 | } 60 | 61 | // TODO efficient buffer() implementation 62 | 63 | /** 64 | * @param {MapOptions=} options map options 65 | * @returns {RawSourceMap | null} map 66 | */ 67 | map(options) { 68 | return getMap(this, options); 69 | } 70 | 71 | /** 72 | * @param {MapOptions=} options map options 73 | * @returns {SourceAndMap} source and map 74 | */ 75 | sourceAndMap(options) { 76 | return getSourceAndMap(this, options); 77 | } 78 | 79 | /** 80 | * @param {Options} options options 81 | * @param {OnChunk} onChunk called for each chunk of code 82 | * @param {OnSource} onSource called for each source 83 | * @param {OnName} onName called for each name 84 | * @returns {GeneratedSourceInfo} generated source info 85 | */ 86 | streamChunks(options, onChunk, onSource, onName) { 87 | const prefix = this._prefix; 88 | const prefixOffset = prefix.length; 89 | const linesOnly = Boolean(options && options.columns === false); 90 | const { generatedLine, generatedColumn, source } = streamChunks( 91 | this._source, 92 | options, 93 | ( 94 | chunk, 95 | generatedLine, 96 | generatedColumn, 97 | sourceIndex, 98 | originalLine, 99 | originalColumn, 100 | nameIndex, 101 | ) => { 102 | if (generatedColumn !== 0) { 103 | // In the middle of the line, we just adject the column 104 | generatedColumn += prefixOffset; 105 | } else if (chunk !== undefined) { 106 | // At the start of the line, when we have source content 107 | // add the prefix as generated mapping 108 | // (in lines only mode we just add it to the original mapping 109 | // for performance reasons) 110 | if (linesOnly || sourceIndex < 0) { 111 | chunk = prefix + chunk; 112 | } else if (prefixOffset > 0) { 113 | onChunk(prefix, generatedLine, generatedColumn, -1, -1, -1, -1); 114 | generatedColumn += prefixOffset; 115 | } 116 | } else if (!linesOnly) { 117 | // Without source content, we only need to adject the column info 118 | // expect in lines only mode where prefix is added to original mapping 119 | generatedColumn += prefixOffset; 120 | } 121 | onChunk( 122 | chunk, 123 | generatedLine, 124 | generatedColumn, 125 | sourceIndex, 126 | originalLine, 127 | originalColumn, 128 | nameIndex, 129 | ); 130 | }, 131 | onSource, 132 | onName, 133 | ); 134 | return { 135 | generatedLine, 136 | generatedColumn: 137 | generatedColumn === 0 138 | ? 0 139 | : prefixOffset + /** @type {number} */ (generatedColumn), 140 | source: 141 | source !== undefined 142 | ? prefix + source.replace(REPLACE_REGEX, `\n${prefix}`) 143 | : undefined, 144 | }; 145 | } 146 | 147 | /** 148 | * @param {HashLike} hash hash 149 | * @returns {void} 150 | */ 151 | updateHash(hash) { 152 | hash.update("PrefixSource"); 153 | this._source.updateHash(hash); 154 | hash.update(this._prefix); 155 | } 156 | } 157 | 158 | module.exports = PrefixSource; 159 | -------------------------------------------------------------------------------- /lib/helpers/getFromStreamChunks.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | const createMappingsSerializer = require("./createMappingsSerializer"); 9 | 10 | /** @typedef {import("../Source").RawSourceMap} RawSourceMap */ 11 | /** @typedef {import("../Source").SourceAndMap} SourceAndMap */ 12 | /** @typedef {import("./streamChunks").Options} Options */ 13 | /** @typedef {import("./streamChunks").StreamChunksFunction} StreamChunksFunction */ 14 | 15 | /** @typedef {{ streamChunks: StreamChunksFunction }} SourceLikeWithStreamChunks */ 16 | 17 | /** 18 | * @param {SourceLikeWithStreamChunks} source source 19 | * @param {Options=} options options 20 | * @returns {RawSourceMap | null} map 21 | */ 22 | module.exports.getMap = (source, options) => { 23 | let mappings = ""; 24 | /** @type {(string | null)[]} */ 25 | const potentialSources = []; 26 | /** @type {(string | null)[]} */ 27 | const potentialSourcesContent = []; 28 | /** @type {(string | null)[]} */ 29 | const potentialNames = []; 30 | const addMapping = createMappingsSerializer(options); 31 | source.streamChunks( 32 | { ...options, source: false, finalSource: true }, 33 | ( 34 | chunk, 35 | generatedLine, 36 | generatedColumn, 37 | sourceIndex, 38 | originalLine, 39 | originalColumn, 40 | nameIndex, 41 | ) => { 42 | mappings += addMapping( 43 | generatedLine, 44 | generatedColumn, 45 | sourceIndex, 46 | originalLine, 47 | originalColumn, 48 | nameIndex, 49 | ); 50 | }, 51 | (sourceIndex, source, sourceContent) => { 52 | while (potentialSources.length < sourceIndex) { 53 | potentialSources.push(null); 54 | } 55 | potentialSources[sourceIndex] = source; 56 | if (sourceContent !== undefined) { 57 | while (potentialSourcesContent.length < sourceIndex) { 58 | potentialSourcesContent.push(null); 59 | } 60 | potentialSourcesContent[sourceIndex] = sourceContent; 61 | } 62 | }, 63 | (nameIndex, name) => { 64 | while (potentialNames.length < nameIndex) { 65 | potentialNames.push(null); 66 | } 67 | potentialNames[nameIndex] = name; 68 | }, 69 | ); 70 | return mappings.length > 0 71 | ? { 72 | version: 3, 73 | file: "x", 74 | mappings, 75 | // We handle broken sources as `null`, in spec this field should be string, but no information what we should do in such cases if we change type it will be breaking change 76 | sources: /** @type {string[]} */ (potentialSources), 77 | sourcesContent: 78 | potentialSourcesContent.length > 0 79 | ? /** @type {string[]} */ (potentialSourcesContent) 80 | : undefined, 81 | names: /** @type {string[]} */ (potentialNames), 82 | } 83 | : null; 84 | }; 85 | 86 | /** 87 | * @param {SourceLikeWithStreamChunks} inputSource input source 88 | * @param {Options=} options options 89 | * @returns {SourceAndMap} map 90 | */ 91 | module.exports.getSourceAndMap = (inputSource, options) => { 92 | let code = ""; 93 | let mappings = ""; 94 | /** @type {(string | null)[]} */ 95 | const potentialSources = []; 96 | /** @type {(string | null)[]} */ 97 | const potentialSourcesContent = []; 98 | /** @type {(string | null)[]} */ 99 | const potentialNames = []; 100 | const addMapping = createMappingsSerializer(options); 101 | const { source } = inputSource.streamChunks( 102 | { ...options, finalSource: true }, 103 | ( 104 | chunk, 105 | generatedLine, 106 | generatedColumn, 107 | sourceIndex, 108 | originalLine, 109 | originalColumn, 110 | nameIndex, 111 | ) => { 112 | if (chunk !== undefined) code += chunk; 113 | mappings += addMapping( 114 | generatedLine, 115 | generatedColumn, 116 | sourceIndex, 117 | originalLine, 118 | originalColumn, 119 | nameIndex, 120 | ); 121 | }, 122 | (sourceIndex, source, sourceContent) => { 123 | while (potentialSources.length < sourceIndex) { 124 | potentialSources.push(null); 125 | } 126 | potentialSources[sourceIndex] = source; 127 | if (sourceContent !== undefined) { 128 | while (potentialSourcesContent.length < sourceIndex) { 129 | potentialSourcesContent.push(null); 130 | } 131 | potentialSourcesContent[sourceIndex] = sourceContent; 132 | } 133 | }, 134 | (nameIndex, name) => { 135 | while (potentialNames.length < nameIndex) { 136 | potentialNames.push(null); 137 | } 138 | potentialNames[nameIndex] = name; 139 | }, 140 | ); 141 | return { 142 | source: source !== undefined ? source : code, 143 | map: 144 | mappings.length > 0 145 | ? { 146 | version: 3, 147 | file: "x", 148 | mappings, 149 | // We handle broken sources as `null`, in spec this field should be string, but no information what we should do in such cases if we change type it will be breaking change 150 | sources: /** @type {string[]} */ (potentialSources), 151 | sourcesContent: 152 | potentialSourcesContent.length > 0 153 | ? /** @type {string[]} */ (potentialSourcesContent) 154 | : undefined, 155 | names: /** @type {string[]} */ (potentialNames), 156 | } 157 | : null, 158 | }; 159 | }; 160 | -------------------------------------------------------------------------------- /test/OriginalSource.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const crypto = require("crypto"); 4 | const BatchedHash = require("webpack/lib/util/hash/BatchedHash"); 5 | const createMd4 = require("webpack/lib/util/hash/md4"); 6 | const createXXHash64 = require("webpack/lib/util/hash/xxhash64"); 7 | 8 | /** @typedef {import("../lib/Source").RawSourceMap} RawSourceMap */ 9 | 10 | jest.mock("./__mocks__/createMappingsSerializer"); 11 | 12 | const { OriginalSource } = require("../"); 13 | const { 14 | disableDualStringBufferCaching, 15 | enableDualStringBufferCaching, 16 | enterStringInterningRange, 17 | exitStringInterningRange, 18 | } = require("../lib/helpers/stringBufferUtils"); 19 | 20 | describe.each([ 21 | { 22 | enableMemoryOptimizations: false, 23 | }, 24 | { 25 | enableMemoryOptimizations: true, 26 | }, 27 | ])("originalSource %s", ({ enableMemoryOptimizations }) => { 28 | beforeEach(() => { 29 | if (enableMemoryOptimizations) { 30 | disableDualStringBufferCaching(); 31 | enterStringInterningRange(); 32 | } 33 | }); 34 | 35 | afterEach(() => { 36 | if (enableMemoryOptimizations) { 37 | enableDualStringBufferCaching(); 38 | exitStringInterningRange(); 39 | } 40 | }); 41 | 42 | it("should handle multiline string", () => { 43 | const source = new OriginalSource("Line1\n\nLine3\n", "file.js"); 44 | const resultText = source.source(); 45 | const result = source.sourceAndMap({ 46 | columns: true, 47 | }); 48 | const resultList = source.sourceAndMap({ 49 | columns: false, 50 | }); 51 | 52 | expect(resultText).toBe("Line1\n\nLine3\n"); 53 | expect(result.source).toEqual(resultText); 54 | expect(resultList.source).toEqual(resultText); 55 | const listMap = /** @type {RawSourceMap} */ (resultList.map); 56 | const resultMap = /** @type {RawSourceMap} */ (result.map); 57 | expect(listMap.file).toEqual(resultMap.file); 58 | expect(listMap.version).toEqual(resultMap.version); 59 | expect(resultMap.sources).toEqual(["file.js"]); 60 | expect(listMap.sources).toEqual(resultMap.sources); 61 | expect(resultMap.sourcesContent).toEqual(["Line1\n\nLine3\n"]); 62 | expect(listMap.sourcesContent).toEqual(resultMap.sourcesContent); 63 | expect(resultMap.mappings).toBe("AAAA;;AAEA"); 64 | expect(listMap.mappings).toBe("AAAA;AACA;AACA"); 65 | }); 66 | 67 | it("should handle empty string", () => { 68 | const source = new OriginalSource("", "file.js"); 69 | const resultText = source.source(); 70 | const resultMap = source.sourceAndMap({ 71 | columns: true, 72 | }); 73 | const resultListMap = source.sourceAndMap({ 74 | columns: false, 75 | }); 76 | 77 | expect(resultText).toBe(""); 78 | expect(resultMap.source).toEqual(resultText); 79 | expect(resultListMap.source).toEqual(resultText); 80 | expect(resultListMap.map).toBeNull(); 81 | expect(resultMap.map).toBeNull(); 82 | }); 83 | 84 | it("should omit mappings for columns with node", () => { 85 | const source = new OriginalSource("Line1\n\nLine3\n", "file.js"); 86 | const resultMap = 87 | /** @type {RawSourceMap} */ 88 | ( 89 | source.map({ 90 | columns: false, 91 | }) 92 | ); 93 | 94 | expect(resultMap.mappings).toBe("AAAA;AACA;AACA"); 95 | }); 96 | 97 | it("should return the correct size for binary files", () => { 98 | const source = new OriginalSource( 99 | Buffer.from(Array.from({ length: 256 })), 100 | "file.wasm", 101 | ); 102 | expect(source.size()).toBe(256); 103 | }); 104 | 105 | it("should return the correct size for unicode files", () => { 106 | const source = new OriginalSource("😋", "file.js"); 107 | expect(source.size()).toBe(4); 108 | }); 109 | 110 | it("should split code into statements", () => { 111 | const input = [ 112 | "if (hello()) { world(); hi(); there(); } done();", 113 | "if (hello()) { world(); hi(); there(); } done();", 114 | ].join("\n"); 115 | const expected = "AAAA,eAAe,SAAS,MAAM,WAAW;AACzC,eAAe,SAAS,MAAM,WAAW"; 116 | const expected2 = "AAAA;AACA"; 117 | const source = new OriginalSource(input, "file.js"); 118 | expect(source.sourceAndMap().source).toBe(input); 119 | expect(source.sourceAndMap({ columns: false }).source).toBe(input); 120 | expect(/** @type {RawSourceMap} */ (source.map()).mappings).toBe(expected); 121 | expect( 122 | /** @type {RawSourceMap} */ 123 | (source.sourceAndMap().map).mappings, 124 | ).toBe(expected); 125 | expect( 126 | /** @type {RawSourceMap} */ 127 | (source.map({ columns: false })).mappings, 128 | ).toBe(expected2); 129 | expect( 130 | /** @type {RawSourceMap} */ 131 | (source.sourceAndMap({ columns: false }).map).mappings, 132 | ).toBe(expected2); 133 | }); 134 | 135 | for (const hash of [ 136 | ["md5", [crypto.createHash("md5"), crypto.createHash("md5")]], 137 | ["md4", [new BatchedHash(createMd4()), new BatchedHash(createMd4())]], 138 | [ 139 | "xxhash64", 140 | [new BatchedHash(createXXHash64()), new BatchedHash(createXXHash64())], 141 | ], 142 | ]) { 143 | it(`should have the same hash (${hash[0]}) for string and Buffer`, () => { 144 | const sourceString = new OriginalSource("Text", "file.js"); 145 | const sourceBuffer = new OriginalSource(Buffer.from("Text"), "file.js"); 146 | 147 | expect(sourceString.source()).toBe("Text"); 148 | expect(sourceString.source()).toBe(sourceBuffer.source()); 149 | 150 | sourceString.updateHash(hash[1][0]); 151 | sourceBuffer.updateHash(hash[1][1]); 152 | 153 | expect(hash[1][0].digest("hex")).toBe(hash[1][1].digest("hex")); 154 | }); 155 | } 156 | }); 157 | -------------------------------------------------------------------------------- /lib/OriginalSource.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | const Source = require("./Source"); 9 | const { getMap, getSourceAndMap } = require("./helpers/getFromStreamChunks"); 10 | const getGeneratedSourceInfo = require("./helpers/getGeneratedSourceInfo"); 11 | const splitIntoLines = require("./helpers/splitIntoLines"); 12 | const splitIntoPotentialTokens = require("./helpers/splitIntoPotentialTokens"); 13 | const { 14 | isDualStringBufferCachingEnabled, 15 | } = require("./helpers/stringBufferUtils"); 16 | 17 | /** @typedef {import("./Source").HashLike} HashLike */ 18 | /** @typedef {import("./Source").MapOptions} MapOptions */ 19 | /** @typedef {import("./Source").RawSourceMap} RawSourceMap */ 20 | /** @typedef {import("./Source").SourceAndMap} SourceAndMap */ 21 | /** @typedef {import("./Source").SourceValue} SourceValue */ 22 | /** @typedef {import("./helpers/getGeneratedSourceInfo").GeneratedSourceInfo} GeneratedSourceInfo */ 23 | /** @typedef {import("./helpers/streamChunks").OnChunk} OnChunk */ 24 | /** @typedef {import("./helpers/streamChunks").OnName} OnName */ 25 | /** @typedef {import("./helpers/streamChunks").OnSource} OnSource */ 26 | /** @typedef {import("./helpers/streamChunks").Options} Options */ 27 | 28 | class OriginalSource extends Source { 29 | /** 30 | * @param {string | Buffer} value value 31 | * @param {string} name name 32 | */ 33 | constructor(value, name) { 34 | super(); 35 | 36 | const isBuffer = Buffer.isBuffer(value); 37 | 38 | /** 39 | * @private 40 | * @type {undefined | string} 41 | */ 42 | this._value = isBuffer ? undefined : value; 43 | /** 44 | * @private 45 | * @type {undefined | Buffer} 46 | */ 47 | this._valueAsBuffer = isBuffer ? value : undefined; 48 | this._name = name; 49 | } 50 | 51 | getName() { 52 | return this._name; 53 | } 54 | 55 | /** 56 | * @returns {SourceValue} source 57 | */ 58 | source() { 59 | if (this._value === undefined) { 60 | const value = 61 | /** @type {Buffer} */ 62 | (this._valueAsBuffer).toString("utf8"); 63 | if (isDualStringBufferCachingEnabled()) { 64 | this._value = value; 65 | } 66 | return value; 67 | } 68 | return this._value; 69 | } 70 | 71 | buffer() { 72 | if (this._valueAsBuffer === undefined) { 73 | const value = Buffer.from(/** @type {string} */ (this._value), "utf8"); 74 | if (isDualStringBufferCachingEnabled()) { 75 | this._valueAsBuffer = value; 76 | } 77 | return value; 78 | } 79 | return this._valueAsBuffer; 80 | } 81 | 82 | /** 83 | * @param {MapOptions=} options map options 84 | * @returns {RawSourceMap | null} map 85 | */ 86 | map(options) { 87 | return getMap(this, options); 88 | } 89 | 90 | /** 91 | * @param {MapOptions=} options map options 92 | * @returns {SourceAndMap} source and map 93 | */ 94 | sourceAndMap(options) { 95 | return getSourceAndMap(this, options); 96 | } 97 | 98 | /** 99 | * @param {Options} options options 100 | * @param {OnChunk} onChunk called for each chunk of code 101 | * @param {OnSource} onSource called for each source 102 | * @param {OnName} _onName called for each name 103 | * @returns {GeneratedSourceInfo} generated source info 104 | */ 105 | streamChunks(options, onChunk, onSource, _onName) { 106 | if (this._value === undefined) { 107 | this._value = 108 | /** @type {Buffer} */ 109 | (this._valueAsBuffer).toString("utf8"); 110 | } 111 | onSource(0, this._name, this._value); 112 | const finalSource = Boolean(options && options.finalSource); 113 | if (!options || options.columns !== false) { 114 | // With column info we need to read all lines and split them 115 | const matches = splitIntoPotentialTokens(this._value); 116 | let line = 1; 117 | let column = 0; 118 | if (matches !== null) { 119 | for (const match of matches) { 120 | const isEndOfLine = match.endsWith("\n"); 121 | if (isEndOfLine && match.length === 1) { 122 | if (!finalSource) onChunk(match, line, column, -1, -1, -1, -1); 123 | } else { 124 | const chunk = finalSource ? undefined : match; 125 | onChunk(chunk, line, column, 0, line, column, -1); 126 | } 127 | if (isEndOfLine) { 128 | line++; 129 | column = 0; 130 | } else { 131 | column += match.length; 132 | } 133 | } 134 | } 135 | return { 136 | generatedLine: line, 137 | generatedColumn: column, 138 | source: finalSource ? this._value : undefined, 139 | }; 140 | } else if (finalSource) { 141 | // Without column info and with final source we only 142 | // need meta info to generate mapping 143 | const result = getGeneratedSourceInfo(this._value); 144 | const { generatedLine, generatedColumn } = result; 145 | if (generatedColumn === 0) { 146 | for ( 147 | let line = 1; 148 | line < /** @type {number} */ (generatedLine); 149 | line++ 150 | ) { 151 | onChunk(undefined, line, 0, 0, line, 0, -1); 152 | } 153 | } else { 154 | for ( 155 | let line = 1; 156 | line <= /** @type {number} */ (generatedLine); 157 | line++ 158 | ) { 159 | onChunk(undefined, line, 0, 0, line, 0, -1); 160 | } 161 | } 162 | return result; 163 | } 164 | // Without column info, but also without final source 165 | // we need to split source by lines 166 | let line = 1; 167 | const matches = splitIntoLines(this._value); 168 | /** @type {string | undefined} */ 169 | let match; 170 | for (match of matches) { 171 | onChunk(finalSource ? undefined : match, line, 0, 0, line, 0, -1); 172 | line++; 173 | } 174 | return matches.length === 0 || /** @type {string} */ (match).endsWith("\n") 175 | ? { 176 | generatedLine: matches.length + 1, 177 | generatedColumn: 0, 178 | source: finalSource ? this._value : undefined, 179 | } 180 | : { 181 | generatedLine: matches.length, 182 | generatedColumn: /** @type {string} */ (match).length, 183 | source: finalSource ? this._value : undefined, 184 | }; 185 | } 186 | 187 | /** 188 | * @param {HashLike} hash hash 189 | * @returns {void} 190 | */ 191 | updateHash(hash) { 192 | hash.update("OriginalSource"); 193 | hash.update(this.buffer()); 194 | hash.update(this._name || ""); 195 | } 196 | } 197 | 198 | module.exports = OriginalSource; 199 | -------------------------------------------------------------------------------- /lib/helpers/createMappingsSerializer.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | /** 9 | * @callback MappingsSerializer 10 | * @param {number} generatedLine generated line 11 | * @param {number} generatedColumn generated column 12 | * @param {number} sourceIndex source index 13 | * @param {number} originalLine original line 14 | * @param {number} originalColumn generated line 15 | * @param {number} nameIndex generated line 16 | * @returns {string} result 17 | */ 18 | 19 | const ALPHABET = [ 20 | ..."ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/", 21 | ]; 22 | 23 | const CONTINUATION_BIT = 0x20; 24 | 25 | const createFullMappingsSerializer = () => { 26 | let currentLine = 1; 27 | let currentColumn = 0; 28 | let currentSourceIndex = 0; 29 | let currentOriginalLine = 1; 30 | let currentOriginalColumn = 0; 31 | let currentNameIndex = 0; 32 | let activeMapping = false; 33 | let activeName = false; 34 | let initial = true; 35 | /** @type {MappingsSerializer} */ 36 | return ( 37 | generatedLine, 38 | generatedColumn, 39 | sourceIndex, 40 | originalLine, 41 | originalColumn, 42 | nameIndex, 43 | ) => { 44 | if (activeMapping && currentLine === generatedLine) { 45 | // A mapping is still active 46 | if ( 47 | sourceIndex === currentSourceIndex && 48 | originalLine === currentOriginalLine && 49 | originalColumn === currentOriginalColumn && 50 | !activeName && 51 | nameIndex < 0 52 | ) { 53 | // avoid repeating the same original mapping 54 | return ""; 55 | } 56 | } 57 | // No mapping is active 58 | else if (sourceIndex < 0) { 59 | // avoid writing unneccessary generated mappings 60 | return ""; 61 | } 62 | 63 | /** @type {undefined | string} */ 64 | let str; 65 | if (currentLine < generatedLine) { 66 | str = ";".repeat(generatedLine - currentLine); 67 | currentLine = generatedLine; 68 | currentColumn = 0; 69 | initial = false; 70 | } else if (initial) { 71 | str = ""; 72 | initial = false; 73 | } else { 74 | str = ","; 75 | } 76 | 77 | /** 78 | * @param {number} value value 79 | * @returns {void} 80 | */ 81 | const writeValue = (value) => { 82 | const sign = (value >>> 31) & 1; 83 | const mask = value >> 31; 84 | const absValue = (value + mask) ^ mask; 85 | let data = (absValue << 1) | sign; 86 | for (;;) { 87 | const sextet = data & 0x1f; 88 | data >>= 5; 89 | if (data === 0) { 90 | str += ALPHABET[sextet]; 91 | break; 92 | } else { 93 | str += ALPHABET[sextet | CONTINUATION_BIT]; 94 | } 95 | } 96 | }; 97 | writeValue(generatedColumn - currentColumn); 98 | currentColumn = generatedColumn; 99 | if (sourceIndex >= 0) { 100 | activeMapping = true; 101 | if (sourceIndex === currentSourceIndex) { 102 | str += "A"; 103 | } else { 104 | writeValue(sourceIndex - currentSourceIndex); 105 | currentSourceIndex = sourceIndex; 106 | } 107 | writeValue(originalLine - currentOriginalLine); 108 | currentOriginalLine = originalLine; 109 | if (originalColumn === currentOriginalColumn) { 110 | str += "A"; 111 | } else { 112 | writeValue(originalColumn - currentOriginalColumn); 113 | currentOriginalColumn = originalColumn; 114 | } 115 | if (nameIndex >= 0) { 116 | writeValue(nameIndex - currentNameIndex); 117 | currentNameIndex = nameIndex; 118 | activeName = true; 119 | } else { 120 | activeName = false; 121 | } 122 | } else { 123 | activeMapping = false; 124 | } 125 | return str; 126 | }; 127 | }; 128 | 129 | const createLinesOnlyMappingsSerializer = () => { 130 | let lastWrittenLine = 0; 131 | let currentLine = 1; 132 | let currentSourceIndex = 0; 133 | let currentOriginalLine = 1; 134 | /** @type {MappingsSerializer} */ 135 | return ( 136 | generatedLine, 137 | _generatedColumn, 138 | sourceIndex, 139 | originalLine, 140 | _originalColumn, 141 | _nameIndex, 142 | ) => { 143 | if (sourceIndex < 0) { 144 | // avoid writing generated mappings at all 145 | return ""; 146 | } 147 | if (lastWrittenLine === generatedLine) { 148 | // avoid writing multiple original mappings per line 149 | return ""; 150 | } 151 | /** @type {undefined | string} */ 152 | let str; 153 | /** 154 | * @param {number} value value 155 | * @returns {void} 156 | */ 157 | const writeValue = (value) => { 158 | const sign = (value >>> 31) & 1; 159 | const mask = value >> 31; 160 | const absValue = (value + mask) ^ mask; 161 | let data = (absValue << 1) | sign; 162 | for (;;) { 163 | const sextet = data & 0x1f; 164 | data >>= 5; 165 | if (data === 0) { 166 | str += ALPHABET[sextet]; 167 | break; 168 | } else { 169 | str += ALPHABET[sextet | CONTINUATION_BIT]; 170 | } 171 | } 172 | }; 173 | lastWrittenLine = generatedLine; 174 | if (generatedLine === currentLine + 1) { 175 | currentLine = generatedLine; 176 | if (sourceIndex === currentSourceIndex) { 177 | if (originalLine === currentOriginalLine + 1) { 178 | currentOriginalLine = originalLine; 179 | return ";AACA"; 180 | } 181 | str = ";AA"; 182 | writeValue(originalLine - currentOriginalLine); 183 | currentOriginalLine = originalLine; 184 | return `${str}A`; 185 | } 186 | str = ";A"; 187 | writeValue(sourceIndex - currentSourceIndex); 188 | currentSourceIndex = sourceIndex; 189 | writeValue(originalLine - currentOriginalLine); 190 | currentOriginalLine = originalLine; 191 | return `${str}A`; 192 | } 193 | str = ";".repeat(generatedLine - currentLine); 194 | currentLine = generatedLine; 195 | if (sourceIndex === currentSourceIndex) { 196 | if (originalLine === currentOriginalLine + 1) { 197 | currentOriginalLine = originalLine; 198 | return `${str}AACA`; 199 | } 200 | str += "AA"; 201 | writeValue(originalLine - currentOriginalLine); 202 | currentOriginalLine = originalLine; 203 | return `${str}A`; 204 | } 205 | str += "A"; 206 | writeValue(sourceIndex - currentSourceIndex); 207 | currentSourceIndex = sourceIndex; 208 | writeValue(originalLine - currentOriginalLine); 209 | currentOriginalLine = originalLine; 210 | return `${str}A`; 211 | }; 212 | }; 213 | 214 | /** 215 | * @param {{ columns?: boolean }=} options options 216 | * @returns {MappingsSerializer} mappings serializer 217 | */ 218 | const createMappingsSerializer = (options) => { 219 | const linesOnly = options && options.columns === false; 220 | return linesOnly 221 | ? createLinesOnlyMappingsSerializer() 222 | : createFullMappingsSerializer(); 223 | }; 224 | 225 | module.exports = createMappingsSerializer; 226 | -------------------------------------------------------------------------------- /test/ConcatSource.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | jest.mock("./__mocks__/createMappingsSerializer"); 4 | 5 | const { ConcatSource } = require("../"); 6 | const { RawSource } = require("../"); 7 | const { OriginalSource } = require("../"); 8 | const { SourceMapSource } = require("../"); 9 | const { withReadableMappings } = require("./helpers"); 10 | 11 | describe("concatSource", () => { 12 | it("should concat two sources", () => { 13 | const source = new ConcatSource( 14 | new RawSource("Hello World\n"), 15 | new OriginalSource( 16 | "console.log('test');\nconsole.log('test2');\n", 17 | "console.js", 18 | ), 19 | ); 20 | source.add(new OriginalSource("Hello2\n", "hello.md")); 21 | const expectedMap1 = { 22 | version: 3, 23 | file: "x", 24 | mappings: ";AAAA;AACA;ACDA", 25 | names: [], 26 | sources: ["console.js", "hello.md"], 27 | sourcesContent: [ 28 | "console.log('test');\nconsole.log('test2');\n", 29 | "Hello2\n", 30 | ], 31 | }; 32 | const expectedSource = [ 33 | "Hello World", 34 | "console.log('test');", 35 | "console.log('test2');", 36 | "Hello2", 37 | "", 38 | ].join("\n"); 39 | expect(source.size()).toBe(62); 40 | expect(source.source()).toEqual(expectedSource); 41 | expect( 42 | source.map({ 43 | columns: false, 44 | }), 45 | ).toEqual(expectedMap1); 46 | expect( 47 | source.sourceAndMap({ 48 | columns: false, 49 | }), 50 | ).toEqual({ 51 | source: expectedSource, 52 | map: expectedMap1, 53 | }); 54 | 55 | const expectedMap2 = { 56 | version: 3, 57 | file: "x", 58 | mappings: ";AAAA;AACA;ACDA", 59 | names: [], 60 | sources: ["console.js", "hello.md"], 61 | sourcesContent: [ 62 | "console.log('test');\nconsole.log('test2');\n", 63 | "Hello2\n", 64 | ], 65 | }; 66 | expect(source.map()).toEqual(expectedMap2); 67 | expect(source.sourceAndMap()).toEqual({ 68 | source: expectedSource, 69 | map: expectedMap2, 70 | }); 71 | }); 72 | 73 | it("should be able to handle strings for all methods", () => { 74 | const source = new ConcatSource( 75 | new RawSource("Hello World\n"), 76 | new OriginalSource( 77 | "console.log('test');\nconsole.log('test2');\n", 78 | "console.js", 79 | ), 80 | ); 81 | const innerSource = new ConcatSource("(", "'string'", ")"); 82 | innerSource.buffer(); // force optimization 83 | source.add("console"); 84 | source.add("."); 85 | source.add("log"); 86 | source.add(innerSource); 87 | const expectedSource = [ 88 | "Hello World", 89 | "console.log('test');", 90 | "console.log('test2');", 91 | "console.log('string')", 92 | ].join("\n"); 93 | const expectedMap1 = { 94 | version: 3, 95 | file: "x", 96 | mappings: ";AAAA;AACA", 97 | names: [], 98 | sources: ["console.js"], 99 | sourcesContent: ["console.log('test');\nconsole.log('test2');\n"], 100 | }; 101 | expect(source.size()).toBe(76); 102 | expect(source.source()).toEqual(expectedSource); 103 | expect(source.buffer()).toEqual(Buffer.from(expectedSource, "utf8")); 104 | expect( 105 | source.map({ 106 | columns: false, 107 | }), 108 | ).toEqual(expectedMap1); 109 | expect( 110 | source.sourceAndMap({ 111 | columns: false, 112 | }), 113 | ).toEqual({ 114 | source: expectedSource, 115 | map: expectedMap1, 116 | }); 117 | 118 | const hash = require("crypto").createHash("sha256"); 119 | 120 | source.updateHash(hash); 121 | const digest = hash.digest("hex"); 122 | expect(digest).toBe( 123 | "183e6e9393eddb8480334aebeebb3366d6cce0124bc429c6e9246cc216167cb2", 124 | ); 125 | 126 | const hash2 = require("crypto").createHash("sha256"); 127 | 128 | const source2 = new ConcatSource( 129 | "Hello World\n", 130 | new OriginalSource( 131 | "console.log('test');\nconsole.log('test2');\n", 132 | "console.js", 133 | ), 134 | "console.log('string')", 135 | ); 136 | source2.updateHash(hash2); 137 | expect(hash2.digest("hex")).toEqual(digest); 138 | 139 | const clone = new ConcatSource(); 140 | clone.addAllSkipOptimizing(source.getChildren()); 141 | 142 | expect(clone.source()).toEqual(source.source()); 143 | 144 | const hash3 = require("crypto").createHash("sha256"); 145 | 146 | clone.updateHash(hash3); 147 | expect(hash3.digest("hex")).toEqual(digest); 148 | }); 149 | 150 | it("should return null as map when only generated code is concatenated", () => { 151 | const source = new ConcatSource( 152 | "Hello World\n", 153 | new RawSource("Hello World\n"), 154 | "", 155 | ); 156 | 157 | const resultText = source.source(); 158 | const resultMap = source.sourceAndMap({ 159 | columns: true, 160 | }); 161 | const resultListMap = source.sourceAndMap({ 162 | columns: false, 163 | }); 164 | 165 | expect(resultText).toBe("Hello World\nHello World\n"); 166 | expect(resultMap.source).toEqual(resultText); 167 | expect(resultListMap.source).toEqual(resultText); 168 | expect(resultListMap.map).toBeNull(); 169 | expect(resultMap.map).toBeNull(); 170 | }); 171 | 172 | it("should allow to concatenate in a single line", () => { 173 | const source = new ConcatSource( 174 | new OriginalSource("Hello", "hello.txt"), 175 | " ", 176 | new OriginalSource("World ", "world.txt"), 177 | "is here\n", 178 | new OriginalSource("Hello\n", "hello.txt"), 179 | " \n", 180 | new OriginalSource("World\n", "world.txt"), 181 | "is here", 182 | ); 183 | 184 | expect(withReadableMappings(source.map())).toMatchInlineSnapshot(` 185 | Object { 186 | "_mappings": "1:0 -> [hello.txt] 1:0, :5, :6 -> [world.txt] 1:0, :12 187 | 2:0 -> [hello.txt] 1:0 188 | 4:0 -> [world.txt] 1:0", 189 | "file": "x", 190 | "mappings": "AAAA,K,CCAA,M;ADAA;;ACAA", 191 | "names": Array [], 192 | "sources": Array [ 193 | "hello.txt", 194 | "world.txt", 195 | ], 196 | "sourcesContent": Array [ 197 | "Hello", 198 | "World ", 199 | ], 200 | "version": 3, 201 | } 202 | `); 203 | }); 204 | 205 | it("should allow to concat buffer sources", () => { 206 | const source = new ConcatSource("a", new RawSource(Buffer.from("b")), "c"); 207 | expect(source.sourceAndMap()).toMatchInlineSnapshot(` 208 | Object { 209 | "map": null, 210 | "source": "abc", 211 | } 212 | `); 213 | }); 214 | 215 | it("should handle column mapping correctly with missing sources", () => { 216 | const source = new ConcatSource( 217 | "/*! For license information please see main.js.LICENSE.txt */", 218 | ); 219 | const innerSource = "ab\nc"; 220 | const innerMap = { 221 | names: [], 222 | file: "x", 223 | version: 3, 224 | sources: ["main.js"], 225 | sourcesContent: ["a\nc"], 226 | mappings: "AAAA,CCAA;ADCA", 227 | // ______________↑ The column mapping (CCAA) references one missing source 228 | }; 229 | source.add(new SourceMapSource(innerSource, "main.js", innerMap)); 230 | const expected = { 231 | source: 232 | "/*! For license information please see main.js.LICENSE.txt */ab\nc", 233 | map: { 234 | version: 3, 235 | file: "x", 236 | mappings: "6DAAA,C;AACA", 237 | sources: ["main.js"], 238 | sourcesContent: ["a\nc"], 239 | names: [], 240 | }, 241 | }; 242 | expect( 243 | source.sourceAndMap({ 244 | columns: true, 245 | }), 246 | ).toEqual({ 247 | source: expected.source, 248 | map: expected.map, 249 | }); 250 | }); 251 | }); 252 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # webpack-sources 2 | 3 | Contains multiple classes which represent a `Source`. A `Source` can be asked for source code, size, source map and hash. 4 | 5 | ## `Source` 6 | 7 | Base class for all sources. 8 | 9 | ### Public methods 10 | 11 | All methods should be considered as expensive as they may need to do computations. 12 | 13 | #### `source` 14 | 15 | 16 | ```typescript 17 | Source.prototype.source() -> String | Buffer 18 | ``` 19 | 20 | Returns the represented source code as string or Buffer (for binary Sources). 21 | 22 | #### `buffer` 23 | 24 | 25 | ```typescript 26 | Source.prototype.buffer() -> Buffer 27 | ``` 28 | 29 | Returns the represented source code as Buffer. Strings are converted to utf-8. 30 | 31 | #### `size` 32 | 33 | 34 | ```typescript 35 | Source.prototype.size() -> Number 36 | ``` 37 | 38 | Returns the size in bytes of the represented source code. 39 | 40 | #### `map` 41 | 42 | 43 | ```typescript 44 | Source.prototype.map(options?: Object) -> Object | null 45 | ``` 46 | 47 | Returns the SourceMap of the represented source code as JSON. May return `null` if no SourceMap is available. 48 | 49 | The `options` object can contain the following keys: 50 | 51 | - `columns: Boolean` (default `true`): If set to false the implementation may omit mappings for columns. 52 | 53 | #### `sourceAndMap` 54 | 55 | 56 | ```typescript 57 | Source.prototype.sourceAndMap(options?: Object) -> { 58 | source: String | Buffer, 59 | map: Object | null 60 | } 61 | ``` 62 | 63 | Returns both, source code (like `Source.prototype.source()` and SourceMap (like `Source.prototype.map()`). This method could have better performance than calling `source()` and `map()` separately. 64 | 65 | See `map()` for `options`. 66 | 67 | #### `updateHash` 68 | 69 | 70 | ```typescript 71 | Source.prototype.updateHash(hash: Hash) -> void 72 | ``` 73 | 74 | Updates the provided `Hash` object with the content of the represented source code. (`Hash` is an object with an `update` method, which is called with string values) 75 | 76 | ## `RawSource` 77 | 78 | Represents source code without SourceMap. 79 | 80 | 81 | ```typescript 82 | new RawSource(sourceCode: String | Buffer) 83 | ``` 84 | 85 | ## `OriginalSource` 86 | 87 | Represents source code, which is a copy of the original file. 88 | 89 | 90 | ```typescript 91 | new OriginalSource( 92 | sourceCode: String | Buffer, 93 | name: String 94 | ) 95 | ``` 96 | 97 | - `sourceCode`: The source code. 98 | - `name`: The filename of the original source code. 99 | 100 | OriginalSource tries to create column mappings if requested, by splitting the source code at typical statement borders (`;`, `{`, `}`). 101 | 102 | ## `SourceMapSource` 103 | 104 | Represents source code with SourceMap, optionally having an additional SourceMap for the original source. 105 | 106 | 107 | ```typescript 108 | new SourceMapSource( 109 | sourceCode: String | Buffer, 110 | name: String, 111 | sourceMap: Object | String | Buffer, 112 | originalSource?: String | Buffer, 113 | innerSourceMap?: Object | String | Buffer, 114 | removeOriginalSource?: boolean 115 | ) 116 | ``` 117 | 118 | - `sourceCode`: The source code. 119 | - `name`: The filename of the original source code. 120 | - `sourceMap`: The SourceMap for the source code. 121 | - `originalSource`: The source code of the original file. Can be omitted if the `sourceMap` already contains the original source code. 122 | - `innerSourceMap`: The SourceMap for the `originalSource`/`name`. 123 | - `removeOriginalSource`: Removes the source code for `name` from the final map, keeping only the deeper mappings for that file. 124 | 125 | The `SourceMapSource` supports "identity" mappings for the `innerSourceMap`. 126 | When original source matches generated source for a mapping it's assumed to be mapped char by char allowing to keep finer mappings from `sourceMap`. 127 | 128 | ## `CachedSource` 129 | 130 | Decorates a `Source` and caches returned results of `map`, `source`, `buffer`, `size` and `sourceAndMap` in memory. `updateHash` is not cached. 131 | It tries to reused cached results from other methods to avoid calculations, i. e. when `source` is already cached, calling `size` will get the size from the cached source, calling `sourceAndMap` will only call `map` on the wrapped Source. 132 | 133 | 134 | ```typescript 135 | new CachedSource(source: Source) 136 | new CachedSource(source: Source | () => Source, cachedData?: CachedData) 137 | ``` 138 | 139 | Instead of passing a `Source` object directly one can pass an function that returns a `Source` object. The function is only called when needed and once. 140 | 141 | ### Public methods 142 | 143 | #### `getCachedData()` 144 | 145 | Returns the cached data for passing to the constructor. All cached entries are converted to Buffers and strings are avoided. 146 | 147 | #### `original()` 148 | 149 | Returns the original `Source` object. 150 | 151 | #### `originalLazy()` 152 | 153 | Returns the original `Source` object or a function returning these. 154 | 155 | ## `PrefixSource` 156 | 157 | Prefix every line of the decorated `Source` with a provided string. 158 | 159 | 160 | ```typescript 161 | new PrefixSource( 162 | prefix: String, 163 | source: Source | String | Buffer 164 | ) 165 | ``` 166 | 167 | ## `ConcatSource` 168 | 169 | Concatenate multiple `Source`s or strings to a single source. 170 | 171 | 172 | ```typescript 173 | new ConcatSource( 174 | ...items?: Source | String 175 | ) 176 | ``` 177 | 178 | ### Public methods 179 | 180 | #### `add` 181 | 182 | 183 | ```typescript 184 | ConcatSource.prototype.add(item: Source | String) 185 | ``` 186 | 187 | Adds an item to the source. 188 | 189 | ## `ReplaceSource` 190 | 191 | Decorates a `Source` with replacements and insertions of source code. 192 | 193 | The `ReplaceSource` supports "identity" mappings for child source. 194 | When original source matches generated source for a mapping it's assumed to be mapped char by char allowing to split mappings at replacements/insertions. 195 | 196 | ### Public methods 197 | 198 | #### `replace` 199 | 200 | 201 | ```typescript 202 | ReplaceSource.prototype.replace( 203 | start: Number, 204 | end: Number, 205 | replacement: String 206 | ) 207 | ``` 208 | 209 | Replaces chars from `start` (0-indexed, inclusive) to `end` (0-indexed, inclusive) with `replacement`. 210 | 211 | Locations represents locations in the original source and are not influenced by other replacements or insertions. 212 | 213 | #### `insert` 214 | 215 | 216 | ```typescript 217 | ReplaceSource.prototype.insert( 218 | pos: Number, 219 | insertion: String 220 | ) 221 | ``` 222 | 223 | Inserts the `insertion` before char `pos` (0-indexed). 224 | 225 | Location represents location in the original source and is not influenced by other replacements or insertions. 226 | 227 | #### `original` 228 | 229 | Get decorated `Source`. 230 | 231 | ## `CompatSource` 232 | 233 | Converts a Source-like object into a real Source object. 234 | 235 | ### Public methods 236 | 237 | #### static `from` 238 | 239 | 240 | ```typescript 241 | CompatSource.from(sourceLike: any | Source) 242 | ``` 243 | 244 | If `sourceLike` is a real Source it returns it unmodified. Otherwise it returns it wrapped in a CompatSource. 245 | -------------------------------------------------------------------------------- /test/Fuzzy.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | jest.mock("./__mocks__/createMappingsSerializer"); 4 | 5 | const { SourceMapConsumer } = require("source-map"); 6 | const validate = require("sourcemap-validator"); 7 | const CachedSource = require("../lib/CachedSource"); 8 | const CompatSource = require("../lib/CompatSource"); 9 | const ConcatSource = require("../lib/ConcatSource"); 10 | const OriginalSource = require("../lib/OriginalSource"); 11 | const PrefixSource = require("../lib/PrefixSource"); 12 | const RawSource = require("../lib/RawSource"); 13 | const ReplaceSource = require("../lib/ReplaceSource"); 14 | const SourceMapSource = require("../lib/SourceMapSource"); 15 | const { withReadableMappings } = require("./helpers"); 16 | 17 | /** @typedef {import("../lib/Source").RawSourceMap} RawSourceMap */ 18 | 19 | const LOREM = 20 | "Lorem { ipsum dolor sit; } amet; { consetetur sadipscing elitr }; { sed { diam; nonumy; } eirmod { tempor invidunt ut labore et } dolore magna aliquyam erat; {{{ sed } diam } voluptua}; At vero eos et accusam et justo duo dolores et ea rebum; Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. { Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore } et dolore magna aliquyam erat, { sed diam voluptua }. { At } { vero } { eos } { et } accusam { et } justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet."; 21 | 22 | const LOREM_LINES = LOREM.replace(/(.{20,}?)\s/g, "$1\n"); 23 | 24 | const makeReplacements = (replaceSource, input) => { 25 | const regexp = /\w{6,}(\n\w{6,})?/g; 26 | let match = regexp.exec(input); 27 | while (match !== null) { 28 | replaceSource.replace( 29 | match.index, 30 | match.index + match[0].length - 1, 31 | match[0].length % 4 === 0 ? "XXX\n" : "XXX", 32 | match[0].replace(/\n[^]*$/, "").trim(), 33 | ); 34 | match = regexp.exec(input); 35 | } 36 | }; 37 | 38 | const getReplacementNames = (input) => input.match(/\w{6,}/g); 39 | 40 | describe("fuzzy", () => { 41 | const variants = { 42 | CompatSource: (source) => new CompatSource(source), 43 | PrefixSource: (source) => new PrefixSource("lorem: ", source), 44 | ReplaceSource: (source) => { 45 | const replaceSource = new ReplaceSource(source, "replaced.txt"); 46 | const input = source.source(); 47 | makeReplacements(replaceSource, input); 48 | return replaceSource; 49 | }, 50 | ConcatSource: (source) => new ConcatSource(source, source, source), 51 | SourceMapSource: (source) => { 52 | const map = source.map(); 53 | return map 54 | ? new SourceMapSource(source.source(), "source-map.txt", source.map()) 55 | : new OriginalSource(source.source(), "lorem.txt"); 56 | }, 57 | SourceMapSourceInner: (source) => { 58 | const code = source.source(); 59 | const replaceSource = new ReplaceSource( 60 | new OriginalSource(code, "lorem.txt"), 61 | "replaced.txt", 62 | ); 63 | const input = source.source(); 64 | makeReplacements(replaceSource, input); 65 | const sourceAndMap = replaceSource.sourceAndMap(); 66 | 67 | const map = source.map(); 68 | return map 69 | ? new SourceMapSource( 70 | sourceAndMap.source, 71 | "lorem.txt", 72 | /** @type {RawSourceMap} */ 73 | (sourceAndMap.map), 74 | code, 75 | map, 76 | true, 77 | ) 78 | : new SourceMapSource( 79 | sourceAndMap.source, 80 | "lorem.txt", 81 | /** @type {RawSourceMap} */ 82 | (sourceAndMap.map), 83 | ); 84 | }, 85 | CachedSource: (source) => new CachedSource(source), 86 | }; 87 | 88 | const createTests = (remaining, snapshot, list, offset) => { 89 | if (remaining === 0) { 90 | for (const [inputName, input] of [ 91 | ["lorem", LOREM], 92 | ["lorem lines", LOREM_LINES], 93 | ]) { 94 | const validNames = getReplacementNames(input); 95 | const validateSourceMap = async (sourceMap, code) => { 96 | try { 97 | expect(sourceMap.mappings).toMatch( 98 | /^[A-Za-z0-9+/]{1,10}((,|;+)[A-Za-z0-9+/]{1,10})*$/, 99 | ); 100 | expect(sourceMap.sources).toContain("lorem.txt"); 101 | for (const name of sourceMap.names) { 102 | expect(validNames).toContain(name); 103 | } 104 | validate(code, JSON.stringify(sourceMap)); 105 | await SourceMapConsumer.with(sourceMap, null, (consumer) => { 106 | if (offset === 0) { 107 | // TODO test for other offset too 108 | expect( 109 | consumer.originalPositionFor({ line: 1, column: 0 }), 110 | ).toEqual({ 111 | source: "lorem.txt", 112 | line: 1, 113 | column: 0, 114 | name: null, 115 | }); 116 | } 117 | }); 118 | } catch (err) { 119 | err.message += `\n${JSON.stringify(sourceMap, undefined, 2)}\n${ 120 | withReadableMappings(sourceMap, code)._mappings 121 | }`; 122 | throw err; 123 | } 124 | }; 125 | const rawSourceFn = list.reduceRight( 126 | (result, fn) => () => fn(result()), 127 | () => new RawSource(input), 128 | ); 129 | const originalSourceFn = list.reduceRight( 130 | (result, fn) => () => fn(result()), 131 | () => new OriginalSource(input, "lorem.txt"), 132 | ); 133 | for (const options of [undefined, { columns: false }]) { 134 | const o = JSON.stringify(options); 135 | for (const [inputSourceName, sourceFn] of [ 136 | ["raw", rawSourceFn], 137 | ["original", originalSourceFn], 138 | ]) { 139 | if (options === undefined) { 140 | it(`${inputSourceName} ${inputName} should return correct .source()`, () => { 141 | const source = sourceFn(); 142 | const result = source.source(); 143 | expect(source.source()).toEqual(result); 144 | if (snapshot) { 145 | expect(result).toMatchSnapshot(); 146 | } 147 | }); 148 | 149 | it(`${inputSourceName} ${inputName} should return correct .size()`, () => { 150 | const source = sourceFn(); 151 | const result = source.size(); 152 | expect(source.size()).toEqual(result); 153 | if (snapshot) { 154 | expect(result).toMatchSnapshot(); 155 | } 156 | }); 157 | } 158 | 159 | it(`${inputSourceName} ${inputName} should return correct .map(${o})`, async () => { 160 | const source = sourceFn(); 161 | const result = withReadableMappings(source.map(options)); 162 | expect(withReadableMappings(source.map(options))).toEqual(result); 163 | if (inputSourceName === "original") { 164 | expect(result).toBeTruthy(); 165 | } 166 | if (result) { 167 | const code = source.source(); 168 | await validateSourceMap(result, code); 169 | } 170 | if (snapshot) { 171 | expect(result).toMatchSnapshot(); 172 | } 173 | }); 174 | 175 | it(`${inputSourceName} ${inputName} should return correct .sourceAndMap(${o})`, async () => { 176 | const source = sourceFn(); 177 | const result = source.sourceAndMap(options); 178 | result.map = withReadableMappings(result.map); 179 | if (result.map) { 180 | expect(result.map.mappings).toMatch( 181 | /^[A-Za-z0-9+/]{1,10}((,|;+)[A-Za-z0-9+/]{1,10})*$/, 182 | ); 183 | await validateSourceMap(result.map, result.source); 184 | } 185 | const result2 = source.sourceAndMap(options); 186 | result2.map = withReadableMappings(result.map); 187 | expect(result).toEqual(result2); 188 | expect(result.map).toEqual( 189 | withReadableMappings(sourceFn().map(options)), 190 | ); 191 | if (snapshot) { 192 | expect(result).toMatchSnapshot(); 193 | } 194 | }); 195 | } 196 | 197 | it(`${inputName} RawSource and OriginalSource should return equal .source(${o})`, () => { 198 | expect(originalSourceFn().source()).toEqual(rawSourceFn().source()); 199 | }); 200 | 201 | it(`${inputName} RawSource and OriginalSource should return equal .sourceAndMap(${o}).source`, () => { 202 | expect(originalSourceFn().sourceAndMap(options).source).toEqual( 203 | rawSourceFn().sourceAndMap(options).source, 204 | ); 205 | }); 206 | } 207 | } 208 | } else { 209 | for (const key of Object.keys(variants)) { 210 | const fn = variants[key]; 211 | 212 | describe(key, () => { 213 | createTests( 214 | remaining - 1, 215 | snapshot, 216 | [...list, fn], 217 | offset + (key === "PrefixSource" ? 7 : 0), 218 | ); 219 | }); 220 | } 221 | } 222 | }; 223 | 224 | describe("single source", () => { 225 | createTests(1, true, [], 0); 226 | }); 227 | 228 | describe("2 sources", () => { 229 | createTests(2, true, [], 0); 230 | }); 231 | 232 | describe("3 sources", () => { 233 | createTests(3, false, [], 0); 234 | }); 235 | 236 | describe("4 sources", () => { 237 | createTests(4, false, [], 0); 238 | }); 239 | }); 240 | -------------------------------------------------------------------------------- /types.d.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * This file was automatically generated. 3 | * DO NOT MODIFY BY HAND. 4 | * Run `yarn fix:special` to update 5 | */ 6 | 7 | import { Buffer } from "buffer"; 8 | 9 | declare interface BufferEntry { 10 | map?: null | RawSourceMap; 11 | bufferedMap?: null | BufferedMap; 12 | } 13 | declare interface BufferedMap { 14 | /** 15 | * version 16 | */ 17 | version: number; 18 | 19 | /** 20 | * sources 21 | */ 22 | sources: string[]; 23 | 24 | /** 25 | * name 26 | */ 27 | names: string[]; 28 | 29 | /** 30 | * source root 31 | */ 32 | sourceRoot?: string; 33 | 34 | /** 35 | * sources content 36 | */ 37 | sourcesContent?: ("" | Buffer)[]; 38 | 39 | /** 40 | * mappings 41 | */ 42 | mappings?: Buffer; 43 | 44 | /** 45 | * file 46 | */ 47 | file: string; 48 | } 49 | declare interface CachedData { 50 | /** 51 | * source 52 | */ 53 | source?: boolean; 54 | 55 | /** 56 | * buffer 57 | */ 58 | buffer: Buffer; 59 | 60 | /** 61 | * size 62 | */ 63 | size?: number; 64 | 65 | /** 66 | * maps 67 | */ 68 | maps: Map; 69 | 70 | /** 71 | * hash 72 | */ 73 | hash?: (string | Buffer)[]; 74 | } 75 | declare class CachedSource extends Source { 76 | constructor(source: Source | (() => Source), cachedData?: CachedData); 77 | getCachedData(): CachedData; 78 | originalLazy(): Source | (() => Source); 79 | original(): Source; 80 | streamChunks( 81 | options: StreamChunksOptions, 82 | onChunk: ( 83 | chunk: undefined | string, 84 | generatedLine: number, 85 | generatedColumn: number, 86 | sourceIndex: number, 87 | originalLine: number, 88 | originalColumn: number, 89 | nameIndex: number, 90 | ) => void, 91 | onSource: ( 92 | sourceIndex: number, 93 | source: null | string, 94 | sourceContent?: string, 95 | ) => void, 96 | onName: (nameIndex: number, name: string) => void, 97 | ): GeneratedSourceInfo; 98 | } 99 | declare class CompatSource extends Source { 100 | constructor(sourceLike: SourceLike); 101 | static from(sourceLike: SourceLike): Source; 102 | } 103 | declare class ConcatSource extends Source { 104 | constructor(...args: ConcatSourceChild[]); 105 | getChildren(): Source[]; 106 | add(item: ConcatSourceChild): void; 107 | addAllSkipOptimizing(items: ConcatSourceChild[]): void; 108 | streamChunks( 109 | options: StreamChunksOptions, 110 | onChunk: ( 111 | chunk: undefined | string, 112 | generatedLine: number, 113 | generatedColumn: number, 114 | sourceIndex: number, 115 | originalLine: number, 116 | originalColumn: number, 117 | nameIndex: number, 118 | ) => void, 119 | onSource: ( 120 | sourceIndex: number, 121 | source: null | string, 122 | sourceContent?: string, 123 | ) => void, 124 | onName: (nameIndex: number, name: string) => void, 125 | ): GeneratedSourceInfo; 126 | } 127 | type ConcatSourceChild = string | Source | SourceLike; 128 | declare interface GeneratedSourceInfo { 129 | /** 130 | * generated line 131 | */ 132 | generatedLine?: number; 133 | 134 | /** 135 | * generated column 136 | */ 137 | generatedColumn?: number; 138 | 139 | /** 140 | * source 141 | */ 142 | source?: string; 143 | } 144 | declare interface HashLike { 145 | /** 146 | * make hash update 147 | */ 148 | update: (data: string | Buffer, inputEncoding?: string) => HashLike; 149 | 150 | /** 151 | * get hash digest 152 | */ 153 | digest: (encoding?: string) => string | Buffer; 154 | } 155 | declare interface MapOptions { 156 | /** 157 | * need columns? 158 | */ 159 | columns?: boolean; 160 | 161 | /** 162 | * is module 163 | */ 164 | module?: boolean; 165 | } 166 | declare class OriginalSource extends Source { 167 | constructor(value: string | Buffer, name: string); 168 | getName(): string; 169 | streamChunks( 170 | options: StreamChunksOptions, 171 | onChunk: ( 172 | chunk: undefined | string, 173 | generatedLine: number, 174 | generatedColumn: number, 175 | sourceIndex: number, 176 | originalLine: number, 177 | originalColumn: number, 178 | nameIndex: number, 179 | ) => void, 180 | onSource: ( 181 | sourceIndex: number, 182 | source: null | string, 183 | sourceContent?: string, 184 | ) => void, 185 | _onName: (nameIndex: number, name: string) => void, 186 | ): GeneratedSourceInfo; 187 | } 188 | declare class PrefixSource extends Source { 189 | constructor(prefix: string, source: string | Source | Buffer); 190 | getPrefix(): string; 191 | original(): Source; 192 | streamChunks( 193 | options: StreamChunksOptions, 194 | onChunk: ( 195 | chunk: undefined | string, 196 | generatedLine: number, 197 | generatedColumn: number, 198 | sourceIndex: number, 199 | originalLine: number, 200 | originalColumn: number, 201 | nameIndex: number, 202 | ) => void, 203 | onSource: ( 204 | sourceIndex: number, 205 | source: null | string, 206 | sourceContent?: string, 207 | ) => void, 208 | onName: (nameIndex: number, name: string) => void, 209 | ): GeneratedSourceInfo; 210 | } 211 | declare class RawSource extends Source { 212 | constructor(value: string | Buffer, convertToString?: boolean); 213 | isBuffer(): boolean; 214 | streamChunks( 215 | options: StreamChunksOptions, 216 | onChunk: ( 217 | chunk: undefined | string, 218 | generatedLine: number, 219 | generatedColumn: number, 220 | sourceIndex: number, 221 | originalLine: number, 222 | originalColumn: number, 223 | nameIndex: number, 224 | ) => void, 225 | onSource: ( 226 | sourceIndex: number, 227 | source: null | string, 228 | sourceContent?: string, 229 | ) => void, 230 | onName: (nameIndex: number, name: string) => void, 231 | ): GeneratedSourceInfo; 232 | } 233 | declare interface RawSourceMap { 234 | /** 235 | * version 236 | */ 237 | version: number; 238 | 239 | /** 240 | * sources 241 | */ 242 | sources: string[]; 243 | 244 | /** 245 | * names 246 | */ 247 | names: string[]; 248 | 249 | /** 250 | * source root 251 | */ 252 | sourceRoot?: string; 253 | 254 | /** 255 | * sources content 256 | */ 257 | sourcesContent?: string[]; 258 | 259 | /** 260 | * mappings 261 | */ 262 | mappings: string; 263 | 264 | /** 265 | * file 266 | */ 267 | file: string; 268 | 269 | /** 270 | * debug id 271 | */ 272 | debugId?: string; 273 | 274 | /** 275 | * ignore list 276 | */ 277 | ignoreList?: number[]; 278 | } 279 | declare class ReplaceSource extends Source { 280 | constructor(source: Source, name?: string); 281 | getName(): undefined | string; 282 | getReplacements(): Replacement[]; 283 | replace(start: number, end: number, newValue: string, name?: string): void; 284 | insert(pos: number, newValue: string, name?: string): void; 285 | original(): Source; 286 | streamChunks( 287 | options: StreamChunksOptions, 288 | onChunk: ( 289 | chunk: undefined | string, 290 | generatedLine: number, 291 | generatedColumn: number, 292 | sourceIndex: number, 293 | originalLine: number, 294 | originalColumn: number, 295 | nameIndex: number, 296 | ) => void, 297 | onSource: ( 298 | sourceIndex: number, 299 | source: null | string, 300 | sourceContent?: string, 301 | ) => void, 302 | onName: (nameIndex: number, name: string) => void, 303 | ): GeneratedSourceInfo; 304 | static Replacement: typeof Replacement; 305 | } 306 | declare class Replacement { 307 | constructor(start: number, end: number, content: string, name?: string); 308 | start: number; 309 | end: number; 310 | content: string; 311 | name?: string; 312 | index?: number; 313 | } 314 | declare class SizeOnlySource extends Source { 315 | constructor(size: number); 316 | } 317 | declare class Source { 318 | constructor(); 319 | source(): SourceValue; 320 | buffer(): Buffer; 321 | size(): number; 322 | map(options?: MapOptions): null | RawSourceMap; 323 | sourceAndMap(options?: MapOptions): SourceAndMap; 324 | updateHash(hash: HashLike): void; 325 | } 326 | declare interface SourceAndMap { 327 | /** 328 | * source 329 | */ 330 | source: SourceValue; 331 | 332 | /** 333 | * map 334 | */ 335 | map: null | RawSourceMap; 336 | } 337 | declare interface SourceLike { 338 | /** 339 | * source 340 | */ 341 | source: () => SourceValue; 342 | 343 | /** 344 | * buffer 345 | */ 346 | buffer?: () => Buffer; 347 | 348 | /** 349 | * size 350 | */ 351 | size?: () => number; 352 | 353 | /** 354 | * map 355 | */ 356 | map?: (options?: MapOptions) => null | RawSourceMap; 357 | 358 | /** 359 | * source and map 360 | */ 361 | sourceAndMap?: (options?: MapOptions) => SourceAndMap; 362 | 363 | /** 364 | * hash updater 365 | */ 366 | updateHash?: (hash: HashLike) => void; 367 | } 368 | declare class SourceMapSource extends Source { 369 | constructor( 370 | value: string | Buffer, 371 | name: string, 372 | sourceMap?: string | RawSourceMap | Buffer, 373 | originalSource?: string | Buffer, 374 | innerSourceMap?: string | RawSourceMap | Buffer, 375 | removeOriginalSource?: boolean, 376 | ); 377 | getArgsAsBuffers(): [ 378 | Buffer, 379 | string, 380 | Buffer, 381 | undefined | Buffer, 382 | undefined | Buffer, 383 | undefined | boolean, 384 | ]; 385 | streamChunks( 386 | options: StreamChunksOptions, 387 | onChunk: ( 388 | chunk: undefined | string, 389 | generatedLine: number, 390 | generatedColumn: number, 391 | sourceIndex: number, 392 | originalLine: number, 393 | originalColumn: number, 394 | nameIndex: number, 395 | ) => void, 396 | onSource: ( 397 | sourceIndex: number, 398 | source: null | string, 399 | sourceContent?: string, 400 | ) => void, 401 | onName: (nameIndex: number, name: string) => void, 402 | ): GeneratedSourceInfo; 403 | } 404 | type SourceValue = string | Buffer; 405 | declare interface StreamChunksOptions { 406 | source?: boolean; 407 | finalSource?: boolean; 408 | columns?: boolean; 409 | } 410 | declare namespace exports { 411 | export namespace util { 412 | export namespace stringBufferUtils { 413 | export let disableDualStringBufferCaching: () => void; 414 | export let enableDualStringBufferCaching: () => void; 415 | export let enterStringInterningRange: () => void; 416 | export let exitStringInterningRange: () => void; 417 | export let internString: (str: string) => string; 418 | export let isDualStringBufferCachingEnabled: () => boolean; 419 | } 420 | } 421 | export type OnChunk = ( 422 | chunk: undefined | string, 423 | generatedLine: number, 424 | generatedColumn: number, 425 | sourceIndex: number, 426 | originalLine: number, 427 | originalColumn: number, 428 | nameIndex: number, 429 | ) => void; 430 | export type OnName = (nameIndex: number, name: string) => void; 431 | export type OnSource = ( 432 | sourceIndex: number, 433 | source: null | string, 434 | sourceContent?: string, 435 | ) => void; 436 | export { 437 | Source, 438 | RawSource, 439 | OriginalSource, 440 | SourceMapSource, 441 | CachedSource, 442 | ConcatSource, 443 | ReplaceSource, 444 | PrefixSource, 445 | SizeOnlySource, 446 | CompatSource, 447 | CachedData, 448 | SourceLike, 449 | ConcatSourceChild, 450 | Replacement, 451 | HashLike, 452 | MapOptions, 453 | RawSourceMap, 454 | SourceAndMap, 455 | SourceValue, 456 | GeneratedSourceInfo, 457 | StreamChunksOptions, 458 | }; 459 | } 460 | 461 | export = exports; 462 | -------------------------------------------------------------------------------- /lib/SourceMapSource.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | const Source = require("./Source"); 9 | const { getMap, getSourceAndMap } = require("./helpers/getFromStreamChunks"); 10 | const streamChunksOfCombinedSourceMap = require("./helpers/streamChunksOfCombinedSourceMap"); 11 | const streamChunksOfSourceMap = require("./helpers/streamChunksOfSourceMap"); 12 | const { 13 | isDualStringBufferCachingEnabled, 14 | } = require("./helpers/stringBufferUtils"); 15 | 16 | /** @typedef {import("./Source").HashLike} HashLike */ 17 | /** @typedef {import("./Source").MapOptions} MapOptions */ 18 | /** @typedef {import("./Source").RawSourceMap} RawSourceMap */ 19 | /** @typedef {import("./Source").SourceAndMap} SourceAndMap */ 20 | /** @typedef {import("./Source").SourceValue} SourceValue */ 21 | /** @typedef {import("./helpers/getGeneratedSourceInfo").GeneratedSourceInfo} GeneratedSourceInfo */ 22 | /** @typedef {import("./helpers/streamChunks").OnChunk} OnChunk */ 23 | /** @typedef {import("./helpers/streamChunks").OnName} OnName */ 24 | /** @typedef {import("./helpers/streamChunks").OnSource} OnSource */ 25 | /** @typedef {import("./helpers/streamChunks").Options} Options */ 26 | 27 | class SourceMapSource extends Source { 28 | /** 29 | * @param {string | Buffer} value value 30 | * @param {string} name name 31 | * @param {string | Buffer | RawSourceMap=} sourceMap source map 32 | * @param {SourceValue=} originalSource original source 33 | * @param {(string | Buffer | RawSourceMap)=} innerSourceMap inner source map 34 | * @param {boolean=} removeOriginalSource do remove original source 35 | */ 36 | constructor( 37 | value, 38 | name, 39 | sourceMap, 40 | originalSource, 41 | innerSourceMap, 42 | removeOriginalSource, 43 | ) { 44 | super(); 45 | const valueIsBuffer = Buffer.isBuffer(value); 46 | /** 47 | * @private 48 | * @type {undefined | string} 49 | */ 50 | this._valueAsString = valueIsBuffer ? undefined : value; 51 | /** 52 | * @private 53 | * @type {undefined | Buffer} 54 | */ 55 | this._valueAsBuffer = valueIsBuffer ? value : undefined; 56 | 57 | this._name = name; 58 | 59 | this._hasSourceMap = Boolean(sourceMap); 60 | const sourceMapIsBuffer = Buffer.isBuffer(sourceMap); 61 | const sourceMapIsString = typeof sourceMap === "string"; 62 | /** 63 | * @private 64 | * @type {undefined | RawSourceMap} 65 | */ 66 | this._sourceMapAsObject = 67 | sourceMapIsBuffer || sourceMapIsString ? undefined : sourceMap; 68 | /** 69 | * @private 70 | * @type {undefined | string} 71 | */ 72 | this._sourceMapAsString = sourceMapIsString ? sourceMap : undefined; 73 | /** 74 | * @private 75 | * @type {undefined | Buffer} 76 | */ 77 | this._sourceMapAsBuffer = sourceMapIsBuffer ? sourceMap : undefined; 78 | 79 | this._hasOriginalSource = Boolean(originalSource); 80 | const originalSourceIsBuffer = Buffer.isBuffer(originalSource); 81 | this._originalSourceAsString = originalSourceIsBuffer 82 | ? undefined 83 | : originalSource; 84 | this._originalSourceAsBuffer = originalSourceIsBuffer 85 | ? originalSource 86 | : undefined; 87 | 88 | this._hasInnerSourceMap = Boolean(innerSourceMap); 89 | const innerSourceMapIsBuffer = Buffer.isBuffer(innerSourceMap); 90 | const innerSourceMapIsString = typeof innerSourceMap === "string"; 91 | /** 92 | * @private 93 | * @type {undefined | RawSourceMap} 94 | */ 95 | this._innerSourceMapAsObject = 96 | innerSourceMapIsBuffer || innerSourceMapIsString 97 | ? undefined 98 | : innerSourceMap; 99 | /** 100 | * @private 101 | * @type {undefined | string} 102 | */ 103 | this._innerSourceMapAsString = innerSourceMapIsString 104 | ? innerSourceMap 105 | : undefined; 106 | /** 107 | * @private 108 | * @type {undefined | Buffer} 109 | */ 110 | this._innerSourceMapAsBuffer = innerSourceMapIsBuffer 111 | ? innerSourceMap 112 | : undefined; 113 | 114 | this._removeOriginalSource = removeOriginalSource; 115 | } 116 | 117 | /** 118 | * @returns {[Buffer, string, Buffer, Buffer | undefined, Buffer | undefined, boolean | undefined]} args 119 | */ 120 | getArgsAsBuffers() { 121 | return [ 122 | this.buffer(), 123 | this._name, 124 | this._sourceMapBuffer(), 125 | this._originalSourceBuffer(), 126 | this._innerSourceMapBuffer(), 127 | this._removeOriginalSource, 128 | ]; 129 | } 130 | 131 | buffer() { 132 | if (this._valueAsBuffer === undefined) { 133 | const value = Buffer.from( 134 | /** @type {string} */ (this._valueAsString), 135 | "utf8", 136 | ); 137 | if (isDualStringBufferCachingEnabled()) { 138 | this._valueAsBuffer = value; 139 | } 140 | return value; 141 | } 142 | return this._valueAsBuffer; 143 | } 144 | 145 | /** 146 | * @returns {SourceValue} source 147 | */ 148 | source() { 149 | if (this._valueAsString === undefined) { 150 | const value = 151 | /** @type {Buffer} */ 152 | (this._valueAsBuffer).toString("utf8"); 153 | if (isDualStringBufferCachingEnabled()) { 154 | this._valueAsString = value; 155 | } 156 | return value; 157 | } 158 | return this._valueAsString; 159 | } 160 | 161 | /** 162 | * @private 163 | * @returns {undefined | Buffer} buffer 164 | */ 165 | _originalSourceBuffer() { 166 | if (this._originalSourceAsBuffer === undefined && this._hasOriginalSource) { 167 | const value = Buffer.from( 168 | /** @type {string} */ 169 | (this._originalSourceAsString), 170 | "utf8", 171 | ); 172 | if (isDualStringBufferCachingEnabled()) { 173 | this._originalSourceAsBuffer = value; 174 | } 175 | return value; 176 | } 177 | return this._originalSourceAsBuffer; 178 | } 179 | 180 | _originalSourceString() { 181 | if (this._originalSourceAsString === undefined && this._hasOriginalSource) { 182 | const value = 183 | /** @type {Buffer} */ 184 | (this._originalSourceAsBuffer).toString("utf8"); 185 | if (isDualStringBufferCachingEnabled()) { 186 | this._originalSourceAsString = value; 187 | } 188 | return value; 189 | } 190 | return this._originalSourceAsString; 191 | } 192 | 193 | _innerSourceMapObject() { 194 | if (this._innerSourceMapAsObject === undefined && this._hasInnerSourceMap) { 195 | const value = JSON.parse(this._innerSourceMapString()); 196 | if (isDualStringBufferCachingEnabled()) { 197 | this._innerSourceMapAsObject = value; 198 | } 199 | return value; 200 | } 201 | return this._innerSourceMapAsObject; 202 | } 203 | 204 | _innerSourceMapBuffer() { 205 | if (this._innerSourceMapAsBuffer === undefined && this._hasInnerSourceMap) { 206 | const value = Buffer.from(this._innerSourceMapString(), "utf8"); 207 | if (isDualStringBufferCachingEnabled()) { 208 | this._innerSourceMapAsBuffer = value; 209 | } 210 | return value; 211 | } 212 | return this._innerSourceMapAsBuffer; 213 | } 214 | 215 | /** 216 | * @private 217 | * @returns {string} result 218 | */ 219 | _innerSourceMapString() { 220 | if (this._innerSourceMapAsString === undefined && this._hasInnerSourceMap) { 221 | if (this._innerSourceMapAsBuffer !== undefined) { 222 | const value = this._innerSourceMapAsBuffer.toString("utf8"); 223 | if (isDualStringBufferCachingEnabled()) { 224 | this._innerSourceMapAsString = value; 225 | } 226 | return value; 227 | } 228 | const value = JSON.stringify(this._innerSourceMapAsObject); 229 | if (isDualStringBufferCachingEnabled()) { 230 | this._innerSourceMapAsString = value; 231 | } 232 | return value; 233 | } 234 | return /** @type {string} */ (this._innerSourceMapAsString); 235 | } 236 | 237 | _sourceMapObject() { 238 | if (this._sourceMapAsObject === undefined) { 239 | const value = JSON.parse(this._sourceMapString()); 240 | if (isDualStringBufferCachingEnabled()) { 241 | this._sourceMapAsObject = value; 242 | } 243 | return value; 244 | } 245 | return this._sourceMapAsObject; 246 | } 247 | 248 | _sourceMapBuffer() { 249 | if (this._sourceMapAsBuffer === undefined) { 250 | const value = Buffer.from(this._sourceMapString(), "utf8"); 251 | if (isDualStringBufferCachingEnabled()) { 252 | this._sourceMapAsBuffer = value; 253 | } 254 | return value; 255 | } 256 | return this._sourceMapAsBuffer; 257 | } 258 | 259 | _sourceMapString() { 260 | if (this._sourceMapAsString === undefined) { 261 | if (this._sourceMapAsBuffer !== undefined) { 262 | const value = this._sourceMapAsBuffer.toString("utf8"); 263 | if (isDualStringBufferCachingEnabled()) { 264 | this._sourceMapAsString = value; 265 | } 266 | return value; 267 | } 268 | const value = JSON.stringify(this._sourceMapAsObject); 269 | if (isDualStringBufferCachingEnabled()) { 270 | this._sourceMapAsString = value; 271 | } 272 | return value; 273 | } 274 | return this._sourceMapAsString; 275 | } 276 | 277 | /** 278 | * @param {MapOptions=} options map options 279 | * @returns {RawSourceMap | null} map 280 | */ 281 | map(options) { 282 | if (!this._hasInnerSourceMap) { 283 | return this._sourceMapObject(); 284 | } 285 | return getMap(this, options); 286 | } 287 | 288 | /** 289 | * @param {MapOptions=} options map options 290 | * @returns {SourceAndMap} source and map 291 | */ 292 | sourceAndMap(options) { 293 | if (!this._hasInnerSourceMap) { 294 | return { 295 | source: this.source(), 296 | map: this._sourceMapObject(), 297 | }; 298 | } 299 | return getSourceAndMap(this, options); 300 | } 301 | 302 | /** 303 | * @param {Options} options options 304 | * @param {OnChunk} onChunk called for each chunk of code 305 | * @param {OnSource} onSource called for each source 306 | * @param {OnName} onName called for each name 307 | * @returns {GeneratedSourceInfo} generated source info 308 | */ 309 | streamChunks(options, onChunk, onSource, onName) { 310 | if (this._hasInnerSourceMap) { 311 | return streamChunksOfCombinedSourceMap( 312 | /** @type {string} */ 313 | (this.source()), 314 | this._sourceMapObject(), 315 | this._name, 316 | /** @type {string} */ 317 | (this._originalSourceString()), 318 | this._innerSourceMapObject(), 319 | this._removeOriginalSource, 320 | onChunk, 321 | onSource, 322 | onName, 323 | Boolean(options && options.finalSource), 324 | Boolean(options && options.columns !== false), 325 | ); 326 | } 327 | return streamChunksOfSourceMap( 328 | /** @type {string} */ 329 | (this.source()), 330 | this._sourceMapObject(), 331 | onChunk, 332 | onSource, 333 | onName, 334 | Boolean(options && options.finalSource), 335 | Boolean(options && options.columns !== false), 336 | ); 337 | } 338 | 339 | /** 340 | * @param {HashLike} hash hash 341 | * @returns {void} 342 | */ 343 | updateHash(hash) { 344 | hash.update("SourceMapSource"); 345 | hash.update(this.buffer()); 346 | hash.update(this._sourceMapBuffer()); 347 | 348 | if (this._hasOriginalSource) { 349 | hash.update( 350 | /** @type {Buffer} */ 351 | (this._originalSourceBuffer()), 352 | ); 353 | } 354 | 355 | if (this._hasInnerSourceMap) { 356 | hash.update( 357 | /** @type {Buffer} */ 358 | (this._innerSourceMapBuffer()), 359 | ); 360 | } 361 | 362 | hash.update(this._removeOriginalSource ? "true" : "false"); 363 | } 364 | } 365 | 366 | module.exports = SourceMapSource; 367 | -------------------------------------------------------------------------------- /lib/ConcatSource.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | const RawSource = require("./RawSource"); 9 | const Source = require("./Source"); 10 | const { getMap, getSourceAndMap } = require("./helpers/getFromStreamChunks"); 11 | const streamChunks = require("./helpers/streamChunks"); 12 | 13 | /** @typedef {import("./CompatSource").SourceLike} SourceLike */ 14 | /** @typedef {import("./Source").HashLike} HashLike */ 15 | /** @typedef {import("./Source").MapOptions} MapOptions */ 16 | /** @typedef {import("./Source").RawSourceMap} RawSourceMap */ 17 | /** @typedef {import("./Source").SourceAndMap} SourceAndMap */ 18 | /** @typedef {import("./Source").SourceValue} SourceValue */ 19 | /** @typedef {import("./helpers/getGeneratedSourceInfo").GeneratedSourceInfo} GeneratedSourceInfo */ 20 | /** @typedef {import("./helpers/streamChunks").OnChunk} OnChunk */ 21 | /** @typedef {import("./helpers/streamChunks").OnName} OnName */ 22 | /** @typedef {import("./helpers/streamChunks").OnSource} OnSource */ 23 | /** @typedef {import("./helpers/streamChunks").Options} Options */ 24 | 25 | /** @typedef {string | Source | SourceLike} Child */ 26 | 27 | const stringsAsRawSources = new WeakSet(); 28 | 29 | class ConcatSource extends Source { 30 | /** 31 | * @param {Child[]} args children 32 | */ 33 | constructor(...args) { 34 | super(); 35 | /** 36 | * @private 37 | * @type {Child[]} 38 | */ 39 | this._children = []; 40 | 41 | for (const item of args) { 42 | if (item instanceof ConcatSource) { 43 | for (const child of item._children) { 44 | this._children.push(child); 45 | } 46 | } else { 47 | this._children.push(item); 48 | } 49 | } 50 | 51 | this._isOptimized = args.length === 0; 52 | } 53 | 54 | /** 55 | * @returns {Source[]} children 56 | */ 57 | getChildren() { 58 | if (!this._isOptimized) this._optimize(); 59 | return /** @type {Source[]} */ (this._children); 60 | } 61 | 62 | /** 63 | * @param {Child} item item 64 | * @returns {void} 65 | */ 66 | add(item) { 67 | if (item instanceof ConcatSource) { 68 | for (const child of item._children) { 69 | this._children.push(child); 70 | } 71 | } else { 72 | this._children.push(item); 73 | } 74 | this._isOptimized = false; 75 | } 76 | 77 | /** 78 | * @param {Child[]} items items 79 | * @returns {void} 80 | */ 81 | addAllSkipOptimizing(items) { 82 | for (const item of items) { 83 | this._children.push(item); 84 | } 85 | } 86 | 87 | buffer() { 88 | if (!this._isOptimized) this._optimize(); 89 | /** @type {Buffer[]} */ 90 | const buffers = []; 91 | for (const child of /** @type {SourceLike[]} */ (this._children)) { 92 | if (typeof child.buffer === "function") { 93 | buffers.push(child.buffer()); 94 | } else { 95 | const bufferOrString = child.source(); 96 | if (Buffer.isBuffer(bufferOrString)) { 97 | buffers.push(bufferOrString); 98 | } else { 99 | // This will not happen 100 | buffers.push(Buffer.from(bufferOrString, "utf8")); 101 | } 102 | } 103 | } 104 | return Buffer.concat(buffers); 105 | } 106 | 107 | /** 108 | * @returns {SourceValue} source 109 | */ 110 | source() { 111 | if (!this._isOptimized) this._optimize(); 112 | let source = ""; 113 | for (const child of this._children) { 114 | source += /** @type {Source} */ (child).source(); 115 | } 116 | return source; 117 | } 118 | 119 | size() { 120 | if (!this._isOptimized) this._optimize(); 121 | let size = 0; 122 | for (const child of this._children) { 123 | size += /** @type {Source} */ (child).size(); 124 | } 125 | return size; 126 | } 127 | 128 | /** 129 | * @param {MapOptions=} options map options 130 | * @returns {RawSourceMap | null} map 131 | */ 132 | map(options) { 133 | return getMap(this, options); 134 | } 135 | 136 | /** 137 | * @param {MapOptions=} options map options 138 | * @returns {SourceAndMap} source and map 139 | */ 140 | sourceAndMap(options) { 141 | return getSourceAndMap(this, options); 142 | } 143 | 144 | /** 145 | * @param {Options} options options 146 | * @param {OnChunk} onChunk called for each chunk of code 147 | * @param {OnSource} onSource called for each source 148 | * @param {OnName} onName called for each name 149 | * @returns {GeneratedSourceInfo} generated source info 150 | */ 151 | streamChunks(options, onChunk, onSource, onName) { 152 | if (!this._isOptimized) this._optimize(); 153 | if (this._children.length === 1) { 154 | return /** @type {ConcatSource[]} */ (this._children)[0].streamChunks( 155 | options, 156 | onChunk, 157 | onSource, 158 | onName, 159 | ); 160 | } 161 | let currentLineOffset = 0; 162 | let currentColumnOffset = 0; 163 | const sourceMapping = new Map(); 164 | const nameMapping = new Map(); 165 | const finalSource = Boolean(options && options.finalSource); 166 | let code = ""; 167 | let needToCloseMapping = false; 168 | for (const item of /** @type {Source[]} */ (this._children)) { 169 | /** @type {number[]} */ 170 | const sourceIndexMapping = []; 171 | /** @type {number[]} */ 172 | const nameIndexMapping = []; 173 | let lastMappingLine = 0; 174 | const { generatedLine, generatedColumn, source } = streamChunks( 175 | item, 176 | options, 177 | // eslint-disable-next-line no-loop-func 178 | ( 179 | chunk, 180 | generatedLine, 181 | generatedColumn, 182 | sourceIndex, 183 | originalLine, 184 | originalColumn, 185 | nameIndex, 186 | ) => { 187 | const line = generatedLine + currentLineOffset; 188 | const column = 189 | generatedLine === 1 190 | ? generatedColumn + currentColumnOffset 191 | : generatedColumn; 192 | if (needToCloseMapping) { 193 | if (generatedLine !== 1 || generatedColumn !== 0) { 194 | onChunk( 195 | undefined, 196 | currentLineOffset + 1, 197 | currentColumnOffset, 198 | -1, 199 | -1, 200 | -1, 201 | -1, 202 | ); 203 | } 204 | needToCloseMapping = false; 205 | } 206 | const resultSourceIndex = 207 | sourceIndex < 0 || sourceIndex >= sourceIndexMapping.length 208 | ? -1 209 | : sourceIndexMapping[sourceIndex]; 210 | const resultNameIndex = 211 | nameIndex < 0 || nameIndex >= nameIndexMapping.length 212 | ? -1 213 | : nameIndexMapping[nameIndex]; 214 | lastMappingLine = resultSourceIndex < 0 ? 0 : generatedLine; 215 | let _chunk; 216 | // When using finalSource, we process the entire source code at once at the end, rather than chunk by chunk 217 | if (finalSource) { 218 | if (chunk !== undefined) code += chunk; 219 | } else { 220 | _chunk = chunk; 221 | } 222 | if (resultSourceIndex < 0) { 223 | onChunk(_chunk, line, column, -1, -1, -1, -1); 224 | } else { 225 | onChunk( 226 | _chunk, 227 | line, 228 | column, 229 | resultSourceIndex, 230 | originalLine, 231 | originalColumn, 232 | resultNameIndex, 233 | ); 234 | } 235 | }, 236 | (i, source, sourceContent) => { 237 | let globalIndex = sourceMapping.get(source); 238 | if (globalIndex === undefined) { 239 | sourceMapping.set(source, (globalIndex = sourceMapping.size)); 240 | onSource(globalIndex, source, sourceContent); 241 | } 242 | sourceIndexMapping[i] = globalIndex; 243 | }, 244 | (i, name) => { 245 | let globalIndex = nameMapping.get(name); 246 | if (globalIndex === undefined) { 247 | nameMapping.set(name, (globalIndex = nameMapping.size)); 248 | onName(globalIndex, name); 249 | } 250 | nameIndexMapping[i] = globalIndex; 251 | }, 252 | ); 253 | if (source !== undefined) code += source; 254 | if ( 255 | needToCloseMapping && 256 | (generatedLine !== 1 || generatedColumn !== 0) 257 | ) { 258 | onChunk( 259 | undefined, 260 | currentLineOffset + 1, 261 | currentColumnOffset, 262 | -1, 263 | -1, 264 | -1, 265 | -1, 266 | ); 267 | needToCloseMapping = false; 268 | } 269 | if (/** @type {number} */ (generatedLine) > 1) { 270 | currentColumnOffset = /** @type {number} */ (generatedColumn); 271 | } else { 272 | currentColumnOffset += /** @type {number} */ (generatedColumn); 273 | } 274 | needToCloseMapping = 275 | needToCloseMapping || 276 | (finalSource && lastMappingLine === generatedLine); 277 | currentLineOffset += /** @type {number} */ (generatedLine) - 1; 278 | } 279 | return { 280 | generatedLine: currentLineOffset + 1, 281 | generatedColumn: currentColumnOffset, 282 | source: finalSource ? code : undefined, 283 | }; 284 | } 285 | 286 | /** 287 | * @param {HashLike} hash hash 288 | * @returns {void} 289 | */ 290 | updateHash(hash) { 291 | if (!this._isOptimized) this._optimize(); 292 | hash.update("ConcatSource"); 293 | for (const item of this._children) { 294 | /** @type {Source} */ 295 | (item).updateHash(hash); 296 | } 297 | } 298 | 299 | _optimize() { 300 | const newChildren = []; 301 | let currentString; 302 | /** @type {undefined | string | [string, string] | SourceLike} */ 303 | let currentRawSources; 304 | /** 305 | * @param {string} string string 306 | * @returns {void} 307 | */ 308 | const addStringToRawSources = (string) => { 309 | if (currentRawSources === undefined) { 310 | currentRawSources = string; 311 | } else if (Array.isArray(currentRawSources)) { 312 | currentRawSources.push(string); 313 | } else { 314 | currentRawSources = [ 315 | typeof currentRawSources === "string" 316 | ? currentRawSources 317 | : /** @type {string} */ (currentRawSources.source()), 318 | string, 319 | ]; 320 | } 321 | }; 322 | /** 323 | * @param {SourceLike} source source 324 | * @returns {void} 325 | */ 326 | const addSourceToRawSources = (source) => { 327 | if (currentRawSources === undefined) { 328 | currentRawSources = source; 329 | } else if (Array.isArray(currentRawSources)) { 330 | currentRawSources.push( 331 | /** @type {string} */ 332 | (source.source()), 333 | ); 334 | } else { 335 | currentRawSources = [ 336 | typeof currentRawSources === "string" 337 | ? currentRawSources 338 | : /** @type {string} */ (currentRawSources.source()), 339 | /** @type {string} */ 340 | (source.source()), 341 | ]; 342 | } 343 | }; 344 | const mergeRawSources = () => { 345 | if (Array.isArray(currentRawSources)) { 346 | const rawSource = new RawSource(currentRawSources.join("")); 347 | stringsAsRawSources.add(rawSource); 348 | newChildren.push(rawSource); 349 | } else if (typeof currentRawSources === "string") { 350 | const rawSource = new RawSource(currentRawSources); 351 | stringsAsRawSources.add(rawSource); 352 | newChildren.push(rawSource); 353 | } else { 354 | newChildren.push(currentRawSources); 355 | } 356 | }; 357 | for (const child of this._children) { 358 | if (typeof child === "string") { 359 | if (currentString === undefined) { 360 | currentString = child; 361 | } else { 362 | currentString += child; 363 | } 364 | } else { 365 | if (currentString !== undefined) { 366 | addStringToRawSources(currentString); 367 | currentString = undefined; 368 | } 369 | if (stringsAsRawSources.has(child)) { 370 | addSourceToRawSources( 371 | /** @type {SourceLike} */ 372 | (child), 373 | ); 374 | } else { 375 | if (currentRawSources !== undefined) { 376 | mergeRawSources(); 377 | currentRawSources = undefined; 378 | } 379 | newChildren.push(child); 380 | } 381 | } 382 | } 383 | if (currentString !== undefined) { 384 | addStringToRawSources(currentString); 385 | } 386 | if (currentRawSources !== undefined) { 387 | mergeRawSources(); 388 | } 389 | this._children = newChildren; 390 | this._isOptimized = true; 391 | } 392 | } 393 | 394 | module.exports = ConcatSource; 395 | -------------------------------------------------------------------------------- /test/ReplaceSource.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | /** @typedef {import("../lib/Source").RawSourceMap} RawSourceMap */ 4 | 5 | jest.mock("./__mocks__/createMappingsSerializer"); 6 | 7 | const validate = require("sourcemap-validator"); 8 | const { ReplaceSource } = require("../"); 9 | const { OriginalSource } = require("../"); 10 | const { SourceMapSource } = require("../"); 11 | const { withReadableMappings } = require("./helpers"); 12 | 13 | describe("replaceSource", () => { 14 | it("should replace correctly", () => { 15 | let line1; 16 | let line2; 17 | let line3; 18 | let line4; 19 | let line5; 20 | const source = new ReplaceSource( 21 | new OriginalSource( 22 | [ 23 | (line1 = "Hello World!"), 24 | (line2 = "{}"), 25 | (line3 = "Line 3"), 26 | (line4 = "Line 4"), 27 | (line5 = "Line 5"), 28 | "Last", 29 | "Line", 30 | ].join("\n"), 31 | "file.txt", 32 | ), 33 | ); 34 | const startLine3 = line1.length + line2.length + 2; 35 | const startLine6 = 36 | startLine3 + line3.length + line4.length + line5.length + 3; 37 | source.replace( 38 | startLine3, 39 | startLine3 + line3.length + line4.length + line5.length + 2, 40 | "", 41 | ); 42 | source.replace(1, 4, "i "); 43 | source.replace(1, 4, "bye"); 44 | source.replace(7, 7, "0000"); 45 | source.insert(line1.length + 2, "\n Multi Line\n"); 46 | source.replace(startLine6 + 4, startLine6 + 4, " "); 47 | const originalSource = source.original(); 48 | const originalText = originalSource.source(); 49 | const resultText = source.source(); 50 | const result = source.sourceAndMap({ 51 | columns: true, 52 | }); 53 | const resultListMap = source.sourceAndMap({ 54 | columns: false, 55 | }); 56 | 57 | // @ts-expect-error for tests 58 | expect(originalSource).toEqual(source._source); 59 | expect(originalText).toBe( 60 | "Hello World!\n{}\nLine 3\nLine 4\nLine 5\nLast\nLine", 61 | ); 62 | // const resultText = "Hi bye W0000rld!\n{\n Multi Line\n}\nLast Line"; 63 | expect(resultText).toBe("Hi bye W0000rld!\n{\n Multi Line\n}\nLast Line"); 64 | expect(result.source).toEqual(resultText); 65 | expect(resultListMap.source).toEqual(resultText); 66 | const listMap = /** @type {RawSourceMap} */ (resultListMap.map); 67 | const resultMap = /** @type {RawSourceMap} */ (result.map); 68 | expect(listMap.file).toEqual(resultMap.file); 69 | expect(listMap.version).toEqual(resultMap.version); 70 | expect(listMap.sources).toEqual(resultMap.sources); 71 | expect(listMap.sourcesContent).toEqual(resultMap.sourcesContent); 72 | expect(withReadableMappings(resultMap)._mappings).toMatchInlineSnapshot(` 73 | "1:0 -> [file.txt] 1:0, :1 -> [file.txt] 1:1, :3 -> [file.txt] 1:5, :8 -> [file.txt] 1:7, :12 -> [file.txt] 1:8 74 | 2:0 -> [file.txt] 2:0, :1 -> [file.txt] 2:1 75 | 3:0 -> [file.txt] 2:1 76 | 4:0 -> [file.txt] 2:1 77 | 5:0 -> [file.txt] 6:0, :4 -> [file.txt] 6:4, :5 -> [file.txt] 7:0" 78 | `); 79 | expect(withReadableMappings(resultListMap.map)._mappings) 80 | .toMatchInlineSnapshot(` 81 | "1:0 -> [file.txt] 1:0 82 | 2:0 -> [file.txt] 2:0 83 | 3:0 -> [file.txt] 2:0 84 | 4:0 -> [file.txt] 2:0 85 | 5:0 -> [file.txt] 6:0" 86 | `); 87 | }); 88 | 89 | it("should replace multiple items correctly", () => { 90 | let line1; 91 | const source = new ReplaceSource( 92 | new OriginalSource([(line1 = "Hello"), "World!"].join("\n"), "file.txt"), 93 | ); 94 | source.insert(0, "Message: "); 95 | source.replace(2, line1.length + 4, "y A"); 96 | const resultText = source.source(); 97 | const result = source.sourceAndMap({ 98 | columns: true, 99 | }); 100 | const resultListMap = source.sourceAndMap({ 101 | columns: false, 102 | }); 103 | 104 | expect(resultText).toBe("Message: Hey Ad!"); 105 | expect(result.source).toEqual(resultText); 106 | expect(resultListMap.source).toEqual(resultText); 107 | const listMap = /** @type {RawSourceMap} */ (resultListMap.map); 108 | const resultMap = /** @type {RawSourceMap} */ (result.map); 109 | expect(listMap.file).toEqual(resultMap.file); 110 | expect(listMap.version).toEqual(resultMap.version); 111 | expect(listMap.sources).toEqual(resultMap.sources); 112 | expect(listMap.sourcesContent).toEqual(resultMap.sourcesContent); 113 | expect(resultMap.mappings).toBe("AAAA,WAAE,GACE"); 114 | expect(listMap.mappings).toBe("AAAA"); 115 | }); 116 | 117 | it("should prepend items correctly", () => { 118 | const source = new ReplaceSource(new OriginalSource("Line 1", "file.txt")); 119 | source.insert(-1, "Line -1\n"); 120 | source.insert(-1, "Line 0\n"); 121 | const resultText = source.source(); 122 | const result = source.sourceAndMap({ 123 | columns: true, 124 | }); 125 | const resultListMap = source.sourceAndMap({ 126 | columns: false, 127 | }); 128 | 129 | expect(resultText).toBe("Line -1\nLine 0\nLine 1"); 130 | expect(result.source).toEqual(resultText); 131 | expect(resultListMap.source).toEqual(resultText); 132 | const listMap = /** @type {RawSourceMap} */ (resultListMap.map); 133 | const resultMap = /** @type {RawSourceMap} */ (result.map); 134 | expect(listMap.file).toEqual(resultMap.file); 135 | expect(listMap.version).toEqual(resultMap.version); 136 | expect(listMap.sources).toEqual(resultMap.sources); 137 | expect(listMap.sourcesContent).toEqual(resultMap.sourcesContent); 138 | expect(resultMap.mappings).toBe("AAAA;AAAA;AAAA"); 139 | expect(listMap.mappings).toBe("AAAA;AAAA;AAAA"); 140 | }); 141 | 142 | it("should prepend items with replace at start correctly", () => { 143 | const source = new ReplaceSource( 144 | new OriginalSource(["Line 1", "Line 2"].join("\n"), "file.txt"), 145 | ); 146 | source.insert(-1, "Line 0\n"); 147 | source.replace(0, 5, "Hello"); 148 | const resultText = source.source(); 149 | const result = source.sourceAndMap({ 150 | columns: true, 151 | }); 152 | const resultListMap = source.sourceAndMap({ 153 | columns: false, 154 | }); 155 | 156 | expect(resultText).toBe("Line 0\nHello\nLine 2"); 157 | expect(result.source).toEqual(resultText); 158 | expect(resultListMap.source).toEqual(resultText); 159 | const listMap = /** @type {RawSourceMap} */ (resultListMap.map); 160 | const resultMap = /** @type {RawSourceMap} */ (result.map); 161 | expect(listMap.file).toEqual(resultMap.file); 162 | expect(listMap.version).toEqual(resultMap.version); 163 | expect(listMap.sources).toEqual(resultMap.sources); 164 | expect(listMap.sourcesContent).toEqual(resultMap.sourcesContent); 165 | expect(resultMap.mappings).toBe("AAAA;AAAA,KAAM;AACN"); 166 | expect(listMap.mappings).toBe("AAAA;AAAA;AACA"); 167 | }); 168 | 169 | it("should append items correctly", () => { 170 | let line1; 171 | const source = new ReplaceSource( 172 | new OriginalSource((line1 = "Line 1\n"), "file.txt"), 173 | ); 174 | source.insert(line1.length + 1, "Line 2\n"); 175 | const resultText = source.source(); 176 | const result = source.sourceAndMap({ 177 | columns: true, 178 | }); 179 | const resultListMap = source.sourceAndMap({ 180 | columns: false, 181 | }); 182 | 183 | expect(resultText).toBe("Line 1\nLine 2\n"); 184 | expect(result.source).toEqual(resultText); 185 | expect(resultListMap.source).toEqual(resultText); 186 | const listMap = /** @type {RawSourceMap} */ (resultListMap.map); 187 | const resultMap = /** @type {RawSourceMap} */ (result.map); 188 | expect(listMap.file).toEqual(resultMap.file); 189 | expect(listMap.version).toEqual(resultMap.version); 190 | expect(listMap.sources).toEqual(resultMap.sources); 191 | expect(listMap.sourcesContent).toEqual(resultMap.sourcesContent); 192 | expect(resultMap.mappings).toBe("AAAA"); 193 | expect(listMap.mappings).toBe("AAAA"); 194 | }); 195 | 196 | it("should produce correct source map", () => { 197 | const bootstrapCode = " var hello\n var world\n"; 198 | 199 | expect(() => { 200 | const source = new ReplaceSource( 201 | new OriginalSource(bootstrapCode, "file.js"), 202 | ); 203 | source.replace(7, 11, "h", "incorrect"); 204 | source.replace(20, 24, "w", "identifiers"); 205 | const resultMap = source.sourceAndMap(); 206 | validate(resultMap.source, JSON.stringify(resultMap.map)); 207 | }).toThrow(/mismatched names/); 208 | 209 | const source = new ReplaceSource( 210 | new OriginalSource(bootstrapCode, "file.js"), 211 | ); 212 | source.replace(7, 11, "h", "hello"); 213 | source.replace(20, 24, "w", "world"); 214 | const resultMap = source.sourceAndMap(); 215 | validate(resultMap.source, JSON.stringify(resultMap.map)); 216 | }); 217 | 218 | it("should allow replacements at the start", () => { 219 | const map = { 220 | version: 3, 221 | sources: ["abc"], 222 | names: ["StaticPage", "data", "foo"], 223 | mappings: 224 | ";;AAAA,eAAe,SAASA,UAAT,OAA8B;AAAA,MAARC,IAAQ,QAARA,IAAQ;AAC3C,sBAAO;AAAA,cAAMA,IAAI,CAACC;AAAX,IAAP;AACD", 225 | /* 226 | 3:0 -> [abc] 1:0, :15 -> [abc] 1:15, :24 -> [abc] 1:24 (StaticPage), :34 -> [abc] 1:15, :41 -> [abc] 1:45 227 | 4:0 -> [abc] 1:45, :6 -> [abc] 1:37 (data), :10 -> [abc] 1:45, :18 -> [abc] 1:37 (data), :22 -> [abc] 1:45 228 | 5:0 -> [abc] 2:2, :22 -> [abc] 2:9 229 | 6:0 -> [abc] 2:9, :14 -> [abc] 2:15 (data), :18 -> [abc] 2:19, :19 -> [abc] 2:20 (foo) 230 | 7:0 -> [abc] 2:9, :4 -> [abc] 2:2 231 | 8:0 -> [abc] 3:1 232 | */ 233 | sourcesContent: [ 234 | `export default function StaticPage({ data }) { 235 | return
{data.foo}
236 | } 237 | `, 238 | ], 239 | file: "x", 240 | }; 241 | const code = `import { jsx as _jsx } from "react/jsx-runtime"; 242 | export var __N_SSG = true; 243 | export default function StaticPage(_ref) { 244 | var data = _ref.data; 245 | return /*#__PURE__*/_jsx("div", { 246 | children: data.foo 247 | }); 248 | }`; 249 | const source = new ReplaceSource( 250 | new SourceMapSource(code, "source.js", map), 251 | ); 252 | source.replace(0, 47, ""); 253 | source.replace(49, 55, ""); 254 | source.replace(76, 90, ""); 255 | source.replace( 256 | 165, 257 | 168, 258 | "(0,react_jsx_runtime__WEBPACK_IMPORTED_MODULE_0__.jsx)", 259 | ); 260 | expect(withReadableMappings(source.map())).toMatchInlineSnapshot(` 261 | Object { 262 | "_mappings": "3:0 -> [abc] 1:15, :9 -> [abc] 1:24 (StaticPage), :19 -> [abc] 1:15, :26 -> [abc] 1:45 263 | 4:0 -> [abc] 1:45, :6 -> [abc] 1:37 (data), :10 -> [abc] 1:45, :18 -> [abc] 1:37 (data), :22 -> [abc] 1:45 264 | 5:0 -> [abc] 2:2, :22 -> [abc] 2:9 265 | 6:0 -> [abc] 2:9, :14 -> [abc] 2:15 (data), :18 -> [abc] 2:19, :19 -> [abc] 2:20 (foo) 266 | 7:0 -> [abc] 2:9, :4 -> [abc] 2:2 267 | 8:0 -> [abc] 3:1", 268 | "file": "x", 269 | "mappings": ";;AAAe,SAASA,UAAT,OAA8B;AAAA,MAARC,IAAQ,QAARA,IAAQ;AAC3C,sBAAO;AAAA,cAAMA,IAAI,CAACC;AAAX,IAAP;AACD", 270 | "names": Array [ 271 | "StaticPage", 272 | "data", 273 | "foo", 274 | ], 275 | "sources": Array [ 276 | "abc", 277 | ], 278 | "sourcesContent": Array [ 279 | "export default function StaticPage({ data }) { 280 | return
{data.foo}
281 | } 282 | ", 283 | ], 284 | "version": 3, 285 | } 286 | `); 287 | }); 288 | 289 | it("should not generate invalid mappings when replacing multiple lines of code", () => { 290 | const source = new ReplaceSource( 291 | new OriginalSource( 292 | ["if (a;b;c) {", " a; b; c;", "}"].join("\n"), 293 | "document.js", 294 | ), 295 | "_document.js", 296 | ); 297 | source.replace(4, 8, "false"); 298 | source.replace(12, 23, ""); 299 | expect(source.source()).toMatchInlineSnapshot('"if (false) {}"'); 300 | expect(withReadableMappings(source.map(), source.source())) 301 | .toMatchInlineSnapshot(` 302 | Object { 303 | "_mappings": "1:0 -> [document.js] 1:0, :4 -> [document.js] 1:4, :9 -> [document.js] 1:9, :12 -> [document.js] 3:0 304 | if (false) {} 305 | ^___^____^__^ 306 | ", 307 | "file": "x", 308 | "mappings": "AAAA,IAAI,KAAK,GAET", 309 | "names": Array [], 310 | "sources": Array [ 311 | "document.js", 312 | ], 313 | "sourcesContent": Array [ 314 | "if (a;b;c) { 315 | a; b; c; 316 | }", 317 | ], 318 | "version": 3, 319 | } 320 | `); 321 | }); 322 | }); 323 | -------------------------------------------------------------------------------- /lib/CachedSource.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | const Source = require("./Source"); 9 | const streamAndGetSourceAndMap = require("./helpers/streamAndGetSourceAndMap"); 10 | const streamChunksOfRawSource = require("./helpers/streamChunksOfRawSource"); 11 | const streamChunksOfSourceMap = require("./helpers/streamChunksOfSourceMap"); 12 | const { 13 | isDualStringBufferCachingEnabled, 14 | } = require("./helpers/stringBufferUtils"); 15 | 16 | /** @typedef {import("./Source").HashLike} HashLike */ 17 | /** @typedef {import("./Source").MapOptions} MapOptions */ 18 | /** @typedef {import("./Source").RawSourceMap} RawSourceMap */ 19 | /** @typedef {import("./Source").SourceAndMap} SourceAndMap */ 20 | /** @typedef {import("./Source").SourceValue} SourceValue */ 21 | /** @typedef {import("./helpers/getGeneratedSourceInfo").GeneratedSourceInfo} GeneratedSourceInfo */ 22 | /** @typedef {import("./helpers/streamChunks").OnChunk} OnChunk */ 23 | /** @typedef {import("./helpers/streamChunks").OnName} OnName */ 24 | /** @typedef {import("./helpers/streamChunks").OnSource} OnSource */ 25 | /** @typedef {import("./helpers/streamChunks").Options} Options */ 26 | 27 | /** 28 | * @typedef {object} BufferedMap 29 | * @property {number} version version 30 | * @property {string[]} sources sources 31 | * @property {string[]} names name 32 | * @property {string=} sourceRoot source root 33 | * @property {(Buffer | "")[]=} sourcesContent sources content 34 | * @property {Buffer=} mappings mappings 35 | * @property {string} file file 36 | */ 37 | 38 | /** 39 | * @param {null | RawSourceMap} map map 40 | * @returns {null | BufferedMap} buffered map 41 | */ 42 | const mapToBufferedMap = (map) => { 43 | if (typeof map !== "object" || !map) return map; 44 | const bufferedMap = 45 | /** @type {BufferedMap} */ 46 | (/** @type {unknown} */ ({ ...map })); 47 | if (map.mappings) { 48 | bufferedMap.mappings = Buffer.from(map.mappings, "utf8"); 49 | } 50 | if (map.sourcesContent) { 51 | bufferedMap.sourcesContent = map.sourcesContent.map( 52 | (str) => str && Buffer.from(str, "utf8"), 53 | ); 54 | } 55 | return bufferedMap; 56 | }; 57 | 58 | /** 59 | * @param {null | BufferedMap} bufferedMap buffered map 60 | * @returns {null | RawSourceMap} map 61 | */ 62 | const bufferedMapToMap = (bufferedMap) => { 63 | if (typeof bufferedMap !== "object" || !bufferedMap) return bufferedMap; 64 | const map = 65 | /** @type {RawSourceMap} */ 66 | (/** @type {unknown} */ ({ ...bufferedMap })); 67 | if (bufferedMap.mappings) { 68 | map.mappings = bufferedMap.mappings.toString("utf8"); 69 | } 70 | if (bufferedMap.sourcesContent) { 71 | map.sourcesContent = bufferedMap.sourcesContent.map( 72 | (buffer) => buffer && buffer.toString("utf8"), 73 | ); 74 | } 75 | return map; 76 | }; 77 | 78 | /** @typedef {{ map?: null | RawSourceMap, bufferedMap?: null | BufferedMap }} BufferEntry */ 79 | /** @typedef {Map} BufferedMaps */ 80 | 81 | /** 82 | * @typedef {object} CachedData 83 | * @property {boolean=} source source 84 | * @property {Buffer} buffer buffer 85 | * @property {number=} size size 86 | * @property {BufferedMaps} maps maps 87 | * @property {(string | Buffer)[]=} hash hash 88 | */ 89 | 90 | class CachedSource extends Source { 91 | /** 92 | * @param {Source | (() => Source)} source source 93 | * @param {CachedData=} cachedData cached data 94 | */ 95 | constructor(source, cachedData) { 96 | super(); 97 | this._source = source; 98 | this._cachedSourceType = cachedData ? cachedData.source : undefined; 99 | /** 100 | * @private 101 | * @type {undefined | string} 102 | */ 103 | this._cachedSource = undefined; 104 | this._cachedBuffer = cachedData ? cachedData.buffer : undefined; 105 | this._cachedSize = cachedData ? cachedData.size : undefined; 106 | /** 107 | * @private 108 | * @type {BufferedMaps} 109 | */ 110 | this._cachedMaps = cachedData ? cachedData.maps : new Map(); 111 | this._cachedHashUpdate = cachedData ? cachedData.hash : undefined; 112 | } 113 | 114 | /** 115 | * @returns {CachedData} cached data 116 | */ 117 | getCachedData() { 118 | /** @type {BufferedMaps} */ 119 | const bufferedMaps = new Map(); 120 | for (const pair of this._cachedMaps) { 121 | const [, cacheEntry] = pair; 122 | if (cacheEntry.bufferedMap === undefined) { 123 | cacheEntry.bufferedMap = mapToBufferedMap( 124 | this._getMapFromCacheEntry(cacheEntry), 125 | ); 126 | } 127 | bufferedMaps.set(pair[0], { 128 | map: undefined, 129 | bufferedMap: cacheEntry.bufferedMap, 130 | }); 131 | } 132 | return { 133 | // We don't want to cache strings 134 | // So if we have a caches sources 135 | // create a buffer from it and only store 136 | // if it was a Buffer or string 137 | buffer: this._cachedSource 138 | ? this.buffer() 139 | : /** @type {Buffer} */ (this._cachedBuffer), 140 | source: 141 | this._cachedSourceType !== undefined 142 | ? this._cachedSourceType 143 | : typeof this._cachedSource === "string" 144 | ? true 145 | : Buffer.isBuffer(this._cachedSource) 146 | ? false 147 | : undefined, 148 | size: this._cachedSize, 149 | maps: bufferedMaps, 150 | hash: this._cachedHashUpdate, 151 | }; 152 | } 153 | 154 | originalLazy() { 155 | return this._source; 156 | } 157 | 158 | original() { 159 | if (typeof this._source === "function") this._source = this._source(); 160 | return this._source; 161 | } 162 | 163 | /** 164 | * @returns {SourceValue} source 165 | */ 166 | source() { 167 | const source = this._getCachedSource(); 168 | if (source !== undefined) return source; 169 | return (this._cachedSource = 170 | /** @type {string} */ 171 | (this.original().source())); 172 | } 173 | 174 | /** 175 | * @private 176 | * @param {BufferEntry} cacheEntry cache entry 177 | * @returns {null | RawSourceMap} raw source map 178 | */ 179 | _getMapFromCacheEntry(cacheEntry) { 180 | if (cacheEntry.map !== undefined) { 181 | return cacheEntry.map; 182 | } else if (cacheEntry.bufferedMap !== undefined) { 183 | return (cacheEntry.map = bufferedMapToMap(cacheEntry.bufferedMap)); 184 | } 185 | 186 | return null; 187 | } 188 | 189 | /** 190 | * @private 191 | * @returns {undefined | string} cached source 192 | */ 193 | _getCachedSource() { 194 | if (this._cachedSource !== undefined) return this._cachedSource; 195 | if (this._cachedBuffer && this._cachedSourceType !== undefined) { 196 | const value = this._cachedSourceType 197 | ? this._cachedBuffer.toString("utf8") 198 | : this._cachedBuffer; 199 | if (isDualStringBufferCachingEnabled()) { 200 | this._cachedSource = /** @type {string} */ (value); 201 | } 202 | return /** @type {string} */ (value); 203 | } 204 | } 205 | 206 | /** 207 | * @returns {Buffer} buffer 208 | */ 209 | buffer() { 210 | if (this._cachedBuffer !== undefined) return this._cachedBuffer; 211 | if (this._cachedSource !== undefined) { 212 | const value = Buffer.isBuffer(this._cachedSource) 213 | ? this._cachedSource 214 | : Buffer.from(this._cachedSource, "utf8"); 215 | if (isDualStringBufferCachingEnabled()) { 216 | this._cachedBuffer = value; 217 | } 218 | return value; 219 | } 220 | if (typeof this.original().buffer === "function") { 221 | return (this._cachedBuffer = this.original().buffer()); 222 | } 223 | const bufferOrString = this.source(); 224 | if (Buffer.isBuffer(bufferOrString)) { 225 | return (this._cachedBuffer = bufferOrString); 226 | } 227 | const value = Buffer.from(bufferOrString, "utf8"); 228 | if (isDualStringBufferCachingEnabled()) { 229 | this._cachedBuffer = value; 230 | } 231 | return value; 232 | } 233 | 234 | /** 235 | * @returns {number} size 236 | */ 237 | size() { 238 | if (this._cachedSize !== undefined) return this._cachedSize; 239 | if (this._cachedBuffer !== undefined) { 240 | return (this._cachedSize = this._cachedBuffer.length); 241 | } 242 | const source = this._getCachedSource(); 243 | if (source !== undefined) { 244 | return (this._cachedSize = Buffer.byteLength(source)); 245 | } 246 | return (this._cachedSize = this.original().size()); 247 | } 248 | 249 | /** 250 | * @param {MapOptions=} options map options 251 | * @returns {SourceAndMap} source and map 252 | */ 253 | sourceAndMap(options) { 254 | const key = options ? JSON.stringify(options) : "{}"; 255 | const cacheEntry = this._cachedMaps.get(key); 256 | // Look for a cached map 257 | if (cacheEntry !== undefined) { 258 | // We have a cached map in some representation 259 | const map = this._getMapFromCacheEntry(cacheEntry); 260 | 261 | // Either get the cached source or compute it 262 | return { source: this.source(), map }; 263 | } 264 | // Look for a cached source 265 | let source = this._getCachedSource(); 266 | // Compute the map 267 | let map; 268 | if (source !== undefined) { 269 | map = this.original().map(options); 270 | } else { 271 | // Compute the source and map together. 272 | const sourceAndMap = this.original().sourceAndMap(options); 273 | source = /** @type {string} */ (sourceAndMap.source); 274 | map = sourceAndMap.map; 275 | this._cachedSource = source; 276 | } 277 | this._cachedMaps.set(key, { 278 | map, 279 | bufferedMap: undefined, 280 | }); 281 | return { source, map }; 282 | } 283 | 284 | /** 285 | * @param {Options} options options 286 | * @param {OnChunk} onChunk called for each chunk of code 287 | * @param {OnSource} onSource called for each source 288 | * @param {OnName} onName called for each name 289 | * @returns {GeneratedSourceInfo} generated source info 290 | */ 291 | streamChunks(options, onChunk, onSource, onName) { 292 | const key = options ? JSON.stringify(options) : "{}"; 293 | if ( 294 | this._cachedMaps.has(key) && 295 | (this._cachedBuffer !== undefined || this._cachedSource !== undefined) 296 | ) { 297 | const { source, map } = this.sourceAndMap(options); 298 | if (map) { 299 | return streamChunksOfSourceMap( 300 | /** @type {string} */ 301 | (source), 302 | map, 303 | onChunk, 304 | onSource, 305 | onName, 306 | Boolean(options && options.finalSource), 307 | true, 308 | ); 309 | } 310 | return streamChunksOfRawSource( 311 | /** @type {string} */ 312 | (source), 313 | onChunk, 314 | onSource, 315 | onName, 316 | Boolean(options && options.finalSource), 317 | ); 318 | } 319 | const sourceAndMap = streamAndGetSourceAndMap( 320 | this.original(), 321 | options, 322 | onChunk, 323 | onSource, 324 | onName, 325 | ); 326 | this._cachedSource = sourceAndMap.source; 327 | this._cachedMaps.set(key, { 328 | map: /** @type {RawSourceMap} */ (sourceAndMap.map), 329 | bufferedMap: undefined, 330 | }); 331 | return sourceAndMap.result; 332 | } 333 | 334 | /** 335 | * @param {MapOptions=} options map options 336 | * @returns {RawSourceMap | null} map 337 | */ 338 | map(options) { 339 | const key = options ? JSON.stringify(options) : "{}"; 340 | const cacheEntry = this._cachedMaps.get(key); 341 | if (cacheEntry !== undefined) { 342 | return this._getMapFromCacheEntry(cacheEntry); 343 | } 344 | const map = this.original().map(options); 345 | this._cachedMaps.set(key, { 346 | map, 347 | bufferedMap: undefined, 348 | }); 349 | return map; 350 | } 351 | 352 | /** 353 | * @param {HashLike} hash hash 354 | * @returns {void} 355 | */ 356 | updateHash(hash) { 357 | if (this._cachedHashUpdate !== undefined) { 358 | for (const item of this._cachedHashUpdate) hash.update(item); 359 | return; 360 | } 361 | /** @type {(string | Buffer)[]} */ 362 | const update = []; 363 | /** @type {string | undefined} */ 364 | let currentString; 365 | const tracker = { 366 | /** 367 | * @param {string | Buffer} item item 368 | * @returns {void} 369 | */ 370 | update: (item) => { 371 | if (typeof item === "string" && item.length < 10240) { 372 | if (currentString === undefined) { 373 | currentString = item; 374 | } else { 375 | currentString += item; 376 | if (currentString.length > 102400) { 377 | update.push(Buffer.from(currentString)); 378 | currentString = undefined; 379 | } 380 | } 381 | } else { 382 | if (currentString !== undefined) { 383 | update.push(Buffer.from(currentString)); 384 | currentString = undefined; 385 | } 386 | update.push(item); 387 | } 388 | }, 389 | }; 390 | this.original().updateHash(/** @type {HashLike} */ (tracker)); 391 | if (currentString !== undefined) { 392 | update.push(Buffer.from(currentString)); 393 | } 394 | for (const item of update) hash.update(item); 395 | this._cachedHashUpdate = update; 396 | } 397 | } 398 | 399 | module.exports = CachedSource; 400 | -------------------------------------------------------------------------------- /lib/helpers/streamChunksOfCombinedSourceMap.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | const splitIntoLines = require("./splitIntoLines"); 9 | const streamChunksOfSourceMap = require("./streamChunksOfSourceMap"); 10 | 11 | /** @typedef {import("../Source").RawSourceMap} RawSourceMap */ 12 | /** @typedef {import("./getGeneratedSourceInfo").GeneratedSourceInfo} GeneratedSourceInfo */ 13 | /** @typedef {import("./streamChunks").OnChunk} onChunk */ 14 | /** @typedef {import("./streamChunks").OnName} OnName */ 15 | /** @typedef {import("./streamChunks").OnSource} OnSource */ 16 | 17 | /** 18 | * @param {string} source source 19 | * @param {RawSourceMap} sourceMap source map 20 | * @param {string} innerSourceName inner source name 21 | * @param {string} innerSource inner source 22 | * @param {RawSourceMap} innerSourceMap inner source map 23 | * @param {boolean | undefined} removeInnerSource do remove inner source 24 | * @param {onChunk} onChunk on chunk 25 | * @param {OnSource} onSource on source 26 | * @param {OnName} onName on name 27 | * @param {boolean} finalSource finalSource 28 | * @param {boolean} columns columns 29 | * @returns {GeneratedSourceInfo} generated source info 30 | */ 31 | const streamChunksOfCombinedSourceMap = ( 32 | source, 33 | sourceMap, 34 | innerSourceName, 35 | innerSource, 36 | innerSourceMap, 37 | removeInnerSource, 38 | onChunk, 39 | onSource, 40 | onName, 41 | finalSource, 42 | columns, 43 | ) => { 44 | /** @type {Map} */ 45 | const sourceMapping = new Map(); 46 | /** @type {Map} */ 47 | const nameMapping = new Map(); 48 | /** @type {number[]} */ 49 | const sourceIndexMapping = []; 50 | /** @type {number[]} */ 51 | const nameIndexMapping = []; 52 | /** @type {string[]} */ 53 | const nameIndexValueMapping = []; 54 | let outerSourceIndex = -2; 55 | /** @type {number[]} */ 56 | const innerSourceIndexMapping = []; 57 | /** @type {[string | null, string | undefined][]} */ 58 | const innerSourceIndexValueMapping = []; 59 | /** @type {(string | undefined)[]} */ 60 | const innerSourceContents = []; 61 | /** @type {(null | undefined | string[])[]} */ 62 | const innerSourceContentLines = []; 63 | /** @type {number[]} */ 64 | const innerNameIndexMapping = []; 65 | /** @type {string[]} */ 66 | const innerNameIndexValueMapping = []; 67 | /** @typedef {[number, number, number, number, number] | number[]} MappingsData */ 68 | /** @type {{ chunks: string[], mappingsData: MappingsData }[]} */ 69 | const innerSourceMapLineData = []; 70 | /** 71 | * @param {number} line line 72 | * @param {number} column column 73 | * @returns {number} result 74 | */ 75 | const findInnerMapping = (line, column) => { 76 | if (line > innerSourceMapLineData.length) return -1; 77 | const { mappingsData } = innerSourceMapLineData[line - 1]; 78 | let l = 0; 79 | let r = mappingsData.length / 5; 80 | while (l < r) { 81 | const m = (l + r) >> 1; 82 | if (mappingsData[m * 5] <= column) { 83 | l = m + 1; 84 | } else { 85 | r = m; 86 | } 87 | } 88 | if (l === 0) return -1; 89 | return l - 1; 90 | }; 91 | return streamChunksOfSourceMap( 92 | source, 93 | sourceMap, 94 | ( 95 | chunk, 96 | generatedLine, 97 | generatedColumn, 98 | sourceIndex, 99 | originalLine, 100 | originalColumn, 101 | nameIndex, 102 | ) => { 103 | // Check if this is a mapping to the inner source 104 | if (sourceIndex === outerSourceIndex) { 105 | // Check if there is a mapping in the inner source 106 | const idx = findInnerMapping(originalLine, originalColumn); 107 | if (idx !== -1) { 108 | const { chunks, mappingsData } = 109 | innerSourceMapLineData[originalLine - 1]; 110 | const mi = idx * 5; 111 | const innerSourceIndex = mappingsData[mi + 1]; 112 | const innerOriginalLine = mappingsData[mi + 2]; 113 | let innerOriginalColumn = mappingsData[mi + 3]; 114 | let innerNameIndex = mappingsData[mi + 4]; 115 | if (innerSourceIndex >= 0) { 116 | // Check for an identity mapping 117 | // where we are allowed to adjust the original column 118 | const innerChunk = chunks[idx]; 119 | const innerGeneratedColumn = mappingsData[mi]; 120 | const locationInChunk = originalColumn - innerGeneratedColumn; 121 | if (locationInChunk > 0) { 122 | let originalSourceLines = 123 | innerSourceIndex < innerSourceContentLines.length 124 | ? innerSourceContentLines[innerSourceIndex] 125 | : null; 126 | if (originalSourceLines === undefined) { 127 | const originalSource = innerSourceContents[innerSourceIndex]; 128 | originalSourceLines = originalSource 129 | ? splitIntoLines(originalSource) 130 | : null; 131 | innerSourceContentLines[innerSourceIndex] = originalSourceLines; 132 | } 133 | if (originalSourceLines !== null) { 134 | const originalChunk = 135 | innerOriginalLine <= originalSourceLines.length 136 | ? originalSourceLines[innerOriginalLine - 1].slice( 137 | innerOriginalColumn, 138 | innerOriginalColumn + locationInChunk, 139 | ) 140 | : ""; 141 | if (innerChunk.slice(0, locationInChunk) === originalChunk) { 142 | innerOriginalColumn += locationInChunk; 143 | innerNameIndex = -1; 144 | } 145 | } 146 | } 147 | 148 | // We have a inner mapping to original source 149 | 150 | // emit source when needed and compute global source index 151 | let sourceIndex = 152 | innerSourceIndex < innerSourceIndexMapping.length 153 | ? innerSourceIndexMapping[innerSourceIndex] 154 | : -2; 155 | if (sourceIndex === -2) { 156 | const [source, sourceContent] = 157 | innerSourceIndex < innerSourceIndexValueMapping.length 158 | ? innerSourceIndexValueMapping[innerSourceIndex] 159 | : [null, undefined]; 160 | let globalIndex = sourceMapping.get(source); 161 | if (globalIndex === undefined) { 162 | sourceMapping.set(source, (globalIndex = sourceMapping.size)); 163 | onSource(globalIndex, source, sourceContent); 164 | } 165 | sourceIndex = globalIndex; 166 | innerSourceIndexMapping[innerSourceIndex] = sourceIndex; 167 | } 168 | 169 | // emit name when needed and compute global name index 170 | let finalNameIndex = -1; 171 | if (innerNameIndex >= 0) { 172 | // when we have a inner name 173 | finalNameIndex = 174 | innerNameIndex < innerNameIndexMapping.length 175 | ? innerNameIndexMapping[innerNameIndex] 176 | : -2; 177 | if (finalNameIndex === -2) { 178 | const name = 179 | innerNameIndex < innerNameIndexValueMapping.length 180 | ? innerNameIndexValueMapping[innerNameIndex] 181 | : undefined; 182 | if (name) { 183 | let globalIndex = nameMapping.get(name); 184 | if (globalIndex === undefined) { 185 | nameMapping.set(name, (globalIndex = nameMapping.size)); 186 | onName(globalIndex, name); 187 | } 188 | finalNameIndex = globalIndex; 189 | } else { 190 | finalNameIndex = -1; 191 | } 192 | innerNameIndexMapping[innerNameIndex] = finalNameIndex; 193 | } 194 | } else if (nameIndex >= 0) { 195 | // when we don't have an inner name, 196 | // but we have an outer name 197 | // it can be used when inner original code equals to the name 198 | let originalSourceLines = 199 | innerSourceContentLines[innerSourceIndex]; 200 | if (originalSourceLines === undefined) { 201 | const originalSource = innerSourceContents[innerSourceIndex]; 202 | originalSourceLines = originalSource 203 | ? splitIntoLines(originalSource) 204 | : null; 205 | innerSourceContentLines[innerSourceIndex] = originalSourceLines; 206 | } 207 | if (originalSourceLines !== null) { 208 | const name = nameIndexValueMapping[nameIndex]; 209 | const originalName = 210 | innerOriginalLine <= originalSourceLines.length 211 | ? originalSourceLines[innerOriginalLine - 1].slice( 212 | innerOriginalColumn, 213 | innerOriginalColumn + name.length, 214 | ) 215 | : ""; 216 | if (name === originalName) { 217 | finalNameIndex = 218 | nameIndex < nameIndexMapping.length 219 | ? nameIndexMapping[nameIndex] 220 | : -2; 221 | if (finalNameIndex === -2) { 222 | const name = nameIndexValueMapping[nameIndex]; 223 | if (name) { 224 | let globalIndex = nameMapping.get(name); 225 | if (globalIndex === undefined) { 226 | nameMapping.set(name, (globalIndex = nameMapping.size)); 227 | onName(globalIndex, name); 228 | } 229 | finalNameIndex = globalIndex; 230 | } else { 231 | finalNameIndex = -1; 232 | } 233 | nameIndexMapping[nameIndex] = finalNameIndex; 234 | } 235 | } 236 | } 237 | } 238 | onChunk( 239 | chunk, 240 | generatedLine, 241 | generatedColumn, 242 | sourceIndex, 243 | innerOriginalLine, 244 | innerOriginalColumn, 245 | finalNameIndex, 246 | ); 247 | return; 248 | } 249 | } 250 | 251 | // We have a mapping to the inner source, but no inner mapping 252 | if (removeInnerSource) { 253 | onChunk(chunk, generatedLine, generatedColumn, -1, -1, -1, -1); 254 | return; 255 | } 256 | if (sourceIndexMapping[sourceIndex] === -2) { 257 | let globalIndex = sourceMapping.get(innerSourceName); 258 | if (globalIndex === undefined) { 259 | sourceMapping.set(source, (globalIndex = sourceMapping.size)); 260 | onSource(globalIndex, innerSourceName, innerSource); 261 | } 262 | sourceIndexMapping[sourceIndex] = globalIndex; 263 | } 264 | } 265 | 266 | const finalSourceIndex = 267 | sourceIndex < 0 || sourceIndex >= sourceIndexMapping.length 268 | ? -1 269 | : sourceIndexMapping[sourceIndex]; 270 | if (finalSourceIndex < 0) { 271 | // no source, so we make it a generated chunk 272 | onChunk(chunk, generatedLine, generatedColumn, -1, -1, -1, -1); 273 | } else { 274 | // Pass through the chunk with mapping 275 | let finalNameIndex = -1; 276 | if (nameIndex >= 0 && nameIndex < nameIndexMapping.length) { 277 | finalNameIndex = nameIndexMapping[nameIndex]; 278 | if (finalNameIndex === -2) { 279 | const name = nameIndexValueMapping[nameIndex]; 280 | let globalIndex = nameMapping.get(name); 281 | if (globalIndex === undefined) { 282 | nameMapping.set(name, (globalIndex = nameMapping.size)); 283 | onName(globalIndex, name); 284 | } 285 | finalNameIndex = globalIndex; 286 | nameIndexMapping[nameIndex] = finalNameIndex; 287 | } 288 | } 289 | onChunk( 290 | chunk, 291 | generatedLine, 292 | generatedColumn, 293 | finalSourceIndex, 294 | originalLine, 295 | originalColumn, 296 | finalNameIndex, 297 | ); 298 | } 299 | }, 300 | (i, source, sourceContent) => { 301 | if (source === innerSourceName) { 302 | outerSourceIndex = i; 303 | if (innerSource !== undefined) sourceContent = innerSource; 304 | else innerSource = /** @type {string} */ (sourceContent); 305 | sourceIndexMapping[i] = -2; 306 | streamChunksOfSourceMap( 307 | /** @type {string} */ 308 | (sourceContent), 309 | innerSourceMap, 310 | ( 311 | chunk, 312 | generatedLine, 313 | generatedColumn, 314 | sourceIndex, 315 | originalLine, 316 | originalColumn, 317 | nameIndex, 318 | ) => { 319 | while (innerSourceMapLineData.length < generatedLine) { 320 | innerSourceMapLineData.push({ 321 | mappingsData: [], 322 | chunks: [], 323 | }); 324 | } 325 | const data = innerSourceMapLineData[generatedLine - 1]; 326 | data.mappingsData.push( 327 | generatedColumn, 328 | sourceIndex, 329 | originalLine, 330 | originalColumn, 331 | nameIndex, 332 | ); 333 | data.chunks.push(/** @type {string} */ (chunk)); 334 | }, 335 | (i, source, sourceContent) => { 336 | innerSourceContents[i] = sourceContent; 337 | innerSourceContentLines[i] = undefined; 338 | innerSourceIndexMapping[i] = -2; 339 | innerSourceIndexValueMapping[i] = [source, sourceContent]; 340 | }, 341 | (i, name) => { 342 | innerNameIndexMapping[i] = -2; 343 | innerNameIndexValueMapping[i] = name; 344 | }, 345 | false, 346 | columns, 347 | ); 348 | } else { 349 | let globalIndex = sourceMapping.get(source); 350 | if (globalIndex === undefined) { 351 | sourceMapping.set(source, (globalIndex = sourceMapping.size)); 352 | onSource(globalIndex, source, sourceContent); 353 | } 354 | sourceIndexMapping[i] = globalIndex; 355 | } 356 | }, 357 | (i, name) => { 358 | nameIndexMapping[i] = -2; 359 | nameIndexValueMapping[i] = name; 360 | }, 361 | finalSource, 362 | columns, 363 | ); 364 | }; 365 | 366 | module.exports = streamChunksOfCombinedSourceMap; 367 | -------------------------------------------------------------------------------- /lib/helpers/streamChunksOfSourceMap.js: -------------------------------------------------------------------------------- 1 | /* 2 | MIT License http://www.opensource.org/licenses/mit-license.php 3 | Author Tobias Koppers @sokra 4 | */ 5 | 6 | "use strict"; 7 | 8 | const getGeneratedSourceInfo = require("./getGeneratedSourceInfo"); 9 | const getSource = require("./getSource"); 10 | const readMappings = require("./readMappings"); 11 | const splitIntoLines = require("./splitIntoLines"); 12 | 13 | /** @typedef {import("../Source").RawSourceMap} RawSourceMap */ 14 | /** @typedef {import("./getGeneratedSourceInfo").GeneratedSourceInfo} GeneratedSourceInfo */ 15 | /** @typedef {import("./streamChunks").OnChunk} OnChunk */ 16 | /** @typedef {import("./streamChunks").OnName} OnName */ 17 | /** @typedef {import("./streamChunks").OnSource} OnSource */ 18 | 19 | /** 20 | * @param {string} source source 21 | * @param {RawSourceMap} sourceMap source map 22 | * @param {OnChunk} onChunk on chunk 23 | * @param {OnSource} onSource on source 24 | * @param {OnName} onName on name 25 | * @returns {GeneratedSourceInfo} generated source info 26 | */ 27 | const streamChunksOfSourceMapFull = ( 28 | source, 29 | sourceMap, 30 | onChunk, 31 | onSource, 32 | onName, 33 | ) => { 34 | const lines = splitIntoLines(source); 35 | if (lines.length === 0) { 36 | return { 37 | generatedLine: 1, 38 | generatedColumn: 0, 39 | }; 40 | } 41 | const { sources, sourcesContent, names, mappings } = sourceMap; 42 | for (let i = 0; i < sources.length; i++) { 43 | onSource( 44 | i, 45 | getSource(sourceMap, i), 46 | (sourcesContent && sourcesContent[i]) || undefined, 47 | ); 48 | } 49 | if (names) { 50 | for (let i = 0; i < names.length; i++) { 51 | onName(i, names[i]); 52 | } 53 | } 54 | 55 | const lastLine = lines[lines.length - 1]; 56 | const lastNewLine = lastLine.endsWith("\n"); 57 | const finalLine = lastNewLine ? lines.length + 1 : lines.length; 58 | const finalColumn = lastNewLine ? 0 : lastLine.length; 59 | 60 | let currentGeneratedLine = 1; 61 | let currentGeneratedColumn = 0; 62 | 63 | let mappingActive = false; 64 | let activeMappingSourceIndex = -1; 65 | let activeMappingOriginalLine = -1; 66 | let activeMappingOriginalColumn = -1; 67 | let activeMappingNameIndex = -1; 68 | 69 | /** 70 | * @param {number} generatedLine generated line 71 | * @param {number} generatedColumn generated column 72 | * @param {number} sourceIndex source index 73 | * @param {number} originalLine original line 74 | * @param {number} originalColumn original column 75 | * @param {number} nameIndex name index 76 | * @returns {void} 77 | */ 78 | const onMapping = ( 79 | generatedLine, 80 | generatedColumn, 81 | sourceIndex, 82 | originalLine, 83 | originalColumn, 84 | nameIndex, 85 | ) => { 86 | if (mappingActive && currentGeneratedLine <= lines.length) { 87 | let chunk; 88 | const mappingLine = currentGeneratedLine; 89 | const mappingColumn = currentGeneratedColumn; 90 | const line = lines[currentGeneratedLine - 1]; 91 | if (generatedLine !== currentGeneratedLine) { 92 | chunk = line.slice(currentGeneratedColumn); 93 | currentGeneratedLine++; 94 | currentGeneratedColumn = 0; 95 | } else { 96 | chunk = line.slice(currentGeneratedColumn, generatedColumn); 97 | currentGeneratedColumn = generatedColumn; 98 | } 99 | if (chunk) { 100 | onChunk( 101 | chunk, 102 | mappingLine, 103 | mappingColumn, 104 | activeMappingSourceIndex, 105 | activeMappingOriginalLine, 106 | activeMappingOriginalColumn, 107 | activeMappingNameIndex, 108 | ); 109 | } 110 | mappingActive = false; 111 | } 112 | if (generatedLine > currentGeneratedLine && currentGeneratedColumn > 0) { 113 | if (currentGeneratedLine <= lines.length) { 114 | const chunk = lines[currentGeneratedLine - 1].slice( 115 | currentGeneratedColumn, 116 | ); 117 | onChunk( 118 | chunk, 119 | currentGeneratedLine, 120 | currentGeneratedColumn, 121 | -1, 122 | -1, 123 | -1, 124 | -1, 125 | ); 126 | } 127 | currentGeneratedLine++; 128 | currentGeneratedColumn = 0; 129 | } 130 | while (generatedLine > currentGeneratedLine) { 131 | if (currentGeneratedLine <= lines.length) { 132 | onChunk( 133 | lines[currentGeneratedLine - 1], 134 | currentGeneratedLine, 135 | 0, 136 | -1, 137 | -1, 138 | -1, 139 | -1, 140 | ); 141 | } 142 | currentGeneratedLine++; 143 | } 144 | if (generatedColumn > currentGeneratedColumn) { 145 | if (currentGeneratedLine <= lines.length) { 146 | const chunk = lines[currentGeneratedLine - 1].slice( 147 | currentGeneratedColumn, 148 | generatedColumn, 149 | ); 150 | onChunk( 151 | chunk, 152 | currentGeneratedLine, 153 | currentGeneratedColumn, 154 | -1, 155 | -1, 156 | -1, 157 | -1, 158 | ); 159 | } 160 | currentGeneratedColumn = generatedColumn; 161 | } 162 | if ( 163 | sourceIndex >= 0 && 164 | (generatedLine < finalLine || 165 | (generatedLine === finalLine && generatedColumn < finalColumn)) 166 | ) { 167 | mappingActive = true; 168 | activeMappingSourceIndex = sourceIndex; 169 | activeMappingOriginalLine = originalLine; 170 | activeMappingOriginalColumn = originalColumn; 171 | activeMappingNameIndex = nameIndex; 172 | } 173 | }; 174 | readMappings(mappings, onMapping); 175 | onMapping(finalLine, finalColumn, -1, -1, -1, -1); 176 | return { 177 | generatedLine: finalLine, 178 | generatedColumn: finalColumn, 179 | }; 180 | }; 181 | 182 | /** 183 | * @param {string} source source 184 | * @param {RawSourceMap} sourceMap source map 185 | * @param {OnChunk} onChunk on chunk 186 | * @param {OnSource} onSource on source 187 | * @param {OnName} _onName on name 188 | * @returns {GeneratedSourceInfo} generated source info 189 | */ 190 | const streamChunksOfSourceMapLinesFull = ( 191 | source, 192 | sourceMap, 193 | onChunk, 194 | onSource, 195 | _onName, 196 | ) => { 197 | const lines = splitIntoLines(source); 198 | if (lines.length === 0) { 199 | return { 200 | generatedLine: 1, 201 | generatedColumn: 0, 202 | }; 203 | } 204 | const { sources, sourcesContent, mappings } = sourceMap; 205 | for (let i = 0; i < sources.length; i++) { 206 | onSource( 207 | i, 208 | getSource(sourceMap, i), 209 | (sourcesContent && sourcesContent[i]) || undefined, 210 | ); 211 | } 212 | 213 | let currentGeneratedLine = 1; 214 | 215 | /** 216 | * @param {number} generatedLine generated line 217 | * @param {number} _generatedColumn generated column 218 | * @param {number} sourceIndex source index 219 | * @param {number} originalLine original line 220 | * @param {number} originalColumn original column 221 | * @param {number} _nameIndex name index 222 | * @returns {void} 223 | */ 224 | const onMapping = ( 225 | generatedLine, 226 | _generatedColumn, 227 | sourceIndex, 228 | originalLine, 229 | originalColumn, 230 | _nameIndex, 231 | ) => { 232 | if ( 233 | sourceIndex < 0 || 234 | generatedLine < currentGeneratedLine || 235 | generatedLine > lines.length 236 | ) { 237 | return; 238 | } 239 | while (generatedLine > currentGeneratedLine) { 240 | if (currentGeneratedLine <= lines.length) { 241 | onChunk( 242 | lines[currentGeneratedLine - 1], 243 | currentGeneratedLine, 244 | 0, 245 | -1, 246 | -1, 247 | -1, 248 | -1, 249 | ); 250 | } 251 | currentGeneratedLine++; 252 | } 253 | if (generatedLine <= lines.length) { 254 | onChunk( 255 | lines[generatedLine - 1], 256 | generatedLine, 257 | 0, 258 | sourceIndex, 259 | originalLine, 260 | originalColumn, 261 | -1, 262 | ); 263 | currentGeneratedLine++; 264 | } 265 | }; 266 | readMappings(mappings, onMapping); 267 | for (; currentGeneratedLine <= lines.length; currentGeneratedLine++) { 268 | onChunk( 269 | lines[currentGeneratedLine - 1], 270 | currentGeneratedLine, 271 | 0, 272 | -1, 273 | -1, 274 | -1, 275 | -1, 276 | ); 277 | } 278 | 279 | const lastLine = lines[lines.length - 1]; 280 | const lastNewLine = lastLine.endsWith("\n"); 281 | 282 | const finalLine = lastNewLine ? lines.length + 1 : lines.length; 283 | const finalColumn = lastNewLine ? 0 : lastLine.length; 284 | 285 | return { 286 | generatedLine: finalLine, 287 | generatedColumn: finalColumn, 288 | }; 289 | }; 290 | 291 | /** 292 | * @param {string} source source 293 | * @param {RawSourceMap} sourceMap source map 294 | * @param {OnChunk} onChunk on chunk 295 | * @param {OnSource} onSource on source 296 | * @param {OnName} onName on name 297 | * @returns {GeneratedSourceInfo} generated source info 298 | */ 299 | const streamChunksOfSourceMapFinal = ( 300 | source, 301 | sourceMap, 302 | onChunk, 303 | onSource, 304 | onName, 305 | ) => { 306 | const result = getGeneratedSourceInfo(source); 307 | const { generatedLine: finalLine, generatedColumn: finalColumn } = result; 308 | 309 | if (finalLine === 1 && finalColumn === 0) return result; 310 | const { sources, sourcesContent, names, mappings } = sourceMap; 311 | for (let i = 0; i < sources.length; i++) { 312 | onSource( 313 | i, 314 | getSource(sourceMap, i), 315 | (sourcesContent && sourcesContent[i]) || undefined, 316 | ); 317 | } 318 | if (names) { 319 | for (let i = 0; i < names.length; i++) { 320 | onName(i, names[i]); 321 | } 322 | } 323 | 324 | let mappingActiveLine = 0; 325 | 326 | /** 327 | * @param {number} generatedLine generated line 328 | * @param {number} generatedColumn generated column 329 | * @param {number} sourceIndex source index 330 | * @param {number} originalLine original line 331 | * @param {number} originalColumn original column 332 | * @param {number} nameIndex name index 333 | * @returns {void} 334 | */ 335 | const onMapping = ( 336 | generatedLine, 337 | generatedColumn, 338 | sourceIndex, 339 | originalLine, 340 | originalColumn, 341 | nameIndex, 342 | ) => { 343 | if ( 344 | generatedLine >= /** @type {number} */ (finalLine) && 345 | (generatedColumn >= /** @type {number} */ (finalColumn) || 346 | generatedLine > /** @type {number} */ (finalLine)) 347 | ) { 348 | return; 349 | } 350 | if (sourceIndex >= 0) { 351 | onChunk( 352 | undefined, 353 | generatedLine, 354 | generatedColumn, 355 | sourceIndex, 356 | originalLine, 357 | originalColumn, 358 | nameIndex, 359 | ); 360 | mappingActiveLine = generatedLine; 361 | } else if (mappingActiveLine === generatedLine) { 362 | onChunk(undefined, generatedLine, generatedColumn, -1, -1, -1, -1); 363 | mappingActiveLine = 0; 364 | } 365 | }; 366 | readMappings(mappings, onMapping); 367 | return result; 368 | }; 369 | 370 | /** 371 | * @param {string} source source 372 | * @param {RawSourceMap} sourceMap source map 373 | * @param {OnChunk} onChunk on chunk 374 | * @param {OnSource} onSource on source 375 | * @param {OnName} _onName on name 376 | * @returns {GeneratedSourceInfo} generated source info 377 | */ 378 | const streamChunksOfSourceMapLinesFinal = ( 379 | source, 380 | sourceMap, 381 | onChunk, 382 | onSource, 383 | _onName, 384 | ) => { 385 | const result = getGeneratedSourceInfo(source); 386 | const { generatedLine, generatedColumn } = result; 387 | if (generatedLine === 1 && generatedColumn === 0) { 388 | return { 389 | generatedLine: 1, 390 | generatedColumn: 0, 391 | }; 392 | } 393 | 394 | const { sources, sourcesContent, mappings } = sourceMap; 395 | for (let i = 0; i < sources.length; i++) { 396 | onSource( 397 | i, 398 | getSource(sourceMap, i), 399 | (sourcesContent && sourcesContent[i]) || undefined, 400 | ); 401 | } 402 | 403 | const finalLine = 404 | generatedColumn === 0 405 | ? /** @type {number} */ (generatedLine) - 1 406 | : /** @type {number} */ (generatedLine); 407 | 408 | let currentGeneratedLine = 1; 409 | 410 | /** 411 | * @param {number} generatedLine generated line 412 | * @param {number} _generatedColumn generated column 413 | * @param {number} sourceIndex source index 414 | * @param {number} originalLine original line 415 | * @param {number} originalColumn original column 416 | * @param {number} _nameIndex name index 417 | * @returns {void} 418 | */ 419 | const onMapping = ( 420 | generatedLine, 421 | _generatedColumn, 422 | sourceIndex, 423 | originalLine, 424 | originalColumn, 425 | _nameIndex, 426 | ) => { 427 | if ( 428 | sourceIndex >= 0 && 429 | currentGeneratedLine <= generatedLine && 430 | generatedLine <= finalLine 431 | ) { 432 | onChunk( 433 | undefined, 434 | generatedLine, 435 | 0, 436 | sourceIndex, 437 | originalLine, 438 | originalColumn, 439 | -1, 440 | ); 441 | currentGeneratedLine = generatedLine + 1; 442 | } 443 | }; 444 | readMappings(mappings, onMapping); 445 | return result; 446 | }; 447 | 448 | /** 449 | * @param {string} source source 450 | * @param {RawSourceMap} sourceMap source map 451 | * @param {OnChunk} onChunk on chunk 452 | * @param {OnSource} onSource on source 453 | * @param {OnName} onName on name 454 | * @param {boolean} finalSource final source 455 | * @param {boolean} columns columns 456 | * @returns {GeneratedSourceInfo} generated source info 457 | */ 458 | module.exports = ( 459 | source, 460 | sourceMap, 461 | onChunk, 462 | onSource, 463 | onName, 464 | finalSource, 465 | columns, 466 | ) => { 467 | if (columns) { 468 | return finalSource 469 | ? streamChunksOfSourceMapFinal( 470 | source, 471 | sourceMap, 472 | onChunk, 473 | onSource, 474 | onName, 475 | ) 476 | : streamChunksOfSourceMapFull( 477 | source, 478 | sourceMap, 479 | onChunk, 480 | onSource, 481 | onName, 482 | ); 483 | } 484 | return finalSource 485 | ? streamChunksOfSourceMapLinesFinal( 486 | source, 487 | sourceMap, 488 | onChunk, 489 | onSource, 490 | onName, 491 | ) 492 | : streamChunksOfSourceMapLinesFull( 493 | source, 494 | sourceMap, 495 | onChunk, 496 | onSource, 497 | onName, 498 | ); 499 | }; 500 | -------------------------------------------------------------------------------- /test/CachedSource.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | jest.mock("./__mocks__/createMappingsSerializer"); 4 | 5 | const crypto = require("crypto"); 6 | const { CachedSource } = require("../"); 7 | const { OriginalSource } = require("../"); 8 | const { RawSource } = require("../"); 9 | const { Source } = require("../"); 10 | const streamChunks = require("../lib/helpers/streamChunks"); 11 | const { 12 | disableDualStringBufferCaching, 13 | enableDualStringBufferCaching, 14 | enterStringInterningRange, 15 | exitStringInterningRange, 16 | } = require("../lib/helpers/stringBufferUtils"); 17 | 18 | class TrackedSource extends Source { 19 | constructor(source) { 20 | super(); 21 | this._innerSource = source; 22 | this.sizeCalled = 0; 23 | this.sourceCalled = 0; 24 | this.bufferCalled = 0; 25 | this.mapCalled = 0; 26 | this.sourceAndMapCalled = 0; 27 | this.updateHashCalled = 0; 28 | } 29 | 30 | getCalls() { 31 | return { 32 | size: this.sizeCalled, 33 | source: this.sourceCalled, 34 | buffer: this.bufferCalled, 35 | map: this.mapCalled, 36 | sourceAndMap: this.sourceAndMapCalled, 37 | hash: this.updateHashCalled, 38 | }; 39 | } 40 | 41 | size() { 42 | this.sizeCalled++; 43 | return this._innerSource.size(); 44 | } 45 | 46 | source() { 47 | this.sourceCalled++; 48 | return this._innerSource.source(); 49 | } 50 | 51 | buffer() { 52 | this.bufferCalled++; 53 | return this._innerSource.buffer(); 54 | } 55 | 56 | map(options) { 57 | this.mapCalled++; 58 | return this._innerSource.map(options); 59 | } 60 | 61 | sourceAndMap(options) { 62 | this.sourceAndMapCalled++; 63 | return this._innerSource.sourceAndMap(options); 64 | } 65 | 66 | updateHash(hash) { 67 | this.updateHashCalled++; 68 | return this._innerSource.updateHash(hash); 69 | } 70 | } 71 | 72 | const getHash = (source) => { 73 | const hash = crypto.createHash("md5"); 74 | source.updateHash(hash); 75 | return hash.digest("hex"); 76 | }; 77 | 78 | describe.each([ 79 | { 80 | enableMemoryOptimizations: false, 81 | }, 82 | { 83 | enableMemoryOptimizations: true, 84 | }, 85 | ])("cachedSource %s", ({ enableMemoryOptimizations }) => { 86 | beforeEach(() => { 87 | if (enableMemoryOptimizations) { 88 | disableDualStringBufferCaching(); 89 | enterStringInterningRange(); 90 | } 91 | }); 92 | 93 | afterEach(() => { 94 | if (enableMemoryOptimizations) { 95 | enableDualStringBufferCaching(); 96 | exitStringInterningRange(); 97 | } 98 | }); 99 | 100 | it("should return the correct size for binary files", () => { 101 | const source = new OriginalSource( 102 | Buffer.from(Array.from({ length: 256 })), 103 | "file.wasm", 104 | ); 105 | const cachedSource = new CachedSource(source); 106 | 107 | expect(cachedSource.size()).toBe(256); 108 | expect(cachedSource.size()).toBe(256); 109 | }); 110 | 111 | it("should return the correct size for cached binary sources", () => { 112 | const source = new OriginalSource( 113 | Buffer.from(Array.from({ length: 256 })), 114 | "file.wasm", 115 | ); 116 | const cachedSource = new CachedSource(source); 117 | 118 | cachedSource.source(); 119 | expect(cachedSource.size()).toBe(256); 120 | expect(cachedSource.size()).toBe(256); 121 | }); 122 | 123 | it("should return the correct size for text files", () => { 124 | const source = new OriginalSource("TestTestTest", "file.js"); 125 | const cachedSource = new CachedSource(source); 126 | 127 | expect(cachedSource.size()).toBe(12); 128 | expect(cachedSource.size()).toBe(12); 129 | }); 130 | 131 | it("should return the correct size for cached text files", () => { 132 | const source = new OriginalSource("TestTestTest", "file.js"); 133 | const cachedSource = new CachedSource(source); 134 | 135 | cachedSource.source(); 136 | expect(cachedSource.size()).toBe(12); 137 | expect(cachedSource.size()).toBe(12); 138 | }); 139 | 140 | it("should return the correct size for unicode files", () => { 141 | const source = new OriginalSource("😋", "file.js"); 142 | const cachedSource = new CachedSource(source); 143 | 144 | expect(cachedSource.size()).toBe(4); 145 | expect(cachedSource.size()).toBe(4); 146 | }); 147 | 148 | it("should return the correct size for cached unicode files", () => { 149 | const source = new OriginalSource("😋", "file.js"); 150 | const cachedSource = new CachedSource(source); 151 | 152 | cachedSource.source(); 153 | expect(cachedSource.size()).toBe(4); 154 | expect(cachedSource.size()).toBe(4); 155 | }); 156 | 157 | it("should use the source cache for all other calls", () => { 158 | const original = new OriginalSource("TestTestTest", "file.js"); 159 | const source = new TrackedSource(original); 160 | const cachedSource = new CachedSource(source); 161 | 162 | expect(cachedSource.source()).toBe("TestTestTest"); 163 | expect(cachedSource.size()).toBe(12); 164 | expect(cachedSource.buffer().toString("utf8")).toBe("TestTestTest"); 165 | expect(getHash(cachedSource)).toBe(getHash(original)); 166 | expect(source.getCalls()).toEqual({ 167 | size: 0, 168 | source: 1, 169 | buffer: 0, 170 | map: 0, 171 | sourceAndMap: 0, 172 | hash: 1, 173 | }); 174 | }); 175 | 176 | it("should use the source cache for all other calls #2", () => { 177 | const original = new OriginalSource("TestTestTest", "file.js"); 178 | const source = new TrackedSource(original); 179 | const cachedSource = new CachedSource(source); 180 | 181 | expect(cachedSource.source()).toBe("TestTestTest"); 182 | expect(cachedSource.source()).toBe("TestTestTest"); 183 | expect(cachedSource.size()).toBe(12); 184 | expect(cachedSource.size()).toBe(12); 185 | expect(cachedSource.buffer().toString("utf8")).toBe("TestTestTest"); 186 | expect(cachedSource.buffer().toString("utf8")).toBe("TestTestTest"); 187 | expect(cachedSource.sourceAndMap().source).toBe("TestTestTest"); 188 | expect(typeof cachedSource.sourceAndMap().map).toBe("object"); 189 | expect(typeof cachedSource.map()).toBe("object"); 190 | expect(typeof cachedSource.map()).toBe("object"); 191 | expect(getHash(cachedSource)).toBe(getHash(original)); 192 | expect(source.getCalls()).toEqual({ 193 | size: 0, 194 | source: 1, 195 | buffer: 0, 196 | map: 1, 197 | sourceAndMap: 0, 198 | hash: 1, 199 | }); 200 | }); 201 | 202 | it("should not use buffer for source", () => { 203 | const source = new TrackedSource( 204 | new OriginalSource("TestTestTest", "file.js"), 205 | ); 206 | const cachedSource = new CachedSource(source); 207 | 208 | expect(cachedSource.size()).toBe(12); 209 | expect(cachedSource.size()).toBe(12); 210 | expect(cachedSource.buffer().toString("utf8")).toBe("TestTestTest"); 211 | expect(cachedSource.buffer().toString("utf8")).toBe("TestTestTest"); 212 | expect(cachedSource.source()).toBe("TestTestTest"); 213 | expect(cachedSource.source()).toBe("TestTestTest"); 214 | expect(source.getCalls()).toEqual({ 215 | size: 1, 216 | source: 1, 217 | buffer: 1, 218 | map: 0, 219 | sourceAndMap: 0, 220 | hash: 0, 221 | }); 222 | }); 223 | 224 | it("should use map for sourceAndMap", () => { 225 | const source = new TrackedSource( 226 | new OriginalSource("TestTestTest", "file.js"), 227 | ); 228 | const cachedSource = new CachedSource(source); 229 | 230 | expect(typeof cachedSource.map()).toBe("object"); 231 | expect(typeof cachedSource.map()).toBe("object"); 232 | expect(cachedSource.sourceAndMap().source).toBe("TestTestTest"); 233 | expect(typeof cachedSource.sourceAndMap().map).toBe("object"); 234 | expect(cachedSource.size()).toBe(12); 235 | expect(cachedSource.size()).toBe(12); 236 | expect(cachedSource.buffer().toString("utf8")).toBe("TestTestTest"); 237 | expect(cachedSource.buffer().toString("utf8")).toBe("TestTestTest"); 238 | expect(cachedSource.source()).toBe("TestTestTest"); 239 | expect(cachedSource.source()).toBe("TestTestTest"); 240 | expect(source.getCalls()).toEqual({ 241 | size: 0, 242 | source: 1, 243 | buffer: 0, 244 | map: 1, 245 | sourceAndMap: 0, 246 | hash: 0, 247 | }); 248 | }); 249 | 250 | it("should use binary source for buffer", () => { 251 | const buffer = Buffer.from(Array.from({ length: 256 })); 252 | const source = new TrackedSource(new RawSource(buffer)); 253 | const cachedSource = new CachedSource(source); 254 | 255 | expect(cachedSource.sourceAndMap().source).toBe(buffer); 256 | expect(cachedSource.sourceAndMap().source).toBe(buffer); 257 | expect(cachedSource.sourceAndMap()).toHaveProperty("map", null); 258 | expect(cachedSource.buffer()).toBe(buffer); 259 | expect(cachedSource.buffer()).toBe(buffer); 260 | expect(cachedSource.source()).toBe(buffer); 261 | expect(cachedSource.source()).toBe(buffer); 262 | expect(source.getCalls()).toEqual({ 263 | size: 0, 264 | source: 0, 265 | buffer: 0, 266 | map: 0, 267 | sourceAndMap: 1, 268 | hash: 0, 269 | }); 270 | }); 271 | 272 | it("should use an old webpack-sources Source with Buffer", () => { 273 | const buffer = Buffer.from(Array.from({ length: 256 })); 274 | const source = new TrackedSource(new RawSource(buffer)); 275 | // @ts-expect-error for tests 276 | source.buffer = undefined; 277 | const cachedSource = new CachedSource(source); 278 | 279 | expect(cachedSource.buffer()).toBe(buffer); 280 | expect(cachedSource.buffer()).toBe(buffer); 281 | expect(cachedSource.source()).toBe(buffer); 282 | expect(cachedSource.source()).toBe(buffer); 283 | expect(source.getCalls()).toEqual({ 284 | size: 0, 285 | source: 1, 286 | buffer: 0, 287 | map: 0, 288 | sourceAndMap: 0, 289 | hash: 0, 290 | }); 291 | }); 292 | 293 | it("should use an old webpack-sources Source with String", () => { 294 | const string = "Hello World"; 295 | const source = new TrackedSource(new RawSource(string)); 296 | // @ts-expect-error for tests 297 | source.buffer = undefined; 298 | const cachedSource = new CachedSource(source); 299 | 300 | const buffer = cachedSource.buffer(); 301 | 302 | expect(Buffer.isBuffer(buffer)).toBe(true); 303 | expect(buffer.toString("utf8")).toBe(string); 304 | expect( 305 | enableMemoryOptimizations 306 | ? cachedSource.buffer().equals(buffer) 307 | : cachedSource.buffer(), 308 | ).toBe(enableMemoryOptimizations ? true : buffer); 309 | expect(cachedSource.source()).toBe(string); 310 | expect(cachedSource.source()).toBe(string); 311 | expect(source.getCalls()).toEqual({ 312 | size: 0, 313 | source: 1, 314 | buffer: 0, 315 | map: 0, 316 | sourceAndMap: 0, 317 | hash: 0, 318 | }); 319 | }); 320 | 321 | it("should include map in the cache if only streamChunks was computed", () => { 322 | const original = new OriginalSource("Hello World", "test.txt"); 323 | const source = new TrackedSource(original); 324 | const cachedSource = new CachedSource(source); 325 | 326 | // @ts-expect-error for tests 327 | source.streamChunks = (...args) => streamChunks(original, ...args); 328 | 329 | // fill up cache 330 | cachedSource.streamChunks( 331 | {}, 332 | () => {}, 333 | () => {}, 334 | () => {}, 335 | ); 336 | 337 | const cachedData = cachedSource.getCachedData(); 338 | expect(cachedData.maps.size).toBe(1); 339 | }); 340 | 341 | it("should allow to store and restore cached data (with SourceMap)", () => { 342 | const original = new OriginalSource("Hello World", "test.txt"); 343 | const source = new CachedSource(original); 344 | 345 | // fill up cache 346 | source.source(); 347 | source.map({}); 348 | source.size(); 349 | getHash(source); 350 | 351 | // @ts-expect-error for tests 352 | const clone = new CachedSource(null, source.getCachedData()); 353 | 354 | expect(clone.source()).toEqual(source.source()); 355 | expect(clone.buffer()).toEqual(source.buffer()); 356 | expect(clone.size()).toEqual(source.size()); 357 | expect(clone.map({})).toEqual(source.map({})); 358 | expect(clone.sourceAndMap({})).toEqual(source.sourceAndMap({})); 359 | expect(getHash(clone)).toBe(getHash(original)); 360 | 361 | // @ts-expect-error for tests 362 | const clone2 = new CachedSource(null, clone.getCachedData()); 363 | 364 | expect(clone2.source()).toEqual(source.source()); 365 | expect(clone2.buffer()).toEqual(source.buffer()); 366 | expect(clone2.size()).toEqual(source.size()); 367 | expect(clone2.map({})).toEqual(source.map({})); 368 | expect(clone2.sourceAndMap({})).toEqual(source.sourceAndMap({})); 369 | expect(getHash(clone2)).toBe(getHash(original)); 370 | }); 371 | 372 | it("should allow to store and restore cached data (without SourceMap)", () => { 373 | const original = new RawSource("Hello World"); 374 | const source = new CachedSource(original); 375 | 376 | // fill up cache 377 | source.source(); 378 | source.map({}); 379 | source.size(); 380 | getHash(source); 381 | 382 | // @ts-expect-error for tests 383 | const clone = new CachedSource(null, source.getCachedData()); 384 | 385 | expect(clone.source()).toEqual(source.source()); 386 | expect(clone.buffer()).toEqual(source.buffer()); 387 | expect(clone.size()).toEqual(source.size()); 388 | expect(clone.map({})).toBeNull(); 389 | expect(clone.sourceAndMap({})).toEqual(source.sourceAndMap({})); 390 | expect(getHash(clone)).toBe(getHash(original)); 391 | }); 392 | 393 | it("should allow to store and restore cached data, but fallback to the original source when needed", () => { 394 | const original = new RawSource("Hello World"); 395 | const source = new CachedSource(original); 396 | 397 | // fill up cache 398 | source.source(); 399 | source.size(); 400 | 401 | let calls = 0; 402 | const clone = () => 403 | new CachedSource(() => { 404 | calls++; 405 | return original; 406 | }, source.getCachedData()); 407 | 408 | expect(clone().source()).toEqual(source.source()); 409 | expect(clone().buffer()).toEqual(source.buffer()); 410 | expect(clone().size()).toEqual(source.size()); 411 | expect(calls).toBe(0); 412 | const clone1 = clone(); 413 | expect(clone1.map({})).toBeNull(); 414 | expect(calls).toBe(1); 415 | expect(clone1.map({})).toBeNull(); 416 | expect(calls).toBe(1); 417 | expect(clone().sourceAndMap({})).toEqual(source.sourceAndMap({})); 418 | expect(calls).toBe(2); 419 | expect(getHash(clone())).toBe(getHash(original)); 420 | expect(calls).toBe(3); 421 | }); 422 | }); 423 | -------------------------------------------------------------------------------- /test/SourceMapSource.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | /** @typedef {import("../lib/Source").RawSourceMap} RawSourceMap */ 4 | 5 | jest.mock("./__mocks__/createMappingsSerializer"); 6 | 7 | const crypto = require("crypto"); 8 | const fs = require("fs"); 9 | const path = require("path"); 10 | const { SourceNode } = require("source-map"); 11 | const BatchedHash = require("webpack/lib/util/hash/BatchedHash"); 12 | const createMd4 = require("webpack/lib/util/hash/md4"); 13 | const createXXHash64 = require("webpack/lib/util/hash/xxhash64"); 14 | const { SourceMapSource } = require("../"); 15 | const { OriginalSource } = require("../"); 16 | const { ConcatSource } = require("../"); 17 | const { PrefixSource } = require("../"); 18 | const { ReplaceSource } = require("../"); 19 | const { CachedSource } = require("../"); 20 | const createMappingsSerializer = require("../lib/helpers/createMappingsSerializer"); 21 | const { 22 | disableDualStringBufferCaching, 23 | enableDualStringBufferCaching, 24 | enterStringInterningRange, 25 | exitStringInterningRange, 26 | } = require("../lib/helpers/stringBufferUtils"); 27 | const { withReadableMappings } = require("./helpers"); 28 | 29 | describe.each([ 30 | { 31 | enableMemoryOptimizations: false, 32 | }, 33 | { 34 | enableMemoryOptimizations: true, 35 | }, 36 | ])("sourceMapSource %s", ({ enableMemoryOptimizations }) => { 37 | beforeEach(() => { 38 | if (enableMemoryOptimizations) { 39 | disableDualStringBufferCaching(); 40 | enterStringInterningRange(); 41 | } 42 | }); 43 | 44 | afterEach(() => { 45 | if (enableMemoryOptimizations) { 46 | enableDualStringBufferCaching(); 47 | exitStringInterningRange(); 48 | } 49 | }); 50 | 51 | it("map correctly", () => { 52 | const innerSourceCode = `${["Hello World", "is a test string"].join( 53 | "\n", 54 | )}\n`; 55 | const innerSource = new ConcatSource( 56 | new OriginalSource(innerSourceCode, "hello-world.txt"), 57 | new OriginalSource("Translate: ", "header.txt"), 58 | "Other text", 59 | ); 60 | 61 | const source = new SourceNode(null, null, null, [ 62 | "Translated: ", 63 | new SourceNode(1, 0, "text", "Hallo", "Hello"), 64 | " ", 65 | new SourceNode(1, 6, "text", "Welt\n", "World"), 66 | new SourceNode(2, 0, "text", "ist ein", "nope"), 67 | " test ", 68 | new SourceNode(2, 10, "text", "Text\n"), 69 | new SourceNode(3, 11, "text", "Anderer"), 70 | " ", 71 | new SourceNode(3, 17, "text", "Text"), 72 | ]); 73 | source.setSourceContent("text", innerSourceCode); 74 | 75 | const sourceR = source.toStringWithSourceMap({ 76 | file: "translated.txt", 77 | }); 78 | 79 | const sourceMapSource1 = new SourceMapSource( 80 | sourceR.code, 81 | "text", 82 | sourceR.map.toJSON(), 83 | innerSource.source(), 84 | /** @type {RawSourceMap} */ 85 | (innerSource.map()), 86 | ); 87 | const sourceMapSource2 = new SourceMapSource( 88 | sourceR.code, 89 | "text", 90 | sourceR.map.toJSON(), 91 | innerSource.source(), 92 | /** @type {RawSourceMap} */ 93 | (innerSource.map()), 94 | true, 95 | ); 96 | 97 | const expectedContent = [ 98 | "Translated: Hallo Welt", 99 | "ist ein test Text", 100 | "Anderer Text", 101 | ].join("\n"); 102 | expect(sourceMapSource1.source()).toEqual(expectedContent); 103 | expect(sourceMapSource2.source()).toEqual(expectedContent); 104 | 105 | expect(withReadableMappings(sourceMapSource1.map())).toMatchInlineSnapshot(` 106 | Object { 107 | "_mappings": "1:12 -> [hello-world.txt] 1:0 (Hello), :17, :18 -> [hello-world.txt] 1:6 (World) 108 | 2:0 -> [hello-world.txt] 2:0, :7, :13 -> [hello-world.txt] 2:10 109 | 3:0 -> [text] 3:11, :7, :8 -> [text] 3:17", 110 | "file": "x", 111 | "mappings": "YAAAA,K,CAAMC;AACN,O,MAAU;ACCC,O,CAAM", 112 | "names": Array [ 113 | "Hello", 114 | "World", 115 | ], 116 | "sources": Array [ 117 | "hello-world.txt", 118 | "text", 119 | ], 120 | "sourcesContent": Array [ 121 | "Hello World 122 | is a test string 123 | ", 124 | "Hello World 125 | is a test string 126 | Translate: Other text", 127 | ], 128 | "version": 3, 129 | } 130 | `); 131 | 132 | expect(withReadableMappings(sourceMapSource2.map())).toMatchInlineSnapshot(` 133 | Object { 134 | "_mappings": "1:12 -> [hello-world.txt] 1:0 (Hello), :17, :18 -> [hello-world.txt] 1:6 (World) 135 | 2:0 -> [hello-world.txt] 2:0, :7, :13 -> [hello-world.txt] 2:10", 136 | "file": "x", 137 | "mappings": "YAAAA,K,CAAMC;AACN,O,MAAU", 138 | "names": Array [ 139 | "Hello", 140 | "World", 141 | ], 142 | "sources": Array [ 143 | "hello-world.txt", 144 | ], 145 | "sourcesContent": Array [ 146 | "Hello World 147 | is a test string 148 | ", 149 | ], 150 | "version": 3, 151 | } 152 | `); 153 | 154 | const hash = require("crypto").createHash("sha256"); 155 | 156 | sourceMapSource1.updateHash(hash); 157 | const digest = hash.digest("hex"); 158 | expect(digest).toMatchInlineSnapshot( 159 | '"a61a2da7f3d541e458b1af9c0ec25d853fb929339d7d8b22361468be67326a52"', 160 | ); 161 | 162 | const clone = new SourceMapSource(...sourceMapSource1.getArgsAsBuffers()); 163 | expect(clone.sourceAndMap()).toEqual(sourceMapSource1.sourceAndMap()); 164 | 165 | const hash2 = require("crypto").createHash("sha256"); 166 | 167 | clone.updateHash(hash2); 168 | const digest2 = hash2.digest("hex"); 169 | expect(digest2).toEqual(digest); 170 | }); 171 | 172 | it("should handle null sources and sourcesContent", () => { 173 | const a = new SourceMapSource("hello world\n", "hello.txt", { 174 | version: 3, 175 | // @ts-expect-error for tests 176 | sources: [null], 177 | // @ts-expect-error for tests 178 | sourcesContent: [null], 179 | mappings: "AAAA", 180 | }); 181 | const b = new SourceMapSource("hello world\n", "hello.txt", { 182 | version: 3, 183 | sources: [], 184 | sourcesContent: [], 185 | mappings: "AAAA", 186 | names: [], 187 | file: "", 188 | }); 189 | const c = new SourceMapSource("hello world\n", "hello.txt", { 190 | version: 3, 191 | sources: ["hello-source.txt"], 192 | sourcesContent: ["hello world\n"], 193 | mappings: "AAAA", 194 | names: [], 195 | file: "", 196 | }); 197 | const sources = [a, b, c].map((s) => { 198 | const r = new ReplaceSource(s); 199 | r.replace(1, 4, "i"); 200 | return r; 201 | }); 202 | const source = new ConcatSource(...sources); 203 | 204 | expect(source.source()).toMatchInlineSnapshot(` 205 | "hi world 206 | hi world 207 | hi world 208 | " 209 | `); 210 | expect(withReadableMappings(source.map(), source.source())) 211 | .toMatchInlineSnapshot(` 212 | Object { 213 | "_mappings": "1:0 -> [null] 1:0 214 | hi world 215 | ^_______ 216 | 3:0 -> [hello-source.txt] 1:0, :1 -> [hello-source.txt] 1:1, :2 -> [hello-source.txt] 1:5 217 | hi world 218 | ^^^_____ 219 | ", 220 | "file": "x", 221 | "mappings": "AAAA;;ACAA,CAAC,CAAI", 222 | "names": Array [], 223 | "sources": Array [ 224 | null, 225 | "hello-source.txt", 226 | ], 227 | "sourcesContent": Array [ 228 | null, 229 | "hello world 230 | ", 231 | ], 232 | "version": 3, 233 | } 234 | `); 235 | expect( 236 | withReadableMappings(source.map({ columns: false }), source.source()), 237 | ).toMatchInlineSnapshot(` 238 | Object { 239 | "_mappings": "1:0 -> [null] 1:0 240 | hi world 241 | ^_______ 242 | 3:0 -> [hello-source.txt] 1:0 243 | hi world 244 | ^_______ 245 | ", 246 | "file": "x", 247 | "mappings": "AAAA;;ACAA", 248 | "names": Array [], 249 | "sources": Array [ 250 | null, 251 | "hello-source.txt", 252 | ], 253 | "sourcesContent": Array [ 254 | null, 255 | "hello world 256 | ", 257 | ], 258 | "version": 3, 259 | } 260 | `); 261 | }); 262 | 263 | it("should handle es6-promise correctly", () => { 264 | const code = fs.readFileSync( 265 | path.resolve(__dirname, "fixtures", "es6-promise.js"), 266 | "utf8", 267 | ); 268 | const map = JSON.parse( 269 | fs.readFileSync( 270 | path.resolve(__dirname, "fixtures", "es6-promise.map"), 271 | "utf8", 272 | ), 273 | ); 274 | const inner = new SourceMapSource(code, "es6-promise.js", map); 275 | const source = new ConcatSource(inner, inner); 276 | expect(source.source()).toBe(code + code); 277 | expect(source.sourceAndMap().source).toBe(code + code); 278 | }); 279 | 280 | it("should not emit zero sizes mappings when ending with empty mapping", () => { 281 | const a = new SourceMapSource("hello\n", "a", { 282 | version: 3, 283 | mappings: "AAAA;AACA", 284 | sources: ["hello1"], 285 | names: [], 286 | file: "", 287 | }); 288 | const b = new SourceMapSource("hi", "b", { 289 | version: 3, 290 | mappings: "AAAA,EAAE", 291 | sources: ["hello2"], 292 | names: [], 293 | file: "", 294 | }); 295 | const b2 = new SourceMapSource("hi", "b", { 296 | version: 3, 297 | mappings: "AAAA,EAAE", 298 | sources: ["hello3"], 299 | names: [], 300 | file: "", 301 | }); 302 | const c = new SourceMapSource("", "c", { 303 | version: 3, 304 | mappings: "AAAA", 305 | sources: ["hello4"], 306 | names: [], 307 | file: "", 308 | }); 309 | const source = new ConcatSource( 310 | a, 311 | a, 312 | b, 313 | b, 314 | b2, 315 | b, 316 | c, 317 | c, 318 | b2, 319 | a, 320 | b2, 321 | c, 322 | a, 323 | b, 324 | ); 325 | source.sourceAndMap(); 326 | expect(withReadableMappings(source.map(), source.source())) 327 | .toMatchInlineSnapshot(` 328 | Object { 329 | "_mappings": "1:0 -> [hello1] 1:0 330 | hello 331 | ^____ 332 | 2:0 -> [hello1] 1:0 333 | hello 334 | ^____ 335 | 3:0 -> [hello2] 1:0, :4 -> [hello3] 1:0, :6 -> [hello2] 1:0, :8 -> [hello3] 1:0, :10 -> [hello1] 1:0 336 | hihihihihihello 337 | ^___^_^_^_^____ 338 | 4:0 -> [hello3] 1:0, :2 -> [hello1] 1:0 339 | hihello 340 | ^_^____ 341 | 5:0 -> [hello2] 1:0 342 | hi 343 | ^_ 344 | ", 345 | "file": "x", 346 | "mappings": "AAAA;AAAA;ACAA,ICAA,EDAA,ECAA,EFAA;AEAA,EFAA;ACAA", 347 | "names": Array [], 348 | "sources": Array [ 349 | "hello1", 350 | "hello2", 351 | "hello3", 352 | ], 353 | "sourcesContent": undefined, 354 | "version": 3, 355 | } 356 | `); 357 | source.sourceAndMap({ columns: true }); 358 | source.map({ columns: true }); 359 | const withReplacements = (s) => { 360 | const r = new ReplaceSource(s); 361 | r.insert(0, ""); 362 | return r; 363 | }; 364 | withReplacements(source).sourceAndMap(); 365 | withReplacements(source).map(); 366 | withReplacements(source).sourceAndMap({ columns: false }); 367 | withReplacements(source).map({ columns: false }); 368 | const withPrefix = (s) => new PrefixSource("test", s); 369 | withPrefix(source).sourceAndMap(); 370 | withPrefix(source).map(); 371 | withPrefix(source).sourceAndMap({ columns: false }); 372 | withPrefix(source).map({ columns: false }); 373 | const testCached = (s, fn) => { 374 | const c = new CachedSource(s); 375 | const o = fn(s); 376 | const a = fn(c); 377 | expect(a).toEqual(o); 378 | const b = fn(c); 379 | expect(b).toEqual(o); 380 | return b; 381 | }; 382 | testCached(source, (s) => s.sourceAndMap()); 383 | testCached(source, (s) => s.map()); 384 | testCached(source, (s) => s.sourceAndMap({ columns: false })); 385 | testCached(source, (s) => s.map({ columns: false })); 386 | testCached(withPrefix(source), (s) => s.sourceAndMap()); 387 | testCached(withPrefix(source), (s) => s.map()); 388 | testCached(withPrefix(source), (s) => s.sourceAndMap({ columns: false })); 389 | testCached(withPrefix(source), (s) => s.map({ columns: false })); 390 | testCached(source, (s) => withPrefix(s).sourceAndMap()); 391 | testCached(source, (s) => withPrefix(s).map()); 392 | testCached(source, (s) => withPrefix(s).sourceAndMap({ columns: false })); 393 | testCached(source, (s) => withPrefix(s).map({ columns: false })); 394 | }); 395 | 396 | it("should not crash without original source when mapping names", () => { 397 | const source = new SourceMapSource( 398 | "h", 399 | "hello.txt", 400 | { 401 | version: 3, 402 | sources: ["hello.txt"], 403 | mappings: "AAAAA", 404 | names: ["hello"], 405 | file: "", 406 | }, 407 | "hello", 408 | { 409 | version: 3, 410 | sources: ["hello world.txt"], 411 | mappings: "AAAA", 412 | names: [], 413 | file: "", 414 | }, 415 | false, 416 | ); 417 | expect(withReadableMappings(source.map())).toMatchInlineSnapshot(` 418 | Object { 419 | "_mappings": "1:0 -> [hello world.txt] 1:0", 420 | "file": "x", 421 | "mappings": "AAAA", 422 | "names": Array [], 423 | "sources": Array [ 424 | "hello world.txt", 425 | ], 426 | "sourcesContent": undefined, 427 | "version": 3, 428 | } 429 | `); 430 | }); 431 | 432 | it("should map generated lines to the inner source", () => { 433 | const m = createMappingsSerializer(); 434 | const m2 = createMappingsSerializer(); 435 | const source = new SourceMapSource( 436 | "Message: H W!", 437 | "HELLO_WORLD.txt", 438 | { 439 | version: 3, 440 | sources: ["messages.txt", "HELLO_WORLD.txt"], 441 | mappings: [ 442 | m(1, 0, 0, 1, 0, 0), 443 | m(1, 9, 1, 1, 0, 1), 444 | m(1, 11, 1, 1, 6, 2), 445 | m(1, 12, -1, -1, -1, -1), 446 | ].join(""), 447 | names: ["Message", "hello", "world"], 448 | file: "", 449 | }, 450 | "HELLO WORLD", 451 | { 452 | version: 3, 453 | sources: ["hello world.txt"], 454 | mappings: [m2(1, 0, 0, 1, 0, 0), m2(1, 6, -1, -1, -1, -1)].join(""), 455 | sourcesContent: ["hello world"], 456 | names: [], 457 | file: "", 458 | }, 459 | false, 460 | ); 461 | expect(withReadableMappings(source.sourceAndMap())).toMatchInlineSnapshot(` 462 | Object { 463 | "_mappings": "1:0 -> [messages.txt] 1:0 (Message), :9 -> [hello world.txt] 1:0, :11 -> [HELLO_WORLD.txt] 1:6 (world), :12 464 | Message: H W! 465 | ^________^_^. 466 | ", 467 | "map": Object { 468 | "file": "x", 469 | "mappings": "AAAAA,SCAA,ECAMC,C", 470 | "names": Array [ 471 | "Message", 472 | "world", 473 | ], 474 | "sources": Array [ 475 | "messages.txt", 476 | "hello world.txt", 477 | "HELLO_WORLD.txt", 478 | ], 479 | "sourcesContent": Array [ 480 | null, 481 | "hello world", 482 | "HELLO WORLD", 483 | ], 484 | "version": 3, 485 | }, 486 | "source": "Message: H W!", 487 | } 488 | `); 489 | }); 490 | 491 | it("provides buffer when backed by string", () => { 492 | const sourceMapSource = new SourceMapSource("source", "name"); 493 | 494 | const buffer1 = sourceMapSource.buffer(); 495 | expect(buffer1).toHaveLength(6); 496 | 497 | const buffer2 = sourceMapSource.buffer(); 498 | 499 | // When memory optimizations are enabled, the buffer is not cached. 500 | expect(enableMemoryOptimizations ? buffer1.equals(buffer2) : buffer2).toBe( 501 | enableMemoryOptimizations ? true : buffer1, 502 | ); 503 | }); 504 | 505 | it("provides buffer when backed by buffer", () => { 506 | const sourceMapSource = new SourceMapSource( 507 | Buffer.from("source", "utf8"), 508 | "name", 509 | ); 510 | 511 | const buffer1 = sourceMapSource.buffer(); 512 | expect(buffer1).toHaveLength(6); 513 | 514 | const buffer2 = sourceMapSource.buffer(); 515 | expect(buffer2).toBe(buffer1); 516 | }); 517 | 518 | for (const hash of [ 519 | ["md5", [crypto.createHash("md5"), crypto.createHash("md5")]], 520 | ["md4", [new BatchedHash(createMd4()), new BatchedHash(createMd4())]], 521 | [ 522 | "xxhash64", 523 | [new BatchedHash(createXXHash64()), new BatchedHash(createXXHash64())], 524 | ], 525 | ]) { 526 | it(`should have the same hash (${hash[0]}) for string and Buffer`, () => { 527 | const sourceString = new SourceMapSource("hello world\n", "hello.txt", { 528 | version: 3, 529 | sources: ["hello-source.txt"], 530 | sourcesContent: ["hello world\n"], 531 | mappings: "AAAA", 532 | names: [], 533 | file: "", 534 | }); 535 | const sourceBuffer = new SourceMapSource( 536 | Buffer.from("hello world\n"), 537 | "hello.txt", 538 | Buffer.from( 539 | JSON.stringify({ 540 | version: 3, 541 | sources: ["hello-source.txt"], 542 | sourcesContent: ["hello world\n"], 543 | mappings: "AAAA", 544 | names: [], 545 | file: "", 546 | }), 547 | ), 548 | ); 549 | 550 | expect(sourceString.source()).toBe("hello world\n"); 551 | expect(sourceString.buffer()).toEqual(sourceBuffer.buffer()); 552 | 553 | sourceString.updateHash(hash[1][0]); 554 | sourceBuffer.updateHash(hash[1][1]); 555 | 556 | expect(hash[1][0].digest("hex")).toBe(hash[1][1].digest("hex")); 557 | }); 558 | } 559 | }); 560 | --------------------------------------------------------------------------------