├── .github └── workflows │ └── test.yml ├── .gitignore ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── bun.lock ├── deno.json ├── examples ├── boxes │ ├── index.html │ ├── index.js │ └── styles.css └── spa-like │ ├── README.md │ ├── code.js │ ├── index.ts │ └── style.css ├── index.d.ts ├── logo-light.svg ├── logo.svg ├── package.json ├── src ├── index.test.ts └── index.ts └── tsconfig.json /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | branches: [main] 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - uses: actions/checkout@v3 15 | - name: Setup bun 16 | uses: oven-sh/setup-bun@v1 17 | with: 18 | bun-version: 1.2.9 19 | - name: Install dependencies, build, and test 20 | run: bun install 21 | - run: bunx playwright@1.51.1 install 22 | - run: bunx playwright@1.51.1 install-deps 23 | - run: bun run build 24 | - run: bun run bundlewatch 25 | - run: bun run test 26 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore 2 | 3 | # Logs 4 | 5 | logs 6 | _.log 7 | npm-debug.log_ 8 | yarn-debug.log* 9 | yarn-error.log* 10 | lerna-debug.log* 11 | .pnpm-debug.log* 12 | 13 | # Caches 14 | 15 | .cache 16 | 17 | # Diagnostic reports (https://nodejs.org/api/report.html) 18 | 19 | report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json 20 | 21 | # Runtime data 22 | 23 | pids 24 | _.pid 25 | _.seed 26 | *.pid.lock 27 | 28 | # Directory for instrumented libs generated by jscoverage/JSCover 29 | 30 | lib-cov 31 | 32 | # Coverage directory used by tools like istanbul 33 | 34 | coverage 35 | *.lcov 36 | 37 | # nyc test coverage 38 | 39 | .nyc_output 40 | 41 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 42 | 43 | .grunt 44 | 45 | # Bower dependency directory (https://bower.io/) 46 | 47 | bower_components 48 | 49 | # node-waf configuration 50 | 51 | .lock-wscript 52 | 53 | # Build files 54 | build 55 | 56 | # Dependency directories 57 | 58 | node_modules/ 59 | jspm_packages/ 60 | 61 | # Snowpack dependency directory (https://snowpack.dev/) 62 | 63 | web_modules/ 64 | 65 | # TypeScript cache 66 | 67 | *.tsbuildinfo 68 | 69 | # Optional npm cache directory 70 | 71 | .npm 72 | 73 | # Optional eslint cache 74 | 75 | .eslintcache 76 | 77 | # Optional stylelint cache 78 | 79 | .stylelintcache 80 | 81 | # Microbundle cache 82 | 83 | .rpt2_cache/ 84 | .rts2_cache_cjs/ 85 | .rts2_cache_es/ 86 | .rts2_cache_umd/ 87 | 88 | # Optional REPL history 89 | 90 | .node_repl_history 91 | 92 | # Output of 'npm pack' 93 | 94 | *.tgz 95 | 96 | # Yarn Integrity file 97 | 98 | .yarn-integrity 99 | 100 | # dotenv environment variable files 101 | 102 | .env 103 | .env.development.local 104 | .env.test.local 105 | .env.production.local 106 | .env.local 107 | 108 | # parcel-bundler cache (https://parceljs.org/) 109 | 110 | .parcel-cache 111 | 112 | # Next.js build output 113 | 114 | .next 115 | out 116 | 117 | # Nuxt.js build / generate output 118 | 119 | .nuxt 120 | dist 121 | 122 | # Gatsby files 123 | 124 | # Comment in the public line in if your project uses Gatsby and not Next.js 125 | 126 | # https://nextjs.org/blog/next-9-1#public-directory-support 127 | 128 | # public 129 | 130 | # vuepress build output 131 | 132 | .vuepress/dist 133 | 134 | # vuepress v2.x temp and cache directory 135 | 136 | .temp 137 | 138 | # Docusaurus cache and generated files 139 | 140 | .docusaurus 141 | 142 | # Serverless directories 143 | 144 | .serverless/ 145 | 146 | # FuseBox cache 147 | 148 | .fusebox/ 149 | 150 | # DynamoDB Local files 151 | 152 | .dynamodb/ 153 | 154 | # TernJS port file 155 | 156 | .tern-port 157 | 158 | # Stores VSCode versions used for testing VSCode extensions 159 | 160 | .vscode-test 161 | 162 | # yarn v2 163 | 164 | .yarn/cache 165 | .yarn/unplugged 166 | .yarn/build-state.yml 167 | .yarn/install-state.gz 168 | .pnp.* 169 | 170 | # IntelliJ based IDEs 171 | .idea 172 | 173 | # Finder (MacOS) folder config 174 | .DS_Store 175 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. 6 | 7 | ## Our Standards 8 | 9 | Examples of behavior that contributes to creating a positive environment include: 10 | 11 | - Using welcoming and inclusive language 12 | - Being respectful of differing viewpoints and experiences 13 | - Gracefully accepting constructive criticism 14 | - Focusing on what is best for the community 15 | - Showing empathy towards other community members 16 | 17 | Examples of unacceptable behavior by participants include: 18 | 19 | - The use of sexualized language or imagery and unwelcome sexual attention or advances 20 | - Trolling, insulting/derogatory comments, and personal or political attacks 21 | - Public or private harassment 22 | - Publishing others' private information, such as a physical or electronic address, without explicit permission 23 | - Other conduct which could reasonably be considered inappropriate in a professional setting 24 | 25 | ## Our Responsibilities 26 | 27 | Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. 28 | 29 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. 30 | 31 | ## Scope 32 | 33 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. 34 | 35 | ## Enforcement 36 | 37 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at contact@aralroca.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. 38 | 39 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. 40 | 41 | ## Attribution 42 | 43 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] 44 | 45 | [homepage]: http://contributor-covenant.org 46 | [version]: http://contributor-covenant.org/version/1/4/ 47 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | When contributing to this repository, please first discuss the change you wish to make via issue, 4 | email, or any other method with the owners of this repository before making a change. 5 | 6 | Please note we have a code of conduct, please follow it in all your interactions with the project. 7 | 8 | ## Pull Request Process 9 | 10 | 1. Ensure you are doing the PR to the canary branch. 11 | 2. Write the failing tests about the issue / feature you are working on. 12 | 3. Update the README.md with details of changes to the interface. 13 | 4. You may merge the Pull Request in once you have the approval of at least one maintainer, or if you 14 | do not have permission to do that, you may request the maintainer to merge it for you. 15 | 16 | ## Code of Conduct 17 | 18 | ### Our Pledge 19 | 20 | In the interest of fostering an open and welcoming environment, we as 21 | contributors and maintainers pledge to making participation in our project and 22 | our community a harassment-free experience for everyone, regardless of age, body 23 | size, disability, ethnicity, gender identity and expression, level of experience, 24 | nationality, personal appearance, race, religion, or sexual identity and 25 | orientation. 26 | 27 | ### Our Standards 28 | 29 | Examples of behavior that contributes to creating a positive environment 30 | include: 31 | 32 | - Using welcoming and inclusive language 33 | - Being respectful of differing viewpoints and experiences 34 | - Gracefully accepting constructive criticism 35 | - Focusing on what is best for the community 36 | - Showing empathy towards other community members 37 | 38 | Examples of unacceptable behavior by participants include: 39 | 40 | - The use of sexualized language or imagery and unwelcome sexual attention or 41 | advances 42 | - Trolling, insulting/derogatory comments, and personal or political attacks 43 | - Public or private harassment 44 | - Publishing others' private information, such as a physical or electronic 45 | address, without explicit permission 46 | - Other conduct which could reasonably be considered inappropriate in a 47 | professional setting 48 | 49 | ### Our Responsibilities 50 | 51 | Project maintainers are responsible for clarifying the standards of acceptable 52 | behavior and are expected to take appropriate and fair corrective action in 53 | response to any instances of unacceptable behavior. 54 | 55 | Project maintainers have the right and responsibility to remove, edit, or 56 | reject comments, commits, code, wiki edits, issues, and other contributions 57 | that are not aligned to this Code of Conduct, or to ban temporarily or 58 | permanently any contributor for other behaviors that they deem inappropriate, 59 | threatening, offensive, or harmful. 60 | 61 | ### Scope 62 | 63 | This Code of Conduct applies both within project spaces and in public spaces 64 | when an individual is representing the project or its community. Examples of 65 | representing a project or community include using an official project e-mail 66 | address, posting via an official social media account, or acting as an appointed 67 | representative at an online or offline event. Representation of a project may be 68 | further defined and clarified by project maintainers. 69 | 70 | ### Enforcement 71 | 72 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 73 | reported by contacting the project team at contact@aralroca.com. All 74 | complaints will be reviewed and investigated and will result in a response that 75 | is deemed necessary and appropriate to the circumstances. The project team is 76 | obligated to maintain confidentiality with regard to the reporter of an incident. 77 | Further details of specific enforcement policies may be posted separately. 78 | 79 | Project maintainers who do not follow or enforce the Code of Conduct in good 80 | faith may face temporary or permanent repercussions as determined by other 81 | members of the project's leadership. 82 | 83 | ### Attribution 84 | 85 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 86 | available at [http://contributor-covenant.org/version/1/4][version] 87 | 88 | [homepage]: http://contributor-covenant.org 89 | [version]: http://contributor-covenant.org/version/1/4/ 90 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License 2 | 3 | MIT License 4 | 5 | Copyright (c) 2024 Aral Roca Gomez 6 | 7 | Permission is hereby granted, free of charge, to any person obtaining a copy 8 | of this software and associated documentation files (the "Software"), to deal 9 | in the Software without restriction, including without limitation the rights 10 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | copies of the Software, and to permit persons to whom the Software is 12 | furnished to do so, subject to the following conditions: 13 | 14 | The above copyright notice and this permission notice shall be included in all 15 | copies or substantial portions of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

2 | 3 | 4 | 5 | 6 |

Diff DOM Streaming

7 |

8 | 9 | [![npm version](https://badge.fury.io/js/diff-dom-streaming.svg)](https://badge.fury.io/js/diff-dom-streaming) 10 | ![npm](https://img.shields.io/npm/dw/diff-dom-streaming) 11 | ![size](https://img.shields.io/bundlephobia/minzip/diff-dom-streaming) 12 | [![PRs Welcome][badge-prwelcome]][prwelcome] 13 | 14 | 15 | 16 | follow on Twitter 18 | 19 | 20 | 21 | [badge-prwelcome]: https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square 22 | [prwelcome]: http://makeapullrequest.com 23 | 24 | The Diff DOM (Document Object Model) algorithm is used to compare two versions of the DOM, such as before and after an update on a web page. It aims to efficiently identify the changes between both DOMs, minimizing the number of manipulations required to update the user interface. 25 | 26 | The Diff DOM Streaming library extends the traditional Diff DOM algorithm by introducing support for comparing a DOM node with a stream. This enables the library to process the changes incrementally as they occur during the diff process. 27 | 28 | For more info, read this: 29 | 30 | - [HTML Streaming Over the Wire | A Deep Dive](https://dev.to/aralroca/html-streaming-over-the-wire-a-deep-dive-2n20). 31 | - [SPA-like Navigation Preserving Web Component State](https://dev.to/aralroca/spa-like-navigation-preserving-web-component-state-lh3) 32 | 33 | ## Getting started 34 | 35 | ### NPM 36 | 37 | Install: 38 | 39 | ```sh 40 | bun install diff-dom-streaming 41 | ``` 42 | 43 | Then import it: 44 | 45 | ```ts 46 | import diff from "diff-dom-streaming"; 47 | ``` 48 | 49 | ### JSR 50 | 51 | Install: 52 | 53 | ```sh 54 | bunx jsr add @aralroca/diff-dom-streaming 55 | ``` 56 | 57 | Then import it: 58 | 59 | ```ts 60 | import diff from "@aralroca/diff-dom-streaming"; 61 | ``` 62 | 63 | ### UNPKG 64 | 65 | Just import it: 66 | 67 | ```tsx 68 | import diff from "https://unpkg.com/diff-dom-streaming@latest"; 69 | ``` 70 | 71 | ## Using it 72 | 73 | ```ts 74 | const res = await fetch(/* some url */); 75 | 76 | // Diff between the current document and the stream: 77 | await diff(document, res.body); 78 | ``` 79 | 80 | ## API 81 | 82 | `diff(oldNode: Node, stream: ReadableStream, options?: Options): Promise` 83 | 84 | This function performs a diffing operation between the `oldNode` and the DOM tree from a stream. It applies the necessary changes to update the `oldNode` accordingly. An optional `options` that include: 85 | 86 | ```ts 87 | type Options = { 88 | // calback to handle each new docoument node during the streaming 89 | // (default: undefined) 90 | onNextNode?: NextNodeCallback; 91 | // update the DOM using document.startViewTransition (default: false) 92 | transition?: boolean; 93 | // callback to ignore nodes (default: undefined) 94 | shouldIgnoreNode?: (node: Node | null) => boolean; 95 | }; 96 | ``` 97 | 98 | ## Lists and `key` attribute 99 | 100 | Keys help to identify which items have changed, are added, or are removed. Keys should be given to the elements inside the array to give the elements a stable identity: 101 | 102 | ```jsx 3 103 | const numbers = [1, 2, 3, 4, 5]; 104 | const listItems = numbers.map((number) => ( 105 |
  • {number}
  • 106 | )); 107 | ``` 108 | 109 | _(Example with JSX)_ 110 | 111 | The `diff-dom-streaming` library takes into account the `key` attribute for these cases, if it does not exist, then see if they have `id`. 112 | 113 | ## Transitions between pages (View Transition API) 114 | 115 | You can activate the View Transition API updating the DOM with this property: 116 | 117 | ```diff 118 | await diff(document, res.body, { 119 | + transition: true 120 | }) 121 | ``` 122 | 123 | > [!TIP] 124 | > 125 | > To access the transition with JavaScript/TypeScript you can access the global property `window.lastDiffTransition` 126 | 127 | ### Incremental vs full transition 128 | 129 | Many times it will make more sense to use a complete transition instead of incremental, especially if we do not use suspense and we want a single transition at once instead of several, in this case, instead of using the configuration, we can use the View Transition API directly: 130 | 131 | ```diff 132 | + document.startViewTransition(async () => { 133 | await diff(document, res.body, { 134 | - transition: true, 135 | }); 136 | +}); 137 | ``` 138 | 139 | ## Strong Opinion on BODY Tag Attributes during Diffing 140 | 141 | Our library has a strong opinion regarding the handling of the BODY tag attributes during the HTML diffing process. This approach is designed to provide greater flexibility and control over runtime modifications, such as themes, fonts, and other display properties that are managed through BODY tag attributes. 142 | 143 | During the diffing process, all content within the HTML is typically updated to reflect the latest changes. However, we recognize that certain attributes of the BODY tag, like `class` and custom `data-attributes`, are often modified at runtime to control the presentation of the content. To avoid overwriting these runtime changes, our library's diffing algorithm specifically excludes these attributes from being updated. 144 | 145 | ### Key Points 146 | 147 | - **Preservation of Attributes**: Attributes of the BODY tag (e.g., `class`, `data-attributes`) are preserved and not overwritten during the diffing process. 148 | - **Consistent Display**: This ensures that runtime modifications, such as theme changes or other display-related adjustments, remain intact across navigations and updates. 149 | - **Enhanced Customization**: Users can rely on the BODY tag attributes to manage display properties without concern for them being reset during content updates. 150 | 151 | ### Example 152 | 153 | Consider the following scenario where the initial HTML and updated HTML are as follows: 154 | 155 | #### Initial HTML 156 | 157 | ```html 158 | 159 |
    Content A
    160 | 161 | ``` 162 | 163 | #### Updated HTML 164 | 165 | After a navigation or content update, the new HTML may look like this: 166 | 167 | ```html 168 | 169 |
    Content B
    170 | 171 | ``` 172 | 173 | ### Result After Diffing 174 | 175 | After the diffing process, the resulting HTML will be as follows: 176 | 177 | ```html 178 | 179 |
    Content B
    180 | 181 | ``` 182 | 183 | ## Examples 184 | 185 | In the repo we have examples for you to try. 186 | 187 | ### Locally 188 | 189 | There are some examples: 190 | 191 | - Run `bun run example:boxes` 192 | - Run `bun run examples:spa-like` 193 | 194 | ### Stackblitz 195 | 196 | You can run the boxes demo with Vanillajs [here](https://stackblitz.com/edit/diff-dom-streaming?file=index.js). 197 | 198 | ![ezgif-4-1ff18912f4](https://github.com/aralroca/diff-dom-streaming/assets/13313058/f18c01c0-4dfe-473f-8817-fb905adc20c1) 199 | 200 | ## Acknowledgments 201 | 202 | The Diff DOM Algorithm with HTML Streaming is inspired by the [set-dom](https://github.com/DylanPiercey/set-dom) library by [@dylan_piercey](https://twitter.com/dylan_piercey) and a technique for parsing streams pioneered by [@jaffathecake](https://twitter.com/jaffathecake). 203 | 204 | ## Contributing 205 | 206 | See [Contributing Guide](CONTRIBUTING.md) and please follow our [Code of Conduct](CODE_OF_CONDUCT.md). 207 | 208 | ## License 209 | 210 | [MIT](LICENSE) 211 | -------------------------------------------------------------------------------- /bun.lock: -------------------------------------------------------------------------------- 1 | { 2 | "lockfileVersion": 1, 3 | "workspaces": { 4 | "": { 5 | "name": "diff-dom-streaming", 6 | "devDependencies": { 7 | "@types/bun": "1.2.9", 8 | "bundlewatch": "0.4.1", 9 | "playwright": "1.51.1", 10 | "typescript": "5.8.3", 11 | }, 12 | }, 13 | }, 14 | "packages": { 15 | "@babel/code-frame": ["@babel/code-frame@7.26.2", "", { "dependencies": { "@babel/helper-validator-identifier": "^7.25.9", "js-tokens": "^4.0.0", "picocolors": "^1.0.0" } }, "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ=="], 16 | 17 | "@babel/helper-validator-identifier": ["@babel/helper-validator-identifier@7.25.9", "", {}, "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ=="], 18 | 19 | "@types/bun": ["@types/bun@1.2.9", "", { "dependencies": { "bun-types": "1.2.9" } }, "sha512-epShhLGQYc4Bv/aceHbmBhOz1XgUnuTZgcxjxk+WXwNyDXavv5QHD1QEFV0FwbTSQtNq6g4ZcV6y0vZakTjswg=="], 20 | 21 | "@types/node": ["@types/node@22.13.0", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-ClIbNe36lawluuvq3+YYhnIN2CELi+6q8NpnM7PYp4hBn/TatfboPgVSm2rwKRfnV2M+Ty9GWDFI64KEe+kysA=="], 22 | 23 | "@types/normalize-package-data": ["@types/normalize-package-data@2.4.4", "", {}, "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA=="], 24 | 25 | "@types/ws": ["@types/ws@8.5.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-bd/YFLW+URhBzMXurx7lWByOu+xzU9+kb3RboOteXYDfW+tr+JZa99OyNmPINEGB/ahzKrEuc8rcv4gnpJmxTw=="], 26 | 27 | "ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="], 28 | 29 | "asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="], 30 | 31 | "axios": ["axios@0.30.0", "", { "dependencies": { "follow-redirects": "^1.15.4", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } }, "sha512-Z4F3LjCgfjZz8BMYalWdMgAQUnEtKDmpwNHjh/C8pQZWde32TF64cqnSeyL3xD/aTIASRU30RHTNzRiV/NpGMg=="], 32 | 33 | "balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="], 34 | 35 | "brace-expansion": ["brace-expansion@1.1.11", "", { "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA=="], 36 | 37 | "bun-types": ["bun-types@1.2.9", "", { "dependencies": { "@types/node": "*", "@types/ws": "*" } }, "sha512-dk/kOEfQbajENN/D6FyiSgOKEuUi9PWfqKQJEgwKrCMWbjS/S6tEXp178mWvWAcUSYm9ArDlWHZKO3T/4cLXiw=="], 38 | 39 | "bundlewatch": ["bundlewatch@0.4.1", "", { "dependencies": { "axios": "^0.30.0", "bytes": "^3.1.1", "chalk": "^4.0.0", "ci-env": "^1.17.0", "commander": "^5.0.0", "glob": "^7.1.2", "gzip-size": "^6.0.0", "jsonpack": "^1.1.5", "lodash.merge": "^4.6.1", "read-pkg-up": "^7.0.1" }, "bin": { "bundlewatch": "lib/bin/index.js" } }, "sha512-SIobIhMX8DhoGiNlZ+IqicXRBkp7NQTKGEoZP82Tu8FQhoykbTdTQlL4zYT7n6swjH9wahJ9RTefz0Xwva5FXA=="], 40 | 41 | "bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="], 42 | 43 | "chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], 44 | 45 | "ci-env": ["ci-env@1.17.0", "", {}, "sha512-NtTjhgSEqv4Aj90TUYHQLxHdnCPXnjdtuGG1X8lTfp/JqeXTdw0FTWl/vUAPuvbWZTF8QVpv6ASe/XacE+7R2A=="], 46 | 47 | "color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="], 48 | 49 | "color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="], 50 | 51 | "combined-stream": ["combined-stream@1.0.8", "", { "dependencies": { "delayed-stream": "~1.0.0" } }, "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg=="], 52 | 53 | "commander": ["commander@5.1.0", "", {}, "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg=="], 54 | 55 | "concat-map": ["concat-map@0.0.1", "", {}, "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="], 56 | 57 | "delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="], 58 | 59 | "duplexer": ["duplexer@0.1.2", "", {}, "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg=="], 60 | 61 | "error-ex": ["error-ex@1.3.2", "", { "dependencies": { "is-arrayish": "^0.2.1" } }, "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g=="], 62 | 63 | "find-up": ["find-up@4.1.0", "", { "dependencies": { "locate-path": "^5.0.0", "path-exists": "^4.0.0" } }, "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw=="], 64 | 65 | "follow-redirects": ["follow-redirects@1.15.9", "", {}, "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ=="], 66 | 67 | "form-data": ["form-data@4.0.1", "", { "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "mime-types": "^2.1.12" } }, "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw=="], 68 | 69 | "fs.realpath": ["fs.realpath@1.0.0", "", {}, "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="], 70 | 71 | "fsevents": ["fsevents@2.3.2", "", { "os": "darwin" }, "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA=="], 72 | 73 | "function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="], 74 | 75 | "glob": ["glob@7.2.3", "", { "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", "minimatch": "^3.1.1", "once": "^1.3.0", "path-is-absolute": "^1.0.0" } }, "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q=="], 76 | 77 | "gzip-size": ["gzip-size@6.0.0", "", { "dependencies": { "duplexer": "^0.1.2" } }, "sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q=="], 78 | 79 | "has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="], 80 | 81 | "hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="], 82 | 83 | "hosted-git-info": ["hosted-git-info@2.8.9", "", {}, "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw=="], 84 | 85 | "inflight": ["inflight@1.0.6", "", { "dependencies": { "once": "^1.3.0", "wrappy": "1" } }, "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA=="], 86 | 87 | "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="], 88 | 89 | "is-arrayish": ["is-arrayish@0.2.1", "", {}, "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg=="], 90 | 91 | "is-core-module": ["is-core-module@2.16.1", "", { "dependencies": { "hasown": "^2.0.2" } }, "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w=="], 92 | 93 | "js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="], 94 | 95 | "json-parse-even-better-errors": ["json-parse-even-better-errors@2.3.1", "", {}, "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w=="], 96 | 97 | "jsonpack": ["jsonpack@1.1.5", "", {}, "sha512-d2vwomK605ks7Q+uCpbwGyoIF5j+UZuJjlYcugISBt3CxM+eBo/W6y63yVPIyIvbYON+pvJYsYZjCYbzqJj/xQ=="], 98 | 99 | "lines-and-columns": ["lines-and-columns@1.2.4", "", {}, "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg=="], 100 | 101 | "locate-path": ["locate-path@5.0.0", "", { "dependencies": { "p-locate": "^4.1.0" } }, "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g=="], 102 | 103 | "lodash.merge": ["lodash.merge@4.6.2", "", {}, "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="], 104 | 105 | "mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="], 106 | 107 | "mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="], 108 | 109 | "minimatch": ["minimatch@3.1.2", "", { "dependencies": { "brace-expansion": "^1.1.7" } }, "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="], 110 | 111 | "normalize-package-data": ["normalize-package-data@2.5.0", "", { "dependencies": { "hosted-git-info": "^2.1.4", "resolve": "^1.10.0", "semver": "2 || 3 || 4 || 5", "validate-npm-package-license": "^3.0.1" } }, "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA=="], 112 | 113 | "once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="], 114 | 115 | "p-limit": ["p-limit@2.3.0", "", { "dependencies": { "p-try": "^2.0.0" } }, "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w=="], 116 | 117 | "p-locate": ["p-locate@4.1.0", "", { "dependencies": { "p-limit": "^2.2.0" } }, "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A=="], 118 | 119 | "p-try": ["p-try@2.2.0", "", {}, "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ=="], 120 | 121 | "parse-json": ["parse-json@5.2.0", "", { "dependencies": { "@babel/code-frame": "^7.0.0", "error-ex": "^1.3.1", "json-parse-even-better-errors": "^2.3.0", "lines-and-columns": "^1.1.6" } }, "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg=="], 122 | 123 | "path-exists": ["path-exists@4.0.0", "", {}, "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w=="], 124 | 125 | "path-is-absolute": ["path-is-absolute@1.0.1", "", {}, "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg=="], 126 | 127 | "path-parse": ["path-parse@1.0.7", "", {}, "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw=="], 128 | 129 | "picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="], 130 | 131 | "playwright": ["playwright@1.51.1", "", { "dependencies": { "playwright-core": "1.51.1" }, "optionalDependencies": { "fsevents": "2.3.2" }, "bin": { "playwright": "cli.js" } }, "sha512-kkx+MB2KQRkyxjYPc3a0wLZZoDczmppyGJIvQ43l+aZihkaVvmu/21kiyaHeHjiFxjxNNFnUncKmcGIyOojsaw=="], 132 | 133 | "playwright-core": ["playwright-core@1.51.1", "", { "bin": { "playwright-core": "cli.js" } }, "sha512-/crRMj8+j/Nq5s8QcvegseuyeZPxpQCZb6HNk3Sos3BlZyAknRjoyJPFWkpNn8v0+P3WiwqFF8P+zQo4eqiNuw=="], 134 | 135 | "proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="], 136 | 137 | "read-pkg": ["read-pkg@5.2.0", "", { "dependencies": { "@types/normalize-package-data": "^2.4.0", "normalize-package-data": "^2.5.0", "parse-json": "^5.0.0", "type-fest": "^0.6.0" } }, "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg=="], 138 | 139 | "read-pkg-up": ["read-pkg-up@7.0.1", "", { "dependencies": { "find-up": "^4.1.0", "read-pkg": "^5.2.0", "type-fest": "^0.8.1" } }, "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg=="], 140 | 141 | "resolve": ["resolve@1.22.10", "", { "dependencies": { "is-core-module": "^2.16.0", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": { "resolve": "bin/resolve" } }, "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w=="], 142 | 143 | "semver": ["semver@5.7.2", "", { "bin": { "semver": "bin/semver" } }, "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g=="], 144 | 145 | "spdx-correct": ["spdx-correct@3.2.0", "", { "dependencies": { "spdx-expression-parse": "^3.0.0", "spdx-license-ids": "^3.0.0" } }, "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA=="], 146 | 147 | "spdx-exceptions": ["spdx-exceptions@2.5.0", "", {}, "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w=="], 148 | 149 | "spdx-expression-parse": ["spdx-expression-parse@3.0.1", "", { "dependencies": { "spdx-exceptions": "^2.1.0", "spdx-license-ids": "^3.0.0" } }, "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q=="], 150 | 151 | "spdx-license-ids": ["spdx-license-ids@3.0.21", "", {}, "sha512-Bvg/8F5XephndSK3JffaRqdT+gyhfqIPwDHpX80tJrF8QQRYMo8sNMeaZ2Dp5+jhwKnUmIOyFFQfHRkjJm5nXg=="], 152 | 153 | "supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], 154 | 155 | "supports-preserve-symlinks-flag": ["supports-preserve-symlinks-flag@1.0.0", "", {}, "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w=="], 156 | 157 | "type-fest": ["type-fest@0.8.1", "", {}, "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA=="], 158 | 159 | "typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="], 160 | 161 | "undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], 162 | 163 | "validate-npm-package-license": ["validate-npm-package-license@3.0.4", "", { "dependencies": { "spdx-correct": "^3.0.0", "spdx-expression-parse": "^3.0.0" } }, "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew=="], 164 | 165 | "wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="], 166 | 167 | "read-pkg/type-fest": ["type-fest@0.6.0", "", {}, "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg=="], 168 | } 169 | } 170 | -------------------------------------------------------------------------------- /deno.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@aralroca/diff-dom-streaming", 3 | "version": "0.6.6", 4 | "exports": { 5 | ".": "./src/index.ts", 6 | "./types": "./index.d.ts" 7 | }, 8 | "description": "Diff DOM algorithm with streaming. Gets all modifications, insertions and removals between a DOM fragment and a stream HTML reader.", 9 | "keywords": ["diff", "dom", "streaming", "html", "reader"], 10 | "publish": { 11 | "include": ["src", "deno.json", "index.d.ts", "LICENSE", "README.md"], 12 | "exclude": [".github", "examples"] 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /examples/boxes/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Example 1 5 | 6 | 7 | 8 | 9 |
    10 |
    11 | 12 | 18 | 19 | 20 | 21 | Reload 22 | Come back to examples 23 | GitHub 26 |
    27 |
    28 | 29 | 30 | -------------------------------------------------------------------------------- /examples/boxes/index.js: -------------------------------------------------------------------------------- 1 | import diff from "https://unpkg.com/diff-dom-streaming@0.6.1"; 2 | 3 | async function diffStreamReader(e) { 4 | e?.preventDefault(); 5 | 6 | // This is a simple example. Normally the stream comes from a fetch request. 7 | const encoder = new TextEncoder(); 8 | const ms = +document.querySelector("#ms").value ?? 0; 9 | const numBoxes = +document.querySelector("#box").value ?? 3; 10 | const wait = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); 11 | const stream = new ReadableStream({ 12 | async start(controller) { 13 | let epochStart = Date.now(); 14 | controller.enqueue( 15 | encoder.encode(` 16 | 17 | 18 | 19 | Example 1 20 | 21 | 22 | 23 | 24 |
    25 |
    26 | 27 | 33 | 34 | 35 | 36 | Reload 37 | Come back to examples 38 | GitHub 39 |
    40 |
    41 |
    42 | `), 43 | ); 44 | 45 | // BOXES 46 | for (let i = 0; i < numBoxes; i++) { 47 | controller.enqueue( 48 | encoder.encode(` 49 |
    50 |

    Box ${i + 1}

    51 |

    ${Date.now() - epochStart} milliseconds

    52 |

    Random number: ${Math.random()}

    53 |
    54 | `), 55 | ); 56 | if (ms) await wait(ms); 57 | } 58 | 59 | controller.enqueue( 60 | encoder.encode(` 61 |
    62 | 63 | 64 | `), 65 | ); 66 | controller.close(); 67 | }, 68 | }); 69 | 70 | await diff(document, stream.getReader()); 71 | } 72 | 73 | document.querySelector("form").addEventListener("submit", diffStreamReader); 74 | -------------------------------------------------------------------------------- /examples/boxes/styles.css: -------------------------------------------------------------------------------- 1 | * { 2 | box-sizing: border-box; 3 | } 4 | 5 | header { 6 | position: fixed; 7 | padding: 10px; 8 | margin-top: -10px; 9 | text-align: center; 10 | background-color: light-dark(white, black); 11 | width: 100%; 12 | box-shadow: 2px 2px 10px 0 hsla(0, 0%, 40%, 0.5); 13 | } 14 | 15 | .container { 16 | padding-top: 60px; 17 | display: flex; 18 | justify-content: flex-start; 19 | flex-wrap: wrap; 20 | } 21 | 22 | .box { 23 | width: 30%; 24 | padding: 20px; 25 | border: 1px solid #000; 26 | margin: 10px; 27 | width: calc(33.33% - 20px); 28 | } 29 | 30 | input, 31 | button, 32 | a { 33 | margin-right: 10px; 34 | } 35 | -------------------------------------------------------------------------------- /examples/spa-like/README.md: -------------------------------------------------------------------------------- 1 | ## SPA-Like Navigation Preserving Web Component State 2 | 3 | Access to the folder and run: 4 | 5 | ```sh 6 | bun run index.ts 7 | ``` 8 | 9 | Then open [http://localhost:1234](http://localhost:1234). 10 | -------------------------------------------------------------------------------- /examples/spa-like/code.js: -------------------------------------------------------------------------------- 1 | const scripts = new Set(); 2 | 3 | function spaNavigation(event) { 4 | const url = new URL(event.destination.url); 5 | 6 | if (location.origin !== url.origin) return; 7 | 8 | event.intercept({ 9 | async handler() { 10 | const res = await fetch(url.pathname, { signal: event.signal }); 11 | 12 | if (res.ok) { 13 | const diffModule = await import( 14 | "https://unpkg.com/diff-dom-streaming@0.6.1" 15 | ); 16 | const diff = diffModule.default; 17 | registerCurrentScripts(); 18 | 19 | await diff(document, res.body.getReader(), { 20 | onNextNode: loadScripts, 21 | transition: true, 22 | }); 23 | } 24 | }, 25 | }); 26 | } 27 | 28 | if ("navigation" in window) { 29 | window.navigation.addEventListener("navigate", spaNavigation); 30 | } 31 | 32 | // Counter Web Component 33 | class CounterComponent extends HTMLElement { 34 | connectedCallback() { 35 | const shadowRoot = this.attachShadow({ mode: "open" }); 36 | let count = 0; 37 | 38 | shadowRoot.innerHTML = ` 39 | 40 | 41 |

    Count: ${count}

    42 | `; 43 | const countEl = shadowRoot.querySelector("#count"); 44 | shadowRoot.querySelector("#inc").addEventListener("click", () => { 45 | count++; 46 | countEl.textContent = `Count: ${count}`; 47 | }); 48 | shadowRoot.querySelector("#dec").addEventListener("click", () => { 49 | count--; 50 | countEl.textContent = `Count: ${count}`; 51 | }); 52 | } 53 | } 54 | 55 | // Register Counter Web Component 56 | if (!customElements.get("counter-component")) { 57 | customElements.define("counter-component", CounterComponent); 58 | } 59 | 60 | // Register current scripts 61 | function registerCurrentScripts() { 62 | for (let script of document.scripts) { 63 | if (script.id || script.hasAttribute("src")) { 64 | scripts.add(script.id || script.getAttribute("src")); 65 | } 66 | } 67 | } 68 | 69 | // Load new scripts 70 | function loadScripts(node) { 71 | if (node.nodeName !== "SCRIPT") return; 72 | 73 | const src = node.getAttribute("src"); 74 | 75 | if (scripts.has(src) || scripts.has(node.id)) return; 76 | 77 | const script = document.createElement("script"); 78 | 79 | if (src) script.src = src; 80 | 81 | script.innerHTML = node.innerHTML; 82 | 83 | // Remove after load the script 84 | script.onload = script.onerror = () => script.remove(); 85 | 86 | document.head.appendChild(script); 87 | 88 | // Remove after append + execute (only for inline script) 89 | if (!src) script.remove(); 90 | } 91 | -------------------------------------------------------------------------------- /examples/spa-like/index.ts: -------------------------------------------------------------------------------- 1 | import path from "node:path"; 2 | 3 | const encoder = new TextEncoder(); 4 | const names: Record = { 5 | "/foo": "Foo", 6 | "/bar": "Bar", 7 | }; 8 | 9 | const server = Bun.serve({ 10 | port: 1234, 11 | fetch(req: Request) { 12 | let suspensePromise; 13 | const url = new URL(req.url); 14 | const name = names[url.pathname] ?? "Hello, World"; 15 | 16 | if (url.pathname === "/code") { 17 | return new Response(Bun.file(path.join(import.meta.dir, "code.js"))); 18 | } 19 | 20 | if (url.pathname === "/style.css") { 21 | return new Response(Bun.file(path.join(import.meta.dir, "style.css"))); 22 | } 23 | 24 | return new Response( 25 | new ReadableStream({ 26 | async start(controller) { 27 | controller.enqueue( 28 | encoder.encode(` 29 | 30 | 31 | SPA-like navigation with Diff DOM Streaming 32 | 33 | 34 | 35 | 40 |

    ${name}!

    41 | `), 42 | ); 43 | 44 | // Add "Suspense" placeholder 45 | controller.enqueue( 46 | encoder.encode('
    Loading...
    '), 47 | ); 48 | 49 | // Expensive chunk: 50 | suspensePromise = Bun.sleep(2000).then(handleExpensiveChunk); 51 | 52 | // "Unsuspense" code 53 | function handleExpensiveChunk() { 54 | controller.enqueue( 55 | encoder.encode(` 56 | 57 | `), 58 | ); 59 | controller.enqueue( 60 | encoder.encode(` 61 | 76 | `), 77 | ); 78 | } 79 | controller.enqueue( 80 | encoder.encode(` 81 | 82 | 83 | 84 | `), 85 | ); 86 | 87 | await suspensePromise; 88 | controller.close(); 89 | }, 90 | }), 91 | ); 92 | }, 93 | }); 94 | 95 | console.log(`Done! http://${server.hostname}:${server.port}`); 96 | -------------------------------------------------------------------------------- /examples/spa-like/style.css: -------------------------------------------------------------------------------- 1 | * { 2 | box-sizing: border-box; 3 | } 4 | 5 | ::view-transition-old(root), 6 | ::view-transition-new(root) { 7 | animation-duration: 0.5s; 8 | } 9 | 10 | div { 11 | margin: 10px 0; 12 | } 13 | -------------------------------------------------------------------------------- /index.d.ts: -------------------------------------------------------------------------------- 1 | export default function diff( 2 | oldNode: Node, 3 | stream: ReadableStream, 4 | options?: Options, 5 | ): Promise; 6 | 7 | type NextNodeCallback = (node: Node) => void; 8 | 9 | type Options = { 10 | onNextNode?: NextNodeCallback; 11 | transition?: boolean; 12 | shouldIgnoreNode?: (node: Node | null) => boolean; 13 | }; 14 | -------------------------------------------------------------------------------- /logo-light.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /logo.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "diff-dom-streaming", 3 | "version": "0.6.6", 4 | "bugs": "https://github.com/aralroca/diff-dom-streaming/issues", 5 | "description": "Diff DOM algorithm with streaming. Gets all modifications, insertions and removals between a DOM fragment and a stream HTML reader.", 6 | "keywords": [ 7 | "brisa", 8 | "typescript", 9 | "streaming", 10 | "stream", 11 | "html", 12 | "diff", 13 | "dom", 14 | "javascript" 15 | ], 16 | "bundlewatch": { 17 | "files": [ 18 | { 19 | "path": "./build/index.js", 20 | "maxSize": "1.5 kB" 21 | } 22 | ] 23 | }, 24 | "module": "./build/index.js", 25 | "main": "./build/index.js", 26 | "types": "./index.d.ts", 27 | "type": "module", 28 | "license": "MIT", 29 | "author": { 30 | "name": "Aral Roca Gòmez", 31 | "email": "contact@aralroca.com" 32 | }, 33 | "files": [ 34 | "build", 35 | "index.d.ts" 36 | ], 37 | "exports": { 38 | ".": { 39 | "import": "./build/index.js", 40 | "require": "./build/index.js", 41 | "types": "./index.d.ts" 42 | } 43 | }, 44 | "repository": { 45 | "type": "git", 46 | "url": "https://github.com/aralroca/diff-dom-streaming.git" 47 | }, 48 | "scripts": { 49 | "build": "bun build --target=browser --minify --outdir=build src/index.ts", 50 | "test": "bun test --timeout=30000", 51 | "bundlewatch": "bundlewatch", 52 | "example:boxes": "bunx serve ./examples/boxes", 53 | "example:spa-like": "bun run ./examples/spa-like/index.ts" 54 | }, 55 | "devDependencies": { 56 | "typescript": "5.8.3", 57 | "@types/bun": "1.2.9", 58 | "bundlewatch": "0.4.1", 59 | "playwright": "1.51.1" 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /src/index.test.ts: -------------------------------------------------------------------------------- 1 | import { 2 | describe, 3 | beforeAll, 4 | beforeEach, 5 | afterEach, 6 | afterAll, 7 | it, 8 | expect, 9 | } from "bun:test"; 10 | import { chromium, firefox, webkit, type Browser, type Page } from "playwright"; 11 | import diff from "./index"; 12 | import { join } from "node:path"; 13 | 14 | const engine: Record = { 15 | chrome: chromium, 16 | firefox: firefox, 17 | safari: webkit, 18 | }; 19 | 20 | const transpiler = new Bun.Transpiler({ loader: "ts", target: "browser" }); 21 | const diffCode = await transpiler.transform( 22 | (await Bun.file(join(import.meta.dir, "index.ts")).text()).replace( 23 | "export default", 24 | "", 25 | ), 26 | ); 27 | const normalize = (t: string) => 28 | t.replace(/\s*\n\s*/g, "").replaceAll("'", '"'); 29 | 30 | describe("Diff test", () => { 31 | let browser: Browser; 32 | let page: Page; 33 | 34 | beforeEach(async () => { 35 | page = await browser.newPage(); 36 | }); 37 | 38 | afterEach(async () => { 39 | await page.close(); 40 | }); 41 | 42 | afterAll(async () => { 43 | await browser.close(); 44 | }); 45 | 46 | describe("Chrome View Transitions API", () => { 47 | it("should not call document.startViewTransition for each DOM update with transition=false", async () => { 48 | browser = await engine.chrome.launch(); 49 | const [newHTML, , , transitionApplied] = await testDiff({ 50 | oldHTMLString: ` 51 |
    52 |

    hello world

    53 |
    54 | `, 55 | newHTMLStringChunks: ["
    ", "

    hello world!

    ", "
    "], 56 | transition: false, 57 | }); 58 | expect(newHTML).toBe( 59 | normalize(` 60 | 61 | 62 | 63 |
    64 |

    hello world!

    65 |
    66 | 67 | 68 | `), 69 | ); 70 | expect(transitionApplied).toBeFalse(); 71 | }); 72 | it("should call document.startViewTransition for each DOM update with transition=true", async () => { 73 | browser = await engine.chrome.launch(); 74 | const [newHTML, , , transitionApplied] = await testDiff({ 75 | oldHTMLString: ` 76 |
    77 |

    hello world

    78 |
    79 | `, 80 | newHTMLStringChunks: ["
    ", "

    hello world!

    ", "
    "], 81 | transition: true, 82 | }); 83 | expect(newHTML).toBe( 84 | normalize(` 85 | 86 | 87 | 88 |
    89 |

    hello world!

    90 |
    91 | 92 | 93 | `), 94 | ); 95 | expect(transitionApplied).toBeTrue(); 96 | }); 97 | }); 98 | 99 | describe.each(["chrome", "firefox", "safari"])("%s", (browserName) => { 100 | beforeAll(async () => { 101 | browser = await engine[browserName].launch(); 102 | }); 103 | 104 | it("should error with invalid arguments", async () => { 105 | const res = new Response('
    hello world
    '); 106 | expect(() => diff("hello world" as any, res.body!)).toThrow(Error); 107 | }); 108 | 109 | it("should not do any DOM modification", async () => { 110 | const [newHTML, mutations] = await testDiff({ 111 | oldHTMLString: ` 112 |
    113 |

    hello world

    114 |
    115 | `, 116 | newHTMLStringChunks: ["
    ", "

    hello world

    ", "
    "], 117 | }); 118 | expect(newHTML).toBe( 119 | normalize(` 120 | 121 | 122 | 123 |
    124 |

    hello world

    125 |
    126 | 127 | 128 | `), 129 | ); 130 | expect(mutations).toBeEmpty(); 131 | }); 132 | 133 | it("should replace only the body content", async () => { 134 | const [newHTML, mutations] = await testDiff({ 135 | oldHTMLString: ` 136 | 137 | 138 | 139 |
    hello world
    140 | 141 | 142 | `, 143 | newHTMLStringChunks: ["something else"], 144 | }); 145 | expect(newHTML).toBe( 146 | normalize(` 147 | 148 | 149 | 150 | something else 151 | 152 | 153 | `), 154 | ); 155 | expect(mutations).toEqual([ 156 | { 157 | addedNodes: [ 158 | { 159 | nodeName: "#text", 160 | nodeValue: "something else", 161 | keepsExistingNodeReference: false, 162 | }, 163 | ], 164 | attributeName: null, 165 | oldValue: null, 166 | outerHTML: "something else", 167 | removedNodes: [ 168 | { 169 | nodeName: "DIV", 170 | nodeValue: null, 171 | }, 172 | ], 173 | tagName: "BODY", 174 | type: "childList", 175 | }, 176 | ]); 177 | }); 178 | 179 | it("should update only one element of the body", async () => { 180 | const [newHTML, mutations] = await testDiff({ 181 | oldHTMLString: ` 182 | 183 | 184 | 185 |

    TEST

    186 |
    Old Node Content
    187 | 188 | 189 | `, 190 | newHTMLStringChunks: [ 191 | "

    TEST

    ", 192 | '
    ', 193 | "New Node Content", 194 | "
    ", 195 | ], 196 | }); 197 | expect(newHTML).toBe( 198 | normalize(` 199 | 200 | 201 | 202 |

    TEST

    203 |
    New Node Content
    204 | 205 | 206 | `), 207 | ); 208 | expect(mutations).toEqual([ 209 | { 210 | addedNodes: [], 211 | attributeName: null, 212 | oldValue: "Old Node Content", 213 | outerHTML: undefined, 214 | removedNodes: [], 215 | tagName: undefined, 216 | type: "characterData", 217 | }, 218 | ]); 219 | }); 220 | 221 | it("should diff attributes", async () => { 222 | const [newHTML, mutations] = await testDiff({ 223 | oldHTMLString: `
    `, 224 | newHTMLStringChunks: ['
    ', "
    "], 225 | }); 226 | expect(newHTML).toBe( 227 | normalize(` 228 | 229 | 230 | 231 |
    232 | 233 | 234 | `), 235 | ); 236 | 237 | expect(mutations).toEqual([ 238 | { 239 | addedNodes: [], 240 | attributeName: "b", 241 | oldValue: null, 242 | outerHTML: '
    ', 243 | removedNodes: [], 244 | tagName: "DIV", 245 | type: "attributes", 246 | }, 247 | { 248 | addedNodes: [], 249 | attributeName: "a", 250 | oldValue: null, 251 | outerHTML: '
    ', 252 | removedNodes: [], 253 | tagName: "DIV", 254 | type: "attributes", 255 | }, 256 | ]); 257 | }); 258 | 259 | it("should diff nodeValue", async () => { 260 | const [newHTML, mutations] = await testDiff({ 261 | oldHTMLString: ` 262 |
    263 | text a 264 | text b 265 |
    266 | `, 267 | newHTMLStringChunks: ["
    ", "text a", "text c", "
    "], 268 | }); 269 | expect(newHTML).toBe( 270 | normalize(` 271 | 272 | 273 | 274 |
    275 | text a 276 | text c 277 |
    278 | 279 | 280 | `), 281 | ); 282 | 283 | expect(mutations).toEqual([ 284 | { 285 | addedNodes: [], 286 | attributeName: null, 287 | oldValue: "text atext b", 288 | outerHTML: undefined, 289 | removedNodes: [], 290 | tagName: undefined, 291 | type: "characterData", 292 | }, 293 | ]); 294 | }); 295 | 296 | it("should diff children", async () => { 297 | const [newHTML, mutations] = await testDiff({ 298 | oldHTMLString: ` 299 |
    300 | hello 301 | text 302 | text2 303 |
    304 | `, 305 | newHTMLStringChunks: [ 306 | "
    ", 307 | 'hello2', 308 | "text1", 309 | "
    ", 310 | ], 311 | }); 312 | expect(newHTML).toBe( 313 | normalize(` 314 | 315 | 316 | 317 |
    318 | hello2 319 | text1 320 |
    321 | 322 | 323 | `), 324 | ); 325 | expect(mutations).toEqual([ 326 | { 327 | addedNodes: [], 328 | attributeName: null, 329 | oldValue: "hello", 330 | outerHTML: undefined, 331 | removedNodes: [], 332 | tagName: undefined, 333 | type: "characterData", 334 | }, 335 | { 336 | addedNodes: [], 337 | attributeName: "href", 338 | oldValue: "link", 339 | outerHTML: 'hello2', 340 | removedNodes: [], 341 | tagName: "A", 342 | type: "attributes", 343 | }, 344 | { 345 | addedNodes: [], 346 | attributeName: null, 347 | oldValue: "text", 348 | outerHTML: undefined, 349 | removedNodes: [], 350 | tagName: undefined, 351 | type: "characterData", 352 | }, 353 | { 354 | addedNodes: [], 355 | attributeName: null, 356 | oldValue: null, 357 | outerHTML: "", 358 | removedNodes: [ 359 | { 360 | nodeName: "#text", 361 | nodeValue: "text1", 362 | }, 363 | ], 364 | tagName: "B", 365 | type: "childList", 366 | }, 367 | { 368 | addedNodes: [ 369 | { 370 | nodeName: "I", 371 | nodeValue: null, 372 | keepsExistingNodeReference: false, 373 | }, 374 | ], 375 | attributeName: null, 376 | oldValue: null, 377 | outerHTML: 378 | '
    hello2text1text2
    ', 379 | removedNodes: [ 380 | { 381 | nodeName: "B", 382 | nodeValue: null, 383 | }, 384 | ], 385 | tagName: "DIV", 386 | type: "childList", 387 | }, 388 | { 389 | addedNodes: [], 390 | attributeName: null, 391 | oldValue: null, 392 | outerHTML: '
    hello2text1
    ', 393 | removedNodes: [ 394 | { 395 | nodeName: "I", 396 | nodeValue: null, 397 | }, 398 | ], 399 | tagName: "DIV", 400 | type: "childList", 401 | }, 402 | ]); 403 | }); 404 | 405 | it("should diff children (id)", async () => { 406 | const [newHTML, mutations] = await testDiff({ 407 | oldHTMLString: ` 408 |
    409 | text 410 | text2 411 |
    412 | `, 413 | newHTMLStringChunks: ["
    ", 'text1', "
    "], 414 | }); 415 | expect(newHTML).toBe( 416 | normalize(` 417 | 418 | 419 | 420 |
    421 | text1 422 |
    423 | 424 | 425 | `), 426 | ); 427 | 428 | expect(mutations).toEqual([ 429 | { 430 | addedNodes: [], 431 | attributeName: null, 432 | oldValue: null, 433 | outerHTML: `
    text2text
    `, 434 | removedNodes: [ 435 | { 436 | nodeName: "I", 437 | nodeValue: null, 438 | }, 439 | ], 440 | tagName: "DIV", 441 | type: "childList", 442 | }, 443 | { 444 | addedNodes: [ 445 | { 446 | nodeName: "I", 447 | nodeValue: null, 448 | keepsExistingNodeReference: true, 449 | }, 450 | ], 451 | attributeName: null, 452 | oldValue: null, 453 | outerHTML: `
    text2text
    `, 454 | removedNodes: [], 455 | tagName: "DIV", 456 | type: "childList", 457 | }, 458 | { 459 | addedNodes: [], 460 | attributeName: null, 461 | oldValue: "text2", 462 | outerHTML: undefined, 463 | removedNodes: [], 464 | tagName: undefined, 465 | type: "characterData", 466 | }, 467 | { 468 | addedNodes: [], 469 | attributeName: null, 470 | oldValue: null, 471 | outerHTML: `
    text1
    `, 472 | removedNodes: [ 473 | { 474 | nodeName: "B", 475 | nodeValue: null, 476 | }, 477 | ], 478 | tagName: "DIV", 479 | type: "childList", 480 | }, 481 | ]); 482 | }); 483 | 484 | it("should diff children (key) move by deleting", async () => { 485 | const [newHTML, mutations] = await testDiff({ 486 | oldHTMLString: ` 487 |
    488 | hello 489 | text 490 | text2 491 |
    492 | `, 493 | newHTMLStringChunks: [ 494 | "
    ", 495 | 'hello2', 496 | 'text1', 497 | "
    ", 498 | ], 499 | }); 500 | expect(newHTML).toBe( 501 | normalize(` 502 | 503 | 504 | 505 |
    506 | hello2 507 | text1 508 |
    509 | 510 | 511 | `), 512 | ); 513 | 514 | expect(mutations).toEqual([ 515 | { 516 | addedNodes: [], 517 | attributeName: null, 518 | oldValue: "hello", 519 | outerHTML: undefined, 520 | removedNodes: [], 521 | tagName: undefined, 522 | type: "characterData", 523 | }, 524 | { 525 | addedNodes: [], 526 | attributeName: "href", 527 | oldValue: "link", 528 | outerHTML: 'hello2', 529 | removedNodes: [], 530 | tagName: "A", 531 | type: "attributes", 532 | }, 533 | { 534 | addedNodes: [], 535 | attributeName: null, 536 | oldValue: null, 537 | outerHTML: 538 | '
    hello2text2text
    ', 539 | removedNodes: [ 540 | { 541 | nodeName: "I", 542 | nodeValue: null, 543 | }, 544 | ], 545 | tagName: "DIV", 546 | type: "childList", 547 | }, 548 | { 549 | addedNodes: [ 550 | { 551 | nodeName: "I", 552 | nodeValue: null, 553 | keepsExistingNodeReference: true, 554 | }, 555 | ], 556 | attributeName: null, 557 | oldValue: null, 558 | outerHTML: 559 | '
    hello2text2text
    ', 560 | removedNodes: [], 561 | tagName: "DIV", 562 | type: "childList", 563 | }, 564 | { 565 | addedNodes: [], 566 | attributeName: null, 567 | oldValue: "text2", 568 | outerHTML: undefined, 569 | removedNodes: [], 570 | tagName: undefined, 571 | type: "characterData", 572 | }, 573 | { 574 | addedNodes: [], 575 | attributeName: null, 576 | oldValue: null, 577 | outerHTML: 578 | '
    hello2text1
    ', 579 | removedNodes: [ 580 | { 581 | nodeName: "B", 582 | nodeValue: null, 583 | }, 584 | ], 585 | tagName: "DIV", 586 | type: "childList", 587 | }, 588 | ]); 589 | }); 590 | 591 | it("should diff children (key) move by shuffling", async () => { 592 | const [newHTML, mutations] = await testDiff({ 593 | oldHTMLString: ` 594 |
    595 | hello 596 | text 597 | text2 598 |
    599 | `, 600 | newHTMLStringChunks: [ 601 | "
    ", 602 | 'hello', 603 | 'text2', 604 | 'text', 605 | "
    ", 606 | ], 607 | }); 608 | expect(newHTML).toBe( 609 | normalize(` 610 | 611 | 612 | 613 |
    614 | hello 615 | text2 616 | text 617 |
    618 | 619 | 620 | `), 621 | ); 622 | 623 | expect(mutations).toEqual([ 624 | { 625 | addedNodes: [], 626 | attributeName: null, 627 | oldValue: null, 628 | outerHTML: 629 | '
    hellotext2text
    ', 630 | removedNodes: [ 631 | { 632 | nodeName: "I", 633 | nodeValue: null, 634 | }, 635 | ], 636 | tagName: "DIV", 637 | type: "childList", 638 | }, 639 | { 640 | addedNodes: [ 641 | { 642 | nodeName: "I", 643 | nodeValue: null, 644 | keepsExistingNodeReference: true, 645 | }, 646 | ], 647 | attributeName: null, 648 | oldValue: null, 649 | outerHTML: 650 | '
    hellotext2text
    ', 651 | removedNodes: [], 652 | tagName: "DIV", 653 | type: "childList", 654 | }, 655 | ]); 656 | }); 657 | 658 | it("should diff children (key) remove", async () => { 659 | const [newHTML, mutations] = await testDiff({ 660 | oldHTMLString: ` 661 |
    662 | hello 663 | text 664 | text2 665 |
    666 | `, 667 | newHTMLStringChunks: ["
    ", 'hello2', "
    "], 668 | }); 669 | expect(newHTML).toBe( 670 | normalize(` 671 | 672 | 673 | 674 |
    675 | hello2 676 |
    677 | 678 | 679 | `), 680 | ); 681 | expect(mutations).toEqual([ 682 | { 683 | addedNodes: [], 684 | attributeName: null, 685 | oldValue: "hello", 686 | outerHTML: undefined, 687 | removedNodes: [], 688 | tagName: undefined, 689 | type: "characterData", 690 | }, 691 | { 692 | addedNodes: [], 693 | attributeName: "href", 694 | oldValue: "link", 695 | outerHTML: 'hello2', 696 | removedNodes: [], 697 | tagName: "A", 698 | type: "attributes", 699 | }, 700 | { 701 | addedNodes: [], 702 | attributeName: null, 703 | oldValue: null, 704 | outerHTML: '', 705 | removedNodes: [ 706 | { 707 | nodeName: "I", 708 | nodeValue: null, 709 | }, 710 | ], 711 | tagName: "DIV", 712 | type: "childList", 713 | }, 714 | { 715 | addedNodes: [], 716 | attributeName: null, 717 | oldValue: null, 718 | outerHTML: '', 719 | removedNodes: [ 720 | { 721 | nodeName: "B", 722 | nodeValue: null, 723 | }, 724 | ], 725 | tagName: "DIV", 726 | type: "childList", 727 | }, 728 | ]); 729 | }); 730 | 731 | it("should diff children (key) insert new node", async () => { 732 | const [newHTML, mutations] = await testDiff({ 733 | oldHTMLString: ` 734 |
    735 | hello 736 | text2 737 |
    738 | `, 739 | newHTMLStringChunks: [ 740 | "
    ", 741 | 'hello2', 742 | "test", 743 | 'text2', 744 | "
    ", 745 | ], 746 | }); 747 | expect(newHTML).toBe( 748 | normalize(` 749 | 750 | 751 | 752 |
    753 | hello2 754 | test 755 | text2 756 |
    757 | 758 | 759 | `), 760 | ); 761 | 762 | expect(mutations).toEqual([ 763 | { 764 | addedNodes: [], 765 | attributeName: null, 766 | oldValue: "hello", 767 | outerHTML: undefined, 768 | removedNodes: [], 769 | tagName: undefined, 770 | type: "characterData", 771 | }, 772 | { 773 | addedNodes: [], 774 | attributeName: "href", 775 | oldValue: "link", 776 | outerHTML: 'hello2', 777 | removedNodes: [], 778 | tagName: "A", 779 | type: "attributes", 780 | }, 781 | { 782 | addedNodes: [ 783 | { 784 | keepsExistingNodeReference: false, 785 | nodeName: "B", 786 | nodeValue: null, 787 | }, 788 | ], 789 | attributeName: null, 790 | oldValue: null, 791 | outerHTML: 792 | '
    hello2testtext2
    ', 793 | removedNodes: [], 794 | tagName: "DIV", 795 | type: "childList", 796 | }, 797 | { 798 | addedNodes: [], 799 | attributeName: null, 800 | oldValue: null, 801 | outerHTML: 802 | '
    hello2testtext2
    ', 803 | removedNodes: [ 804 | { 805 | nodeName: "I", 806 | nodeValue: null, 807 | }, 808 | ], 809 | tagName: "DIV", 810 | type: "childList", 811 | }, 812 | { 813 | addedNodes: [ 814 | { 815 | keepsExistingNodeReference: true, 816 | nodeName: "I", 817 | nodeValue: null, 818 | }, 819 | ], 820 | attributeName: null, 821 | oldValue: null, 822 | outerHTML: 823 | '
    hello2testtext2
    ', 824 | removedNodes: [], 825 | tagName: "DIV", 826 | type: "childList", 827 | }, 828 | ]); 829 | }); 830 | 831 | it("should diff children (key) with xhtml namespaceURI", async () => { 832 | const [newHTML, mutations] = await testDiff({ 833 | oldHTMLString: ` 834 |
    835 | hello 836 | text 837 | text2 838 |
    839 | `, 840 | newHTMLStringChunks: [ 841 | '
    ', 842 | 'hello2', 843 | 'text1', 844 | "
    ", 845 | ], 846 | }); 847 | expect(newHTML).toBe( 848 | normalize(` 849 | 850 | 851 | 852 |
    853 | hello2 854 | text1 855 |
    856 | 857 | 858 | `), 859 | ); 860 | 861 | expect(mutations).toEqual([ 862 | { 863 | addedNodes: [], 864 | attributeName: null, 865 | oldValue: "hello", 866 | outerHTML: undefined, 867 | removedNodes: [], 868 | tagName: undefined, 869 | type: "characterData", 870 | }, 871 | { 872 | addedNodes: [], 873 | attributeName: "href", 874 | oldValue: "link", 875 | outerHTML: 'hello2', 876 | removedNodes: [], 877 | tagName: "A", 878 | type: "attributes", 879 | }, 880 | { 881 | addedNodes: [], 882 | attributeName: null, 883 | oldValue: null, 884 | outerHTML: 885 | '
    hello2text2text
    ', 886 | removedNodes: [ 887 | { 888 | nodeName: "I", 889 | nodeValue: null, 890 | }, 891 | ], 892 | tagName: "DIV", 893 | type: "childList", 894 | }, 895 | { 896 | addedNodes: [ 897 | { 898 | keepsExistingNodeReference: true, 899 | nodeName: "I", 900 | nodeValue: null, 901 | }, 902 | ], 903 | attributeName: null, 904 | oldValue: null, 905 | outerHTML: 906 | '
    hello2text2text
    ', 907 | removedNodes: [], 908 | tagName: "DIV", 909 | type: "childList", 910 | }, 911 | { 912 | addedNodes: [], 913 | attributeName: null, 914 | oldValue: "text2", 915 | outerHTML: undefined, 916 | removedNodes: [], 917 | tagName: undefined, 918 | type: "characterData", 919 | }, 920 | { 921 | addedNodes: [], 922 | attributeName: null, 923 | oldValue: null, 924 | outerHTML: 925 | '
    hello2text1
    ', 926 | removedNodes: [ 927 | { 928 | nodeName: "B", 929 | nodeValue: null, 930 | }, 931 | ], 932 | tagName: "DIV", 933 | type: "childList", 934 | }, 935 | ]); 936 | }); 937 | 938 | it("should diff children (key) move (custom attribute)", async () => { 939 | const [newHTML, mutations] = await testDiff({ 940 | oldHTMLString: ` 941 |
    942 | hello 943 | text 944 | text2 945 |
    946 | `, 947 | newHTMLStringChunks: [ 948 | "
    ", 949 | 'hello', 950 | 'text2', 951 | 'text', 952 | "
    ", 953 | ], 954 | }); 955 | expect(newHTML).toBe( 956 | normalize(` 957 | 958 | 959 | 960 |
    961 | hello 962 | text2 963 | text 964 |
    965 | 966 | 967 | `), 968 | ); 969 | 970 | expect(mutations).toEqual([ 971 | { 972 | addedNodes: [], 973 | attributeName: null, 974 | oldValue: null, 975 | outerHTML: 976 | '
    hellotext2text
    ', 977 | removedNodes: [ 978 | { 979 | nodeName: "I", 980 | nodeValue: null, 981 | }, 982 | ], 983 | tagName: "DIV", 984 | type: "childList", 985 | }, 986 | { 987 | addedNodes: [ 988 | { 989 | nodeName: "I", 990 | nodeValue: null, 991 | keepsExistingNodeReference: true, 992 | }, 993 | ], 994 | attributeName: null, 995 | oldValue: null, 996 | outerHTML: 997 | '
    hellotext2text
    ', 998 | removedNodes: [], 999 | tagName: "DIV", 1000 | type: "childList", 1001 | }, 1002 | ]); 1003 | }); 1004 | 1005 | it("should only replace the lang attribute of the HTML tag", async () => { 1006 | const [newHTML, mutations] = await testDiff({ 1007 | oldHTMLString: ` 1008 | 1009 | 1010 | 1011 |
    hello world
    1012 | 1013 | 1014 | `, 1015 | newHTMLStringChunks: [ 1016 | '', 1017 | "", 1018 | "", 1019 | "
    hello world
    ", 1020 | "", 1021 | "", 1022 | ], 1023 | }); 1024 | expect(newHTML).toBe( 1025 | normalize(` 1026 | 1027 | 1028 | 1029 |
    hello world
    1030 | 1031 | 1032 | `), 1033 | ); 1034 | expect(mutations).toEqual([ 1035 | { 1036 | addedNodes: [], 1037 | attributeName: "lang", 1038 | oldValue: "en", 1039 | outerHTML: 1040 | '
    hello world
    ', 1041 | removedNodes: [], 1042 | tagName: "HTML", 1043 | type: "attributes", 1044 | }, 1045 | ]); 1046 | }); 1047 | 1048 | it("should only update the title content inside head", async () => { 1049 | const [newHTML, mutations] = await testDiff({ 1050 | oldHTMLString: ` 1051 | 1052 | 1053 | Old Title 1054 | 1055 | 1056 |
    hello world
    1057 | 1058 | 1059 | `, 1060 | newHTMLStringChunks: [ 1061 | "", 1062 | "", 1063 | "New Title", 1064 | "", 1065 | "", 1066 | "
    hello world
    ", 1067 | "", 1068 | "", 1069 | ], 1070 | }); 1071 | expect(newHTML).toBe( 1072 | normalize(` 1073 | 1074 | 1075 | New Title 1076 | 1077 | 1078 |
    hello world
    1079 | 1080 | 1081 | `), 1082 | ); 1083 | expect(mutations).toEqual([ 1084 | { 1085 | addedNodes: [], 1086 | attributeName: null, 1087 | oldValue: "Old Title", 1088 | outerHTML: undefined, 1089 | removedNodes: [], 1090 | tagName: undefined, 1091 | type: "characterData", 1092 | }, 1093 | ]); 1094 | }); 1095 | 1096 | it("should change data-attribute", async () => { 1097 | const [newHTML, mutations] = await testDiff({ 1098 | oldHTMLString: ` 1099 |
    foo
    1100 | `, 1101 | newHTMLStringChunks: ['
    ', "foo", "
    "], 1102 | }); 1103 | expect(newHTML).toBe( 1104 | normalize(` 1105 | 1106 | 1107 | 1108 |
    foo
    1109 | 1110 | 1111 | `), 1112 | ); 1113 | expect(mutations).toEqual([ 1114 | { 1115 | addedNodes: [], 1116 | attributeName: "data-attribute", 1117 | oldValue: "abc", 1118 | outerHTML: '
    foo
    ', 1119 | removedNodes: [], 1120 | tagName: "DIV", 1121 | type: "attributes", 1122 | }, 1123 | ]); 1124 | }); 1125 | 1126 | it("should update only the path of an SVG element", async () => { 1127 | const [newHTML, mutations] = await testDiff({ 1128 | oldHTMLString: ` 1129 | 1130 | 1131 | 1132 | `, 1133 | newHTMLStringChunks: [ 1134 | "", 1135 | '', 1136 | "", 1137 | ], 1138 | }); 1139 | expect(newHTML).toBe( 1140 | normalize(` 1141 | 1142 | 1143 | 1144 | 1145 | 1146 | 1147 | 1148 | 1149 | `), 1150 | ); 1151 | expect(mutations).toEqual([ 1152 | { 1153 | addedNodes: [], 1154 | attributeName: "d", 1155 | oldValue: "M 10 10 L 20 20", 1156 | outerHTML: '', 1157 | removedNodes: [], 1158 | tagName: "path", 1159 | type: "attributes", 1160 | }, 1161 | ]); 1162 | }); 1163 | 1164 | it("should diff children (data-checksum)", async () => { 1165 | const [newHTML, mutations] = await testDiff({ 1166 | oldHTMLString: ` 1167 |
    1168 |
    initial
    1169 |
    1170 | `, 1171 | newHTMLStringChunks: [ 1172 | "
    ", 1173 | '
    final
    ', 1174 | "
    ", 1175 | ], 1176 | }); 1177 | expect(newHTML).toBe( 1178 | normalize(` 1179 | 1180 | 1181 | 1182 |
    1183 |
    final
    1184 |
    1185 | 1186 | 1187 | `), 1188 | ); 1189 | expect(mutations).toEqual([ 1190 | { 1191 | addedNodes: [], 1192 | attributeName: null, 1193 | oldValue: "initial", 1194 | outerHTML: undefined, 1195 | removedNodes: [], 1196 | tagName: undefined, 1197 | type: "characterData", 1198 | }, 1199 | { 1200 | addedNodes: [], 1201 | attributeName: "data-checksum", 1202 | oldValue: "abc", 1203 | outerHTML: '
    final
    ', 1204 | removedNodes: [], 1205 | tagName: "DIV", 1206 | type: "attributes", 1207 | }, 1208 | { 1209 | addedNodes: [], 1210 | attributeName: "class", 1211 | oldValue: "a", 1212 | outerHTML: '
    final
    ', 1213 | removedNodes: [], 1214 | tagName: "DIV", 1215 | type: "attributes", 1216 | }, 1217 | ]); 1218 | }); 1219 | 1220 | it("should diff between an entire document and documentElement", async () => { 1221 | const [newHTML, mutations] = await testDiff({ 1222 | oldHTMLString: ` 1223 | 1224 | 1225 | 1226 | hello foo 1227 | 1228 | `, 1229 | newHTMLStringChunks: [ 1230 | "", 1231 | "", 1232 | "hello bar", 1233 | "", 1234 | ], 1235 | }); 1236 | expect(newHTML).toBe( 1237 | normalize(` 1238 | 1239 | 1240 | 1241 | 1242 | hello bar 1243 | 1244 | 1245 | `), 1246 | ); 1247 | expect(mutations).toEqual([ 1248 | { 1249 | type: "characterData", 1250 | addedNodes: [], 1251 | removedNodes: [], 1252 | attributeName: null, 1253 | tagName: undefined, 1254 | outerHTML: undefined, 1255 | oldValue: "hello foo", 1256 | }, 1257 | ]); 1258 | }); 1259 | 1260 | it("should diff between entire documents", async () => { 1261 | const [newHTML, mutations] = await testDiff({ 1262 | oldHTMLString: ` 1263 | 1264 | 1265 | 1266 | hello foo 1267 | 1268 | `, 1269 | newHTMLStringChunks: [ 1270 | "", 1271 | "", 1272 | "", 1273 | "hello bar", 1274 | "", 1275 | ], 1276 | }); 1277 | expect(newHTML).toBe( 1278 | normalize(` 1279 | 1280 | 1281 | 1282 | 1283 | hello bar 1284 | 1285 | 1286 | `), 1287 | ); 1288 | expect(mutations).toEqual([ 1289 | { 1290 | type: "characterData", 1291 | addedNodes: [], 1292 | removedNodes: [], 1293 | attributeName: null, 1294 | tagName: undefined, 1295 | outerHTML: undefined, 1296 | oldValue: "hello foo", 1297 | }, 1298 | ]); 1299 | }); 1300 | 1301 | it("should don't modify if is the same node with diffent way to close the tag", async () => { 1302 | const [newHTML, mutations] = await testDiff({ 1303 | oldHTMLString: ` 1304 |
    1305 |
    1306 |
    1307 | `, 1308 | newHTMLStringChunks: ["
    ", "
    ", "
    "], 1309 | }); 1310 | expect(newHTML).toBe( 1311 | normalize(` 1312 | 1313 | 1314 | 1315 |
    1316 |
    1317 |
    1318 | 1319 | 1320 | `), 1321 | ); 1322 | expect(mutations).toEqual([]); 1323 | }); 1324 | 1325 | it("should diff and patch html strings with special chars", async () => { 1326 | const [newHTML, mutations] = await testDiff({ 1327 | oldHTMLString: ` 1328 |
    1329 |
    hello world
    1330 |
    1331 | `, 1332 | newHTMLStringChunks: ["
    ", "
    hello & world
    ", "
    "], 1333 | }); 1334 | expect(newHTML).toBe( 1335 | normalize(` 1336 | 1337 | 1338 | 1339 |
    1340 |
    hello & world
    1341 |
    1342 | 1343 | 1344 | `), 1345 | ); 1346 | expect(mutations).toEqual([ 1347 | { 1348 | type: "characterData", 1349 | addedNodes: [], 1350 | removedNodes: [], 1351 | attributeName: null, 1352 | tagName: undefined, 1353 | outerHTML: undefined, 1354 | oldValue: "hello world", 1355 | }, 1356 | ]); 1357 | }); 1358 | 1359 | it("should analyze all stream nodes using a forEachStreamNode", async () => { 1360 | const [, , streamNodes] = await testDiff({ 1361 | oldHTMLString: ` 1362 |
    1363 |
    hello world
    1364 |
    1365 | `, 1366 | newHTMLStringChunks: ["
    ", "
    hello & world
    ", "
    "], 1367 | useForEeachStreamNode: true, 1368 | }); 1369 | 1370 | // Analyze all stream nodes via forEachStreamNode 1371 | expect(streamNodes).toHaveLength(5); 1372 | expect(streamNodes[0].nodeName).toBe("HEAD"); 1373 | expect(streamNodes[1].nodeName).toBe("BODY"); 1374 | expect(streamNodes[2].nodeName).toBe("DIV"); 1375 | expect(streamNodes[3].nodeName).toBe("DIV"); 1376 | expect(streamNodes[4].nodeName).toBe("#text"); 1377 | expect(streamNodes[4].nodeValue).toBe("hello & world"); 1378 | }); 1379 | 1380 | it("should diff with slow chunks", async () => { 1381 | const [newHTML, mutations] = await testDiff({ 1382 | oldHTMLString: ` 1383 | 1384 | 1385 | 1386 |
    foo
    1387 |
    bar
    1388 |
    baz
    1389 | 1390 | 1391 | `, 1392 | newHTMLStringChunks: [ 1393 | "", 1394 | "", 1395 | "", 1396 | "
    baz
    ", 1397 | "
    foo
    ", 1398 | "
    bar
    ", 1399 | "", 1400 | "", 1401 | ], 1402 | slowChunks: true, 1403 | }); 1404 | expect(newHTML).toBe( 1405 | normalize(` 1406 | 1407 | 1408 | 1409 |
    baz
    1410 |
    foo
    1411 |
    bar
    1412 | 1413 | 1414 | `), 1415 | ); 1416 | expect(mutations).toEqual([ 1417 | { 1418 | addedNodes: [], 1419 | attributeName: null, 1420 | oldValue: "foo", 1421 | outerHTML: undefined, 1422 | removedNodes: [], 1423 | tagName: undefined, 1424 | type: "characterData", 1425 | }, 1426 | { 1427 | addedNodes: [], 1428 | attributeName: null, 1429 | oldValue: "bar", 1430 | outerHTML: undefined, 1431 | removedNodes: [], 1432 | tagName: undefined, 1433 | type: "characterData", 1434 | }, 1435 | { 1436 | addedNodes: [], 1437 | attributeName: null, 1438 | oldValue: "baz", 1439 | outerHTML: undefined, 1440 | removedNodes: [], 1441 | tagName: undefined, 1442 | type: "characterData", 1443 | }, 1444 | ]); 1445 | }); 1446 | 1447 | it('should replace a div to "template" tag with the content', async () => { 1448 | const [newHTML, mutations] = await testDiff({ 1449 | oldHTMLString: ` 1450 | 1451 | 1452 | 1453 |
    foo
    1454 | 1455 | 1456 | `, 1457 | newHTMLStringChunks: [ 1458 | "", 1459 | "", 1460 | "", 1461 | '', 1462 | "", 1463 | "", 1464 | ], 1465 | }); 1466 | expect(newHTML).toBe( 1467 | normalize(` 1468 | 1469 | 1470 | 1471 | 1474 | 1475 | 1476 | `), 1477 | ); 1478 | expect(mutations).toEqual([ 1479 | { 1480 | addedNodes: [], 1481 | attributeName: null, 1482 | oldValue: null, 1483 | outerHTML: "
    ", 1484 | removedNodes: [ 1485 | { 1486 | nodeName: "#text", 1487 | nodeValue: "foo", 1488 | }, 1489 | ], 1490 | tagName: "DIV", 1491 | type: "childList", 1492 | }, 1493 | { 1494 | addedNodes: [ 1495 | { 1496 | nodeName: "TEMPLATE", 1497 | nodeValue: null, 1498 | keepsExistingNodeReference: false, 1499 | }, 1500 | ], 1501 | attributeName: null, 1502 | oldValue: null, 1503 | outerHTML: 1504 | '', 1505 | removedNodes: [ 1506 | { 1507 | nodeName: "DIV", 1508 | nodeValue: null, 1509 | }, 1510 | ], 1511 | tagName: "BODY", 1512 | type: "childList", 1513 | }, 1514 | ]); 1515 | }); 1516 | 1517 | it("should diff with body without div wrapper and with div wrapper", async () => { 1518 | const [newHTML] = await testDiff({ 1519 | oldHTMLString: ` 1520 | 1521 | 1522 | 1523 | 1524 |
    1525 | This will be a landingpage. But you can go to the admin for now login page 1526 |
    1527 | 1528 | 1529 | 1530 | `, 1531 | newHTMLStringChunks: [ 1532 | "", 1533 | "", 1534 | "", 1535 | "
    ", 1536 | "", 1537 | "
    ", 1538 | "This will be a Admin Page. But you can go to the admin for now home page", 1539 | "
    ", 1540 | "
    ", 1541 | '', 1542 | "", 1543 | "", 1544 | ], 1545 | }); 1546 | 1547 | expect(newHTML).toBe( 1548 | normalize(` 1549 | 1550 | 1551 | 1552 |
    1553 | 1554 |
    1555 | This will be a Admin Page. But you can go to the admin for now home page 1556 |
    1557 |
    1558 | 1559 | 1560 | `), 1561 | ); 1562 | }); 1563 | 1564 | it('should not add again the "data-action" attribute after diff to avoid registering server actions twice', async () => { 1565 | const [newHTML, mutations] = await testDiff({ 1566 | oldHTMLString: ` 1567 |
    foo
    1568 | `, 1569 | newHTMLStringChunks: ['
    foo
    '], 1570 | }); 1571 | expect(newHTML).toBe( 1572 | normalize(` 1573 | 1574 | 1575 | 1576 |
    foo
    1577 | 1578 | 1579 | `), 1580 | ); 1581 | expect(mutations).toEqual([]); 1582 | }); 1583 | 1584 | it("should change the content of the BODY but keep the old attributes (theme, etc)", async () => { 1585 | const [newHTML] = await testDiff({ 1586 | oldHTMLString: ` 1587 | 1588 | 1589 | 1590 |
    foo
    1591 | 1592 | 1593 | `, 1594 | newHTMLStringChunks: [ 1595 | "
    bar
    ", 1596 | ], 1597 | }); 1598 | expect(newHTML).toBe( 1599 | normalize(` 1600 | 1601 | 1602 | 1603 |
    bar
    1604 | 1605 | 1606 | `), 1607 | ); 1608 | }); 1609 | 1610 | it("should options.shouldIgnoreNode work", async () => { 1611 | const [newHTML] = await testDiff({ 1612 | oldHTMLString: ` 1613 |
    1614 |
    foo
    1615 |
    bar
    1616 |
    1617 | `, 1618 | newHTMLStringChunks: [ 1619 | "", 1620 | "", 1621 | "", 1622 | "
    bar
    ", 1623 | "
    bazz!
    ", 1624 | "", 1625 | "", 1626 | ], 1627 | ignoreId: true, 1628 | }); 1629 | expect(newHTML).toBe( 1630 | normalize(` 1631 | 1632 | 1633 | 1634 |
    bar
    1635 | 1636 | 1637 | `), 1638 | ); 1639 | }); 1640 | 1641 | it("should add WC that modifies the DOM on connect it", async () => { 1642 | const [newHTML] = await testDiff({ 1643 | oldHTMLString: ` 1644 |
    foo
    1645 | `, 1646 | newHTMLStringChunks: ["foo"], 1647 | registerWC: true, 1648 | }); 1649 | 1650 | expect(newHTML).toBe( 1651 | normalize(` 1652 | 1653 | 1654 | 1655 | foo 1656 | 1657 | 1658 | `), 1659 | ); 1660 | }); 1661 | 1662 | it("should onNextNode execute in a sequential way when is async", async () => { 1663 | const results = await testDiff({ 1664 | oldHTMLString: ` 1665 |
    foo
    1666 | `, 1667 | newHTMLStringChunks: ["
    first
    ", "
    second
    ", "
    third
    "], 1668 | registerWC: true, 1669 | onNextNode: `async (n) => { 1670 | if (!n?.hasAttribute?.('scan')) return 1671 | window.index ??= 1; 1672 | window.logs ??= ''; 1673 | await new Promise((r) => setTimeout(() => { 1674 | window.logs += n.innerText + ' '; 1675 | r(true); 1676 | }, ++window.index * 50)); 1677 | }` 1678 | }); 1679 | 1680 | expect(results[0]).toBe( 1681 | normalize(` 1682 | 1683 | 1684 | 1685 |
    first
    1686 |
    second
    1687 |
    third
    1688 | 1689 | 1690 | `), 1691 | ); 1692 | 1693 | expect(results.at(-1)).toBe('first second third ') 1694 | }); 1695 | 1696 | it("should add WC that modifies the DOM on connect it (old with key)", async () => { 1697 | const [newHTML] = await testDiff({ 1698 | oldHTMLString: ` 1699 |
    foo
    1700 | `, 1701 | newHTMLStringChunks: ["foo"], 1702 | registerWC: true, 1703 | }); 1704 | 1705 | expect(newHTML).toBe( 1706 | normalize(` 1707 | 1708 | 1709 | 1710 | foo 1711 | 1712 | 1713 | `), 1714 | ); 1715 | }); 1716 | }); 1717 | 1718 | async function testDiff({ 1719 | oldHTMLString, 1720 | newHTMLStringChunks, 1721 | useForEeachStreamNode = false, 1722 | slowChunks = false, 1723 | transition = false, 1724 | ignoreId = false, 1725 | registerWC = false, 1726 | onNextNode, 1727 | }: { 1728 | oldHTMLString: string; 1729 | newHTMLStringChunks: string[]; 1730 | useForEeachStreamNode?: boolean; 1731 | slowChunks?: boolean; 1732 | transition?: boolean; 1733 | ignoreId?: boolean; 1734 | registerWC?: boolean; 1735 | onNextNode?: string 1736 | }): Promise<[string, any[], Node[], boolean, string]> { 1737 | await page.setContent(normalize(oldHTMLString)); 1738 | const [mutations, streamNodes, transitionApplied, logs] = await page.evaluate( 1739 | async ([ 1740 | diffCode, 1741 | newHTMLStringChunks, 1742 | useForEeachStreamNode, 1743 | slowChunks, 1744 | transition, 1745 | ignoreId, 1746 | registerWC, 1747 | onNextNode, 1748 | ]) => { 1749 | eval(diffCode as string); 1750 | const encoder = new TextEncoder(); 1751 | const readable = new ReadableStream({ 1752 | start: async (controller) => { 1753 | for (const chunk of newHTMLStringChunks as string[]) { 1754 | if (slowChunks) 1755 | await new Promise((resolve) => setTimeout(resolve, 100)); 1756 | controller.enqueue(encoder.encode(chunk)); 1757 | } 1758 | controller.close(); 1759 | }, 1760 | }); 1761 | const allMutations: any[] = []; 1762 | const observer = new MutationObserver((mutations) => { 1763 | allMutations.push( 1764 | ...mutations.map((mutation, mutationIndex) => ({ 1765 | type: mutation.type, 1766 | addedNodes: Array.from(mutation.addedNodes).map( 1767 | (node, index) => ({ 1768 | nodeName: node.nodeName, 1769 | nodeValue: node.nodeValue, 1770 | keepsExistingNodeReference: node.isSameNode( 1771 | mutations[mutationIndex - 1]?.removedNodes?.[index], 1772 | ), 1773 | }), 1774 | ), 1775 | removedNodes: Array.from(mutation.removedNodes).map( 1776 | (node) => ({ 1777 | nodeName: node.nodeName, 1778 | nodeValue: node.nodeValue, 1779 | }), 1780 | ), 1781 | attributeName: mutation.attributeName, 1782 | tagName: (mutation.target as Element).tagName, 1783 | outerHTML: (mutation.target as Element).outerHTML, 1784 | oldValue: mutation.oldValue, 1785 | })), 1786 | ); 1787 | }); 1788 | 1789 | observer.observe(document.documentElement, { 1790 | childList: true, 1791 | attributes: true, 1792 | subtree: true, 1793 | attributeOldValue: true, 1794 | characterData: true, 1795 | characterDataOldValue: true, 1796 | }); 1797 | 1798 | const streamNodes: Node[] = []; 1799 | 1800 | const forEachStreamNode = useForEeachStreamNode 1801 | ? (node: Node) => { 1802 | streamNodes.push({ 1803 | nodeName: node.nodeName, 1804 | nodeValue: node.nodeValue, 1805 | } as Node); 1806 | } 1807 | : eval(onNextNode); 1808 | 1809 | if (registerWC) { 1810 | class TestWC extends HTMLElement { 1811 | connectedCallback() { 1812 | this.setAttribute("data-connected", "true"); 1813 | } 1814 | } 1815 | customElements.define("test-wc", TestWC); 1816 | } 1817 | 1818 | await diff(document.documentElement!, readable, { 1819 | onNextNode: forEachStreamNode, 1820 | transition: transition as boolean, 1821 | shouldIgnoreNode(node: Node | null) { 1822 | if (!ignoreId) return false; 1823 | return (node as Element)?.id === "ignore"; 1824 | }, 1825 | }); 1826 | 1827 | // @ts-ignore 1828 | const transitionApplied = !!window.lastDiffTransition; 1829 | 1830 | observer.disconnect(); 1831 | 1832 | return [allMutations, streamNodes, transitionApplied, (window as any).logs]; 1833 | }, 1834 | [ 1835 | diffCode, 1836 | newHTMLStringChunks, 1837 | useForEeachStreamNode, 1838 | slowChunks, 1839 | transition, 1840 | ignoreId, 1841 | registerWC, 1842 | onNextNode, 1843 | ], 1844 | ); 1845 | 1846 | return [ 1847 | (await page.content()).replace(/\s*\n\s*/g, "").replaceAll("'", '"'), 1848 | mutations, 1849 | streamNodes, 1850 | transitionApplied, 1851 | logs 1852 | ]; 1853 | } 1854 | }); 1855 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | /// 2 | /// 3 | /// 4 | /// 5 | /** 6 | * This file contains a diffing algorithm that is used to update the DOM 7 | * inspired by the set-dom library https://github.com/DylanPiercey/set-dom 8 | * but using HTML streaming and View Transition API. 9 | */ 10 | type Walker = { 11 | root: Node | null; 12 | [FIRST_CHILD]: (node: Node) => Promise; 13 | [NEXT_SIBLING]: (node: Node) => Promise; 14 | [APPLY_TRANSITION]: (v: () => void) => void; 15 | }; 16 | 17 | type NextNodeCallback = (node: Node) => void; 18 | 19 | type Options = { 20 | onNextNode?: NextNodeCallback; 21 | transition?: boolean; 22 | shouldIgnoreNode?: (node: Node | null) => boolean; 23 | }; 24 | 25 | const ELEMENT_TYPE = 1; 26 | const DOCUMENT_TYPE = 9; 27 | const DOCUMENT_FRAGMENT_TYPE = 11; 28 | const APPLY_TRANSITION = 0; 29 | const FIRST_CHILD = 1; 30 | const NEXT_SIBLING = 2; 31 | const SPECIAL_TAGS = new Set(["HTML", "HEAD", "BODY"]); 32 | const wait = () => new Promise((resolve) => requestAnimationFrame(resolve)); 33 | 34 | export default async function diff( 35 | oldNode: Node, 36 | stream: ReadableStream, 37 | options?: Options, 38 | ) { 39 | const walker = await htmlStreamWalker(stream, options); 40 | const newNode = walker.root!; 41 | 42 | if (oldNode.nodeType === DOCUMENT_TYPE) { 43 | oldNode = (oldNode as Document).documentElement; 44 | } 45 | 46 | if (newNode.nodeType === DOCUMENT_FRAGMENT_TYPE) { 47 | await setChildNodes(oldNode, newNode, walker); 48 | } else { 49 | await updateNode(oldNode, newNode, walker); 50 | } 51 | } 52 | 53 | /** 54 | * Updates a specific htmlNode and does whatever it takes to convert it to another one. 55 | */ 56 | async function updateNode(oldNode: Node, newNode: Node, walker: Walker) { 57 | if (oldNode.nodeType !== newNode.nodeType) { 58 | return walker[APPLY_TRANSITION](() => 59 | oldNode.parentNode!.replaceChild(newNode.cloneNode(true), oldNode), 60 | ); 61 | } 62 | 63 | if (oldNode.nodeType === ELEMENT_TYPE) { 64 | await setChildNodes(oldNode, newNode, walker); 65 | 66 | walker[APPLY_TRANSITION](() => { 67 | if (oldNode.nodeName === newNode.nodeName) { 68 | if (newNode.nodeName !== "BODY") { 69 | setAttributes( 70 | (oldNode as Element).attributes, 71 | (newNode as Element).attributes, 72 | ); 73 | } 74 | } else { 75 | const hasDocumentFragmentInside = newNode.nodeName === "TEMPLATE"; 76 | const clonedNewNode = newNode.cloneNode(hasDocumentFragmentInside); 77 | while (oldNode.firstChild) 78 | clonedNewNode.appendChild(oldNode.firstChild); 79 | oldNode.parentNode!.replaceChild(clonedNewNode, oldNode); 80 | } 81 | }); 82 | } else if (oldNode.nodeValue !== newNode.nodeValue) { 83 | walker[APPLY_TRANSITION](() => (oldNode.nodeValue = newNode.nodeValue)); 84 | } 85 | } 86 | 87 | /** 88 | * Utility that will update one list of attributes to match another. 89 | */ 90 | function setAttributes( 91 | oldAttributes: NamedNodeMap, 92 | newAttributes: NamedNodeMap, 93 | ) { 94 | let i, oldAttribute, newAttribute, namespace, name; 95 | 96 | // Remove old attributes. 97 | for (i = oldAttributes.length; i--; ) { 98 | oldAttribute = oldAttributes[i]; 99 | namespace = oldAttribute.namespaceURI; 100 | name = oldAttribute.localName; 101 | newAttribute = newAttributes.getNamedItemNS(namespace, name); 102 | 103 | if (!newAttribute) oldAttributes.removeNamedItemNS(namespace, name); 104 | } 105 | 106 | // Set new attributes. 107 | for (i = newAttributes.length; i--; ) { 108 | oldAttribute = newAttributes[i]; 109 | namespace = oldAttribute.namespaceURI; 110 | name = oldAttribute.localName; 111 | newAttribute = oldAttributes.getNamedItemNS(namespace, name); 112 | 113 | // Avoid register already registered server action in frameworks like Brisa 114 | if (oldAttribute.name === "data-action") continue; 115 | 116 | if (!newAttribute) { 117 | // Add a new attribute. 118 | newAttributes.removeNamedItemNS(namespace, name); 119 | oldAttributes.setNamedItemNS(oldAttribute); 120 | } else if (newAttribute.value !== oldAttribute.value) { 121 | // Update existing attribute. 122 | newAttribute.value = oldAttribute.value; 123 | } 124 | } 125 | } 126 | 127 | /** 128 | * Utility that will nodes childern to match another nodes children. 129 | */ 130 | async function setChildNodes(oldParent: Node, newParent: Node, walker: Walker) { 131 | let checkOld; 132 | let oldKey; 133 | let newKey; 134 | let foundNode; 135 | let keyedNodes: Record | null = null; 136 | let oldNode = oldParent.firstChild; 137 | let newNode = await walker[FIRST_CHILD](newParent); 138 | let extra = 0; 139 | 140 | // Extract keyed nodes from previous children and keep track of total count. 141 | while (oldNode) { 142 | extra++; 143 | checkOld = oldNode; 144 | oldKey = getKey(checkOld); 145 | oldNode = oldNode.nextSibling; 146 | 147 | if (oldKey) { 148 | if (!keyedNodes) keyedNodes = {}; 149 | keyedNodes[oldKey] = checkOld; 150 | } 151 | } 152 | 153 | oldNode = oldParent.firstChild; 154 | 155 | // Loop over new nodes and perform updates. 156 | while (newNode) { 157 | let insertedNode; 158 | 159 | if ( 160 | keyedNodes && 161 | (newKey = getKey(newNode)) && 162 | (foundNode = keyedNodes[newKey]) 163 | ) { 164 | delete keyedNodes[newKey]; 165 | if (foundNode !== oldNode) { 166 | walker[APPLY_TRANSITION](() => 167 | oldParent.insertBefore(foundNode!, oldNode), 168 | ); 169 | } else { 170 | oldNode = oldNode.nextSibling; 171 | } 172 | 173 | await updateNode(foundNode, newNode, walker); 174 | } else if (oldNode) { 175 | checkOld = oldNode; 176 | oldNode = oldNode.nextSibling; 177 | if (getKey(checkOld)) { 178 | insertedNode = newNode.cloneNode(true); 179 | walker[APPLY_TRANSITION](() => 180 | oldParent.insertBefore(insertedNode!, checkOld!), 181 | ); 182 | } else { 183 | await updateNode(checkOld, newNode, walker); 184 | } 185 | } else { 186 | insertedNode = newNode.cloneNode(true); 187 | walker[APPLY_TRANSITION](() => oldParent.appendChild(insertedNode!)); 188 | } 189 | 190 | newNode = (await walker[NEXT_SIBLING](newNode)) as ChildNode; 191 | 192 | // If we didn't insert a node this means we are updating an existing one, so we 193 | // need to decrement the extra counter, so we can skip removing the old node. 194 | if (!insertedNode) extra--; 195 | } 196 | 197 | walker[APPLY_TRANSITION](() => { 198 | // Remove old keyed nodes. 199 | for (oldKey in keyedNodes) { 200 | extra--; 201 | oldParent.removeChild(keyedNodes![oldKey]!); 202 | } 203 | 204 | // If we have any remaining unkeyed nodes remove them from the end. 205 | while (--extra >= 0) oldParent.removeChild(oldParent.lastChild!); 206 | }); 207 | } 208 | 209 | function getKey(node: Node) { 210 | return (node as Element)?.getAttribute?.("key") || (node as Element).id; 211 | } 212 | 213 | /** 214 | * Utility that will walk a html stream and call a callback for each node. 215 | */ 216 | async function htmlStreamWalker( 217 | stream: ReadableStream, 218 | options: Options = {}, 219 | ): Promise { 220 | const doc = document.implementation.createHTMLDocument(); 221 | 222 | doc.open(); 223 | const decoderStream = new TextDecoderStream(); 224 | const decoderStreamReader = decoderStream.readable.getReader(); 225 | let streamInProgress = true; 226 | 227 | stream.pipeTo(decoderStream.writable); 228 | processStream(); 229 | 230 | async function processStream() { 231 | try { 232 | while (true) { 233 | const { done, value } = await decoderStreamReader.read(); 234 | if (done) { 235 | streamInProgress = false; 236 | break; 237 | } 238 | 239 | doc.write(value); 240 | } 241 | } finally { 242 | doc.close(); 243 | } 244 | } 245 | 246 | while (!doc.documentElement || isLastNodeOfChunk(doc.documentElement)) { 247 | await wait(); 248 | } 249 | 250 | function next(field: "firstChild" | "nextSibling") { 251 | return async (node: Node) => { 252 | if (!node) return null; 253 | 254 | let nextNode = node[field]; 255 | 256 | while (options.shouldIgnoreNode?.(nextNode)) { 257 | nextNode = nextNode![field]; 258 | } 259 | 260 | if (nextNode) await options.onNextNode?.(nextNode); 261 | 262 | const waitChildren = field === "firstChild"; 263 | 264 | while (isLastNodeOfChunk(nextNode as Element, waitChildren)) { 265 | await wait(); 266 | } 267 | 268 | return nextNode; 269 | }; 270 | } 271 | 272 | function isLastNodeOfChunk(node: Node, waitChildren?: boolean) { 273 | if (!node || !streamInProgress || node.nextSibling) { 274 | return false; 275 | } 276 | 277 | if (SPECIAL_TAGS.has(node.nodeName)) { 278 | return !doc.body?.hasChildNodes?.(); 279 | } 280 | 281 | let parent = node.parentElement; 282 | 283 | while (parent) { 284 | if (parent.nextSibling) return false; 285 | parent = parent.parentElement; 286 | } 287 | 288 | // Related issues to this ternary (hard to reproduce in a test): 289 | // https://github.com/brisa-build/diff-dom-streaming/pull/15 290 | // https://github.com/brisa-build/brisa/issues/739 291 | return waitChildren 292 | ? streamInProgress && !node.hasChildNodes?.() 293 | : streamInProgress 294 | } 295 | 296 | return { 297 | root: doc.documentElement, 298 | [FIRST_CHILD]: next("firstChild"), 299 | [NEXT_SIBLING]: next("nextSibling"), 300 | [APPLY_TRANSITION]: (v) => { 301 | if (options.transition && document.startViewTransition) { 302 | // @ts-ignore 303 | window.lastDiffTransition = document.startViewTransition(v); 304 | } else v(); 305 | }, 306 | }; 307 | } 308 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "lib": ["ESNext", "dom", "dom.iterable"], 4 | "module": "esnext", 5 | "baseUrl": "./src", 6 | "target": "esnext", 7 | "moduleResolution": "bundler", 8 | "moduleDetection": "force", 9 | "allowImportingTsExtensions": true, 10 | "verbatimModuleSyntax": true, 11 | "noFallthroughCasesInSwitch": true, 12 | "noEmit": true, 13 | "composite": true, 14 | "strict": true, 15 | "downlevelIteration": true, 16 | "skipLibCheck": true, 17 | "allowSyntheticDefaultImports": true, 18 | "forceConsistentCasingInFileNames": true, 19 | "allowJs": true 20 | }, 21 | "exclude": ["node_modules", "build"] 22 | } 23 | --------------------------------------------------------------------------------