├── .dockerignore ├── .env.example ├── .github └── workflows │ └── main.yml ├── .gitignore ├── .node-version ├── .npmrc ├── .prettierrc.json ├── Dockerfile ├── LICENSE.txt ├── README.md ├── app.test.ts ├── app.ts ├── docker-compose.yml ├── docs ├── README.md └── getting_started.md ├── extension ├── .gitignore ├── Dockerfile ├── README.md ├── addon │ ├── background.html │ ├── icons │ │ ├── LICENSE │ │ ├── logo.png │ │ └── logo_dark.png │ ├── manifest.json │ ├── popup │ │ ├── index.html │ │ └── style.css │ └── single-file │ │ ├── dist │ │ ├── extension-core.js │ │ ├── extension-frames.js │ │ ├── single-file-bootstrap.js │ │ ├── single-file-frames.js │ │ ├── single-file.js │ │ └── web │ │ │ └── hooks │ │ │ ├── hooks-frames-web.js │ │ │ └── hooks-web.js │ │ └── lib │ │ └── single-file │ │ ├── LICENSE.md │ │ ├── fetch.js │ │ ├── frame-tree.js │ │ └── lazy-timeout.js ├── content_scripts │ └── archive.js ├── package-lock.json ├── package.json ├── popup │ └── popup.js └── webpack.config.js ├── index.ts ├── jest.config.ts ├── package-lock.json ├── package.json ├── src ├── helpers.ts ├── ledger │ └── index.ts ├── qldb │ └── index.ts ├── s3 │ ├── index.ts │ └── s3.test.ts ├── store │ ├── index.ts │ └── store.test.ts ├── types │ ├── Annotations.ts │ ├── Bundle.ts │ ├── Data.ts │ ├── File.test.ts │ ├── File.ts │ └── Record.ts └── verify │ ├── index.ts │ └── verify.test.ts ├── tsconfig.json └── ui ├── .dockerignore ├── .gitignore ├── .node-version ├── .npmrc ├── .prettierrc.json ├── Dockerfile ├── default.conf ├── package-lock.json ├── package.json ├── src ├── app.css ├── app.html ├── global.d.ts ├── hooks.ts ├── lib │ ├── Ledger │ │ ├── EditingPanel.svelte │ │ ├── EntryMetadata.svelte │ │ ├── EntryThumbnail.svelte │ │ ├── History.svelte │ │ ├── LedgerEntry.svelte │ │ └── index.ts │ ├── header │ │ ├── Header.svelte │ │ └── logo.svg │ ├── helpers.ts │ ├── stores.ts │ └── types.d.ts └── routes │ ├── __layout.svelte │ ├── index.svelte │ ├── library.svelte │ └── verify.svelte ├── static ├── favicon.png └── robots.txt ├── svelte.config.js └── tsconfig.json /.dockerignore: -------------------------------------------------------------------------------- 1 | ui/ 2 | node_modules/ 3 | out/ -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | AWS_ACCESS_KEY_ID="" 2 | AWS_SECRET_ACCESS_KEY="" 3 | AWS_REGION="" 4 | LEDGER_NAME="" 5 | BUCKET_NAME="" 6 | DOC_TABLE_NAME="" 7 | 8 | SOURCE_FILES_DIRECTORY="" 9 | SOURCE_FILES_BUCKET="" -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: [push] 3 | 4 | permissions: 5 | id-token: write 6 | contents: read 7 | 8 | jobs: 9 | run-tests: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - uses: actions/checkout@v2 14 | - uses: actions/setup-node@v2 15 | with: 16 | node-version: '16' 17 | - name: Install modules 18 | run: npm install 19 | - name: Configure AWS Credentials 20 | uses: aws-actions/configure-aws-credentials@v4 21 | with: 22 | aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} 23 | aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 24 | aws-region: eu-central-1 25 | - name: Run tests 26 | run: npm run test 27 | env: 28 | BUCKET_NAME: ${{ secrets.BUCKET_NAME }} 29 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | function.zip 3 | node_modules 4 | .env 5 | out/ -------------------------------------------------------------------------------- /.node-version: -------------------------------------------------------------------------------- 1 | 16.13.0 2 | -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | engine-strict=true 2 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "singleQuote": true, 3 | "arrowParens": "avoid", 4 | "jsxSingleQuote": true, 5 | "svelteSortOrder": "options-scripts-styles-markup" 6 | } 7 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:16.13.0-alpine3.14 2 | 3 | LABEL version="0.1" 4 | LABEL description="API handling interactiong with Amazon QLDB" 5 | 6 | WORKDIR /app 7 | COPY ["package.json", "package-lock.json", "./"] 8 | RUN npm install 9 | COPY . . 10 | 11 | EXPOSE 3000 12 | 13 | CMD ["npm", "run", "start"] -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | 2 | The Digital Evidence Preservation Toolkit 3 | Copyright (C) 2021 • Basile Simon 4 | 5 | This program is free software: you can redistribute it and/or modify 6 | it under the terms of the GNU General Public License as published by 7 | the Free Software Foundation, either version 3 of the License, or 8 | (at your option) any later version. 9 | 10 | This program is distributed in the hope that it will be useful, 11 | but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | GNU General Public License for more details. 14 | 15 | GNU GENERAL PUBLIC LICENSE 16 | Version 3, 29 June 2007 17 | 18 | Copyright (C) 2007 Free Software Foundation, Inc. 19 | Everyone is permitted to copy and distribute verbatim copies 20 | of this license document, but changing it is not allowed. 21 | 22 | Preamble 23 | 24 | The GNU General Public License is a free, copyleft license for 25 | software and other kinds of works. 26 | 27 | The licenses for most software and other practical works are designed 28 | to take away your freedom to share and change the works. By contrast, 29 | the GNU General Public License is intended to guarantee your freedom to 30 | share and change all versions of a program--to make sure it remains free 31 | software for all its users. We, the Free Software Foundation, use the 32 | GNU General Public License for most of our software; it applies also to 33 | any other work released this way by its authors. You can apply it to 34 | your programs, too. 35 | 36 | When we speak of free software, we are referring to freedom, not 37 | price. Our General Public Licenses are designed to make sure that you 38 | have the freedom to distribute copies of free software (and charge for 39 | them if you wish), that you receive source code or can get it if you 40 | want it, that you can change the software or use pieces of it in new 41 | free programs, and that you know you can do these things. 42 | 43 | To protect your rights, we need to prevent others from denying you 44 | these rights or asking you to surrender the rights. Therefore, you have 45 | certain responsibilities if you distribute copies of the software, or if 46 | you modify it: responsibilities to respect the freedom of others. 47 | 48 | For example, if you distribute copies of such a program, whether 49 | gratis or for a fee, you must pass on to the recipients the same 50 | freedoms that you received. You must make sure that they, too, receive 51 | or can get the source code. And you must show them these terms so they 52 | know their rights. 53 | 54 | Developers that use the GNU GPL protect your rights with two steps: 55 | (1) assert copyright on the software, and (2) offer you this License 56 | giving you legal permission to copy, distribute and/or modify it. 57 | 58 | For the developers' and authors' protection, the GPL clearly explains 59 | that there is no warranty for this free software. For both users' and 60 | authors' sake, the GPL requires that modified versions be marked as 61 | changed, so that their problems will not be attributed erroneously to 62 | authors of previous versions. 63 | 64 | Some devices are designed to deny users access to install or run 65 | modified versions of the software inside them, although the manufacturer 66 | can do so. This is fundamentally incompatible with the aim of 67 | protecting users' freedom to change the software. The systematic 68 | pattern of such abuse occurs in the area of products for individuals to 69 | use, which is precisely where it is most unacceptable. Therefore, we 70 | have designed this version of the GPL to prohibit the practice for those 71 | products. If such problems arise substantially in other domains, we 72 | stand ready to extend this provision to those domains in future versions 73 | of the GPL, as needed to protect the freedom of users. 74 | 75 | Finally, every program is threatened constantly by software patents. 76 | States should not allow patents to restrict development and use of 77 | software on general-purpose computers, but in those that do, we wish to 78 | avoid the special danger that patents applied to a free program could 79 | make it effectively proprietary. To prevent this, the GPL assures that 80 | patents cannot be used to render the program non-free. 81 | 82 | The precise terms and conditions for copying, distribution and 83 | modification follow. 84 | 85 | TERMS AND CONDITIONS 86 | 87 | 0. Definitions. 88 | 89 | "This License" refers to version 3 of the GNU General Public License. 90 | 91 | "Copyright" also means copyright-like laws that apply to other kinds of 92 | works, such as semiconductor masks. 93 | 94 | "The Program" refers to any copyrightable work licensed under this 95 | License. Each licensee is addressed as "you". "Licensees" and 96 | "recipients" may be individuals or organizations. 97 | 98 | To "modify" a work means to copy from or adapt all or part of the work 99 | in a fashion requiring copyright permission, other than the making of an 100 | exact copy. The resulting work is called a "modified version" of the 101 | earlier work or a work "based on" the earlier work. 102 | 103 | A "covered work" means either the unmodified Program or a work based 104 | on the Program. 105 | 106 | To "propagate" a work means to do anything with it that, without 107 | permission, would make you directly or secondarily liable for 108 | infringement under applicable copyright law, except executing it on a 109 | computer or modifying a private copy. Propagation includes copying, 110 | distribution (with or without modification), making available to the 111 | public, and in some countries other activities as well. 112 | 113 | To "convey" a work means any kind of propagation that enables other 114 | parties to make or receive copies. Mere interaction with a user through 115 | a computer network, with no transfer of a copy, is not conveying. 116 | 117 | An interactive user interface displays "Appropriate Legal Notices" 118 | to the extent that it includes a convenient and prominently visible 119 | feature that (1) displays an appropriate copyright notice, and (2) 120 | tells the user that there is no warranty for the work (except to the 121 | extent that warranties are provided), that licensees may convey the 122 | work under this License, and how to view a copy of this License. If 123 | the interface presents a list of user commands or options, such as a 124 | menu, a prominent item in the list meets this criterion. 125 | 126 | 1. Source Code. 127 | 128 | The "source code" for a work means the preferred form of the work 129 | for making modifications to it. "Object code" means any non-source 130 | form of a work. 131 | 132 | A "Standard Interface" means an interface that either is an official 133 | standard defined by a recognized standards body, or, in the case of 134 | interfaces specified for a particular programming language, one that 135 | is widely used among developers working in that language. 136 | 137 | The "System Libraries" of an executable work include anything, other 138 | than the work as a whole, that (a) is included in the normal form of 139 | packaging a Major Component, but which is not part of that Major 140 | Component, and (b) serves only to enable use of the work with that 141 | Major Component, or to implement a Standard Interface for which an 142 | implementation is available to the public in source code form. A 143 | "Major Component", in this context, means a major essential component 144 | (kernel, window system, and so on) of the specific operating system 145 | (if any) on which the executable work runs, or a compiler used to 146 | produce the work, or an object code interpreter used to run it. 147 | 148 | The "Corresponding Source" for a work in object code form means all 149 | the source code needed to generate, install, and (for an executable 150 | work) run the object code and to modify the work, including scripts to 151 | control those activities. However, it does not include the work's 152 | System Libraries, or general-purpose tools or generally available free 153 | programs which are used unmodified in performing those activities but 154 | which are not part of the work. For example, Corresponding Source 155 | includes interface definition files associated with source files for 156 | the work, and the source code for shared libraries and dynamically 157 | linked subprograms that the work is specifically designed to require, 158 | such as by intimate data communication or control flow between those 159 | subprograms and other parts of the work. 160 | 161 | The Corresponding Source need not include anything that users 162 | can regenerate automatically from other parts of the Corresponding 163 | Source. 164 | 165 | The Corresponding Source for a work in source code form is that 166 | same work. 167 | 168 | 2. Basic Permissions. 169 | 170 | All rights granted under this License are granted for the term of 171 | copyright on the Program, and are irrevocable provided the stated 172 | conditions are met. This License explicitly affirms your unlimited 173 | permission to run the unmodified Program. The output from running a 174 | covered work is covered by this License only if the output, given its 175 | content, constitutes a covered work. This License acknowledges your 176 | rights of fair use or other equivalent, as provided by copyright law. 177 | 178 | You may make, run and propagate covered works that you do not 179 | convey, without conditions so long as your license otherwise remains 180 | in force. You may convey covered works to others for the sole purpose 181 | of having them make modifications exclusively for you, or provide you 182 | with facilities for running those works, provided that you comply with 183 | the terms of this License in conveying all material for which you do 184 | not control copyright. Those thus making or running the covered works 185 | for you must do so exclusively on your behalf, under your direction 186 | and control, on terms that prohibit them from making any copies of 187 | your copyrighted material outside their relationship with you. 188 | 189 | Conveying under any other circumstances is permitted solely under 190 | the conditions stated below. Sublicensing is not allowed; section 10 191 | makes it unnecessary. 192 | 193 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 194 | 195 | No covered work shall be deemed part of an effective technological 196 | measure under any applicable law fulfilling obligations under article 197 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 198 | similar laws prohibiting or restricting circumvention of such 199 | measures. 200 | 201 | When you convey a covered work, you waive any legal power to forbid 202 | circumvention of technological measures to the extent such circumvention 203 | is effected by exercising rights under this License with respect to 204 | the covered work, and you disclaim any intention to limit operation or 205 | modification of the work as a means of enforcing, against the work's 206 | users, your or third parties' legal rights to forbid circumvention of 207 | technological measures. 208 | 209 | 4. Conveying Verbatim Copies. 210 | 211 | You may convey verbatim copies of the Program's source code as you 212 | receive it, in any medium, provided that you conspicuously and 213 | appropriately publish on each copy an appropriate copyright notice; 214 | keep intact all notices stating that this License and any 215 | non-permissive terms added in accord with section 7 apply to the code; 216 | keep intact all notices of the absence of any warranty; and give all 217 | recipients a copy of this License along with the Program. 218 | 219 | You may charge any price or no price for each copy that you convey, 220 | and you may offer support or warranty protection for a fee. 221 | 222 | 5. Conveying Modified Source Versions. 223 | 224 | You may convey a work based on the Program, or the modifications to 225 | produce it from the Program, in the form of source code under the 226 | terms of section 4, provided that you also meet all of these conditions: 227 | 228 | a) The work must carry prominent notices stating that you modified 229 | it, and giving a relevant date. 230 | 231 | b) The work must carry prominent notices stating that it is 232 | released under this License and any conditions added under section 233 | 7. This requirement modifies the requirement in section 4 to 234 | "keep intact all notices". 235 | 236 | c) You must license the entire work, as a whole, under this 237 | License to anyone who comes into possession of a copy. This 238 | License will therefore apply, along with any applicable section 7 239 | additional terms, to the whole of the work, and all its parts, 240 | regardless of how they are packaged. This License gives no 241 | permission to license the work in any other way, but it does not 242 | invalidate such permission if you have separately received it. 243 | 244 | d) If the work has interactive user interfaces, each must display 245 | Appropriate Legal Notices; however, if the Program has interactive 246 | interfaces that do not display Appropriate Legal Notices, your 247 | work need not make them do so. 248 | 249 | A compilation of a covered work with other separate and independent 250 | works, which are not by their nature extensions of the covered work, 251 | and which are not combined with it such as to form a larger program, 252 | in or on a volume of a storage or distribution medium, is called an 253 | "aggregate" if the compilation and its resulting copyright are not 254 | used to limit the access or legal rights of the compilation's users 255 | beyond what the individual works permit. Inclusion of a covered work 256 | in an aggregate does not cause this License to apply to the other 257 | parts of the aggregate. 258 | 259 | 6. Conveying Non-Source Forms. 260 | 261 | You may convey a covered work in object code form under the terms 262 | of sections 4 and 5, provided that you also convey the 263 | machine-readable Corresponding Source under the terms of this License, 264 | in one of these ways: 265 | 266 | a) Convey the object code in, or embodied in, a physical product 267 | (including a physical distribution medium), accompanied by the 268 | Corresponding Source fixed on a durable physical medium 269 | customarily used for software interchange. 270 | 271 | b) Convey the object code in, or embodied in, a physical product 272 | (including a physical distribution medium), accompanied by a 273 | written offer, valid for at least three years and valid for as 274 | long as you offer spare parts or customer support for that product 275 | model, to give anyone who possesses the object code either (1) a 276 | copy of the Corresponding Source for all the software in the 277 | product that is covered by this License, on a durable physical 278 | medium customarily used for software interchange, for a price no 279 | more than your reasonable cost of physically performing this 280 | conveying of source, or (2) access to copy the 281 | Corresponding Source from a network server at no charge. 282 | 283 | c) Convey individual copies of the object code with a copy of the 284 | written offer to provide the Corresponding Source. This 285 | alternative is allowed only occasionally and noncommercially, and 286 | only if you received the object code with such an offer, in accord 287 | with subsection 6b. 288 | 289 | d) Convey the object code by offering access from a designated 290 | place (gratis or for a charge), and offer equivalent access to the 291 | Corresponding Source in the same way through the same place at no 292 | further charge. You need not require recipients to copy the 293 | Corresponding Source along with the object code. If the place to 294 | copy the object code is a network server, the Corresponding Source 295 | may be on a different server (operated by you or a third party) 296 | that supports equivalent copying facilities, provided you maintain 297 | clear directions next to the object code saying where to find the 298 | Corresponding Source. Regardless of what server hosts the 299 | Corresponding Source, you remain obligated to ensure that it is 300 | available for as long as needed to satisfy these requirements. 301 | 302 | e) Convey the object code using peer-to-peer transmission, provided 303 | you inform other peers where the object code and Corresponding 304 | Source of the work are being offered to the general public at no 305 | charge under subsection 6d. 306 | 307 | A separable portion of the object code, whose source code is excluded 308 | from the Corresponding Source as a System Library, need not be 309 | included in conveying the object code work. 310 | 311 | A "User Product" is either (1) a "consumer product", which means any 312 | tangible personal property which is normally used for personal, family, 313 | or household purposes, or (2) anything designed or sold for incorporation 314 | into a dwelling. In determining whether a product is a consumer product, 315 | doubtful cases shall be resolved in favor of coverage. For a particular 316 | product received by a particular user, "normally used" refers to a 317 | typical or common use of that class of product, regardless of the status 318 | of the particular user or of the way in which the particular user 319 | actually uses, or expects or is expected to use, the product. A product 320 | is a consumer product regardless of whether the product has substantial 321 | commercial, industrial or non-consumer uses, unless such uses represent 322 | the only significant mode of use of the product. 323 | 324 | "Installation Information" for a User Product means any methods, 325 | procedures, authorization keys, or other information required to install 326 | and execute modified versions of a covered work in that User Product from 327 | a modified version of its Corresponding Source. The information must 328 | suffice to ensure that the continued functioning of the modified object 329 | code is in no case prevented or interfered with solely because 330 | modification has been made. 331 | 332 | If you convey an object code work under this section in, or with, or 333 | specifically for use in, a User Product, and the conveying occurs as 334 | part of a transaction in which the right of possession and use of the 335 | User Product is transferred to the recipient in perpetuity or for a 336 | fixed term (regardless of how the transaction is characterized), the 337 | Corresponding Source conveyed under this section must be accompanied 338 | by the Installation Information. But this requirement does not apply 339 | if neither you nor any third party retains the ability to install 340 | modified object code on the User Product (for example, the work has 341 | been installed in ROM). 342 | 343 | The requirement to provide Installation Information does not include a 344 | requirement to continue to provide support service, warranty, or updates 345 | for a work that has been modified or installed by the recipient, or for 346 | the User Product in which it has been modified or installed. Access to a 347 | network may be denied when the modification itself materially and 348 | adversely affects the operation of the network or violates the rules and 349 | protocols for communication across the network. 350 | 351 | Corresponding Source conveyed, and Installation Information provided, 352 | in accord with this section must be in a format that is publicly 353 | documented (and with an implementation available to the public in 354 | source code form), and must require no special password or key for 355 | unpacking, reading or copying. 356 | 357 | 7. Additional Terms. 358 | 359 | "Additional permissions" are terms that supplement the terms of this 360 | License by making exceptions from one or more of its conditions. 361 | Additional permissions that are applicable to the entire Program shall 362 | be treated as though they were included in this License, to the extent 363 | that they are valid under applicable law. If additional permissions 364 | apply only to part of the Program, that part may be used separately 365 | under those permissions, but the entire Program remains governed by 366 | this License without regard to the additional permissions. 367 | 368 | When you convey a copy of a covered work, you may at your option 369 | remove any additional permissions from that copy, or from any part of 370 | it. (Additional permissions may be written to require their own 371 | removal in certain cases when you modify the work.) You may place 372 | additional permissions on material, added by you to a covered work, 373 | for which you have or can give appropriate copyright permission. 374 | 375 | Notwithstanding any other provision of this License, for material you 376 | add to a covered work, you may (if authorized by the copyright holders of 377 | that material) supplement the terms of this License with terms: 378 | 379 | a) Disclaiming warranty or limiting liability differently from the 380 | terms of sections 15 and 16 of this License; or 381 | 382 | b) Requiring preservation of specified reasonable legal notices or 383 | author attributions in that material or in the Appropriate Legal 384 | Notices displayed by works containing it; or 385 | 386 | c) Prohibiting misrepresentation of the origin of that material, or 387 | requiring that modified versions of such material be marked in 388 | reasonable ways as different from the original version; or 389 | 390 | d) Limiting the use for publicity purposes of names of licensors or 391 | authors of the material; or 392 | 393 | e) Declining to grant rights under trademark law for use of some 394 | trade names, trademarks, or service marks; or 395 | 396 | f) Requiring indemnification of licensors and authors of that 397 | material by anyone who conveys the material (or modified versions of 398 | it) with contractual assumptions of liability to the recipient, for 399 | any liability that these contractual assumptions directly impose on 400 | those licensors and authors. 401 | 402 | All other non-permissive additional terms are considered "further 403 | restrictions" within the meaning of section 10. If the Program as you 404 | received it, or any part of it, contains a notice stating that it is 405 | governed by this License along with a term that is a further 406 | restriction, you may remove that term. If a license document contains 407 | a further restriction but permits relicensing or conveying under this 408 | License, you may add to a covered work material governed by the terms 409 | of that license document, provided that the further restriction does 410 | not survive such relicensing or conveying. 411 | 412 | If you add terms to a covered work in accord with this section, you 413 | must place, in the relevant source files, a statement of the 414 | additional terms that apply to those files, or a notice indicating 415 | where to find the applicable terms. 416 | 417 | Additional terms, permissive or non-permissive, may be stated in the 418 | form of a separately written license, or stated as exceptions; 419 | the above requirements apply either way. 420 | 421 | 8. Termination. 422 | 423 | You may not propagate or modify a covered work except as expressly 424 | provided under this License. Any attempt otherwise to propagate or 425 | modify it is void, and will automatically terminate your rights under 426 | this License (including any patent licenses granted under the third 427 | paragraph of section 11). 428 | 429 | However, if you cease all violation of this License, then your 430 | license from a particular copyright holder is reinstated (a) 431 | provisionally, unless and until the copyright holder explicitly and 432 | finally terminates your license, and (b) permanently, if the copyright 433 | holder fails to notify you of the violation by some reasonable means 434 | prior to 60 days after the cessation. 435 | 436 | Moreover, your license from a particular copyright holder is 437 | reinstated permanently if the copyright holder notifies you of the 438 | violation by some reasonable means, this is the first time you have 439 | received notice of violation of this License (for any work) from that 440 | copyright holder, and you cure the violation prior to 30 days after 441 | your receipt of the notice. 442 | 443 | Termination of your rights under this section does not terminate the 444 | licenses of parties who have received copies or rights from you under 445 | this License. If your rights have been terminated and not permanently 446 | reinstated, you do not qualify to receive new licenses for the same 447 | material under section 10. 448 | 449 | 9. Acceptance Not Required for Having Copies. 450 | 451 | You are not required to accept this License in order to receive or 452 | run a copy of the Program. Ancillary propagation of a covered work 453 | occurring solely as a consequence of using peer-to-peer transmission 454 | to receive a copy likewise does not require acceptance. However, 455 | nothing other than this License grants you permission to propagate or 456 | modify any covered work. These actions infringe copyright if you do 457 | not accept this License. Therefore, by modifying or propagating a 458 | covered work, you indicate your acceptance of this License to do so. 459 | 460 | 10. Automatic Licensing of Downstream Recipients. 461 | 462 | Each time you convey a covered work, the recipient automatically 463 | receives a license from the original licensors, to run, modify and 464 | propagate that work, subject to this License. You are not responsible 465 | for enforcing compliance by third parties with this License. 466 | 467 | An "entity transaction" is a transaction transferring control of an 468 | organization, or substantially all assets of one, or subdividing an 469 | organization, or merging organizations. If propagation of a covered 470 | work results from an entity transaction, each party to that 471 | transaction who receives a copy of the work also receives whatever 472 | licenses to the work the party's predecessor in interest had or could 473 | give under the previous paragraph, plus a right to possession of the 474 | Corresponding Source of the work from the predecessor in interest, if 475 | the predecessor has it or can get it with reasonable efforts. 476 | 477 | You may not impose any further restrictions on the exercise of the 478 | rights granted or affirmed under this License. For example, you may 479 | not impose a license fee, royalty, or other charge for exercise of 480 | rights granted under this License, and you may not initiate litigation 481 | (including a cross-claim or counterclaim in a lawsuit) alleging that 482 | any patent claim is infringed by making, using, selling, offering for 483 | sale, or importing the Program or any portion of it. 484 | 485 | 11. Patents. 486 | 487 | A "contributor" is a copyright holder who authorizes use under this 488 | License of the Program or a work on which the Program is based. The 489 | work thus licensed is called the contributor's "contributor version". 490 | 491 | A contributor's "essential patent claims" are all patent claims 492 | owned or controlled by the contributor, whether already acquired or 493 | hereafter acquired, that would be infringed by some manner, permitted 494 | by this License, of making, using, or selling its contributor version, 495 | but do not include claims that would be infringed only as a 496 | consequence of further modification of the contributor version. For 497 | purposes of this definition, "control" includes the right to grant 498 | patent sublicenses in a manner consistent with the requirements of 499 | this License. 500 | 501 | Each contributor grants you a non-exclusive, worldwide, royalty-free 502 | patent license under the contributor's essential patent claims, to 503 | make, use, sell, offer for sale, import and otherwise run, modify and 504 | propagate the contents of its contributor version. 505 | 506 | In the following three paragraphs, a "patent license" is any express 507 | agreement or commitment, however denominated, not to enforce a patent 508 | (such as an express permission to practice a patent or covenant not to 509 | sue for patent infringement). To "grant" such a patent license to a 510 | party means to make such an agreement or commitment not to enforce a 511 | patent against the party. 512 | 513 | If you convey a covered work, knowingly relying on a patent license, 514 | and the Corresponding Source of the work is not available for anyone 515 | to copy, free of charge and under the terms of this License, through a 516 | publicly available network server or other readily accessible means, 517 | then you must either (1) cause the Corresponding Source to be so 518 | available, or (2) arrange to deprive yourself of the benefit of the 519 | patent license for this particular work, or (3) arrange, in a manner 520 | consistent with the requirements of this License, to extend the patent 521 | license to downstream recipients. "Knowingly relying" means you have 522 | actual knowledge that, but for the patent license, your conveying the 523 | covered work in a country, or your recipient's use of the covered work 524 | in a country, would infringe one or more identifiable patents in that 525 | country that you have reason to believe are valid. 526 | 527 | If, pursuant to or in connection with a single transaction or 528 | arrangement, you convey, or propagate by procuring conveyance of, a 529 | covered work, and grant a patent license to some of the parties 530 | receiving the covered work authorizing them to use, propagate, modify 531 | or convey a specific copy of the covered work, then the patent license 532 | you grant is automatically extended to all recipients of the covered 533 | work and works based on it. 534 | 535 | A patent license is "discriminatory" if it does not include within 536 | the scope of its coverage, prohibits the exercise of, or is 537 | conditioned on the non-exercise of one or more of the rights that are 538 | specifically granted under this License. You may not convey a covered 539 | work if you are a party to an arrangement with a third party that is 540 | in the business of distributing software, under which you make payment 541 | to the third party based on the extent of your activity of conveying 542 | the work, and under which the third party grants, to any of the 543 | parties who would receive the covered work from you, a discriminatory 544 | patent license (a) in connection with copies of the covered work 545 | conveyed by you (or copies made from those copies), or (b) primarily 546 | for and in connection with specific products or compilations that 547 | contain the covered work, unless you entered into that arrangement, 548 | or that patent license was granted, prior to 28 March 2007. 549 | 550 | Nothing in this License shall be construed as excluding or limiting 551 | any implied license or other defenses to infringement that may 552 | otherwise be available to you under applicable patent law. 553 | 554 | 12. No Surrender of Others' Freedom. 555 | 556 | If conditions are imposed on you (whether by court order, agreement or 557 | otherwise) that contradict the conditions of this License, they do not 558 | excuse you from the conditions of this License. If you cannot convey a 559 | covered work so as to satisfy simultaneously your obligations under this 560 | License and any other pertinent obligations, then as a consequence you may 561 | not convey it at all. For example, if you agree to terms that obligate you 562 | to collect a royalty for further conveying from those to whom you convey 563 | the Program, the only way you could satisfy both those terms and this 564 | License would be to refrain entirely from conveying the Program. 565 | 566 | 13. Use with the GNU Affero General Public License. 567 | 568 | Notwithstanding any other provision of this License, you have 569 | permission to link or combine any covered work with a work licensed 570 | under version 3 of the GNU Affero General Public License into a single 571 | combined work, and to convey the resulting work. The terms of this 572 | License will continue to apply to the part which is the covered work, 573 | but the special requirements of the GNU Affero General Public License, 574 | section 13, concerning interaction through a network will apply to the 575 | combination as such. 576 | 577 | 14. Revised Versions of this License. 578 | 579 | The Free Software Foundation may publish revised and/or new versions of 580 | the GNU General Public License from time to time. Such new versions will 581 | be similar in spirit to the present version, but may differ in detail to 582 | address new problems or concerns. 583 | 584 | Each version is given a distinguishing version number. If the 585 | Program specifies that a certain numbered version of the GNU General 586 | Public License "or any later version" applies to it, you have the 587 | option of following the terms and conditions either of that numbered 588 | version or of any later version published by the Free Software 589 | Foundation. If the Program does not specify a version number of the 590 | GNU General Public License, you may choose any version ever published 591 | by the Free Software Foundation. 592 | 593 | If the Program specifies that a proxy can decide which future 594 | versions of the GNU General Public License can be used, that proxy's 595 | public statement of acceptance of a version permanently authorizes you 596 | to choose that version for the Program. 597 | 598 | Later license versions may give you additional or different 599 | permissions. However, no additional obligations are imposed on any 600 | author or copyright holder as a result of your choosing to follow a 601 | later version. 602 | 603 | 15. Disclaimer of Warranty. 604 | 605 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 606 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 607 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 608 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 609 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 610 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 611 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 612 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 613 | 614 | 16. Limitation of Liability. 615 | 616 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 617 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 618 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 619 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 620 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 621 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 622 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 623 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 624 | SUCH DAMAGES. 625 | 626 | 17. Interpretation of Sections 15 and 16. 627 | 628 | If the disclaimer of warranty and limitation of liability provided 629 | above cannot be given local legal effect according to their terms, 630 | reviewing courts shall apply local law that most closely approximates 631 | an absolute waiver of all civil liability in connection with the 632 | Program, unless a warranty or assumption of liability accompanies a 633 | copy of the Program in return for a fee. 634 | 635 | END OF TERMS AND CONDITIONS 636 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # The Digital Evidence Preservation Toolkit 2 | 3 | _(Or **DEPToolkit** for short. Bit of a mouthful otherwise)_ 4 | 5 |
6 | 7 |
8 | 9 | A proof-of-concept software for researchers and small teams sifting through online material. With only one click of the mouse, the material will be **archived in a framework demonstrating chain of custody and stored durably**. Once included in the growing database, users will be able to go back to search through and **annotate the material**, and to **export working copies** of said material for publication and dissemination. 10 | 11 | A database built thusly can be handed to a prosecutor ten years down the line, and they will be able to say with mathematical certainty: “the material in this archive is identical and contemporary to the one saved at the time, ten years ago.” 12 | 13 | Built with the support of **Prototype Fund**, the **German Federal Ministry for Education and Research**, the **Open Knowledge Foundation**, and **Amazon Web Services**. 14 | 15 | ### Architecture 16 | 17 | The Docker Compose orchestration is composed of several services: 18 | 19 | - An Express/TypeScript API, 20 | - A plain JS browser extension 21 | - And a frontend 22 | 23 | To start the whole app: 24 | 25 | ```sh 26 | $ docker-compose up 27 | ``` 28 | 29 | ### Help & Contact 30 | 31 | - [Getting started](docs/getting_started.md) 32 | - [Technical documentation](docs/) 33 | - Email: basile at digitalevidencetoolkit dot org 34 | -------------------------------------------------------------------------------- /app.test.ts: -------------------------------------------------------------------------------- 1 | import supertest from 'supertest'; 2 | import { ImportMock } from 'ts-mock-imports'; 3 | 4 | import { pprint } from './src/helpers'; 5 | import * as Ledger from './src/ledger'; 6 | 7 | import app from './app'; 8 | 9 | // TODO 10 | // - isolate test env (pass outDir as a param to app, create a specific outDir 11 | // for test) 12 | // - test that an existing file is returned 13 | // - test that an existing dotFile is not returned 14 | describe('The file endpoint', () => { 15 | it('should 404 if the file doesnt exist.', async () => { 16 | // return supertest(app) 17 | // .get('/file/foo') 18 | // .then(response => { 19 | // expect(response.status).toBe(500); 20 | // }); 21 | }); 22 | 23 | it('should 200 with the file if it exists', () => { 24 | // create a file in the test dir 25 | // ask for it 26 | // file is returned 27 | }); 28 | 29 | it('should not return an existing dot file', () => { 30 | // create a dot file in the test dir 31 | // ask for it 32 | // file is not returned 33 | }); 34 | }); 35 | 36 | describe('The history endpoint', () => { 37 | afterEach(() => { 38 | ImportMock.restore(); 39 | }); 40 | 41 | it("should use the ledger's history API", () => { 42 | const result = { foo: 'bar' }; 43 | 44 | const getListHistoryMock = ImportMock.mockFunction( 45 | Ledger, 46 | 'listDocHistory', 47 | Promise.resolve(result) 48 | ); 49 | 50 | const name = 'foo'; 51 | 52 | return supertest(app) 53 | .get(`/history/${name}`) 54 | .then(response => { 55 | expect(getListHistoryMock.calledOnce).toBe(true); 56 | expect(getListHistoryMock.calledWith(name)).toBe(true); 57 | 58 | expect(response.status).toBe(200); 59 | expect(response.text).toBe(pprint(result)); 60 | }); 61 | }); 62 | }); 63 | 64 | describe('The export-copy endpoint', () => {}); 65 | -------------------------------------------------------------------------------- /app.ts: -------------------------------------------------------------------------------- 1 | import express, { Application, Request, Response } from 'express'; 2 | import { config } from 'dotenv'; 3 | import { join } from 'path'; 4 | import cors from 'cors'; 5 | import sharp from 'sharp'; 6 | import formidable, { Fields, Files } from 'formidable'; 7 | import chalk from 'chalk'; 8 | import fs from 'fs/promises'; 9 | import { parse } from 'path'; 10 | 11 | import * as Ledger from './src/ledger'; 12 | import * as Store from './src/store'; 13 | import * as Bundle from './src/types/Bundle'; 14 | import * as Record from './src/types/Record'; 15 | import * as Verify from './src/verify'; 16 | 17 | import { pprint, cleanupBase64 } from './src/helpers'; 18 | 19 | // set up .env variables as environment variables 20 | config(); 21 | 22 | const outDir = join(__dirname, './out'); 23 | 24 | const app: Application = express(); 25 | 26 | app.use(cors()); 27 | 28 | // Body parsing Middleware 29 | app.use(express.json()); 30 | app.use(express.urlencoded({ extended: true })); 31 | 32 | app.get('/file/:id', async (req: Request, res: Response) => { 33 | const { id } = req.params; 34 | return Store.getFile(id, res); 35 | }); 36 | 37 | app.get( 38 | '/history/:sku', 39 | async (req: Request, res: Response): Promise => { 40 | const { sku } = req.params; 41 | const result = await Ledger.listDocHistory(sku); 42 | console.log(chalk.bold(`GET /history/${sku}`)); 43 | return res.status(200).send(pprint(result)); 44 | } 45 | ); 46 | 47 | app.get( 48 | '/export-copy/:sku', 49 | async (req: Request, res: Response): Promise => { 50 | const { sku } = req.params; 51 | const rootDir = outDir; 52 | const options = { 53 | root: rootDir, 54 | dotfiles: 'deny', 55 | }; 56 | // `sku` comes with .zip at the end, which we must remove 57 | const cleanSku = parse(sku).name; 58 | const result = await Ledger.getDoc(cleanSku, 'id'); 59 | if (result === null) { 60 | res.status(404).send(`Could not find the resource you asked for: ${sku}`); 61 | } else { 62 | await Store.makeZip(result, rootDir, rootDir); 63 | res 64 | .set(`Content-Type`, `application/octet-stream`) 65 | .set(`Content-Disposition`, `attachment; filename=${sku}`) 66 | .sendFile(`${sku}`, options); 67 | } 68 | } 69 | ); 70 | 71 | app.get( 72 | '/list-docs', 73 | async (req: Request, res: Response): Promise => { 74 | const result = await Ledger.listDocs(); 75 | console.log(chalk.bold(`GET /list-docs`)); 76 | return res.status(200).send(pprint(result)); 77 | } 78 | ); 79 | 80 | app.post('/form', async (req: Request, res: Response): Promise => { 81 | console.log(chalk.bold(`POST /form`)); 82 | return new Promise((resolve, reject) => { 83 | const form = new formidable.IncomingForm(); 84 | form.parse(req, async (err: Error, fields: Fields): Promise => { 85 | if (err) { 86 | resolve(res.status(400).send(`${err.name}: ${err.message}`)); // FIXME: don't expose js errors to public 87 | } 88 | const { url, title, scr } = fields; 89 | const base64Data = cleanupBase64(scr as string); // FIXME: don't use 'as string', instead make sure we have string instead of string[] 90 | const screenshotData = new Buffer(base64Data, 'base64'); 91 | const thumbnailData = await sharp(screenshotData) 92 | .resize(320, 240, { fit: 'inside' }) 93 | .toBuffer(); 94 | let onefileData: string; 95 | onefileData = fields.onefile as string; 96 | 97 | const screenshot = { kind: 'screenshot' as const, data: screenshotData }; 98 | const thumbnail = { 99 | kind: 'screenshot_thumbnail' as const, 100 | data: thumbnailData, 101 | }; 102 | const onefile = { kind: 'one_file' as const, data: onefileData }; 103 | 104 | await Store.newBundle([screenshot, thumbnail, onefile], { 105 | type: 'local', 106 | directory: outDir, 107 | }) 108 | .then((bundle: Bundle.Bundle) => { 109 | const record = { 110 | bundle, 111 | annotations: { description: '' }, 112 | data: { url: url as string, title: title as string }, 113 | }; 114 | return record; 115 | }) 116 | .then((r: Record.Record) => { 117 | return Ledger.insertDoc(r); 118 | }) 119 | .then(_ => { 120 | console.log(`ledger inserted correctly`); 121 | resolve(res.status(200).send('Received POST on /form')); 122 | }) 123 | .catch(e => { 124 | resolve( 125 | res.status(422).send(`${e.name} (type ${e.type}): ${e.message}`) 126 | ); 127 | }); 128 | }); 129 | }); 130 | }); 131 | 132 | app.post( 133 | '/edit-description/:sku', 134 | async (req: Request, res: Response): Promise => { 135 | const { sku } = req.params; 136 | console.log(chalk.bold(`POST /edit-description/${sku}`)); 137 | return new Promise((resolve, reject) => { 138 | const form = new formidable.IncomingForm(); 139 | form.parse(req, async (err: Error, fields: Fields): Promise => { 140 | if (err) { 141 | resolve(res.status(400).send(`${err.name}: ${err.message}`)); // FIXME: don't expose js errors to public 142 | } 143 | const { description } = fields; 144 | const update = { description: description as string }; 145 | Ledger.updateDoc(sku, update); 146 | 147 | resolve(res.status(200).send(`Wrote description to ${sku}`)); 148 | }); 149 | }); 150 | } 151 | ); 152 | 153 | app.post('/verify', async (req: Request, res: Response): Promise => { 154 | console.log(chalk.bold(`POST /verify`)); 155 | return new Promise((resolve, reject) => { 156 | const form = new formidable.IncomingForm(); 157 | form.parse(req, async (err: Error, fields: Fields, files: Files) => { 158 | // verify each file by opening it and sending it to check 159 | // (which involves hashing and comparing to QLDB), then building 160 | // the Response one file at a time and sending it once 161 | Promise.all( 162 | Object.keys(files).map(async i => { 163 | await fs 164 | // @ts-expect-error 165 | .readFile(files[i].path) 166 | .then(f => Verify.verifyFile(f)) 167 | .then(result => 168 | res.write( 169 | result 170 | ? JSON.stringify(result) 171 | : JSON.stringify({ not_found: 'item not found' }) 172 | ) 173 | ); 174 | }) 175 | ).then(() => resolve(res.send())); 176 | }); 177 | }); 178 | }); 179 | 180 | export default app; 181 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | 3 | services: 4 | endpoint: 5 | restart: always 6 | build: 7 | context: . 8 | dockerfile: ./Dockerfile 9 | ports: 10 | - '3000:3000' 11 | networks: 12 | - app 13 | volumes: 14 | - './out:/app/out' 15 | 16 | ui: 17 | build: 18 | context: ui/ 19 | dockerfile: Dockerfile 20 | env_file: .env 21 | logging: 22 | driver: none 23 | ports: 24 | - '8000:80' 25 | networks: 26 | - app 27 | depends_on: 28 | - endpoint 29 | 30 | networks: 31 | app: 32 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | Welcome to the documentation of the Digital Evidence Preservation Toolkit, a one-click tool to archive and annotate webpages while demonstrating chain of custody throughout. The Toolkit is a proof-of-concept software for researchers and small teams sifting through online material. 2 | 3 | With only one click of the mouse, the material will be **archived in a framework demonstrating chain of custody** and **stored durably**. Once included in the growing database, users will be able to **go back to search through** and **annotate the material**, and to **export working copies** of said material for publication and dissemination. 4 | 5 | A database built thusly can be handed to a prosecutor ten years down the line, and they will be able to say with mathematical certainty: **“the material in this archive is identical and contemporary to the one saved at the time, ten years ago.”** 6 | 7 | --- 8 | 9 | ### **The flow from 30,000ft:** 10 | 11 | A **browser extension** is tasked with passing data from the user to the system. 12 | The system receives this data through HTTP requests and **records it into the ledger. 13 | A GUI of the library** is served by the system, and this can also add data to the ledger. **Annotations** can be added to the archive through the UI. **Working copies,** true to the originals, can be exported through the UI. 14 | 15 | --- 16 | 17 | ### 🤔 What is where? 18 | 19 | To install and hit the ground running, see [Getting Started](./getting_started.md). 20 | 21 | **The browser extension** is currently written in **plain JS** (as well as some HTML/CSS). The JS assets are bundled and moved in place by Webpack, which also provides auto-reloading of the extension in-browser. 22 | 23 | **The app and API** are currently written in (mostly) **Node & TypeScript**. It presently exposes REST endpoints (such as `/list-docs`, `/form`, etc.) and handles the interfacing with QLDB. 24 | 25 | An **example UI** is included and built in **Svelte**, an amazing frontend framework. It demonstrates how some of the above API endpoints can be implemented and some of the capabilities of the tool. 26 | 27 | All the above runs with `docker-compose`, as well as standalone `npm` scripts. 28 | 29 | --- 30 | 31 | ### 🥱 So, where are we ${today}? 32 | 33 | #### The API 34 | 35 | Both the browser extension and the app/API are in a functioning state, though features need to be developed in sync to be considered complete. 36 | 37 | Among other things, the browser extension is able to POST an object of the following shape to the API endpoint `/form` (ed: this name is terrible): 38 | 39 | ```tsx 40 | { url: string, 41 | title: string, 42 | scr: Base64DataURI, 43 | one_file: HTMLCodeString } 44 | ``` 45 | 46 | We're including: 47 | 48 | - **a base64-encoded screenshot PNG** which, disappointingly, is only the visible part of the screen (see [`browser.tabs.captureVisibleTab`](https://developer.mozilla.org/en-US/docs/Mozilla/Add-ons/WebExtensions/API/Tabs/captureVisibleTab)). 49 | Moving to a full-page screenshot will involve some fiddling with simulating a scroll while capturing with Screen Capture API, I'm told. 50 | These screenshots can be quite large (from a few hundred kbs to a couple of mbs) so on the app/API side we account for a chunked, streamed payload. All data from the browser is grouped in a `FormData()`. 51 | - **a long string of HTML code** which contains all HTML, inlined styles and JavaScript, as well as encoded images where possible. 52 | This is most definitely not quite `.mhtml`, which apparently is not supported on Firefox anymore since Quantum. Go figure! 53 | 54 | --- 55 | 56 | A main `Record` type is defined as the central data structure flowing through the application. 57 | 58 | ```tsx 59 | type Record { 60 | 61 | // list of files preserved and hashed 62 | // type Bundle 63 | bundle: [ 64 | { typ: 'screenshot' | 'one_file' | 'screenshot_thumbnail', 65 | hash: 'aaaaaaa' 66 | }, {...} 67 | ], 68 | 69 | // user-created data about the record 70 | // most probably after the original archive 71 | // type Annotations 72 | annotations: [ 73 | description: 'description', 74 | other_key: 'other val' 75 | ], 76 | 77 | // data points about the page saved 78 | // type Data 79 | data: { 80 | title: 'page title', 81 | url: 'https://foo.bar.com' 82 | } 83 | 84 | } 85 | ``` 86 | 87 | **Examples of this data flowing:** 88 | 89 | - Upon receiving `POST /form` , the API wrangles the payload data into this shape, which can then be passed to `Ledger.insertDoc` to be added to the ledger. 90 | (this includes side effects such as the writing to disk of screenshots and of the one-file archive) 91 | - The frontend consumes the result of `GET /list-docs`, which still fetches data from QLDB after passing it through two successive formatting functions: 92 | - `Record.fromLedger`, which takes QLDB-shaped data and builds a nice `Record` as defined above, 93 | - then `Record.toFrontend` , which takes a `Record` and builds a simplified shape for the frontend. 94 | 95 | **Central to this type definition is the _Bundle:_** 96 | 97 | ```tsx 98 | type Bundle = File.File[]; 99 | 100 | type File = { 101 | kind: "screenshot" | "one_file" | "screenshot_thumbnail"; 102 | hash: "xxx"; 103 | }; 104 | ``` 105 | 106 | A Bundle is a list of files, which can only be of some kinds. At the back of our mind, these are the three kinds of files we're interested in for now: 107 | 108 | - a page screenshot, 109 | - and its thumbnail for rendering in the UI, 110 | - as well as a one-file download of all the HTML/CSS/JS assets 111 | 112 | The QLDB logic can be found under the `QLDB.*` namespace. 113 | 114 | ### The UI 115 | 116 | The webapp uses [SvelteKit](https://kit.svelte.dev), a JS framework. It implements two notables routes – the two main use stories: 117 | 118 | - The Library: `src/routes/library.svelte` 119 | - The Verification: `src/routes/verify.svelte` 120 | 121 | **Library** renders a list of ledger entries, with their accompanying metadata. It supports the querying of a record's history, as well as the addition of metadata (i.e. a "description" field). 122 | 123 | - details about how each of these features is replicated through the API 124 | 125 | **Verification** implements the lookup process and surfacing of information made possible by the tool. 126 | 127 | --- 128 | 129 | ### Miscellaneous 130 | 131 | #### On uniqueness 132 | 133 | Each record in our database contains a list of files that make it up (as of Aug 10th: a screenshot, its thumbnail, and a one-file HTML archive). Each is represented by its _kind_ and its hash (sha256). 134 | 135 | The ID of the record is the hash of the concatenated hashes of its files: 136 | 137 | `Record.id = hash(Record.files.sort().map(File.id).join(''))` 138 | 139 | With self-identifiable data, it is possible to associate files to their ledger entries, since the ID can be computed from the files. 140 | 141 | #### On ledgers 142 | 143 | "A non ledger database is table-centric. A ledger database is log-centric. **The log is the database.**" ([Ivan Moto](https://ivan.mw/2019-11-24/what-is-a-ledger-database)) 144 | 145 | "Standard databases track a sequence of transactions that add, delete, or change entries. Ledger databases add a layer of digital signatures for each transaction so anyone can audit the list and see that it was constructed correctly. More importantly, no one has gone back to adjust a previous transaction — to change history, so to speak." ([VentureBeat](https://venturebeat.com/2021/01/18/database-trends-why-you-need-a-ledger-database/)) 146 | 147 | -------------------------------------------------------------------------------- /docs/getting_started.md: -------------------------------------------------------------------------------- 1 | ### Getting started 2 | 3 | If the instructions in this guide feel a bit much, it's likely because the Toolkit is still an alpha-version software which assumes a certain technical knowledge. There are technical solutions to simplifying this setup, but these were not prioritised. 4 | 5 | 9 | 10 | --- 11 | 12 | #### Setting up the ledger 13 | 14 | The Toolkit requires a working connection with Amazon Web Services, and thus that you have some kind of well-permissioned account or IAM role. 15 | 16 | In short, you will need: 17 | 18 | 1. the AWS CLI and an authorised profile in `~/.aws/credentials` (see [“Installing the AWS CLI v2”](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html) - _docs.aws.amazon.com)_ 19 | 2. an existing QLDB ledger, with a blank table in it (see [“Creating a QLDB ledger from the AWS Console”](https://qldbguide.com/docs/guide/getting-started/#using-aws-console) - _qldbguide.com_) 20 | 21 | Not required but recommended is an S3 bucket in which to store Toolkit data. 22 | 23 | **Remember the names** of the ledger and of its table. You'll need them shortly (see "Environment" below). 24 | 25 | --- 26 | 27 | #### Environment 28 | 29 | After cloning the repository, create an `.env` file at the root or copy `.env.example`. The job of this file is to contain variables you really don't want to share publicly, so keep this out of version control software. 30 | 31 | This file **must contain:** 32 | 33 | - AWS access credentials and preferred region 34 | - Details about the ledger and S3 bucket 35 | 36 | ```bash 37 | AWS_ACCESS_KEY_ID="your aws access key" 38 | AWS_SECRET_ACCESS_KEY="your aws secret key" 39 | AWS_REGION="eu-central-1 (or another region)" 40 | BUCKET_NAME="anS3BucketName" 41 | LEDGER_NAME="yourLedgerName" 42 | DOC_TABLE_NAME="yourTableName" 43 | ``` 44 | 45 | --- 46 | 47 | #### Recommended way of running the Toolkit 48 | 49 | 52 | 53 | The Docker Compose orchestration is composed of several services: 54 | 55 | 1. An Express/TypeScript API 56 | 2. A plain JS browser extension 57 | 3. And a frontend 58 | 59 | To start the whole app: 60 | 61 | ```bash 62 | $ docker-compose up 63 | ``` 64 | 65 | --- 66 | 67 | #### Running without Docker 68 | 69 | Ensure you're running `node > 10.0` — the recommended version is the LTS, i.e. `node v16`. If you are using `nvm`: 70 | 71 | ```bash 72 | $ nvm use --lts 73 | > Now using node v16.13.0 74 | ``` 75 | 76 | Manually install dependencies for each service: 77 | 78 | ```bash 79 | $ cd ui/ & npm install 80 | $ cd extension/ & npm install 81 | $ npm install 82 | ``` 83 | 84 | Then use the npm script including all services: 85 | 86 | ```bash 87 | $ npm run all 88 | ``` 89 | 90 | --- 91 | 92 | #### Storage options 93 | 94 | By including a bucket in the `.env` config, you’re choosing to replicate your archival on S3. 95 | 96 | Namely, the Store (`src/store/index.ts`) will: 97 | 98 | - upon receiving an archive request, store the Bundle files both locally and on S3, 99 | - and upon receiving a file request (e.g. the UI fetching thumbnails), serve it from S3. 100 | 101 | --- 102 | 103 | #### Is there anybody out there? 104 | 105 | ##### API and frontend 106 | 107 | The API should be available at `http://localhost:3000` — assert this by running: 108 | 109 | ```bash 110 | $ curl http://localhost:3000/list-docs 111 | > [ {...}, {...} ] 112 | ``` 113 | 114 | The UI should be available at `http://localhost:8000` in your web browser of choice. API requests are proxied through the UI. Thus, the following queries are equivalent: 115 | 116 | ```bash 117 | $ curl http://localhost:3000/list-docs // as before 118 | $ curl http://localhost:8000/api/list-docs 119 | ``` 120 | 121 | ##### Browser extension 122 | 123 | The extension should be being bundled on your filesystem. Pop open your browser's extension runtime by pasting this in the URL bar: 124 | 125 | `about:debugging#/runtime/this-firefox` 126 | 127 | Click _“Load temporary Add-on...”_ and navigate to `extension/addon` to select `manifest.json`. 128 | 129 | The extension should have been loaded in your extension bar, as shown below: 130 | 131 | ![Untitled](/images/dept-untitled.png) 132 | -------------------------------------------------------------------------------- /extension/.gitignore: -------------------------------------------------------------------------------- 1 | addon/content_scripts/ 2 | addon/popup/index.js 3 | 4 | -------------------------------------------------------------------------------- /extension/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:14.16.1-alpine3.12 2 | LABEL version="0.1" 3 | LABEL description="Firefox extension for the DEPToolkit" 4 | 5 | WORKDIR /extension 6 | COPY ["package.json", "package-lock.json", "./"] 7 | 8 | RUN apk --no-cache add git 9 | RUN apk --no-cache --virtual build-dependencies add \ 10 | g++ gcc libgcc \ 11 | libstdc++ linux-headers \ 12 | make python3 \ 13 | && npm install \ 14 | && apk del build-dependencies 15 | 16 | COPY . . 17 | 18 | EXPOSE 9000 19 | 20 | CMD ["npm", "run", "watch"] -------------------------------------------------------------------------------- /extension/README.md: -------------------------------------------------------------------------------- 1 | # Digital Evidence Toolkit Firefox extension 2 | 3 | Browser extension used to trigger the archiving of a webpage, according to the (coming) Digital Evidence Toolkit's standard. 4 | 5 | ## Install 6 | 7 | ```sh 8 | $ npm install 9 | $ npm run watch 10 | ``` 11 | 12 | Then open `http://about:debugging` in Firefox and pick `manifest.json`. 13 | 14 | The Webpack task bundles dependencies and moves `popup/popup.js` and `content_scripts/archive.js` into their proper place in `addon/`. 15 | 16 | Your changes will automatically reload. 17 | 18 | ## Usage 19 | 20 | Use while the Node API is running in parallel. 21 | -------------------------------------------------------------------------------- /extension/addon/background.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /extension/addon/icons/LICENSE: -------------------------------------------------------------------------------- 1 | 2 | The icon "beasts-32.png" is taken from the IconBeast Lite iconset, and used under the terms of its license (http://www.iconbeast.com/faq/), with a link back to the website: http://www.iconbeast.com/free/. 3 | 4 | The icon "beasts-48.png" is taken from Aha-Soft’s Free Retina iconset, and used under the terms of its license (http://www.aha-soft.com/free-icons/free-retina-icon-set/), with a link back to the website: http://www.aha-soft.com/. 5 | -------------------------------------------------------------------------------- /extension/addon/icons/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/digitalevidencetoolkit/deptoolkit/348a9ed840a2771e3d08b83d122acdbc3cb9667c/extension/addon/icons/logo.png -------------------------------------------------------------------------------- /extension/addon/icons/logo_dark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/digitalevidencetoolkit/deptoolkit/348a9ed840a2771e3d08b83d122acdbc3cb9667c/extension/addon/icons/logo_dark.png -------------------------------------------------------------------------------- /extension/addon/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "manifest_version": 2, 3 | "name": "deptoolkit-extension", 4 | "description": "A Firefox extension to archive digital media content", 5 | "version": "1.0", 6 | "homepage_url": "https://digitalevidencetoolkit.org", 7 | "icons": { 8 | "48": "icons/logo.png" 9 | }, 10 | 11 | "permissions": ["activeTab", "*://localhost/*", ""], 12 | 13 | "browser_action": { 14 | "default_icon": "icons/logo.png", 15 | "theme_icons": [ 16 | { 17 | "light": "icons/logo_dark.png", 18 | "dark": "icons/logo.png", 19 | "size": 32 20 | } 21 | ], 22 | "default_title": "Archiver", 23 | "default_popup": "popup/index.html" 24 | }, 25 | 26 | "content_scripts": [ 27 | { 28 | "matches": [""], 29 | "run_at": "document_start", 30 | "js": [ 31 | "single-file/dist/single-file-frames.js", 32 | "single-file/dist/extension-frames.js" 33 | ], 34 | "all_frames": true, 35 | "match_about_blank": true 36 | }, 37 | { 38 | "matches": [""], 39 | "run_at": "document_start", 40 | "js": [ 41 | "single-file/dist/single-file-bootstrap.js", 42 | "single-file/dist/extension-core.js", 43 | "single-file/dist/single-file.js" 44 | ], 45 | "all_frames": false 46 | } 47 | ], 48 | "background": { 49 | "page": "background.html", 50 | "persistent": false 51 | }, 52 | "web_accessible_resources": [ 53 | "single-file/dist/web/hooks/hooks-web.js", 54 | "single-file/dist/web/hooks/hooks-frames-web.js" 55 | ] 56 | } 57 | -------------------------------------------------------------------------------- /extension/addon/popup/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 14 | 18 | 19 | 20 | 21 | -------------------------------------------------------------------------------- /extension/addon/popup/style.css: -------------------------------------------------------------------------------- 1 | :root { 2 | --text-color: rgb(94, 94, 94); 3 | --text-color-emphasis: #111; 4 | --text-color-muted: #ddd; 5 | --white: #fff; 6 | } 7 | html, 8 | body { 9 | width: 200px; 10 | font-family: sans-serif; 11 | color: var(--text-color); 12 | } 13 | 14 | .hidden { 15 | display: none; 16 | } 17 | 18 | .button { 19 | margin: 3% auto; 20 | padding: 4px; 21 | text-align: center; 22 | font-size: 1rem; 23 | cursor: pointer; 24 | border: 1px solid #ddd; 25 | background-color: #ececec; 26 | 27 | transition: background-color 0.2s, border 0.2s, color 0.3s, opacity 0.2s; 28 | } 29 | 30 | .button:hover { 31 | color: var(--text-color-emphasis); 32 | background-color: var(--white); 33 | border: 1px solid #333; 34 | text-decoration: underline; 35 | } 36 | .button.selected { 37 | color: var(--text-color-muted); 38 | background-color: var(--white); 39 | border: 1px solid var(--white); 40 | } 41 | .button.archived { 42 | background-color: var(--white); 43 | border: none; 44 | cursor: default; 45 | } 46 | .button.archived:hover { 47 | color: var(--text-color); 48 | background-color: var(--white); 49 | text-decoration: none; 50 | } 51 | -------------------------------------------------------------------------------- /extension/addon/single-file/dist/extension-core.js: -------------------------------------------------------------------------------- 1 | !function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self).extension={})}(this,(function(e){"use strict";let t,r;const a=["dist/chrome-browser-polyfill.js","dist/single-file.js"],n=["dist/chrome-browser-polyfill.js","dist/single-file-frames.js"];async function s(e,s){let o;if(await async function(e){const s=e.extensionScriptFiles||[];t||r||([t,r]=await Promise.all([i(a.concat(s)),i(n)]))}(s),!s.removeFrames)try{await browser.tabs.executeScript(e,{code:r,allFrames:!0,matchAboutBlank:!0,runAt:"document_start"})}catch(e){}try{await browser.tabs.executeScript(e,{code:t,allFrames:!1,runAt:"document_idle"}),o=!0}catch(e){}return o&&s.frameId&&await browser.tabs.executeScript(e,{code:"document.documentElement.dataset.requestedFrameId = true",frameId:s.frameId,matchAboutBlank:!0,runAt:"document_start"}),o}async function i(e){const t=e.map((async e=>{if("function"==typeof e)return"("+e.toString()+")();";{const t=await fetch(browser.runtime.getURL("../../../"+e));return(new TextDecoder).decode(await t.arrayBuffer())}}));let r="";for(const e of t)r+=await e;return r}const o="single-file-response-fetch",c=window.fetch;async function f(e,t={}){try{let t=await c(e,{cache:"force-cache"});return 401!=t.status&&403!=t.status&&404!=t.status||(t=await l(e)),t}catch(r){const a=await d({method:"singlefile.fetch",url:e,referrer:t.referrer});return{status:a.status,headers:{get:e=>a.headers&&a.headers[e]},arrayBuffer:async()=>new Uint8Array(a.array).buffer}}}async function u(e,t){const r=await d({method:"singlefile.fetchFrame",url:e,frameId:t.frameId,referrer:t.referrer});return{status:r.status,headers:new Map(r.headers),arrayBuffer:async()=>new Uint8Array(r.array).buffer}}async function d(e){const t=await browser.runtime.sendMessage(e);if(!t||t.error)throw new Error(t&&t.error&&t.error.toString());return t}function l(e){return new Promise(((t,r)=>{var a,n,s,i;a=new CustomEvent("single-file-request-fetch",{detail:e}),window.dispatchEvent(a),n=o,s=function a(n){var s,i,c;n.detail?n.detail.url==e&&(s=o,i=a,c=!1,window.removeEventListener(s,i,c),n.detail.response?t({status:n.detail.status,headers:new Map(n.detail.headers),arrayBuffer:async()=>n.detail.response}):r(n.detail.error)):r()},i=!1,window.addEventListener(n,s,i)}))}browser.runtime.onMessage.addListener((e=>{if("singlefile.fetchFrame"==e.method&&window.frameId&&window.frameId==e.frameId)return async function(e){try{let t=await c(e.url,{cache:"force-cache"});return 401!=t.status&&403!=t.status&&404!=t.status||(t=await Promise.race([l(e.url),new Promise(((e,t)=>setTimeout((()=>t()),5e3)))])),{status:t.status,headers:[...t.headers],array:Array.from(new Uint8Array(await t.arrayBuffer()))}}catch(e){return{error:e&&e.toString()}}}(e)})),e.getPageData=function(e,t,r,a={fetch:f,frameFetch:u}){return globalThis.singlefile.getPageData(e,a,t,r)},e.injectScript=function(e,t){return s(e,t)},Object.defineProperty(e,"__esModule",{value:!0})})); 2 | -------------------------------------------------------------------------------- /extension/addon/single-file/dist/extension-frames.js: -------------------------------------------------------------------------------- 1 | !function(){"use strict";const e="single-file-response-fetch",t=window.fetch;function r(t){return new Promise(((r,a)=>{var s,n,i,o;s=new CustomEvent("single-file-request-fetch",{detail:t}),window.dispatchEvent(s),n=e,i=function s(n){var i,o,d;n.detail?n.detail.url==t&&(i=e,o=s,d=!1,window.removeEventListener(i,o,d),n.detail.response?r({status:n.detail.status,headers:new Map(n.detail.headers),arrayBuffer:async()=>n.detail.response}):a(n.detail.error)):a()},o=!1,window.addEventListener(n,i,o)}))}browser.runtime.onMessage.addListener((e=>{if("singlefile.fetchFrame"==e.method&&window.frameId&&window.frameId==e.frameId)return async function(e){try{let a=await t(e.url,{cache:"force-cache"});return 401!=a.status&&403!=a.status&&404!=a.status||(a=await Promise.race([r(e.url),new Promise(((e,t)=>setTimeout((()=>t()),5e3)))])),{status:a.status,headers:[...a.headers],array:Array.from(new Uint8Array(await a.arrayBuffer()))}}catch(e){return{error:e&&e.toString()}}}(e)}))}(); 2 | -------------------------------------------------------------------------------- /extension/addon/single-file/dist/single-file-bootstrap.js: -------------------------------------------------------------------------------- 1 | !function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self).singlefileBootstrap={})}(this,(function(e){"use strict";const t="single-file-load-image",s="single-file-image-loaded",o=globalThis.browser,n=e=>globalThis.dispatchEvent(e),a=globalThis.CustomEvent,i=globalThis.document,r=globalThis.HTMLDocument;let l;if(l=window._singleFile_fontFaces?window._singleFile_fontFaces:window._singleFile_fontFaces=[],i instanceof r&&o&&o.runtime&&o.runtime.getURL){d="single-file-new-font-face",c=e=>{const t=e.detail;l.find((e=>JSON.stringify(e)==JSON.stringify(t)))||l.push(e.detail)},globalThis.addEventListener(d,c,m);let e=i.createElement("script");e.textContent="("+function(){"undefined"==typeof globalThis&&(window.globalThis=window);const e=globalThis.console,t=e=>globalThis.dispatchEvent(e),s=globalThis.CustomEvent,o=globalThis.FileReader,n=globalThis.Blob,a=e&&e.warn&&((...t)=>e.warn(...t))||(()=>{}),i="single-file-new-font-face",r={family:"font-family",style:"font-style",weight:"font-weight",stretch:"font-stretch",unicodeRange:"unicode-range",variant:"font-variant",featureSettings:"font-feature-settings"};if(globalThis.FontFace){const e=globalThis.FontFace;let l;globalThis.FontFace=function(){l||(a("SingleFile is hooking the FontFace constructor to get font URLs."),l=!0);const d={};d["font-family"]=arguments[0],d.src=arguments[1];const c=arguments[2];if(c&&Object.keys(c).forEach((e=>{r[e]&&(d[r[e]]=c[e])})),d.src instanceof ArrayBuffer){const e=new o;e.readAsDataURL(new n([d.src])),e.addEventListener("load",(()=>{d.src="url("+e.result+")",t(new s(i,{detail:d}))}))}else t(new s(i,{detail:d}));return new e(...arguments)},globalThis.FontFace.toString=function(){return"function FontFace() { [native code] }"}}}.toString()+")()",(i.documentElement||i).appendChild(e),e.remove(),e=i.createElement("script"),e.src=o.runtime.getURL("/single-file/dist/web/hooks/hooks-frames-web.js"),e.async=!1,(i.documentElement||i).appendChild(e),e.remove()}var d,c,m;const u=new RegExp("\\\\([\\da-f]{1,6}[\\x20\\t\\r\\n\\f]?|([\\x20\\t\\r\\n\\f])|.)","ig");const g="single-file-on-before-capture",p="single-file-on-after-capture",f="data-single-file-removed-content",h="data-single-file-hidden-content",E="data-single-file-kept-content",T="data-single-file-hidden-frame",b="data-single-file-preserved-space-element",y="data-single-file-shadow-root-element",w="data-single-file-image",I="data-single-file-poster",A="data-single-file-canvas",N="data-single-file-import",v="data-single-file-input-value",S="data-single-file-lazy-loaded-src",R="data-single-file-stylesheet",F="data-single-file-disabled-noscript",C="data-single-file-async-script",_="*:not(base):not(link):not(meta):not(noscript):not(script):not(style):not(template):not(title)",x=["NOSCRIPT","DISABLED-NOSCRIPT","META","LINK","STYLE","TITLE","TEMPLATE","SOURCE","OBJECT","SCRIPT","HEAD"],P=/^'(.*?)'$/,M=/^"(.*?)"$/,O={regular:"400",normal:"400",bold:"700",bolder:"700",lighter:"100"},L="single-file-ui-element",D=(e,t,s)=>globalThis.addEventListener(e,t,s);function k(e,t,s){let o;return e.querySelectorAll("noscript:not([data-single-file-disabled-noscript])").forEach((e=>{e.setAttribute(F,e.textContent),e.textContent=""})),function(e){e.querySelectorAll("meta[http-equiv=refresh]").forEach((e=>{e.removeAttribute("http-equiv"),e.setAttribute("disabled-http-equiv","refresh")}))}(e),e.head&&e.head.querySelectorAll(_).forEach((e=>e.hidden=!0)),e.querySelectorAll("svg foreignObject").forEach((e=>{const t=e.querySelectorAll("html > head > "+_+", html > body > "+_);t.length&&(Array.from(e.childNodes).forEach((e=>e.remove())),t.forEach((t=>e.appendChild(t))))})),o=t&&e.documentElement?q(t,e,e.documentElement,s):{canvases:[],images:[],posters:[],usedFonts:[],shadowRoots:[],imports:[],markedElements:[]},{canvases:o.canvases,fonts:l,stylesheets:z(e),images:o.images,posters:o.posters,usedFonts:Array.from(o.usedFonts.values()),shadowRoots:o.shadowRoots,imports:o.imports,referrer:e.referrer,markedElements:o.markedElements}}function q(e,t,s,o,n={usedFonts:new Map,canvases:[],images:[],posters:[],shadowRoots:[],imports:[],markedElements:[]},a){return Array.from(s.childNodes).filter((t=>t instanceof e.HTMLElement||t instanceof e.SVGElement)).forEach((s=>{let i,r,l;if(!o.autoSaveExternalSave&&(o.removeHiddenElements||o.removeUnusedFonts||o.compressHTML)&&(l=e.getComputedStyle(s),s instanceof e.HTMLElement&&o.removeHiddenElements&&(r=(a||s.closest("html > head"))&&x.includes(s.tagName)||s.closest("details"),r||(i=a||function(e,t){let s=!1;if(t){const o=t.getPropertyValue("display"),n=t.getPropertyValue("opacity"),a=t.getPropertyValue("visibility");if(s="none"==o,!s&&("0"==n||"hidden"==a)&&e.getBoundingClientRect){const t=e.getBoundingClientRect();s=!t.width&&!t.height}}return Boolean(s)}(s,l),i&&(s.setAttribute(h,""),n.markedElements.push(s)))),!i)){if(o.compressHTML&&l){const e=l.getPropertyValue("white-space");e&&e.startsWith("pre")&&(s.setAttribute(b,""),n.markedElements.push(s))}o.removeUnusedFonts&&(U(l,o,n.usedFonts),U(e.getComputedStyle(s,":first-letter"),o,n.usedFonts),U(e.getComputedStyle(s,":before"),o,n.usedFonts),U(e.getComputedStyle(s,":after"),o,n.usedFonts))}!function(e,t,s,o,n,a,i){if("CANVAS"==s.tagName)try{const t=W(e,s,i);n.canvases.push({dataURI:s.toDataURL("image/png",""),width:t.width,height:t.height}),s.setAttribute(A,n.canvases.length-1),n.markedElements.push(s)}catch(e){}if("IMG"==s.tagName){const t={currentSrc:a?"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==":o.loadDeferredImages&&s.getAttribute(S)||s.currentSrc};if(n.images.push(t),s.setAttribute(w,n.images.length-1),n.markedElements.push(s),s.removeAttribute(S),i=i||e.getComputedStyle(s)){t.size=W(e,s,i);const o=i.getPropertyValue("box-shadow"),n=i.getPropertyValue("background-image");o&&"none"!=o||n&&"none"!=n||!(t.size.pxWidth>1||t.size.pxHeight>1)||(t.replaceable=!0,t.backgroundColor=i.getPropertyValue("background-color"),t.objectFit=i.getPropertyValue("object-fit"),t.boxSizing=i.getPropertyValue("box-sizing"),t.objectPosition=i.getPropertyValue("object-position"))}}if("VIDEO"==s.tagName&&!s.poster){const e=t.createElement("canvas"),o=e.getContext("2d");e.width=s.clientWidth,e.height=s.clientHeight;try{o.drawImage(s,0,0,e.width,e.height),n.posters.push(e.toDataURL("image/png","")),s.setAttribute(I,n.posters.length-1),n.markedElements.push(s)}catch(e){}}"IFRAME"==s.tagName&&a&&o.removeHiddenElements&&(s.setAttribute(T,""),n.markedElements.push(s));"LINK"==s.tagName&&s.import&&s.import.documentElement&&(n.imports.push({content:J(s.import)}),s.setAttribute(N,n.imports.length-1),n.markedElements.push(s));"INPUT"==s.tagName&&("password"!=s.type&&(s.setAttribute(v,s.value),n.markedElements.push(s)),"radio"!=s.type&&"checkbox"!=s.type||(s.setAttribute(v,s.checked),n.markedElements.push(s)));"TEXTAREA"==s.tagName&&(s.setAttribute(v,s.value),n.markedElements.push(s));"SELECT"==s.tagName&&s.querySelectorAll("option").forEach((e=>{e.selected&&(e.setAttribute(v,""),n.markedElements.push(e))}));"SCRIPT"==s.tagName&&(s.async&&""!=s.getAttribute("async")&&"async"!=s.getAttribute("async")&&(s.setAttribute(C,""),n.markedElements.push(s)),s.textContent=s.textContent.replace(/<\/script>/gi,"<\\/script>"))}(e,t,s,o,n,i,l);const d=!(s instanceof e.SVGElement)&&H(s);if(d&&!s.classList.contains(L)){const a={};s.setAttribute(y,n.shadowRoots.length),n.markedElements.push(s),n.shadowRoots.push(a),q(e,t,d,o,n,i),a.content=d.innerHTML,a.delegatesFocus=d.delegatesFocus,a.mode=d.mode,d.adoptedStyleSheets&&d.adoptedStyleSheets.length&&(a.adoptedStyleSheets=Array.from(d.adoptedStyleSheets).map((e=>Array.from(e.cssRules).map((e=>e.cssText)).join("\n"))))}q(e,t,s,o,n,i),!o.autoSaveExternalSave&&o.removeHiddenElements&&a&&(r||""==s.getAttribute(E)?s.parentElement&&(s.parentElement.setAttribute(E,""),n.markedElements.push(s.parentElement)):i&&(s.setAttribute(f,""),n.markedElements.push(s)))})),n}function U(e,t,s){if(e){const o=e.getPropertyValue("font-style")||"normal";e.getPropertyValue("font-family").split(",").forEach((n=>{if(n=V(n),!t.loadedFonts||t.loadedFonts.find((e=>V(e.family)==n&&e.style==o))){const t=(a=e.getPropertyValue("font-weight"),O[a.toLowerCase().trim()]||a),i=e.getPropertyValue("font-variant")||"normal",r=[n,t,o,i];s.set(JSON.stringify(r),[n,t,o,i])}var a}))}}function H(e){const t=globalThis.chrome;if(e.openOrClosedShadowRoot)return e.openOrClosedShadowRoot;if(!(t&&t.dom&&t.dom.openOrClosedShadowRoot))return e.shadowRoot;try{return t.dom.openOrClosedShadowRoot(e)}catch(t){return e.shadowRoot}}function V(e=""){return function(e){e=e.match(P)?e.replace(P,"$1"):e.replace(M,"$1");return e.trim()}((t=e.trim(),t.replace(u,((e,t,s)=>{const o="0x"+t-65536;return o!=o||s?t:o<0?String.fromCharCode(o+65536):String.fromCharCode(o>>10|55296,1023&o|56320)})))).toLowerCase();var t}function B(e,t){if(e.querySelectorAll("[data-single-file-disabled-noscript]").forEach((e=>{e.textContent=e.getAttribute(F),e.removeAttribute(F)})),e.querySelectorAll("meta[disabled-http-equiv]").forEach((e=>{e.setAttribute("http-equiv",e.getAttribute("disabled-http-equiv")),e.removeAttribute("disabled-http-equiv")})),e.head&&e.head.querySelectorAll("*:not(base):not(link):not(meta):not(noscript):not(script):not(style):not(template):not(title)").forEach((e=>e.removeAttribute("hidden"))),!t){const s=[f,T,h,b,w,I,A,v,y,N,R,C];t=e.querySelectorAll(s.map((e=>"["+e+"]")).join(","))}t.forEach((e=>{e.removeAttribute(f),e.removeAttribute(h),e.removeAttribute(E),e.removeAttribute(T),e.removeAttribute(b),e.removeAttribute(w),e.removeAttribute(I),e.removeAttribute(A),e.removeAttribute(v),e.removeAttribute(y),e.removeAttribute(N),e.removeAttribute(R),e.removeAttribute(C)}))}function z(e){if(e){const t=[];return e.querySelectorAll("style").forEach(((s,o)=>{try{const n=e.createElement("style");n.textContent=s.textContent,e.body.appendChild(n);const a=n.sheet;n.remove(),a&&a.cssRules.length==s.sheet.cssRules.length||(s.setAttribute(R,o),t[o]=Array.from(s.sheet.cssRules).map((e=>e.cssText)).join("\n"))}catch(e){}})),t}}function W(e,t,s){let o=t.naturalWidth,n=t.naturalHeight;if(!o&&!n){let a,i,r,l,d,c,m,u,g=!1;if("content-box"==(s=s||e.getComputedStyle(t)).getPropertyValue("box-sizing")){const e=t.style.getPropertyValue("box-sizing"),s=t.style.getPropertyPriority("box-sizing"),o=t.clientWidth;t.style.setProperty("box-sizing","border-box","important"),g=t.clientWidth!=o,e?t.style.setProperty("box-sizing",e,s):t.style.removeProperty("box-sizing")}a=j("padding-left",s),i=j("padding-right",s),r=j("padding-top",s),l=j("padding-bottom",s),g?(d=j("border-left-width",s),c=j("border-right-width",s),m=j("border-top-width",s),u=j("border-bottom-width",s)):d=c=m=u=0,o=Math.max(0,t.clientWidth-a-i-d-c),n=Math.max(0,t.clientHeight-r-l-m-u)}return{pxWidth:o,pxHeight:n}}function j(e,t){if(t.getPropertyValue(e).endsWith("px"))return parseFloat(t.getPropertyValue(e))}function J(e){const t=e.doctype;let s="";return t&&(s=" "),s+e.documentElement.outerHTML}const G=S,Y=L,K="attributes",$=globalThis.browser,Q=globalThis.document,X=globalThis.MutationObserver,Z=(e,t,s)=>globalThis.addEventListener(e,t,s),ee=(e,t,s)=>globalThis.removeEventListener(e,t,s),te=new Map;async function se(e){if(Q.documentElement){te.clear();const o=Math.max(Q.documentElement.scrollHeight-1.5*Q.documentElement.clientHeight,0),i=Math.max(Q.documentElement.scrollWidth-1.5*Q.documentElement.clientWidth,0);if(globalThis.scrollY<=o&&globalThis.scrollX<=i)return function(e){return new Promise((async o=>{let i;const r=new Set,l=new X((async t=>{if((t=t.filter((e=>e.type==K))).length){t.filter((e=>{if("src"==e.attributeName&&(e.target.setAttribute(G,e.target.src),e.target.addEventListener("load",c)),"src"==e.attributeName||"srcset"==e.attributeName||"SOURCE"==e.target.tagName)return!e.target.classList||!e.target.classList.contains(Y)})).length&&(i=!0,await ne(l,e,g),r.size||await oe(l,e,g))}}));async function d(t){await ie("idleTimeout",(async()=>{i?(le("idleTimeout"),await d(Math.max(500,t/2))):(le("loadTimeout"),le("maxTimeout"),ae(l,e,g))}),t)}function c(e){const t=e.target;t.removeAttribute(G),t.removeEventListener("load",c)}async function m(t){i=!0,await ne(l,e,g),await oe(l,e,g),t.detail&&r.add(t.detail)}async function u(t){await ne(l,e,g),await oe(l,e,g),r.delete(t.detail),r.size||await oe(l,e,g)}function g(e){l.disconnect(),ee(t,m),ee(s,u),o(e)}await d(2*e.loadDeferredImagesMaxIdleTime),await ne(l,e,g),l.observe(Q,{subtree:!0,childList:!0,attributes:!0}),Z(t,m),Z(s,u),function(e){e.loadDeferredImagesBlockCookies&&n(new a("single-file-block-cookies-start")),e.loadDeferredImagesBlockStorage&&n(new a("single-file-block-storage-start")),e.loadDeferredImagesKeepZoomLevel?n(new a("single-file-load-deferred-images-keep-zoom-level-start")):n(new a("single-file-load-deferred-images-start"))}(e)}))}(e)}}async function oe(e,t,s){await ie("loadTimeout",(()=>ae(e,t,s)),t.loadDeferredImagesMaxIdleTime)}async function ne(e,t,s){await ie("maxTimeout",(async()=>{await le("loadTimeout"),await ae(e,t,s)}),10*t.loadDeferredImagesMaxIdleTime)}async function ae(e,t,s){await le("idleTimeout"),function(e){e.loadDeferredImagesBlockCookies&&n(new a("single-file-block-cookies-end")),e.loadDeferredImagesBlockStorage&&n(new a("single-file-block-storage-end")),e.loadDeferredImagesKeepZoomLevel?n(new a("single-file-load-deferred-images-keep-zoom-level-end")):n(new a("single-file-load-deferred-images-end"))}(t),await ie("endTimeout",(async()=>{await le("maxTimeout"),s()}),t.loadDeferredImagesMaxIdleTime/2),e.disconnect()}async function ie(e,t,s){if($&&$.runtime&&$.runtime.sendMessage){if(!te.get(e)||!te.get(e).pending){const o={callback:t,pending:!0};te.set(e,o);try{await $.runtime.sendMessage({method:"singlefile.lazyTimeout.setTimeout",type:e,delay:s})}catch(o){re(e,t,s)}o.pending=!1}}else re(e,t,s)}function re(e,t,s){const o=te.get(e);o&&globalThis.clearTimeout(o),te.set(e,t),globalThis.setTimeout(t,s)}async function le(e){if($&&$.runtime&&$.runtime.sendMessage)try{await $.runtime.sendMessage({method:"singlefile.lazyTimeout.clearTimeout",type:e})}catch(t){de(e)}else de(e)}function de(e){const t=te.get(e);te.delete(e),t&&globalThis.clearTimeout(t)}$&&$.runtime&&$.runtime.onMessage&&$.runtime.onMessage.addListener&&$.runtime.onMessage.addListener((e=>{if("singlefile.lazyTimeout.onTimeout"==e.method){const t=te.get(e.type);if(t){te.delete(e.type);try{t.callback()}catch(t){de(e.type)}}}}));const ce={ON_BEFORE_CAPTURE_EVENT_NAME:g,ON_AFTER_CAPTURE_EVENT_NAME:p,WIN_ID_ATTRIBUTE_NAME:"data-single-file-win-id",preProcessDoc:k,serialize:J,postProcessDoc:B,getShadowRoot:H},me="__frameTree__::",ue='iframe, frame, object[type="text/html"][data]',ge="singlefile.frameTree.initRequest",pe="singlefile.frameTree.ackInitRequest",fe="singlefile.frameTree.cleanupRequest",he="singlefile.frameTree.initResponse",Ee=".",Te=globalThis.window==globalThis.top,be=globalThis.browser,ye=globalThis.top,we=globalThis.MessageChannel,Ie=globalThis.document,Ae=new Map;let Ne;function ve(){return globalThis.crypto.getRandomValues(new Uint32Array(32)).join("")}async function Se(e){const t=e.sessionId,s=globalThis._singleFile_waitForUserScript;Te||(Ne=globalThis.frameId=e.windowId),Ce(Ie,e.options,Ne,t),Te||(e.options.userScriptEnabled&&s&&await s(ce.ON_BEFORE_CAPTURE_EVENT_NAME),Me({frames:[Le(Ie,globalThis,Ne,e.options)],sessionId:t,requestedFrameId:Ie.documentElement.dataset.requestedFrameId&&Ne}),e.options.userScriptEnabled&&s&&await s(ce.ON_AFTER_CAPTURE_EVENT_NAME),delete Ie.documentElement.dataset.requestedFrameId)}function Re(e){const t=e.sessionId;Pe(De(Ie),e.windowId,t)}function Fe(e){e.frames.forEach((t=>_e("responseTimeouts",e.sessionId,t.windowId)));const t=Ae.get(e.sessionId);if(t){e.requestedFrameId&&(t.requestedFrameId=e.requestedFrameId),e.frames.forEach((e=>{let s=t.frames.find((t=>e.windowId==t.windowId));s||(s={windowId:e.windowId},t.frames.push(s)),s.processed||(s.content=e.content,s.baseURI=e.baseURI,s.title=e.title,s.canvases=e.canvases,s.fonts=e.fonts,s.stylesheets=e.stylesheets,s.images=e.images,s.posters=e.posters,s.usedFonts=e.usedFonts,s.shadowRoots=e.shadowRoots,s.imports=e.imports,s.processed=e.processed)}));t.frames.filter((e=>!e.processed)).length||(t.frames=t.frames.sort(((e,t)=>t.windowId.split(Ee).length-e.windowId.split(Ee).length)),t.resolve&&(t.requestedFrameId&&t.frames.forEach((e=>{e.windowId==t.requestedFrameId&&(e.requestedFrame=!0)})),t.resolve(t.frames)))}}function Ce(e,t,s,o){const n=De(e);!function(e,t,s,o,n){const a=[];let i;Ae.get(n)?i=Ae.get(n).requestTimeouts:(i={},Ae.set(n,{requestTimeouts:i}));t.forEach(((e,t)=>{const s=o+Ee+t;e.setAttribute(ce.WIN_ID_ATTRIBUTE_NAME,s),a.push({windowId:s})})),Me({frames:a,sessionId:n,requestedFrameId:e.documentElement.dataset.requestedFrameId&&o}),t.forEach(((e,t)=>{const a=o+Ee+t;try{Oe(e.contentWindow,{method:ge,windowId:a,sessionId:n,options:s})}catch(e){}i[a]=globalThis.setTimeout((()=>Me({frames:[{windowId:a,processed:!0}],sessionId:n})),750)})),delete e.documentElement.dataset.requestedFrameId}(e,n,t,s,o),n.length&&function(e,t,s,o,n){const a=[];t.forEach(((e,t)=>{const i=o+Ee+t;let r;try{r=e.contentDocument}catch(e){}if(r)try{const t=e.contentWindow;t.stop(),_e("requestTimeouts",n,i),Ce(r,s,i,n),a.push(Le(r,t,i,s))}catch(e){a.push({windowId:i,processed:!0})}})),Me({frames:a,sessionId:n,requestedFrameId:e.documentElement.dataset.requestedFrameId&&o}),delete e.documentElement.dataset.requestedFrameId}(e,n,t,s,o)}function _e(e,t,s){const o=Ae.get(t);if(o&&o[e]){const t=o[e][s];t&&(globalThis.clearTimeout(t),delete o[e][s])}}function xe(e,t){const s=Ae.get(e);s&&s.responseTimeouts&&(s.responseTimeouts[t]=globalThis.setTimeout((()=>Me({frames:[{windowId:t,processed:!0}],sessionId:e})),1e4))}function Pe(e,t,s){e.forEach(((e,o)=>{const n=t+Ee+o;e.removeAttribute(ce.WIN_ID_ATTRIBUTE_NAME);try{Oe(e.contentWindow,{method:fe,windowId:n,sessionId:s})}catch(e){}})),e.forEach(((e,o)=>{const n=t+Ee+o;let a;try{a=e.contentDocument}catch(e){}if(a)try{Pe(De(a),n,s)}catch(e){}}))}function Me(e){e.method=he;try{ye.singlefile.processors.frameTree.initResponse(e)}catch(t){Oe(ye,e,!0)}}function Oe(e,t,s){if(e==ye&&be&&be.runtime&&be.runtime.sendMessage)be.runtime.sendMessage(t);else if(s){const s=new we;e.postMessage(me+JSON.stringify({method:t.method,sessionId:t.sessionId}),"*",[s.port2]),s.port1.postMessage(t)}else e.postMessage(me+JSON.stringify(t),"*")}function Le(e,t,s,o){const n=ce.preProcessDoc(e,t,o),a=ce.serialize(e);ce.postProcessDoc(e,n.markedElements);return{windowId:s,content:a,baseURI:e.baseURI.split("#")[0],title:e.title,canvases:n.canvases,fonts:n.fonts,stylesheets:n.stylesheets,images:n.images,posters:n.posters,usedFonts:n.usedFonts,shadowRoots:n.shadowRoots,imports:n.imports,processed:!0}}function De(e){let t=Array.from(e.querySelectorAll(ue));return e.querySelectorAll("*").forEach((e=>{const s=ce.getShadowRoot(e);s&&(t=t.concat(...s.querySelectorAll(ue)))})),t}Te&&(Ne="0",be&&be.runtime&&be.runtime.onMessage&&be.runtime.onMessage.addListener&&be.runtime.onMessage.addListener((e=>e.method==he?(Fe(e),Promise.resolve({})):e.method==pe?(_e("requestTimeouts",e.sessionId,e.windowId),xe(e.sessionId,e.windowId),Promise.resolve({})):void 0))),((e,t,s)=>{globalThis.addEventListener(e,t,s)})("message",(async e=>{if("string"==typeof e.data&&e.data.startsWith(me)){e.preventDefault(),e.stopPropagation();const t=JSON.parse(e.data.substring(me.length));if(t.method==ge)e.source&&Oe(e.source,{method:pe,windowId:t.windowId,sessionId:t.sessionId}),Te||(globalThis.stop(),t.options.loadDeferredImages&&se(t.options),await Se(t));else if(t.method==pe)_e("requestTimeouts",t.sessionId,t.windowId),xe(t.sessionId,t.windowId);else if(t.method==fe)Re(t);else if(t.method==he&&Ae.get(t.sessionId)){e.ports[0].onmessage=e=>Fe(e.data)}}}),!0);const ke=["area","base","br","col","command","embed","hr","img","input","keygen","link","meta","param","source","track","wbr"],qe=[{tagName:"head",accept:e=>!e.childNodes.length||1==e.childNodes[0].nodeType},{tagName:"body",accept:e=>!e.childNodes.length}],Ue=[{tagName:"html",accept:e=>!e||8!=e.nodeType},{tagName:"head",accept:e=>!e||8!=e.nodeType&&(3!=e.nodeType||!Be(e.textContent))},{tagName:"body",accept:e=>!e||8!=e.nodeType},{tagName:"li",accept:(e,t)=>!e&&t.parentElement&&("UL"==t.parentElement.tagName||"OL"==t.parentElement.tagName)||e&&["LI"].includes(e.tagName)},{tagName:"dt",accept:e=>!e||["DT","DD"].includes(e.tagName)},{tagName:"p",accept:e=>e&&["ADDRESS","ARTICLE","ASIDE","BLOCKQUOTE","DETAILS","DIV","DL","FIELDSET","FIGCAPTION","FIGURE","FOOTER","FORM","H1","H2","H3","H4","H5","H6","HEADER","HR","MAIN","NAV","OL","P","PRE","SECTION","TABLE","UL"].includes(e.tagName)},{tagName:"dd",accept:e=>!e||["DT","DD"].includes(e.tagName)},{tagName:"rt",accept:e=>!e||["RT","RP"].includes(e.tagName)},{tagName:"rp",accept:e=>!e||["RT","RP"].includes(e.tagName)},{tagName:"optgroup",accept:e=>!e||["OPTGROUP"].includes(e.tagName)},{tagName:"option",accept:e=>!e||["OPTION","OPTGROUP"].includes(e.tagName)},{tagName:"colgroup",accept:e=>!e||8!=e.nodeType&&(3!=e.nodeType||!Be(e.textContent))},{tagName:"caption",accept:e=>!e||8!=e.nodeType&&(3!=e.nodeType||!Be(e.textContent))},{tagName:"thead",accept:e=>!e||["TBODY","TFOOT"].includes(e.tagName)},{tagName:"tbody",accept:e=>!e||["TBODY","TFOOT"].includes(e.tagName)},{tagName:"tfoot",accept:e=>!e},{tagName:"tr",accept:e=>!e||["TR"].includes(e.tagName)},{tagName:"td",accept:e=>!e||["TD","TH"].includes(e.tagName)},{tagName:"th",accept:e=>!e||["TD","TH"].includes(e.tagName)}],He=["style","script","xmp","iframe","noembed","noframes","plaintext","noscript"];function Ve(e,t,s){return 3==e.nodeType?function(e){const t=e.parentNode;let s;t&&1==t.nodeType&&(s=t.tagName.toLowerCase());return!s||He.includes(s)?"script"==s?e.textContent.replace(/<\//gi,"<\\/").replace(/\/>/gi,"\\/>"):e.textContent:e.textContent.replace(/&/g,"&").replace(/\u00a0/g," ").replace(//g,">")}(e):8==e.nodeType?"\x3c!--"+e.textContent+"--\x3e":1==e.nodeType?function(e,t,s){const o=e.tagName.toLowerCase(),n=t&&qe.find((t=>o==t.tagName&&t.accept(e)));let a="";n&&!e.attributes.length||(a="<"+o,Array.from(e.attributes).forEach((s=>a+=function(e,t,s){const o=e.name;let n="";if(!o.match(/["'>/=]/)){let a,i=e.value;s&&"class"==o&&(i=Array.from(t.classList).map((e=>e.trim())).join(" ")),i=i.replace(/&/g,"&").replace(/\u00a0/g," "),i.includes('"')&&(i.includes("'")||!s?i=i.replace(/"/g,"""):a=!0);const r=!s||!i.match(/^[^ \t\n\f\r'"`=<>]+$/);n+=" ",e.namespace?"http://www.w3.org/XML/1998/namespace"==e.namespaceURI?n+="xml:"+o:"http://www.w3.org/2000/xmlns/"==e.namespaceURI?("xmlns"!==o&&(n+="xmlns:"),n+=o):"http://www.w3.org/1999/xlink"==e.namespaceURI?n+="xlink:"+o:n+=o:n+=o,""!=i&&(n+="=",r&&(n+=a?"'":'"'),n+=i,r&&(n+=a?"'":'"'))}return n}(s,e,t))),a+=">");"TEMPLATE"!=e.tagName||e.childNodes.length?Array.from(e.childNodes).forEach((e=>a+=Ve(e,t,s||"svg"==o))):a+=e.innerHTML;const i=t&&Ue.find((t=>o==t.tagName&&t.accept(e.nextSibling,e)));(s||!i&&!ke.includes(o))&&(a+="");return a}(e,t,s):void 0}function Be(e){return Boolean(e.match(/^[ \t\n\f\r]/))}const ze={frameTree:Object.freeze({__proto__:null,getAsync:function(e){const t=ve();return e=JSON.parse(JSON.stringify(e)),new Promise((s=>{Ae.set(t,{frames:[],requestTimeouts:{},responseTimeouts:{},resolve:e=>{e.sessionId=t,s(e)}}),Se({windowId:Ne,sessionId:t,options:e})}))},getSync:function(e){const t=ve();e=JSON.parse(JSON.stringify(e)),Ae.set(t,{frames:[],requestTimeouts:{},responseTimeouts:{}}),function(e){const t=e.sessionId,s=globalThis._singleFile_waitForUserScript;Te||(Ne=globalThis.frameId=e.windowId);Ce(Ie,e.options,Ne,t),Te||(e.options.userScriptEnabled&&s&&s(ce.ON_BEFORE_CAPTURE_EVENT_NAME),Me({frames:[Le(Ie,globalThis,Ne,e.options)],sessionId:t,requestedFrameId:Ie.documentElement.dataset.requestedFrameId&&Ne}),e.options.userScriptEnabled&&s&&s(ce.ON_AFTER_CAPTURE_EVENT_NAME),delete Ie.documentElement.dataset.requestedFrameId)}({windowId:Ne,sessionId:t,options:e});const s=Ae.get(t).frames;return s.sessionId=t,s},cleanup:function(e){Ae.delete(e),Re({windowId:Ne,sessionId:e,options:{sessionId:e}})},initResponse:Fe,TIMEOUT_INIT_REQUEST_MESSAGE:750})},We={COMMENT_HEADER:"Page saved with SingleFile",COMMENT_HEADER_LEGACY:"Archive processed by SingleFile",ON_BEFORE_CAPTURE_EVENT_NAME:g,ON_AFTER_CAPTURE_EVENT_NAME:p,preProcessDoc:k,postProcessDoc:B,serialize:(e,t)=>function(e,t){const s=e.doctype;let o="";return s&&(o=" "),o+Ve(e.documentElement,t)}(e,t),getShadowRoot:H};D("single-file-user-script-init",(()=>globalThis._singleFile_waitForUserScript=async e=>{const t=new CustomEvent(e+"-request",{cancelable:!0}),s=new Promise((t=>D(e+"-response",t)));(e=>{globalThis.dispatchEvent(e)})(t),t.defaultPrevented&&await s})),e.helper=We,e.processors=ze,Object.defineProperty(e,"__esModule",{value:!0})})); 2 | -------------------------------------------------------------------------------- /extension/addon/single-file/dist/single-file-frames.js: -------------------------------------------------------------------------------- 1 | !function(e){"function"==typeof define&&define.amd?define(e):e()}((function(){"use strict";const e="single-file-load-image",t="single-file-image-loaded",s=globalThis.browser,o=e=>globalThis.dispatchEvent(e),n=globalThis.CustomEvent,i=globalThis.document,a=globalThis.HTMLDocument;let r;if(r=window._singleFile_fontFaces?window._singleFile_fontFaces:window._singleFile_fontFaces=[],i instanceof a&&s&&s.runtime&&s.runtime.getURL){l="single-file-new-font-face",d=e=>{const t=e.detail;r.find((e=>JSON.stringify(e)==JSON.stringify(t)))||r.push(e.detail)},globalThis.addEventListener(l,d,c);let e=i.createElement("script");e.textContent="("+function(){"undefined"==typeof globalThis&&(window.globalThis=window);const e=globalThis.console,t=e=>globalThis.dispatchEvent(e),s=globalThis.CustomEvent,o=globalThis.FileReader,n=globalThis.Blob,i=e&&e.warn&&((...t)=>e.warn(...t))||(()=>{}),a="single-file-new-font-face",r={family:"font-family",style:"font-style",weight:"font-weight",stretch:"font-stretch",unicodeRange:"unicode-range",variant:"font-variant",featureSettings:"font-feature-settings"};if(globalThis.FontFace){const e=globalThis.FontFace;let l;globalThis.FontFace=function(){l||(i("SingleFile is hooking the FontFace constructor to get font URLs."),l=!0);const d={};d["font-family"]=arguments[0],d.src=arguments[1];const c=arguments[2];if(c&&Object.keys(c).forEach((e=>{r[e]&&(d[r[e]]=c[e])})),d.src instanceof ArrayBuffer){const e=new o;e.readAsDataURL(new n([d.src])),e.addEventListener("load",(()=>{d.src="url("+e.result+")",t(new s(a,{detail:d}))}))}else t(new s(a,{detail:d}));return new e(...arguments)},globalThis.FontFace.toString=function(){return"function FontFace() { [native code] }"}}}.toString()+")()",(i.documentElement||i).appendChild(e),e.remove(),e=i.createElement("script"),e.src=s.runtime.getURL("/single-file/dist/web/hooks/hooks-frames-web.js"),e.async=!1,(i.documentElement||i).appendChild(e),e.remove()}var l,d,c;const m=new RegExp("\\\\([\\da-f]{1,6}[\\x20\\t\\r\\n\\f]?|([\\x20\\t\\r\\n\\f])|.)","ig");const u="data-single-file-removed-content",g="data-single-file-hidden-content",f="data-single-file-kept-content",h="data-single-file-hidden-frame",p="data-single-file-preserved-space-element",b="data-single-file-shadow-root-element",w="data-single-file-image",y="data-single-file-poster",E="data-single-file-canvas",T="data-single-file-import",A="data-single-file-input-value",I="data-single-file-lazy-loaded-src",v="data-single-file-stylesheet",S="data-single-file-disabled-noscript",R="data-single-file-async-script",F="*:not(base):not(link):not(meta):not(noscript):not(script):not(style):not(template):not(title)",C=["NOSCRIPT","DISABLED-NOSCRIPT","META","LINK","STYLE","TITLE","TEMPLATE","SOURCE","OBJECT","SCRIPT","HEAD"],k=/^'(.*?)'$/,M=/^"(.*?)"$/,x={regular:"400",normal:"400",bold:"700",bolder:"700",lighter:"100"},N="single-file-ui-element";function q(e,t,s,o,n={usedFonts:new Map,canvases:[],images:[],posters:[],shadowRoots:[],imports:[],markedElements:[]},i){return Array.from(s.childNodes).filter((t=>t instanceof e.HTMLElement||t instanceof e.SVGElement)).forEach((s=>{let a,r,l;if(!o.autoSaveExternalSave&&(o.removeHiddenElements||o.removeUnusedFonts||o.compressHTML)&&(l=e.getComputedStyle(s),s instanceof e.HTMLElement&&o.removeHiddenElements&&(r=(i||s.closest("html > head"))&&C.includes(s.tagName)||s.closest("details"),r||(a=i||function(e,t){let s=!1;if(t){const o=t.getPropertyValue("display"),n=t.getPropertyValue("opacity"),i=t.getPropertyValue("visibility");if(s="none"==o,!s&&("0"==n||"hidden"==i)&&e.getBoundingClientRect){const t=e.getBoundingClientRect();s=!t.width&&!t.height}}return Boolean(s)}(s,l),a&&(s.setAttribute(g,""),n.markedElements.push(s)))),!a)){if(o.compressHTML&&l){const e=l.getPropertyValue("white-space");e&&e.startsWith("pre")&&(s.setAttribute(p,""),n.markedElements.push(s))}o.removeUnusedFonts&&(P(l,o,n.usedFonts),P(e.getComputedStyle(s,":first-letter"),o,n.usedFonts),P(e.getComputedStyle(s,":before"),o,n.usedFonts),P(e.getComputedStyle(s,":after"),o,n.usedFonts))}!function(e,t,s,o,n,i,a){if("CANVAS"==s.tagName)try{const t=O(e,s,a);n.canvases.push({dataURI:s.toDataURL("image/png",""),width:t.width,height:t.height}),s.setAttribute(E,n.canvases.length-1),n.markedElements.push(s)}catch(e){}if("IMG"==s.tagName){const t={currentSrc:i?"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==":o.loadDeferredImages&&s.getAttribute(I)||s.currentSrc};if(n.images.push(t),s.setAttribute(w,n.images.length-1),n.markedElements.push(s),s.removeAttribute(I),a=a||e.getComputedStyle(s)){t.size=O(e,s,a);const o=a.getPropertyValue("box-shadow"),n=a.getPropertyValue("background-image");o&&"none"!=o||n&&"none"!=n||!(t.size.pxWidth>1||t.size.pxHeight>1)||(t.replaceable=!0,t.backgroundColor=a.getPropertyValue("background-color"),t.objectFit=a.getPropertyValue("object-fit"),t.boxSizing=a.getPropertyValue("box-sizing"),t.objectPosition=a.getPropertyValue("object-position"))}}if("VIDEO"==s.tagName&&!s.poster){const e=t.createElement("canvas"),o=e.getContext("2d");e.width=s.clientWidth,e.height=s.clientHeight;try{o.drawImage(s,0,0,e.width,e.height),n.posters.push(e.toDataURL("image/png","")),s.setAttribute(y,n.posters.length-1),n.markedElements.push(s)}catch(e){}}"IFRAME"==s.tagName&&i&&o.removeHiddenElements&&(s.setAttribute(h,""),n.markedElements.push(s));"LINK"==s.tagName&&s.import&&s.import.documentElement&&(n.imports.push({content:U(s.import)}),s.setAttribute(T,n.imports.length-1),n.markedElements.push(s));"INPUT"==s.tagName&&("password"!=s.type&&(s.setAttribute(A,s.value),n.markedElements.push(s)),"radio"!=s.type&&"checkbox"!=s.type||(s.setAttribute(A,s.checked),n.markedElements.push(s)));"TEXTAREA"==s.tagName&&(s.setAttribute(A,s.value),n.markedElements.push(s));"SELECT"==s.tagName&&s.querySelectorAll("option").forEach((e=>{e.selected&&(e.setAttribute(A,""),n.markedElements.push(e))}));"SCRIPT"==s.tagName&&(s.async&&""!=s.getAttribute("async")&&"async"!=s.getAttribute("async")&&(s.setAttribute(R,""),n.markedElements.push(s)),s.textContent=s.textContent.replace(/<\/script>/gi,"<\\/script>"))}(e,t,s,o,n,a,l);const d=!(s instanceof e.SVGElement)&&L(s);if(d&&!s.classList.contains(N)){const i={};s.setAttribute(b,n.shadowRoots.length),n.markedElements.push(s),n.shadowRoots.push(i),q(e,t,d,o,n,a),i.content=d.innerHTML,i.delegatesFocus=d.delegatesFocus,i.mode=d.mode,d.adoptedStyleSheets&&d.adoptedStyleSheets.length&&(i.adoptedStyleSheets=Array.from(d.adoptedStyleSheets).map((e=>Array.from(e.cssRules).map((e=>e.cssText)).join("\n"))))}q(e,t,s,o,n,a),!o.autoSaveExternalSave&&o.removeHiddenElements&&i&&(r||""==s.getAttribute(f)?s.parentElement&&(s.parentElement.setAttribute(f,""),n.markedElements.push(s.parentElement)):a&&(s.setAttribute(u,""),n.markedElements.push(s)))})),n}function P(e,t,s){if(e){const o=e.getPropertyValue("font-style")||"normal";e.getPropertyValue("font-family").split(",").forEach((n=>{if(n=_(n),!t.loadedFonts||t.loadedFonts.find((e=>_(e.family)==n&&e.style==o))){const t=(i=e.getPropertyValue("font-weight"),x[i.toLowerCase().trim()]||i),a=e.getPropertyValue("font-variant")||"normal",r=[n,t,o,a];s.set(JSON.stringify(r),[n,t,o,a])}var i}))}}function L(e){const t=globalThis.chrome;if(e.openOrClosedShadowRoot)return e.openOrClosedShadowRoot;if(!(t&&t.dom&&t.dom.openOrClosedShadowRoot))return e.shadowRoot;try{return t.dom.openOrClosedShadowRoot(e)}catch(t){return e.shadowRoot}}function _(e=""){return function(e){e=e.match(k)?e.replace(k,"$1"):e.replace(M,"$1");return e.trim()}((t=e.trim(),t.replace(m,((e,t,s)=>{const o="0x"+t-65536;return o!=o||s?t:o<0?String.fromCharCode(o+65536):String.fromCharCode(o>>10|55296,1023&o|56320)})))).toLowerCase();var t}function D(e){if(e){const t=[];return e.querySelectorAll("style").forEach(((s,o)=>{try{const n=e.createElement("style");n.textContent=s.textContent,e.body.appendChild(n);const i=n.sheet;n.remove(),i&&i.cssRules.length==s.sheet.cssRules.length||(s.setAttribute(v,o),t[o]=Array.from(s.sheet.cssRules).map((e=>e.cssText)).join("\n"))}catch(e){}})),t}}function O(e,t,s){let o=t.naturalWidth,n=t.naturalHeight;if(!o&&!n){let i,a,r,l,d,c,m,u,g=!1;if("content-box"==(s=s||e.getComputedStyle(t)).getPropertyValue("box-sizing")){const e=t.style.getPropertyValue("box-sizing"),s=t.style.getPropertyPriority("box-sizing"),o=t.clientWidth;t.style.setProperty("box-sizing","border-box","important"),g=t.clientWidth!=o,e?t.style.setProperty("box-sizing",e,s):t.style.removeProperty("box-sizing")}i=V("padding-left",s),a=V("padding-right",s),r=V("padding-top",s),l=V("padding-bottom",s),g?(d=V("border-left-width",s),c=V("border-right-width",s),m=V("border-top-width",s),u=V("border-bottom-width",s)):d=c=m=u=0,o=Math.max(0,t.clientWidth-i-a-d-c),n=Math.max(0,t.clientHeight-r-l-m-u)}return{pxWidth:o,pxHeight:n}}function V(e,t){if(t.getPropertyValue(e).endsWith("px"))return parseFloat(t.getPropertyValue(e))}function U(e){const t=e.doctype;let s="";return t&&(s=" "),s+e.documentElement.outerHTML}const z=I,B=N,H="attributes",W=globalThis.browser,j=globalThis.document,J=globalThis.MutationObserver,K=(e,t,s)=>globalThis.addEventListener(e,t,s),G=(e,t,s)=>globalThis.removeEventListener(e,t,s),Y=new Map;async function $(s){if(j.documentElement){Y.clear();const i=Math.max(j.documentElement.scrollHeight-1.5*j.documentElement.clientHeight,0),a=Math.max(j.documentElement.scrollWidth-1.5*j.documentElement.clientWidth,0);if(globalThis.scrollY<=i&&globalThis.scrollX<=a)return function(s){return new Promise((async i=>{let a;const r=new Set,l=new J((async e=>{if((e=e.filter((e=>e.type==H))).length){e.filter((e=>{if("src"==e.attributeName&&(e.target.setAttribute(z,e.target.src),e.target.addEventListener("load",c)),"src"==e.attributeName||"srcset"==e.attributeName||"SOURCE"==e.target.tagName)return!e.target.classList||!e.target.classList.contains(B)})).length&&(a=!0,await Z(l,s,g),r.size||await X(l,s,g))}}));async function d(e){await ee("idleTimeout",(async()=>{a?(se("idleTimeout"),await d(Math.max(500,e/2))):(se("loadTimeout"),se("maxTimeout"),Q(l,s,g))}),e)}function c(e){const t=e.target;t.removeAttribute(z),t.removeEventListener("load",c)}async function m(e){a=!0,await Z(l,s,g),await X(l,s,g),e.detail&&r.add(e.detail)}async function u(e){await Z(l,s,g),await X(l,s,g),r.delete(e.detail),r.size||await X(l,s,g)}function g(s){l.disconnect(),G(e,m),G(t,u),i(s)}await d(2*s.loadDeferredImagesMaxIdleTime),await Z(l,s,g),l.observe(j,{subtree:!0,childList:!0,attributes:!0}),K(e,m),K(t,u),function(e){e.loadDeferredImagesBlockCookies&&o(new n("single-file-block-cookies-start")),e.loadDeferredImagesBlockStorage&&o(new n("single-file-block-storage-start")),e.loadDeferredImagesKeepZoomLevel?o(new n("single-file-load-deferred-images-keep-zoom-level-start")):o(new n("single-file-load-deferred-images-start"))}(s)}))}(s)}}async function X(e,t,s){await ee("loadTimeout",(()=>Q(e,t,s)),t.loadDeferredImagesMaxIdleTime)}async function Z(e,t,s){await ee("maxTimeout",(async()=>{await se("loadTimeout"),await Q(e,t,s)}),10*t.loadDeferredImagesMaxIdleTime)}async function Q(e,t,s){await se("idleTimeout"),function(e){e.loadDeferredImagesBlockCookies&&o(new n("single-file-block-cookies-end")),e.loadDeferredImagesBlockStorage&&o(new n("single-file-block-storage-end")),e.loadDeferredImagesKeepZoomLevel?o(new n("single-file-load-deferred-images-keep-zoom-level-end")):o(new n("single-file-load-deferred-images-end"))}(t),await ee("endTimeout",(async()=>{await se("maxTimeout"),s()}),t.loadDeferredImagesMaxIdleTime/2),e.disconnect()}async function ee(e,t,s){if(W&&W.runtime&&W.runtime.sendMessage){if(!Y.get(e)||!Y.get(e).pending){const o={callback:t,pending:!0};Y.set(e,o);try{await W.runtime.sendMessage({method:"singlefile.lazyTimeout.setTimeout",type:e,delay:s})}catch(o){te(e,t,s)}o.pending=!1}}else te(e,t,s)}function te(e,t,s){const o=Y.get(e);o&&globalThis.clearTimeout(o),Y.set(e,t),globalThis.setTimeout(t,s)}async function se(e){if(W&&W.runtime&&W.runtime.sendMessage)try{await W.runtime.sendMessage({method:"singlefile.lazyTimeout.clearTimeout",type:e})}catch(t){oe(e)}else oe(e)}function oe(e){const t=Y.get(e);Y.delete(e),t&&globalThis.clearTimeout(t)}W&&W.runtime&&W.runtime.onMessage&&W.runtime.onMessage.addListener&&W.runtime.onMessage.addListener((e=>{if("singlefile.lazyTimeout.onTimeout"==e.method){const t=Y.get(e.type);if(t){Y.delete(e.type);try{t.callback()}catch(t){oe(e.type)}}}}));const ne={ON_BEFORE_CAPTURE_EVENT_NAME:"single-file-on-before-capture",ON_AFTER_CAPTURE_EVENT_NAME:"single-file-on-after-capture",WIN_ID_ATTRIBUTE_NAME:"data-single-file-win-id",preProcessDoc:function(e,t,s){let o;return e.querySelectorAll("noscript:not([data-single-file-disabled-noscript])").forEach((e=>{e.setAttribute(S,e.textContent),e.textContent=""})),function(e){e.querySelectorAll("meta[http-equiv=refresh]").forEach((e=>{e.removeAttribute("http-equiv"),e.setAttribute("disabled-http-equiv","refresh")}))}(e),e.head&&e.head.querySelectorAll(F).forEach((e=>e.hidden=!0)),e.querySelectorAll("svg foreignObject").forEach((e=>{const t=e.querySelectorAll("html > head > "+F+", html > body > "+F);t.length&&(Array.from(e.childNodes).forEach((e=>e.remove())),t.forEach((t=>e.appendChild(t))))})),o=t&&e.documentElement?q(t,e,e.documentElement,s):{canvases:[],images:[],posters:[],usedFonts:[],shadowRoots:[],imports:[],markedElements:[]},{canvases:o.canvases,fonts:r,stylesheets:D(e),images:o.images,posters:o.posters,usedFonts:Array.from(o.usedFonts.values()),shadowRoots:o.shadowRoots,imports:o.imports,referrer:e.referrer,markedElements:o.markedElements}},serialize:U,postProcessDoc:function(e,t){if(e.querySelectorAll("[data-single-file-disabled-noscript]").forEach((e=>{e.textContent=e.getAttribute(S),e.removeAttribute(S)})),e.querySelectorAll("meta[disabled-http-equiv]").forEach((e=>{e.setAttribute("http-equiv",e.getAttribute("disabled-http-equiv")),e.removeAttribute("disabled-http-equiv")})),e.head&&e.head.querySelectorAll("*:not(base):not(link):not(meta):not(noscript):not(script):not(style):not(template):not(title)").forEach((e=>e.removeAttribute("hidden"))),!t){const s=[u,h,g,p,w,y,E,A,b,T,v,R];t=e.querySelectorAll(s.map((e=>"["+e+"]")).join(","))}t.forEach((e=>{e.removeAttribute(u),e.removeAttribute(g),e.removeAttribute(f),e.removeAttribute(h),e.removeAttribute(p),e.removeAttribute(w),e.removeAttribute(y),e.removeAttribute(E),e.removeAttribute(A),e.removeAttribute(b),e.removeAttribute(T),e.removeAttribute(v),e.removeAttribute(R)}))},getShadowRoot:L},ie="__frameTree__::",ae='iframe, frame, object[type="text/html"][data]',re="singlefile.frameTree.initRequest",le="singlefile.frameTree.ackInitRequest",de="singlefile.frameTree.cleanupRequest",ce="singlefile.frameTree.initResponse",me=".",ue=globalThis.window==globalThis.top,ge=globalThis.browser,fe=globalThis.top,he=globalThis.MessageChannel,pe=globalThis.document,be=new Map;let we;function ye(e){e.frames.forEach((t=>Te("responseTimeouts",e.sessionId,t.windowId)));const t=be.get(e.sessionId);if(t){e.requestedFrameId&&(t.requestedFrameId=e.requestedFrameId),e.frames.forEach((e=>{let s=t.frames.find((t=>e.windowId==t.windowId));s||(s={windowId:e.windowId},t.frames.push(s)),s.processed||(s.content=e.content,s.baseURI=e.baseURI,s.title=e.title,s.canvases=e.canvases,s.fonts=e.fonts,s.stylesheets=e.stylesheets,s.images=e.images,s.posters=e.posters,s.usedFonts=e.usedFonts,s.shadowRoots=e.shadowRoots,s.imports=e.imports,s.processed=e.processed)}));t.frames.filter((e=>!e.processed)).length||(t.frames=t.frames.sort(((e,t)=>t.windowId.split(me).length-e.windowId.split(me).length)),t.resolve&&(t.requestedFrameId&&t.frames.forEach((e=>{e.windowId==t.requestedFrameId&&(e.requestedFrame=!0)})),t.resolve(t.frames)))}}function Ee(e,t,s,o){const n=Fe(e);!function(e,t,s,o,n){const i=[];let a;be.get(n)?a=be.get(n).requestTimeouts:(a={},be.set(n,{requestTimeouts:a}));t.forEach(((e,t)=>{const s=o+me+t;e.setAttribute(ne.WIN_ID_ATTRIBUTE_NAME,s),i.push({windowId:s})})),ve({frames:i,sessionId:n,requestedFrameId:e.documentElement.dataset.requestedFrameId&&o}),t.forEach(((e,t)=>{const i=o+me+t;try{Se(e.contentWindow,{method:re,windowId:i,sessionId:n,options:s})}catch(e){}a[i]=globalThis.setTimeout((()=>ve({frames:[{windowId:i,processed:!0}],sessionId:n})),750)})),delete e.documentElement.dataset.requestedFrameId}(e,n,t,s,o),n.length&&function(e,t,s,o,n){const i=[];t.forEach(((e,t)=>{const a=o+me+t;let r;try{r=e.contentDocument}catch(e){}if(r)try{const t=e.contentWindow;t.stop(),Te("requestTimeouts",n,a),Ee(r,s,a,n),i.push(Re(r,t,a,s))}catch(e){i.push({windowId:a,processed:!0})}})),ve({frames:i,sessionId:n,requestedFrameId:e.documentElement.dataset.requestedFrameId&&o}),delete e.documentElement.dataset.requestedFrameId}(e,n,t,s,o)}function Te(e,t,s){const o=be.get(t);if(o&&o[e]){const t=o[e][s];t&&(globalThis.clearTimeout(t),delete o[e][s])}}function Ae(e,t){const s=be.get(e);s&&s.responseTimeouts&&(s.responseTimeouts[t]=globalThis.setTimeout((()=>ve({frames:[{windowId:t,processed:!0}],sessionId:e})),1e4))}function Ie(e,t,s){e.forEach(((e,o)=>{const n=t+me+o;e.removeAttribute(ne.WIN_ID_ATTRIBUTE_NAME);try{Se(e.contentWindow,{method:de,windowId:n,sessionId:s})}catch(e){}})),e.forEach(((e,o)=>{const n=t+me+o;let i;try{i=e.contentDocument}catch(e){}if(i)try{Ie(Fe(i),n,s)}catch(e){}}))}function ve(e){e.method=ce;try{fe.singlefile.processors.frameTree.initResponse(e)}catch(t){Se(fe,e,!0)}}function Se(e,t,s){if(e==fe&&ge&&ge.runtime&&ge.runtime.sendMessage)ge.runtime.sendMessage(t);else if(s){const s=new he;e.postMessage(ie+JSON.stringify({method:t.method,sessionId:t.sessionId}),"*",[s.port2]),s.port1.postMessage(t)}else e.postMessage(ie+JSON.stringify(t),"*")}function Re(e,t,s,o){const n=ne.preProcessDoc(e,t,o),i=ne.serialize(e);ne.postProcessDoc(e,n.markedElements);return{windowId:s,content:i,baseURI:e.baseURI.split("#")[0],title:e.title,canvases:n.canvases,fonts:n.fonts,stylesheets:n.stylesheets,images:n.images,posters:n.posters,usedFonts:n.usedFonts,shadowRoots:n.shadowRoots,imports:n.imports,processed:!0}}function Fe(e){let t=Array.from(e.querySelectorAll(ae));return e.querySelectorAll("*").forEach((e=>{const s=ne.getShadowRoot(e);s&&(t=t.concat(...s.querySelectorAll(ae)))})),t}ue&&(we="0",ge&&ge.runtime&&ge.runtime.onMessage&&ge.runtime.onMessage.addListener&&ge.runtime.onMessage.addListener((e=>e.method==ce?(ye(e),Promise.resolve({})):e.method==le?(Te("requestTimeouts",e.sessionId,e.windowId),Ae(e.sessionId,e.windowId),Promise.resolve({})):void 0))),((e,t,s)=>{globalThis.addEventListener(e,t,s)})("message",(async e=>{if("string"==typeof e.data&&e.data.startsWith(ie)){e.preventDefault(),e.stopPropagation();const t=JSON.parse(e.data.substring(ie.length));if(t.method==re)e.source&&Se(e.source,{method:le,windowId:t.windowId,sessionId:t.sessionId}),ue||(globalThis.stop(),t.options.loadDeferredImages&&$(t.options),await async function(e){const t=e.sessionId,s=globalThis._singleFile_waitForUserScript;ue||(we=globalThis.frameId=e.windowId);Ee(pe,e.options,we,t),ue||(e.options.userScriptEnabled&&s&&await s(ne.ON_BEFORE_CAPTURE_EVENT_NAME),ve({frames:[Re(pe,globalThis,we,e.options)],sessionId:t,requestedFrameId:pe.documentElement.dataset.requestedFrameId&&we}),e.options.userScriptEnabled&&s&&await s(ne.ON_AFTER_CAPTURE_EVENT_NAME),delete pe.documentElement.dataset.requestedFrameId)}(t));else if(t.method==le)Te("requestTimeouts",t.sessionId,t.windowId),Ae(t.sessionId,t.windowId);else if(t.method==de)!function(e){const t=e.sessionId;Ie(Fe(pe),e.windowId,t)}(t);else if(t.method==ce&&be.get(t.sessionId)){e.ports[0].onmessage=e=>ye(e.data)}}}),!0)})); 2 | -------------------------------------------------------------------------------- /extension/addon/single-file/dist/web/hooks/hooks-frames-web.js: -------------------------------------------------------------------------------- 1 | !function(){"use strict";(e=>{const t="single-file-lazy-load",n="single-file-load-image",i="single-file-image-loaded",o="single-file-new-font-face",r={family:"font-family",style:"font-style",weight:"font-weight",stretch:"font-stretch",unicodeRange:"unicode-range",variant:"font-variant",featureSettings:"font-feature-settings"},l=(t,n,i)=>e.addEventListener(t,n,i),s=t=>e.dispatchEvent(t),c=e.CustomEvent,d=e.document,g=e.screen,a=e.Element,m=e.UIEvent,_=e.FileReader,f=e.Blob,u=e.console,y=u&&u.warn&&((...e)=>u.warn(...e))||(()=>{}),h=new Map,p=new Map;function F(o){const r=d.scrollingElement||d.documentElement,l=r.clientHeight,_=r.clientWidth,f=Math.max(r.scrollHeight-l,l),u=Math.max(r.scrollWidth-_,_);if(d.querySelectorAll("[loading=lazy]").forEach((e=>{e.loading="eager",e.setAttribute(t,"")})),r.__defineGetter__("clientHeight",(()=>f)),r.__defineGetter__("clientWidth",(()=>u)),g.__defineGetter__("height",(()=>f)),g.__defineGetter__("width",(()=>u)),e._singleFile_innerHeight=e.innerHeight,e._singleFile_innerWidth=e.innerWidth,e.__defineGetter__("innerHeight",(()=>f)),e.__defineGetter__("innerWidth",(()=>u)),o||e._singleFile_getBoundingClientRect||(e._singleFile_getBoundingClientRect=a.prototype.getBoundingClientRect,a.prototype.getBoundingClientRect=function(){const t=e._singleFile_getBoundingClientRect.call(this);return this==r&&(t.__defineGetter__("height",(()=>f)),t.__defineGetter__("bottom",(()=>f+t.top)),t.__defineGetter__("width",(()=>u)),t.__defineGetter__("right",(()=>u+t.left))),t}),!e._singleFileImage){const t=e.Image;e._singleFileImage=e.Image,e.__defineGetter__("Image",(function(){return function(){const e=new t(...arguments),o=new t(...arguments);return o.__defineSetter__("src",(function(t){e.src=t,s(new c(n,{detail:e.src}))})),o.__defineGetter__("src",(function(){return e.src})),o.__defineSetter__("srcset",(function(t){s(new c(n)),e.srcset=t})),o.__defineGetter__("srcset",(function(){return e.srcset})),e.onload=e.onloadend=e.onerror=t=>{s(new c(i,{detail:e.src})),o.dispatchEvent(new m(t.type,t))},e.decode&&(o.decode=()=>e.decode()),o}}))}let y,F;o?(y=l/f,F=_/u):(y=(l+e.scrollY)/f,F=(_+e.scrollX)/u);const E=Math.min(y,F);if(E<1){const e=d.documentElement.style.getPropertyValue("transform"),t=d.documentElement.style.getPropertyPriority("transform"),n=d.documentElement.style.getPropertyValue("transform-origin"),i=d.documentElement.style.getPropertyPriority("transform-origin"),r=d.documentElement.style.getPropertyValue("min-height"),l=d.documentElement.style.getPropertyPriority("min-height");d.documentElement.style.setProperty("transform-origin",(y<1?"50%":"0")+" "+(F<1?"50%":"0")+" 0","important"),d.documentElement.style.setProperty("transform","scale3d("+E+", "+E+", 1)","important"),d.documentElement.style.setProperty("min-height",100/E+"vh","important"),P(),o?(d.documentElement.style.setProperty("-sf-transform",e,t),d.documentElement.style.setProperty("-sf-transform-origin",n,i),d.documentElement.style.setProperty("-sf-min-height",r,l)):(d.documentElement.style.setProperty("transform",e,t),d.documentElement.style.setProperty("transform-origin",n,i),d.documentElement.style.setProperty("min-height",r,l))}if(!o){P();const e=r.getBoundingClientRect();[...h].forEach((([t,n])=>{const i=n.options&&n.options.root&&n.options.root.getBoundingClientRect,o=i&&n.options.root.getBoundingClientRect(),r=p.get(t);r&&n.callback(r.map((t=>{const n=t.getBoundingClientRect();return{target:t,intersectionRatio:1,boundingClientRect:n,intersectionRect:n,isIntersecting:!0,rootBounds:i?o:e,time:0}})),t)}))}}function E(n){d.querySelectorAll("[single-file-lazy-load]").forEach((e=>{e.loading="lazy",e.removeAttribute(t)})),n||e._singleFile_getBoundingClientRect&&(a.prototype.getBoundingClientRect=e._singleFile_getBoundingClientRect,delete e._singleFile_getBoundingClientRect),e._singleFileImage&&(delete e.Image,e.Image=e._singleFileImage,delete e._singleFileImage),n||P()}function b(){const t=d.scrollingElement||d.documentElement;null!=e._singleFile_innerHeight&&(e.innerHeight=e._singleFile_innerHeight,delete e._singleFile_innerHeight),null!=e._singleFile_innerWidth&&(e.innerWidth=e._singleFile_innerWidth,delete e._singleFile_innerWidth),delete t.clientHeight,delete t.clientWidth,delete g.height,delete g.width}if(l("single-file-load-deferred-images-start",(()=>F())),l("single-file-load-deferred-images-keep-zoom-level-start",(()=>F(!0))),l("single-file-load-deferred-images-end",(()=>E())),l("single-file-load-deferred-images-keep-zoom-level-end",(()=>E(!0))),l("single-file-load-deferred-images-reset",b),l("single-file-load-deferred-images-keep-zoom-level-reset",(()=>{const e=d.documentElement.style.getPropertyValue("-sf-transform"),t=d.documentElement.style.getPropertyPriority("-sf-transform"),n=d.documentElement.style.getPropertyValue("-sf-transform-origin"),i=d.documentElement.style.getPropertyPriority("-sf-transform-origin"),o=d.documentElement.style.getPropertyValue("-sf-min-height"),r=d.documentElement.style.getPropertyPriority("-sf-min-height");d.documentElement.style.setProperty("transform",e,t),d.documentElement.style.setProperty("transform-origin",n,i),d.documentElement.style.setProperty("min-height",o,r),d.documentElement.style.removeProperty("-sf-transform"),d.documentElement.style.removeProperty("-sf-transform-origin"),d.documentElement.style.removeProperty("-sf-min-height"),b()})),l("single-file-block-cookies-start",(()=>{try{d.__defineGetter__("cookie",(()=>{throw new Error("document.cookie temporary blocked by SingleFile")}))}catch(e){}})),l("single-file-block-cookies-end",(()=>{delete d.cookie})),l("single-file-block-storage-start",(()=>{e._singleFile_localStorage||(e._singleFile_localStorage=e.localStorage,e.__defineGetter__("localStorage",(()=>{throw new Error("localStorage temporary blocked by SingleFile")}))),e._singleFile_indexedDB||(e._singleFile_indexedDB=e.indexedDB,e.__defineGetter__("indexedDB",(()=>{throw new Error("indexedDB temporary blocked by SingleFile")})))})),l("single-file-block-storage-end",(()=>{e._singleFile_localStorage&&(delete e.localStorage,e.localStorage=e._singleFile_localStorage,delete e._singleFile_localStorage),e._singleFile_indexedDB||(delete e.indexedDB,e.indexedDB=e._singleFile_indexedDB,delete e._singleFile_indexedDB)})),e.FontFace){const t=e.FontFace;let n;e.FontFace=function(){n||(y("SingleFile is hooking the FontFace constructor to get font URLs."),n=!0);const e={};e["font-family"]=arguments[0],e.src=arguments[1];const i=arguments[2];if(i&&Object.keys(i).forEach((t=>{r[t]&&(e[r[t]]=i[t])})),e.src instanceof ArrayBuffer){const t=new _;t.readAsDataURL(new f([e.src])),t.addEventListener("load",(()=>{e.src="url("+t.result+")",s(new c(o,{detail:e}))}))}else s(new c(o,{detail:e}));return new t(...arguments)},e.FontFace.toString=function(){return"function FontFace() { [native code] }"}}if(e.IntersectionObserver){const t=e.IntersectionObserver;let n;e.IntersectionObserver=function(){n||(y("SingleFile is hooking the IntersectionObserver API to detect and load deferred images."),n=!0);const e=new t(...arguments),i=t.prototype.observe||e.observe,o=t.prototype.unobserve||e.unobserve,r=arguments[0],l=arguments[1];return i&&(e.observe=function(t){let n=p.get(e);return n||(n=[],p.set(e,n)),n.push(t),i.call(e,t)}),o&&(e.unobserve=function(t){let n=p.get(e);return n&&(n=n.filter((e=>e!=t)),n.length?p.set(e,n):(p.delete(e),h.delete(e))),o.call(e,t)}),h.set(e,{callback:r,options:l}),e},e.IntersectionObserver.prototype=t.prototype,e.IntersectionObserver.toString=function(){return"function IntersectionObserver() { [native code] }"}}function P(){try{s(new m("resize"))}catch(e){}}})("object"==typeof globalThis?globalThis:window)}(); 2 | -------------------------------------------------------------------------------- /extension/addon/single-file/dist/web/hooks/hooks-web.js: -------------------------------------------------------------------------------- 1 | !function(){"use strict";(e=>{const t=e.CustomEvent,s=e.fetch;var a,r,c;a="single-file-request-fetch",r=async a=>{const r=a.detail;let c;try{const e=await s(r,{cache:"force-cache"});c={url:r,response:await e.arrayBuffer(),headers:[...e.headers],status:e.status}}catch(e){c={url:r,error:e&&e.toString()}}(t=>{e.dispatchEvent(t)})(new t("single-file-response-fetch",{detail:c}))},e.addEventListener(a,r,c)})("object"==typeof globalThis?globalThis:window)}(); 2 | -------------------------------------------------------------------------------- /extension/addon/single-file/lib/single-file/LICENSE.md: -------------------------------------------------------------------------------- 1 | Copyright 2010-2020 Gildas Lormeau 2 | contact : gildas.lormeau gmail.com 3 | 4 | The files in this directory are part of SingleFile. 5 | 6 | The code in this dir is free software: you can redistribute it and/or 7 | modify it under the terms of the GNU Affero General Public License 8 | (GNU AGPL) as published by the Free Software Foundation, either version 3 9 | of the License, or (at your option) any later version. 10 | 11 | The code in this dir is distributed in the hope that it will be useful, 12 | but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero 14 | General Public License for more details. 15 | 16 | As additional permission under GNU AGPL version 3 section 7, you may 17 | distribute UNMODIFIED VERSIONS OF THIS file without the copy of the GNU 18 | AGPL normally required by section 4, provided you include this license 19 | notice and a URL through which recipients can access the Corresponding 20 | Source. 21 | -------------------------------------------------------------------------------- /extension/addon/single-file/lib/single-file/fetch.js: -------------------------------------------------------------------------------- 1 | /* global browser, XMLHttpRequest */ 2 | 3 | const referrers = new Map(); 4 | const REQUEST_ID_HEADER_NAME = 'x-single-file-request-id'; 5 | // export { 6 | // REQUEST_ID_HEADER_NAME, 7 | // referrers 8 | // }; 9 | 10 | browser.runtime.onMessage.addListener((message, sender) => { 11 | if (message.method && message.method.startsWith('singlefile.fetch')) { 12 | return new Promise(resolve => { 13 | onRequest(message, sender) 14 | .then(resolve) 15 | .catch(error => resolve({ error: error && error.toString() })); 16 | }); 17 | } 18 | }); 19 | 20 | function onRequest(message, sender) { 21 | if (message.method == 'singlefile.fetch') { 22 | return fetchResource(message.url, { referrer: message.referrer }); 23 | } else if (message.method == 'singlefile.fetchFrame') { 24 | return browser.tabs.sendMessage(sender.tab.id, message); 25 | } 26 | } 27 | 28 | function fetchResource(url, options, includeRequestId) { 29 | return new Promise((resolve, reject) => { 30 | const xhrRequest = new XMLHttpRequest(); 31 | xhrRequest.withCredentials = true; 32 | xhrRequest.responseType = 'arraybuffer'; 33 | xhrRequest.onerror = event => reject(new Error(event.detail)); 34 | xhrRequest.onreadystatechange = () => { 35 | if (xhrRequest.readyState == XMLHttpRequest.DONE) { 36 | if (xhrRequest.status || xhrRequest.response.byteLength) { 37 | if ( 38 | (xhrRequest.status == 401 || 39 | xhrRequest.status == 403 || 40 | xhrRequest.status == 404) && 41 | !includeRequestId 42 | ) { 43 | fetchResource(url, options, true).then(resolve).catch(reject); 44 | } else { 45 | resolve({ 46 | array: Array.from(new Uint8Array(xhrRequest.response)), 47 | headers: { 48 | 'content-type': xhrRequest.getResponseHeader('Content-Type'), 49 | }, 50 | status: xhrRequest.status, 51 | }); 52 | } 53 | } else { 54 | reject(); 55 | } 56 | } 57 | }; 58 | xhrRequest.open('GET', url, true); 59 | if (includeRequestId) { 60 | const randomId = String(Math.random()).substring(2); 61 | setReferrer(randomId, options.referrer); 62 | xhrRequest.setRequestHeader(REQUEST_ID_HEADER_NAME, randomId); 63 | } 64 | xhrRequest.send(); 65 | }); 66 | } 67 | 68 | function setReferrer(requestId, referrer) { 69 | referrers.set(requestId, referrer); 70 | } 71 | -------------------------------------------------------------------------------- /extension/addon/single-file/lib/single-file/frame-tree.js: -------------------------------------------------------------------------------- 1 | /* global browser */ 2 | 3 | browser.runtime.onMessage.addListener((message, sender) => { 4 | if ( 5 | message.method == 'singlefile.frameTree.initResponse' || 6 | message.method == 'singlefile.frameTree.ackInitRequest' 7 | ) { 8 | browser.tabs.sendMessage(sender.tab.id, message, { frameId: 0 }); 9 | return Promise.resolve({}); 10 | } 11 | }); 12 | -------------------------------------------------------------------------------- /extension/addon/single-file/lib/single-file/lazy-timeout.js: -------------------------------------------------------------------------------- 1 | /* global browser, setTimeout, clearTimeout */ 2 | 3 | const timeouts = new Map(); 4 | 5 | browser.runtime.onMessage.addListener((message, sender) => { 6 | if (message.method == 'singlefile.lazyTimeout.setTimeout') { 7 | let tabTimeouts = timeouts.get(sender.tab.id); 8 | let frameTimeouts; 9 | if (tabTimeouts) { 10 | frameTimeouts = tabTimeouts.get(sender.frameId); 11 | if (frameTimeouts) { 12 | const previousTimeoutId = frameTimeouts.get(message.type); 13 | if (previousTimeoutId) { 14 | clearTimeout(previousTimeoutId); 15 | } 16 | } else { 17 | frameTimeouts = new Map(); 18 | } 19 | } 20 | const timeoutId = setTimeout(async () => { 21 | try { 22 | const tabTimeouts = timeouts.get(sender.tab.id); 23 | const frameTimeouts = tabTimeouts.get(sender.frameId); 24 | if (tabTimeouts && frameTimeouts) { 25 | deleteTimeout(frameTimeouts, message.type); 26 | } 27 | await browser.tabs.sendMessage(sender.tab.id, { 28 | method: 'singlefile.lazyTimeout.onTimeout', 29 | type: message.type, 30 | }); 31 | } catch (error) { 32 | // ignored 33 | } 34 | }, message.delay); 35 | if (!tabTimeouts) { 36 | tabTimeouts = new Map(); 37 | frameTimeouts = new Map(); 38 | tabTimeouts.set(sender.frameId, frameTimeouts); 39 | timeouts.set(sender.tab.id, tabTimeouts); 40 | } 41 | frameTimeouts.set(message.type, timeoutId); 42 | return Promise.resolve({}); 43 | } 44 | if (message.method == 'singlefile.lazyTimeout.clearTimeout') { 45 | let tabTimeouts = timeouts.get(sender.tab.id); 46 | if (tabTimeouts) { 47 | const frameTimeouts = tabTimeouts.get(sender.frameId); 48 | if (frameTimeouts) { 49 | const timeoutId = frameTimeouts.get(message.type); 50 | if (timeoutId) { 51 | clearTimeout(timeoutId); 52 | } 53 | deleteTimeout(frameTimeouts, message.type); 54 | } 55 | } 56 | return Promise.resolve({}); 57 | } 58 | }); 59 | 60 | browser.tabs.onRemoved.addListener(tabId => timeouts.delete(tabId)); 61 | 62 | function deleteTimeout(framesTimeouts, type) { 63 | framesTimeouts.delete(type); 64 | } 65 | -------------------------------------------------------------------------------- /extension/content_scripts/archive.js: -------------------------------------------------------------------------------- 1 | function listDocuments() { 2 | fetch('http://localhost:3000/list-docs', { 3 | method: 'GET', 4 | headers: { 5 | 'content-type': 'text/json', 6 | }, 7 | }) 8 | .then(response => response.json()) 9 | .then(data => console.log(data)) 10 | .catch(err => { 11 | console.log(err); 12 | }); 13 | } 14 | 15 | async function archiveCurrentTab(dataURI) { 16 | let form = new FormData(); 17 | form.append('url', window.location.href); 18 | form.append('title', document.title); 19 | form.append('scr', dataURI); 20 | const { content, title, filename } = await extension 21 | .getPageData({ 22 | removeHiddenElements: true, 23 | removeUnusedStyles: true, 24 | removeUnusedFonts: true, 25 | removeImports: true, 26 | removeScripts: true, 27 | compressHTML: true, 28 | removeAudioSrc: true, 29 | removeVideoSrc: true, 30 | removeAlternativeFonts: true, 31 | removeAlternativeMedias: true, 32 | removeAlternativeImages: true, 33 | groupDuplicateImages: true, 34 | }) 35 | .then(d => { 36 | form.append('onefile', d.content); 37 | fetch('http://localhost:3000/form', { 38 | method: 'POST', 39 | body: form, 40 | }).then(res => 41 | browser.runtime.sendMessage({ 42 | command: 'response-received', 43 | data: res.status, 44 | }) 45 | ); 46 | }); 47 | console.log('FORM', [...form]); 48 | } 49 | 50 | browser.runtime.onMessage.addListener(message => { 51 | if (message.command === 'list-docs') { 52 | listDocuments(); 53 | } else if (message.command === 'archive-current-tab') { 54 | archiveCurrentTab(message.data); 55 | } 56 | }); 57 | -------------------------------------------------------------------------------- /extension/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "deptoolkit-extension", 3 | "version": "1.0.0", 4 | "scripts": { 5 | "test": "echo \"Error: no test specified\" && exit 1", 6 | "build": "webpack", 7 | "watch": "webpack -w" 8 | }, 9 | "devDependencies": { 10 | "webpack": "5.94.0", 11 | "webpack-webext-plugin": "github:rpl/webpack-webext-plugin" 12 | }, 13 | "dependencies": { 14 | "form-data": "^4.0.0" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /extension/popup/popup.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Listen for clicks on the buttons, and send the appropriate message to 3 | * the content script in the page. 4 | */ 5 | const listenForClicks = () => { 6 | function reportError(error) { 7 | console.error(`Could not archive: ${error}`); 8 | } 9 | 10 | const listDocs = tabs => { 11 | browser.tabs.sendMessage(tabs[0].id, { 12 | command: 'list-docs', 13 | }); 14 | }; 15 | 16 | const archiveCurrentTab = tabs => { 17 | document.getElementById('archive-current-tab').classList.add('selected'); 18 | browser.tabs.captureVisibleTab().then(res => { 19 | let screenshotData = res; 20 | browser.tabs.sendMessage(tabs[0].id, { 21 | command: 'archive-current-tab', 22 | data: screenshotData, 23 | }); 24 | }); 25 | }; 26 | 27 | document.getElementById('list-docs').addEventListener('click', e => { 28 | browser.tabs 29 | .query({ active: true, currentWindow: true }) 30 | .then(listDocs) 31 | .catch(reportError); 32 | }); 33 | document 34 | .getElementById('archive-current-tab') 35 | .addEventListener('click', e => { 36 | browser.tabs 37 | .query({ active: true, currentWindow: true }) 38 | .then(archiveCurrentTab) 39 | .catch(reportError); 40 | }); 41 | }; 42 | 43 | /** 44 | * There was an error executing the script. 45 | * Display the popup's error message, and hide the normal UI. 46 | */ 47 | function reportExecuteScriptError(error) { 48 | document.querySelector('#popup-content').classList.add('hidden'); 49 | document.querySelector('#error-content').classList.remove('hidden'); 50 | console.error(`Failed to execute archiver content script: ${error.message}`); 51 | } 52 | 53 | /** 54 | * HTML element transitions from .selected to .archived 55 | */ 56 | function changeArchiveButtonState(btn) { 57 | btn.classList.remove('selected'); 58 | btn.classList.add('archived'); 59 | btn.innerHTML = '✅ Archived'; 60 | } 61 | 62 | /** 63 | * When the popup loads, inject a content script into the active tab, 64 | * and add a click handler. 65 | * If we couldn't inject the script, handle the error. 66 | */ 67 | browser.tabs 68 | .executeScript({ file: '/content_scripts/index.js' }) 69 | .then(listenForClicks) 70 | .catch(reportExecuteScriptError); 71 | 72 | browser.runtime.onMessage.addListener(message => { 73 | /** 74 | * On receiving an OK response from the API, change the CSS 75 | * and content of the button to prevent duplicate archiving. 76 | * Note: simulate a 400ms job... so it looks like the tool is 77 | * "doing something" 78 | */ 79 | if (message.command === 'response-received') { 80 | if (message.data === 200) { 81 | setTimeout(() => { 82 | changeArchiveButtonState( 83 | document.getElementById('archive-current-tab') 84 | ); 85 | }, 400); 86 | } 87 | } 88 | }); 89 | -------------------------------------------------------------------------------- /extension/webpack.config.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const WebpackWebExt = require('webpack-webext-plugin'); 3 | 4 | module.exports = { 5 | entry: { 6 | content_scripts: './content_scripts/archive.js', 7 | popup: './popup/popup.js', 8 | }, 9 | output: { 10 | path: path.resolve(__dirname, 'addon'), 11 | filename: '[name]/index.js', 12 | }, 13 | plugins: [ 14 | new WebpackWebExt({ 15 | runOnce: false, 16 | argv: ['lint', '-s', 'addon/'], 17 | }), 18 | ], 19 | }; 20 | -------------------------------------------------------------------------------- /index.ts: -------------------------------------------------------------------------------- 1 | import chalk from 'chalk'; 2 | 3 | import app from './app'; 4 | 5 | const port = 3000; 6 | 7 | try { 8 | app.listen(port, (): void => { 9 | console.log(chalk.green(`Connected successfully on port ${port} 🚀`)); 10 | }); 11 | } catch (error) { 12 | console.error(chalk.red(`❌ Error occured: ${(error as Error).message}`)); 13 | } 14 | -------------------------------------------------------------------------------- /jest.config.ts: -------------------------------------------------------------------------------- 1 | import type { Config } from '@jest/types'; 2 | 3 | const cfg: Config.InitialOptions = { 4 | transform: { 5 | '^.+\\.(ts|tsx)$': 'ts-jest' 6 | }, 7 | testEnvironment: 'node', 8 | } 9 | 10 | export default cfg; 11 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "deptoolkit-api", 3 | "author": "Basile Simon ", 4 | "contributors": [ 5 | { 6 | "name": "Niko Para", 7 | "email": "niko@niko.io" 8 | } 9 | ], 10 | "version": "0.11.0", 11 | "repository": { 12 | "type": "git", 13 | "url": "https://github.com/digitalevidencetoolkit/deptoolkit" 14 | }, 15 | "engines": { 16 | "node": "^16" 17 | }, 18 | "license": "gpl-3.0", 19 | "dependencies": { 20 | "amazon-qldb-driver-nodejs": "^2.2.0", 21 | "aws-sdk": "^2.1354.0", 22 | "body-parser": "^1.20.3", 23 | "chalk": "^4.1.2", 24 | "cors": "^2.8.5", 25 | "dotenv": "^8.2.0", 26 | "esm": "^3.2.25", 27 | "express": "^4.21.0", 28 | "formidable": "^1.2.2", 29 | "ion-js": "^4.2.1", 30 | "jsbi": "^3.1.6", 31 | "node-fetch": "^3.1.1", 32 | "sharp": "^0.32.6" 33 | }, 34 | "scripts": { 35 | "start": "ts-node-dev ./index.ts", 36 | "server": "ts-node-dev ./index.ts", 37 | "ui": "cd ui && npm run dev", 38 | "all": "npm-run-all --parallel server ui", 39 | "test": "jest", 40 | "testw": "jest --watch" 41 | }, 42 | "devDependencies": { 43 | "@jest/types": "^27.0.6", 44 | "@types/archiver": "^5.1.1", 45 | "@types/cors": "^2.8.12", 46 | "@types/express": "^4.17.13", 47 | "@types/formidable": "^1.2.3", 48 | "@types/jest": "^27.0.1", 49 | "@types/node": "^15.14.7", 50 | "@types/sharp": "^0.28.5", 51 | "@types/supertest": "^2.0.11", 52 | "archiver": "^5.3.0", 53 | "autoprefixer": "^10.3.1", 54 | "jest": "^27.0.6", 55 | "node-stream-zip": "^1.14.0", 56 | "npm-run-all": "^4.1.5", 57 | "sinon": "^11.1.2", 58 | "supertest": "^6.1.6", 59 | "ts-jest": "^27.0.5", 60 | "ts-mock-imports": "^1.3.7", 61 | "ts-node-dev": "^1.1.8", 62 | "typescript": "^4.3.5", 63 | "yup": "^0.32.9" 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/helpers.ts: -------------------------------------------------------------------------------- 1 | import { createHash } from 'crypto'; 2 | 3 | /** 4 | * Pretty prints a JSON var 5 | * @returns A nicely-formatted string 6 | */ 7 | export const pprint = (json: {}): string => JSON.stringify(json, null, 4); 8 | 9 | export const makeHash = (str: string | Buffer): string => { 10 | let hash = createHash('sha256'); 11 | hash.update(str); 12 | return hash.digest('hex'); 13 | }; 14 | 15 | /** 16 | * change the format of a base64 string to a type we can work with 17 | * @param s the string, as given by the formidable library, or something else 18 | * @returns a string, without the data:image things, and also without the + character 19 | */ 20 | export const cleanupBase64 = (s: string): string => { 21 | let base64Data: string; 22 | base64Data = s.replace(/^data:image\/png;base64,/, ''); 23 | base64Data += base64Data.replace('+', ' '); 24 | return base64Data; 25 | }; 26 | -------------------------------------------------------------------------------- /src/ledger/index.ts: -------------------------------------------------------------------------------- 1 | import * as Record from '../types/Record'; 2 | import * as Annotations from '../types/Annotations'; 3 | import type { Annotation } from '../types/Annotations'; 4 | import * as QLDB from '../qldb'; 5 | import { dom } from 'ion-js'; 6 | 7 | import { Result } from 'amazon-qldb-driver-nodejs'; 8 | 9 | export const insertDoc = (r: Record.Record): Promise => 10 | Record.validate(r) 11 | .then(Record.toLedger) 12 | .then(dbItem => 13 | QLDB.insertDocuments(process.env.DOC_TABLE_NAME as string, dbItem) 14 | ) 15 | .catch(err => console.log(`${err.name}: ${err.errors}`)); 16 | 17 | export const listDocs = async (): Promise => { 18 | const list: Result | undefined = await QLDB.listDocuments( 19 | process.env.DOC_TABLE_NAME as string 20 | ); 21 | const result = list?.getResultList() || []; 22 | return result 23 | .map(e => Record.fromLedger(e)) 24 | .map((e: Record.Record): Record.FrontEndRecord => Record.toFrontend(e)); 25 | }; 26 | 27 | export const getDoc = async ( 28 | id: string, 29 | col: string 30 | ): Promise => { 31 | const list: Result | undefined = await QLDB.getOneDocument( 32 | id, 33 | col, 34 | process.env.DOC_TABLE_NAME as string 35 | ); 36 | const result = list?.getResultList() || []; 37 | if (result.length > 0) { 38 | return Record.fromLedger(result[0]); 39 | } else { 40 | return null; 41 | } 42 | }; 43 | 44 | export const listDocHistory = async (sku: string): Promise => { 45 | const list = await QLDB.queryHistoryOfDocument(sku); 46 | const result = list?.getResultList() || []; 47 | return result; 48 | }; 49 | 50 | export const updateDoc = async (sku: string, data: Annotation) => { 51 | Annotations.validate(data) 52 | .then(annotation => annotation.description) 53 | .then(description => 54 | QLDB.updateDocument( 55 | process.env.DOC_TABLE_NAME as string, 56 | description, 57 | sku 58 | ) 59 | ) 60 | .catch(err => console.log(`${err.name}: ${err.errors}`)); 61 | }; 62 | -------------------------------------------------------------------------------- /src/qldb/index.ts: -------------------------------------------------------------------------------- 1 | import { 2 | QldbDriver, 3 | Result, 4 | TransactionExecutor, 5 | RetryConfig, 6 | } from 'amazon-qldb-driver-nodejs'; 7 | import { config } from 'dotenv'; 8 | import { ClientConfiguration } from 'aws-sdk/clients/qldbsession'; 9 | 10 | config(); 11 | const qldbDriver: QldbDriver = createQldbDriver( 12 | process.env.LEDGER_NAME as string 13 | ); 14 | 15 | /** 16 | * Create a driver for creating sessions. 17 | * @param ledgerName The name of the ledger to create the driver on. 18 | * @param serviceConfigurationOptions The configurations for the AWS SDK client that the driver uses. 19 | * @returns The driver for creating sessions. 20 | */ 21 | function createQldbDriver( 22 | ledgerName: string, 23 | serviceConfigurationOptions: ClientConfiguration = { 24 | region: process.env.AWS_REGION, 25 | } 26 | ): QldbDriver { 27 | const retryLimit = 4; 28 | const maxConcurrentTransactions = 10; 29 | //Use driver's default backoff function (and hence, no second parameter provided to RetryConfig) 30 | const retryConfig: RetryConfig = new RetryConfig(retryLimit); 31 | const qldbDriver: QldbDriver = new QldbDriver( 32 | ledgerName, 33 | serviceConfigurationOptions, 34 | 10, 35 | retryConfig 36 | ); 37 | return qldbDriver; 38 | } 39 | 40 | function getQldbDriver(): QldbDriver { 41 | return qldbDriver; 42 | } 43 | 44 | /** 45 | * Connect to a session for a given ledger using default settings. 46 | * @returns Promise which fulfills with void. 47 | */ 48 | const ConnectToLedger = async function (): Promise { 49 | try { 50 | console.log('Listing table names...'); 51 | const tableNames: string[] = await qldbDriver.getTableNames(); 52 | tableNames.forEach((tableName: string): void => { 53 | console.log(tableName); 54 | }); 55 | } catch (e) { 56 | console.log(`Unable to create session: ${e}`); 57 | } 58 | }; 59 | 60 | /** 61 | * List all documents in a given table. 62 | * @param tableName Name of the table to look up documents from. 63 | * @returns Promise which fulfills with a {@linkcode Result} object. 64 | */ 65 | export const listDocuments = async function ( 66 | tableName: string 67 | ): Promise { 68 | try { 69 | const qldbDriver: QldbDriver = getQldbDriver(); 70 | const statement: string = `SELECT * FROM ${tableName}`; 71 | let r = qldbDriver.executeLambda(async (txn: TransactionExecutor) => { 72 | let results = await txn.execute(statement); 73 | return results; 74 | }); 75 | return r; 76 | } catch (e) { 77 | console.log(`Unable to list documents in ${tableName}: ${e}`); 78 | } 79 | }; 80 | 81 | export const getOneDocument = async function ( 82 | id: string, 83 | column: string, 84 | tableName: string 85 | ): Promise { 86 | try { 87 | const qldbDriver: QldbDriver = getQldbDriver(); 88 | const statement: string = `SELECT * FROM ${tableName} WHERE ${column}='${id}'`; 89 | let r = qldbDriver.executeLambda(async (txn: TransactionExecutor) => { 90 | let results = await txn.execute(statement); 91 | return results; 92 | }); 93 | return r; 94 | } catch (e) { 95 | console.log(`Unable to list documents in ${tableName}: ${e}`); 96 | } 97 | }; 98 | 99 | /** 100 | * Insert documents into a table in a QLDB ledger. 101 | * @param tableName Name of the table to insert documents into. 102 | * @param documents List of documents to insert. 103 | * @returns Promise which fulfills with with a {@linkcode Result} object. 104 | */ 105 | export const insertDocuments = async function ( 106 | tableName: string, 107 | documents: object 108 | ): Promise { 109 | try { 110 | const qldbDriver: QldbDriver = getQldbDriver(); 111 | const statement: string = `INSERT INTO ${tableName} ?`; 112 | let r = qldbDriver.executeLambda(async (txn: TransactionExecutor) => { 113 | let results = await txn.execute(statement, documents); 114 | return results; 115 | }); 116 | return r; 117 | } catch (e) { 118 | console.log(`Unable to insert documents: ${e}`); 119 | } 120 | }; 121 | 122 | /** 123 | * Get the document IDs from the given table. 124 | * @param txn The {@linkcode TransactionExecutor} for lambda execute. 125 | * @param tableName The table name to query. 126 | * @param field A field to query. 127 | * @param value The key of the given field. 128 | * @returns Promise which fulfills with the document ID as a string. 129 | */ 130 | async function getDocumentIdByField( 131 | txn: TransactionExecutor, 132 | tableName: string, 133 | field: string, 134 | value: string 135 | ): Promise { 136 | const query: string = `SELECT id FROM ${tableName} AS t BY id WHERE t.${field} = ?`; 137 | let documentId: string | undefined; 138 | await txn 139 | .execute(query, value) 140 | .then((result: Result) => { 141 | const resultList = result.getResultList(); 142 | if (resultList.length === 0) { 143 | throw new Error( 144 | `Unable to retrieve document ID using ${field}: ${value}.` 145 | ); 146 | } 147 | documentId = resultList[0].get('id')?.stringValue() || undefined; 148 | }) 149 | .catch((err: any) => { 150 | console.log(`Error getting documentId: ${err}`); 151 | }); 152 | return documentId; 153 | } 154 | 155 | /** 156 | * List the history of a particular document. 157 | * @param sku Unique identifier of the document. 158 | * @returns Promise which fulfills with a list of results 159 | */ 160 | export const queryHistoryOfDocument = async function ( 161 | sku: string 162 | ): Promise { 163 | const tableName = process.env.DOC_TABLE_NAME as string; 164 | try { 165 | const qldbDriver: QldbDriver = getQldbDriver(); 166 | const statement: string = `SELECT * from history (${tableName}) AS h WHERE h.metadata.id = ?`; 167 | 168 | let r = qldbDriver.executeLambda(async (txn: TransactionExecutor) => { 169 | const documentId: string | undefined = await getDocumentIdByField( 170 | txn, 171 | tableName, 172 | 'id', 173 | sku 174 | ); 175 | 176 | // @TODO handle results = undefined because sku can't be found 177 | // current solution doesn't feel like the correct way to handle errors 178 | if (documentId) { 179 | let results = await txn.execute(statement, documentId); 180 | return results; 181 | } 182 | }); 183 | return r; 184 | } catch (e) { 185 | console.log(`Unable to query history of document in ${tableName}`); 186 | } 187 | }; 188 | 189 | /** 190 | * Update a particular document's description field 191 | * @param tableName Name of the table to insert documents into. 192 | * @param description String to add to the ledger 193 | * @param id Unique identifier of the document 194 | * @returns Promise which fulfills with a list of result(s) 195 | */ 196 | export const updateDocument = async function ( 197 | tableName: string, 198 | description: string, 199 | id: string 200 | ): Promise { 201 | try { 202 | const qldbDriver: QldbDriver = getQldbDriver(); 203 | const statement: string = `UPDATE ${tableName} AS r 204 | SET r.description = ? 205 | WHERE r.id = ?`; 206 | let r = qldbDriver.executeLambda(async (txn: TransactionExecutor) => { 207 | let results = await txn.execute(statement, description, id); 208 | return results; 209 | }); 210 | return r; 211 | } catch (e) { 212 | console.log(`Unable to update document: ${e}`); 213 | } 214 | }; 215 | -------------------------------------------------------------------------------- /src/s3/index.ts: -------------------------------------------------------------------------------- 1 | import { S3 } from 'aws-sdk'; 2 | import { Readable } from 'stream'; 3 | import { config } from 'dotenv'; 4 | 5 | config(); 6 | 7 | export const getFileInBucket = (id: string, bucket: string): Readable => { 8 | const s3 = new S3({ region: process.env.AWS_REGION }); 9 | const params = { Bucket: bucket as string, Key: id }; 10 | return s3.getObject(params).createReadStream(); 11 | }; 12 | 13 | export const writeFileInBucket = ( 14 | id: string, 15 | body: Buffer | string, 16 | bucket: string 17 | ) => { 18 | const s3 = new S3({ region: process.env.AWS_REGION }); 19 | const params = { Bucket: bucket, Body: body, Key: id }; 20 | return s3.upload(params, (err: Error, data: S3.ManagedUpload.SendData) => { 21 | if (err) { 22 | throw err; 23 | } 24 | console.log(`☁️ S3 upload OK`); 25 | }); 26 | }; 27 | -------------------------------------------------------------------------------- /src/s3/s3.test.ts: -------------------------------------------------------------------------------- 1 | // good test suite from AWS: 2 | // https://github.com/aws/aws-sdk-js-v3/blob/main/clients/client-s3/test/e2e/S3.ispec.ts 3 | 4 | import * as S3 from './index'; 5 | import { PassThrough } from 'stream'; 6 | 7 | describe('uploading files to S3', () => { 8 | it('should be able to upload an object', async () => { 9 | // 10 | }); 11 | }); 12 | 13 | describe('finding files on S3', () => { 14 | it('should find a known object', async () => { 15 | const key = 16 | 'a8eab0456eb979653a3c1ca77c37239100ef7300c9026db868ad9dc57f5aa580.png'; 17 | expect(S3.getFileInBucket(key, 'deptoolkit-public')).toBeInstanceOf( 18 | PassThrough 19 | ); 20 | }); 21 | it('should error on an unknown object', async () => { 22 | const key = 'foo.png'; 23 | const r = S3.getFileInBucket(key, 'deptoolkit-public'); 24 | r.on('error', e => expect(e).toBeInstanceOf(Error)); 25 | }); 26 | }); 27 | -------------------------------------------------------------------------------- /src/store/index.ts: -------------------------------------------------------------------------------- 1 | import archiver from 'archiver'; 2 | import { Response } from 'express'; 3 | import { config } from 'dotenv'; 4 | import * as fs from 'fs'; 5 | import * as fsp from 'fs/promises'; 6 | import * as path from 'path'; 7 | import { makeHash, pprint } from '../helpers'; 8 | import * as File from '../types/File'; 9 | import * as Bundle from '../types/Bundle'; 10 | import * as Record from '../types/Record'; 11 | import * as S3 from '../s3'; 12 | // import * as s3storage from s3storage 13 | 14 | // set up .env variables as environment variables 15 | config(); 16 | 17 | type WriteConfiguration = { 18 | type: 'local' | 'S3' | undefined; 19 | directory: string; 20 | bucket?: string; 21 | }; 22 | 23 | /** 24 | * Write the specified `data` in the specified `directory`, under the specified 25 | * `fileName`. 26 | * 27 | * If `directory` doesn't exist, it is created. If there is already a file 28 | * named `fileName` in that location, it is silently overwritten. 29 | * 30 | * @returns a Promise resolving once the write is done. 31 | */ 32 | const writeToDisk = async ( 33 | data: Buffer | string, 34 | directory: string, 35 | fileName: string 36 | ): Promise => { 37 | // create the directory if it does not yet exist 38 | await fsp.mkdir(directory, { recursive: true }); 39 | await fsp.writeFile(path.join(directory, fileName), data); 40 | }; 41 | 42 | /** 43 | * Write the specified `newFile` to disk or the cloud, according to the specified 44 | * `configuration`. 45 | * 46 | * If the same file is written twice, the old version is replaced silently. 47 | * 48 | * @param newFile object holding and describing the data to be written 49 | * @param configuration object describing how the new file should be written 50 | * @returns a promise for the resulting file, resolving once the write is 51 | * complete. 52 | **/ 53 | const writeOne = async ( 54 | newFile: File.NewFile, 55 | configuration: WriteConfiguration 56 | ): Promise => { 57 | const destination = sourceToFavour(); 58 | const { directory } = configuration; 59 | const { kind, data } = newFile; 60 | 61 | const hash = makeHash(data); 62 | const result: File.File = { kind, hash }; 63 | 64 | if (destination === 'directory') { 65 | await writeToDisk(data, directory, File.fileName(result)); 66 | } else if (destination === 'bucket') { 67 | await writeToDisk(data, directory, File.fileName(result)); 68 | await S3.writeFileInBucket( 69 | File.fileName(result), 70 | data, 71 | process.env.SOURCE_FILES_BUCKET as string 72 | ); 73 | } 74 | 75 | return result; 76 | }; 77 | 78 | /** 79 | * Writes to disk the data contained in the specified `newBundle`, following the 80 | * specified `configuration`. 81 | * 82 | * If the same new bundle is written twice (or part of a new bundle is 83 | * re-written as part of another new bundle), the relevant files are replaced 84 | * silently. 85 | * 86 | * @param newBundle a NewBundle, made of base64 string data 87 | * @param configuration object describing how to write the new bundle 88 | * @returns a promise for the resulting bundle, resolving once the write is 89 | * complete. 90 | **/ 91 | export const newBundle = ( 92 | newBundle: Bundle.NewBundle, 93 | configuration: WriteConfiguration 94 | ): Promise => { 95 | return Promise.all( 96 | newBundle.map(newFile => writeOne(newFile, configuration)) 97 | ); 98 | }; 99 | 100 | /** 101 | * Generates a string describing the specified Record `r`. 102 | */ 103 | export const generateAboutString = (r: Record.Record): string => { 104 | const screenshotFile = r.bundle.find(e => e.kind === 'screenshot'); 105 | const oneFileFile = r.bundle.find(e => e.kind === 'one_file'); 106 | if (screenshotFile === undefined || oneFileFile === undefined) { 107 | throw new Error( 108 | `This record's bundle is malformed (missing either screenshot or oneFile): ${pprint( 109 | r 110 | )}` 111 | ); 112 | } 113 | 114 | const screenshot = File.fileName(screenshotFile); 115 | const one_file = File.fileName(oneFileFile); 116 | 117 | return `THE DIGITAL EVIDENCE PRESERVATION TOOLKIT 118 | ============ 119 | Working copy export generated on ${Date.now()} 120 | 121 | ${r.data.title} 122 | ${r.data.url} 123 | 124 | Files included: 125 | ${screenshot} 126 | ${one_file}`; 127 | }; 128 | 129 | /** 130 | * Builds a ZIP file from the specified Record `r`, in the specified 131 | * `outDirectory`. The produced file will use the record's bundle id (as produced 132 | * by `Bundle.id`) as its name, and will be located in the specified 133 | * `outDirectory`. The function will use the specified `bundleRootDirectory` as the root 134 | * where to read the files listed in `r`. 135 | * 136 | * The behaviour is unspecified unless: 137 | * - `r` specifies one 'one_file' and one 'screenshot' files 138 | * - the screenshot and one_file files exist and can be read in `bundleRootDirectory` 139 | * - `outDirectory` exists and is writable 140 | * 141 | * @returns a promise resolving once the ZIP file has been created, or rejecting 142 | * if an error happened. 143 | */ 144 | export const makeZip = ( 145 | r: Record.Record, 146 | bundleRootDirectory: string, 147 | outDirectory: string 148 | ): Promise => { 149 | const b = r.bundle; 150 | const zip = archiver('zip', { zlib: { level: 0 } }); 151 | const out = `${outDirectory}/${Bundle.id(b)}.zip`; 152 | 153 | return new Promise((resolve, reject) => { 154 | const screenshotFile = r.bundle.find(e => e.kind === 'screenshot'); 155 | const oneFileFile = r.bundle.find(e => e.kind === 'one_file'); 156 | if (screenshotFile === undefined || oneFileFile === undefined) { 157 | reject( 158 | new Error( 159 | `This record's bundle is malformed (missing either screenshot or oneFile): ${pprint( 160 | r 161 | )}` 162 | ) 163 | ); 164 | return; 165 | } 166 | // presently the ZIP file only includes the full screenshot and one-file 167 | // HTML archive. isolating them like so isn't the most elegant. 168 | // maybe replace with a function from `Bundle`? 169 | const screenshotName = File.fileName(screenshotFile); 170 | const one_fileName = File.fileName(oneFileFile); 171 | const sidecarTextFile = generateAboutString(r); 172 | 173 | const stream = fs.createWriteStream(out); 174 | stream.on('error', e => { 175 | console.error(e); 176 | reject(); 177 | }); 178 | stream.on('close', () => { 179 | console.log(`${zip.pointer()} bytes written`); 180 | resolve(); 181 | }); 182 | zip 183 | .append( 184 | fs 185 | .createReadStream(`${bundleRootDirectory}/${screenshotName}`) 186 | .on('error', reject), 187 | { 188 | name: screenshotName, 189 | } 190 | ) 191 | .append( 192 | fs 193 | .createReadStream(`${bundleRootDirectory}/${one_fileName}`) 194 | .on('error', reject), 195 | { 196 | name: one_fileName, 197 | } 198 | ) 199 | .append(sidecarTextFile, { name: `about-this-export.txt` }) 200 | .on('error', reject) 201 | .pipe(stream); 202 | zip.finalize(); 203 | }); 204 | }; 205 | 206 | /** 207 | * Handles how to access a file depending on its source, i.e. dispatch between 208 | * the file system or an S3 bucket (and hopefully more to come) 209 | * @returns a Response containing the file 210 | */ 211 | export const getFile = (id: string, res: Response) => { 212 | const source = sourceToFavour(); 213 | if (source === 'directory') { 214 | const outDir = path.join( 215 | __dirname, 216 | `./../../${process.env.SOURCE_FILES_DIRECTORY}` 217 | ); 218 | const options = { 219 | root: outDir, 220 | dotfiles: 'deny', 221 | }; 222 | res.sendFile(`${id}`, options); 223 | } else if (source === 'bucket') { 224 | const result = S3.getFileInBucket( 225 | id, 226 | process.env.SOURCE_FILES_BUCKET as string 227 | ); 228 | result.on('error', err => { 229 | const { name } = err; 230 | res.status(500).send(`Error getting file: S3 ${name}`); 231 | }); 232 | result.pipe(res); 233 | } 234 | }; 235 | 236 | /** 237 | * Handles dispatch between local and cloud resources. 238 | * If a bucket name is present in the config, then this 239 | * data source will be preferred over local directory. 240 | * @returns a string: 'bucket' or 'directory' 241 | */ 242 | export const sourceToFavour = (): 'bucket' | 'directory' | null => { 243 | const hasBucket = 244 | process.env.SOURCE_FILES_BUCKET && process.env.SOURCE_FILES_BUCKET != ''; 245 | const hasDirectory = 246 | process.env.SOURCE_FILES_DIRECTORY && 247 | process.env.SOURCE_FILES_DIRECTORY != ''; 248 | if (hasBucket === true) { 249 | return 'bucket'; 250 | } else if (hasDirectory === true) { 251 | return 'directory'; 252 | } else { 253 | return null; 254 | } 255 | }; 256 | 257 | /** 258 | * if cfg.s3 259 | * ... 260 | * if cfg.local 261 | * ... 262 | **/ 263 | -------------------------------------------------------------------------------- /src/store/store.test.ts: -------------------------------------------------------------------------------- 1 | import 'buffer'; 2 | import * as fs from 'fs'; 3 | import * as fsp from 'fs/promises'; 4 | import * as path from 'path'; 5 | import Zip from 'node-stream-zip'; 6 | import { config } from 'dotenv'; 7 | 8 | import * as Bundle from '../types/Bundle'; 9 | import * as Record from '../types/Record'; 10 | import * as File from '../types/File'; 11 | 12 | import * as Store from './index'; 13 | 14 | describe('write a file locally', () => { 15 | let outDir = ''; 16 | config(); 17 | const old_env = process.env; 18 | 19 | beforeEach(() => { 20 | //jest.resetModules(); 21 | process.env = { ...old_env }; 22 | process.env.SOURCE_FILES_DIRECTORY = 'out'; 23 | process.env.SOURCE_FILES_BUCKET = ''; 24 | outDir = fs.mkdtempSync('out-'); 25 | }); 26 | afterEach(() => { 27 | fs.rmSync(outDir, { recursive: true, force: true }); 28 | }); 29 | afterAll(() => { 30 | process.env = old_env; 31 | }); 32 | 33 | const validateFile = async ( 34 | { kind, hash }: File.File, 35 | expectedPath: string, 36 | expectedData: string | Buffer 37 | ): Promise => { 38 | const name = `${hash}.${kind === 'one_file' ? 'html' : 'png'}`; 39 | const actual = await fsp.readFile(path.join(expectedPath, name)); 40 | const expected = Buffer.from(expectedData); 41 | expect(actual.compare(expected)).toBe(0); 42 | }; 43 | 44 | it('should write to disk the specified new bundle', async () => { 45 | const newBundle: Bundle.NewBundle = [ 46 | { kind: 'one_file', data: 'jeejtuut' }, 47 | { kind: 'screenshot', data: 'foobar' }, 48 | ]; 49 | 50 | const bundle = await Store.newBundle(newBundle, { 51 | type: 'local', 52 | directory: outDir, 53 | }); 54 | 55 | expect(bundle.length).toBe(2); 56 | await Promise.all( 57 | bundle.map((file, index) => 58 | validateFile(file, outDir, newBundle[index].data) 59 | ) 60 | ); 61 | }); 62 | 63 | it('should create the specified directory if it doesnt exist', async () => { 64 | const nestedDir = path.join(outDir, 'jeej-tuut'); 65 | 66 | const newBundle: Bundle.NewBundle = [ 67 | { kind: 'one_file', data: 'jeejtuut' }, 68 | { kind: 'screenshot', data: 'foobar' }, 69 | ]; 70 | 71 | const bundle = await Store.newBundle(newBundle, { 72 | type: 'local', 73 | directory: nestedDir, 74 | }); 75 | 76 | expect(bundle.length).toBe(2); 77 | await Promise.all( 78 | bundle.map((file, index) => 79 | validateFile(file, nestedDir, newBundle[index].data) 80 | ) 81 | ); 82 | }); 83 | 84 | it('should be robust to writing the same file twice', async () => { 85 | const newBundle1: Bundle.NewBundle = [ 86 | { kind: 'one_file', data: 'jeejtuut' }, 87 | { kind: 'screenshot', data: 'foobar' }, 88 | ]; 89 | const newBundle2: Bundle.NewBundle = [ 90 | { kind: 'one_file', data: 'jeejtuut' }, 91 | { kind: 'screenshot', data: 'souce' }, 92 | ]; 93 | 94 | const bundle1 = await Store.newBundle(newBundle1, { 95 | type: 'local', 96 | directory: outDir, 97 | }); 98 | const bundle2 = await Store.newBundle(newBundle2, { 99 | type: 'local', 100 | directory: outDir, 101 | }); 102 | const bundle3 = await Store.newBundle(newBundle1, { 103 | type: 'local', 104 | directory: outDir, 105 | }); 106 | 107 | expect(bundle1[0]).toEqual(bundle2[0]); 108 | expect(bundle1).toEqual(bundle3); 109 | }); 110 | }); 111 | 112 | describe('write a file to S3', () => { 113 | // or maybe test in src/s3 directly 114 | }); 115 | 116 | describe('generateAboutString', () => { 117 | it('should throw if the record is missing the screenshot or one_file', () => { 118 | const bundles: Bundle.Bundle[] = [ 119 | [], 120 | [{ hash: 'jeej', kind: 'one_file' }], 121 | [{ hash: 'tuut', kind: 'screenshot' }], 122 | ]; 123 | 124 | bundles.forEach(bundle => 125 | expect(() => 126 | Store.generateAboutString({ 127 | bundle, 128 | annotations: { 129 | description: '', 130 | }, 131 | data: { 132 | title: 'foo', 133 | url: 'http://jeej.tuut', 134 | }, 135 | }) 136 | ).toThrow() 137 | ); 138 | }); 139 | 140 | it('should generate a string describing the given bundle', () => { 141 | const title = 'Win big money in no time thanks to this one simple trick'; 142 | const url = 'https://www.youtube.com/watch?v=dQw4w9WgXcQ'; 143 | const oneFile: File.File = { hash: 'this-is-the-file', kind: 'one_file' }; 144 | const screenshot: File.File = { 145 | hash: 'pretty-picture', 146 | kind: 'screenshot', 147 | }; 148 | const ogNow = Date.now; 149 | Date.now = () => 42; 150 | 151 | const result = Store.generateAboutString({ 152 | bundle: [oneFile, screenshot], 153 | annotations: { 154 | description: '', 155 | }, 156 | data: { 157 | title, 158 | url, 159 | }, 160 | }); 161 | 162 | const expected = `THE DIGITAL EVIDENCE PRESERVATION TOOLKIT 163 | ============ 164 | Working copy export generated on 42 165 | 166 | ${title} 167 | ${url} 168 | 169 | Files included: 170 | ${File.fileName(screenshot)} 171 | ${File.fileName(oneFile)}`; 172 | 173 | expect(result).toEqual(expected); 174 | 175 | Date.now = ogNow; 176 | }); 177 | }); 178 | 179 | describe('makeZip', () => { 180 | let bundleRootDir = ''; 181 | let outDir = ''; 182 | 183 | beforeEach(() => { 184 | bundleRootDir = fs.mkdtempSync('in-'); 185 | outDir = fs.mkdtempSync('out-'); 186 | }); 187 | 188 | afterEach(() => { 189 | [bundleRootDir, outDir].forEach(p => 190 | fs.rmSync(p, { force: true, recursive: true }) 191 | ); 192 | }); 193 | 194 | it('should reject if the record is missing the screenshot or one_file', () => { 195 | const bundles: Bundle.Bundle[] = [ 196 | [], 197 | [{ hash: 'jeej', kind: 'one_file' }], 198 | [{ hash: 'tuut', kind: 'screenshot' }], 199 | ]; 200 | 201 | bundles.forEach(async bundle => { 202 | let zip = undefined; 203 | try { 204 | zip = await Store.makeZip( 205 | { 206 | bundle, 207 | annotations: { 208 | description: '', 209 | }, 210 | data: { 211 | title: 'foo', 212 | url: 'http://jeej.tuut', 213 | }, 214 | }, 215 | bundleRootDir, 216 | outDir 217 | ); 218 | } catch (e) {} 219 | expect(zip).toBeUndefined(); 220 | }); 221 | }); 222 | 223 | it('should produce a zip of the given record, in the correct location', async () => { 224 | const oneFile: File.File = { hash: 'this-is-the-file', kind: 'one_file' }; 225 | const screenshot: File.File = { 226 | hash: 'pretty-picture', 227 | kind: 'screenshot', 228 | }; 229 | const oneFileName = File.fileName(oneFile); 230 | const screenshotName = File.fileName(screenshot); 231 | 232 | fs.writeFileSync(path.join(bundleRootDir, oneFileName), 'jeej'); 233 | fs.writeFileSync(path.join(bundleRootDir, screenshotName), 'tuut'); 234 | 235 | const record: Record.Record = { 236 | data: { 237 | title: 'Non Stop Nyan Cat', 238 | url: 'http://www.nyan.cat/', 239 | }, 240 | annotations: { 241 | description: 'A cat farting an infinite rainbow', 242 | }, 243 | bundle: [oneFile, screenshot], 244 | }; 245 | 246 | await Store.makeZip(record, bundleRootDir, outDir); 247 | 248 | const zipPath = path.join(outDir, `${Bundle.id(record.bundle)}.zip`); 249 | 250 | // check for the zip's existence and contents 251 | // TODO: for this test to be complete, we should also check the files 252 | // contents themselves. 253 | const z = new Zip.async({ file: zipPath }); 254 | const entries = await z.entries(); 255 | expect(entries).toEqual({ 256 | [oneFileName]: expect.any(Object), 257 | [screenshotName]: expect.any(Object), 258 | 'about-this-export.txt': expect.any(Object), 259 | }); 260 | z.close(); 261 | }); 262 | 263 | it('should return a rejected promise when a file is missing on disk', async () => { 264 | const oneFile: File.File = { hash: 'this-is-the-file', kind: 'one_file' }; 265 | const screenshot: File.File = { 266 | hash: 'pretty-picture', 267 | kind: 'screenshot', 268 | }; 269 | const screenshotName = File.fileName(screenshot); 270 | 271 | // simulate a missing file 272 | // fs.writeFileSync(path.join(bundleRootDir, oneFileName), 'jeej'); 273 | fs.writeFileSync(path.join(bundleRootDir, screenshotName), 'tuut'); 274 | 275 | const record: Record.Record = { 276 | data: { 277 | title: 'Non Stop Nyan Cat', 278 | url: 'http://www.nyan.cat/', 279 | }, 280 | annotations: { 281 | description: 'A cat farting an infinite rainbow', 282 | }, 283 | bundle: [oneFile, screenshot], 284 | }; 285 | 286 | await Store.makeZip(record, bundleRootDir, outDir) 287 | .then(() => { 288 | throw new Error('hello error, bad luck'); 289 | }) 290 | .catch(() => { 291 | // this is the expected behavior 292 | }); 293 | }); 294 | }); 295 | 296 | describe('sourceToFavour', () => { 297 | config(); 298 | const old_env = process.env; 299 | 300 | beforeEach(() => { 301 | jest.resetModules(); 302 | process.env = { ...old_env }; 303 | }); 304 | afterAll(() => { 305 | process.env = old_env; 306 | }); 307 | 308 | it('should return a value based on env', () => { 309 | process.env.SOURCE_FILES_DIRECTORY = 'out'; 310 | const source = Store.sourceToFavour(); 311 | expect(typeof source).toBe('string'); 312 | }); 313 | it('should be null if no config', () => { 314 | process.env.SOURCE_FILES_DIRECTORY = ''; 315 | process.env.SOURCE_FILES_BUCKET = ''; 316 | const source = Store.sourceToFavour(); 317 | expect(source).toBe(null); 318 | }); 319 | it('should favour bucket over local dir', () => { 320 | process.env.SOURCE_FILES_DIRECTORY = 'dir'; 321 | process.env.SOURCE_FILES_BUCKET = 'bucket'; 322 | const source = Store.sourceToFavour(); 323 | expect(source).toBe('bucket'); 324 | }); 325 | it('should be dir if bucket is empty', () => { 326 | process.env.SOURCE_FILES_DIRECTORY = 'dir'; 327 | process.env.SOURCE_FILES_BUCKET = ''; 328 | const source = Store.sourceToFavour(); 329 | expect(source).toBe('directory'); 330 | }); 331 | }); 332 | -------------------------------------------------------------------------------- /src/types/Annotations.ts: -------------------------------------------------------------------------------- 1 | import * as yup from 'yup'; 2 | 3 | /** 4 | * Annotations are user-created data _about_ the record, 5 | * most probably after the original archive through the UI 6 | */ 7 | export type Annotation = { description: string; [key: string]: any }; 8 | 9 | const DocSchema = yup 10 | .object() 11 | .shape({ 12 | description: yup.string(), 13 | }) 14 | .strict() 15 | .noUnknown(); 16 | 17 | export const validate = (obj: Annotation): Promise => 18 | DocSchema.validate(obj); 19 | -------------------------------------------------------------------------------- /src/types/Bundle.ts: -------------------------------------------------------------------------------- 1 | import { makeHash } from '../helpers'; 2 | import * as File from './File'; 3 | 4 | /** 5 | * A bundle is a list of files, which belong to a discrete 6 | * number of "kinds": screenshots, thumbnails, one-file downloads... 7 | */ 8 | export type Bundle = File.File[]; 9 | 10 | /** 11 | * list of File-like things with base64-encoded strings, 12 | * that should be stored but haven't been yet 13 | */ 14 | export type NewBundle = Array; 15 | 16 | /** 17 | * Ensure the bundle (a list of files) is _always_ sorted 18 | * the same way to preserve identity. 19 | * @param b a Bundle 20 | * @returns a Bundle, sorted alphabetically by its ID 21 | */ 22 | const sortedBundle = (b: Bundle): Bundle => { 23 | const sortedArray = [...b].sort((a, b) => 24 | File.id(a).localeCompare(File.id(b)) 25 | ); 26 | return sortedArray; 27 | }; 28 | 29 | /** 30 | * A bundle's ID is the sum of its composing parts' IDs (or hashes) 31 | * @param b a Bundle 32 | * @returns a string 33 | */ 34 | // @TODO: confirm this is accurate 35 | export const id = (b: Bundle): string => 36 | makeHash(sortedBundle(b).map(File.id).join('')); 37 | -------------------------------------------------------------------------------- /src/types/Data.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Data saved from the archived webpage 3 | **/ 4 | export type Data = { title: string; url: string }; 5 | -------------------------------------------------------------------------------- /src/types/File.test.ts: -------------------------------------------------------------------------------- 1 | import * as File from './File'; 2 | 3 | describe('id', () => { 4 | it('should return the file hash', () => { 5 | const hash = 'I should have been a pair of ragged claws'; 6 | expect(File.id({ kind: 'one_file', hash })).toBe(hash); 7 | }); 8 | }); 9 | 10 | describe('fileName', () => { 11 | it('should generate the file name by adding the correct extention to the hash', () => { 12 | const cases: Array = [ 13 | { hash: 'The gate is straight', kind: 'one_file', expected: 'html' }, 14 | { hash: 'Deep and wide', kind: 'screenshot', expected: 'png' }, 15 | { 16 | hash: 'Break on through to the other side', 17 | kind: 'screenshot_thumbnail', 18 | expected: 'png', 19 | }, 20 | ]; 21 | 22 | cases.forEach(c => { 23 | expect(File.fileName(c)).toBe(`${c.hash}.${c.expected}`); 24 | }); 25 | }); 26 | }); 27 | -------------------------------------------------------------------------------- /src/types/File.ts: -------------------------------------------------------------------------------- 1 | type Hash = string; 2 | 3 | export type Process = 'screenshot' | 'one_file' | 'screenshot_thumbnail'; 4 | 5 | export type File = { kind: Process; hash: Hash }; 6 | 7 | export type NewFile = { kind: Process; data: Buffer | string }; 8 | 9 | /** 10 | * A File's ID is its hash 11 | * @param a a File 12 | * @returns string 13 | */ 14 | export const id = (a: File) => a.hash; 15 | 16 | /** 17 | * Generates a complete filename from a given file. 18 | * @returns A complete filename, with the correct extension according to the 19 | * file's kind. 20 | */ 21 | export const fileName: (f: File) => string = ({ kind, hash }) => 22 | `${hash}.${kind === 'one_file' ? 'html' : 'png'}`; 23 | -------------------------------------------------------------------------------- /src/types/Record.ts: -------------------------------------------------------------------------------- 1 | import * as yup from 'yup'; 2 | import * as Annotations from './Annotations'; 3 | import * as Data from './Data'; 4 | import * as Bundle from './Bundle'; 5 | 6 | export type Record = { 7 | bundle: Bundle.Bundle; 8 | annotations: Annotations.Annotation; 9 | data: Data.Data; 10 | }; 11 | 12 | export type FrontEndRecord = { 13 | title: string; 14 | sku: string; 15 | url: string; 16 | thumb_hash?: string; 17 | screenshot_hash?: string; 18 | one_file_hash?: string; 19 | description?: string; 20 | }; 21 | 22 | const DocSchema = yup 23 | .object() 24 | .shape({ 25 | bundle: yup 26 | .array() 27 | .of(yup.object().shape({ kind: yup.string(), hash: yup.string() })), 28 | annotations: yup.object().shape({ description: yup.string() }), 29 | data: yup.object().shape({ title: yup.string(), url: yup.string() }), 30 | }) 31 | .strict() 32 | .noUnknown(); 33 | 34 | export const validate = (r: Record): Promise => DocSchema.validate(r); 35 | 36 | type ArbitraryObject = { [key: string]: any }; 37 | 38 | /** 39 | * A Record's ID is the same ID as its bundle 40 | * @param r a Record 41 | * @returns string 42 | */ 43 | export const id = (r: Record) => Bundle.id(r.bundle); 44 | 45 | /** 46 | * Converts a Record data structure ahead of sending to the ledger 47 | * @param r a Record 48 | * @returns an un-nested Record structure 49 | */ 50 | export const toLedger = (r: Record) => ({ 51 | id: id(r), 52 | ...r.data, 53 | ...r.annotations, 54 | /** 55 | * [{kind: 'a', hash: 'aaa'}, {kind: 'b', hash: 'bbb'}] 56 | * --> { a: 'aaa', b: 'bbb' } 57 | */ 58 | ...r.bundle.reduce((a, { kind, hash }) => ({ ...a, [kind]: hash }), {}), 59 | }); 60 | 61 | /** 62 | * Converts a ledger data structure to a Record one 63 | * @param o an object from the ledger 64 | * @returns a Record 65 | */ 66 | export const fromLedger = (o: ArbitraryObject): Record => { 67 | return { 68 | data: { url: o?.url, title: o?.title }, 69 | annotations: { description: o?.description }, 70 | bundle: [ 71 | { kind: 'screenshot' as const, hash: o?.screenshot }, 72 | { 73 | kind: 'screenshot_thumbnail' as const, 74 | hash: o?.screenshot_thumbnail, 75 | }, 76 | { kind: 'one_file' as const, hash: o?.one_file }, 77 | ], 78 | }; 79 | console.warn(` got this junk from the ledger: ${o}`); 80 | }; 81 | 82 | /** 83 | * Converts a Record data structure ahead of rendering by the frontend 84 | * @param r 85 | * @returns 86 | */ 87 | export const toFrontend = (r: Record): FrontEndRecord => ({ 88 | title: r.data.title, 89 | sku: id(r), 90 | url: r.data.url, 91 | thumb_hash: r.bundle.find(f => f.kind === 'screenshot_thumbnail')?.hash, 92 | screenshot_hash: r.bundle.find(f => f.kind === 'screenshot')?.hash, 93 | one_file_hash: r.bundle.find(f => f.kind === 'one_file')?.hash, 94 | description: r.annotations?.description, 95 | }); 96 | -------------------------------------------------------------------------------- /src/verify/index.ts: -------------------------------------------------------------------------------- 1 | import { makeHash } from '../helpers'; 2 | import * as Record from '../types/Record'; 3 | import * as Ledger from '../ledger'; 4 | 5 | /** 6 | * Verify whether or not the specified `file` is present in the Ledger, 7 | * 8 | * @return a Promise resolving to: 9 | * - `null` if the file isn't present, or 10 | * - the corresponding record, converted to Frontend format, otherwise. 11 | **/ 12 | export const verifyFile = async ( 13 | file: Buffer 14 | ): Promise => { 15 | const hash = makeHash(file); 16 | const record = await Ledger.getDoc(hash, 'screenshot'); 17 | return record === null ? record : Record.toFrontend(record); 18 | }; 19 | -------------------------------------------------------------------------------- /src/verify/verify.test.ts: -------------------------------------------------------------------------------- 1 | import { exportNamedDeclaration } from '@babel/types'; 2 | import { Buffer } from 'buffer'; 3 | import { ImportMock } from 'ts-mock-imports'; 4 | import * as Ledger from '../ledger'; 5 | import * as Record from '../types/Record'; 6 | 7 | import * as Verify from './index'; 8 | 9 | describe('verify', () => { 10 | const buffer = Buffer.from('foo-bar'); 11 | 12 | afterEach(() => { 13 | ImportMock.restore(); 14 | }); 15 | 16 | it('should return null when the file could not be found on the ledger', async () => { 17 | ImportMock.mockFunction(Ledger, 'getDoc', Promise.resolve(null)); 18 | 19 | const result = await Verify.verifyFile(buffer); 20 | 21 | expect(result).toBeNull(); 22 | }); 23 | 24 | it('should return the corresponding record when the file is found', async () => { 25 | const foundRecord: Record.Record = { 26 | data: { 27 | title: 'Async/Await - The modern Javascript tutorial', 28 | url: 'https://javascript.info/async-await', 29 | }, 30 | annotations: { 31 | description: 'Learn this one weird trick', 32 | }, 33 | bundle: [], 34 | }; 35 | 36 | ImportMock.mockFunction(Ledger, 'getDoc', Promise.resolve(foundRecord)); 37 | 38 | const result = await Verify.verifyFile(buffer); 39 | 40 | expect(result).toEqual(Record.toFrontend(foundRecord)); 41 | }); 42 | }); 43 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es5", 4 | "module": "commonjs", 5 | "strict": true, 6 | "esModuleInterop": true, 7 | "skipLibCheck": true, 8 | "forceConsistentCasingInFileNames": true 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /ui/.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | out/ -------------------------------------------------------------------------------- /ui/.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules 3 | /build 4 | /.svelte-kit 5 | /package 6 | -------------------------------------------------------------------------------- /ui/.node-version: -------------------------------------------------------------------------------- 1 | 16.13.0 2 | -------------------------------------------------------------------------------- /ui/.npmrc: -------------------------------------------------------------------------------- 1 | engine-strict=true 2 | -------------------------------------------------------------------------------- /ui/.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "singleQuote": true, 3 | "arrowParens": "avoid", 4 | "jsxSingleQuote": true, 5 | "svelteSortOrder": "options-scripts-styles-markup" 6 | } 7 | -------------------------------------------------------------------------------- /ui/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:16.13.0-alpine3.14 as builder 2 | LABEL version="0.1" 3 | LABEL description="GUI from the QLDB ledger" 4 | 5 | WORKDIR /app 6 | COPY . . 7 | RUN npm install 8 | RUN node node_modules/esbuild/install.js 9 | RUN npm run build 10 | 11 | FROM nginx:alpine 12 | COPY --from=builder /app/out /assets 13 | 14 | COPY ./default.conf /etc/nginx/conf.d/default.conf -------------------------------------------------------------------------------- /ui/default.conf: -------------------------------------------------------------------------------- 1 | upstream endpoint { 2 | server endpoint:3000; 3 | } 4 | 5 | server { 6 | listen 80; 7 | client_max_body_size 64M; 8 | client_body_buffer_size 64M; 9 | 10 | location / { 11 | root /assets; 12 | try_files $uri $uri/ /index.html; 13 | } 14 | 15 | location /api { 16 | rewrite /api/(.*) /$1 break; 17 | proxy_pass http://endpoint; 18 | proxy_buffering off; 19 | proxy_request_buffering off; 20 | } 21 | } -------------------------------------------------------------------------------- /ui/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "deptoolkit-ui", 3 | "version": "2.0.0", 4 | "engines": { 5 | "node": "^16" 6 | }, 7 | "scripts": { 8 | "dev": "svelte-kit dev --port 8000", 9 | "build": "svelte-kit build", 10 | "preview": "svelte-kit preview", 11 | "check": "svelte-check --tsconfig ./tsconfig.json", 12 | "check:watch": "svelte-check --tsconfig ./tsconfig.json --watch" 13 | }, 14 | "devDependencies": { 15 | "@sveltejs/adapter-node": "^1.0.0-next.67", 16 | "@sveltejs/adapter-static": "^1.0.0-next.26", 17 | "@sveltejs/kit": "^1.30.4", 18 | "@types/cookie": "^0.4.0", 19 | "attractions": "^3.3.0", 20 | "postcss": "^8.4.31", 21 | "prettier": "~2.2.1", 22 | "prettier-plugin-svelte": "^2.2.0", 23 | "sass": "^1.37.5", 24 | "svelte": "^4.2.19", 25 | "svelte-check": "^2.0.0", 26 | "svelte-feather-icons": "^3.5.0", 27 | "svelte-preprocess": "^4.7.4", 28 | "tslib": "^2.0.0", 29 | "typescript": "^4.0.0" 30 | }, 31 | "type": "module", 32 | "dependencies": { 33 | "@fontsource/fira-mono": "^4.2.2", 34 | "@fontsource/roboto": "^4.5.0", 35 | "@lukeed/uuid": "^2.0.0", 36 | "@types/d3-scale": "^4.0.1", 37 | "cookie": "^0.4.1", 38 | "d3-scale": "^4.0.0", 39 | "d3-time": "^3.0.0", 40 | "d3-time-format": "^4.0.0" 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /ui/src/app.css: -------------------------------------------------------------------------------- 1 | @import '@fontsource/fira-mono'; 2 | @import '@fontsource/roboto'; 3 | 4 | :root { 5 | font-family: Roboto, Arial, -apple-system, BlinkMacSystemFont, 'Segoe UI', 6 | Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif; 7 | --font-mono: 'Fira Mono', monospace; 8 | --pure-white: #ffffff; 9 | --primary-color: #fff; 10 | --secondary-color: #d0dde9; 11 | --tertiary-color: #edf0f8; 12 | --accent-color: #b10dc9; 13 | --muted-grey: #ddd; 14 | --heading-color: rgba(0, 0, 0, 0.7); 15 | --text-color: #444444; 16 | --background-without-opacity: rgba(255, 255, 255, 0.7); 17 | --column-width: 42rem; 18 | --column-margin-top: 4rem; 19 | } 20 | 21 | body { 22 | min-height: 100vh; 23 | margin: 0; 24 | background-color: var(--primary-color); 25 | -moz-osx-font-smoothing: grayscale; 26 | text-rendering: optimizeLegibility; 27 | } 28 | 29 | body::before { 30 | content: ''; 31 | width: 80vw; 32 | height: 100vh; 33 | position: absolute; 34 | top: 0; 35 | left: 10vw; 36 | z-index: -1; 37 | opacity: 0.05; 38 | } 39 | 40 | #svelte { 41 | min-height: 100vh; 42 | display: flex; 43 | flex-direction: column; 44 | } 45 | 46 | h1 { 47 | font-weight: 400; 48 | color: var(--accent-color); 49 | } 50 | h2, 51 | p { 52 | font-weight: 400; 53 | color: var(--heading-color); 54 | } 55 | 56 | p { 57 | line-height: 1; 58 | } 59 | 60 | a { 61 | color: var(--accent-color); 62 | text-decoration: none; 63 | } 64 | 65 | a:hover { 66 | text-decoration: underline; 67 | } 68 | 69 | h1 { 70 | font-size: 2rem; 71 | text-align: center; 72 | } 73 | 74 | h2 { 75 | font-size: 1rem; 76 | } 77 | 78 | pre { 79 | font-size: 16px; 80 | font-family: var(--font-mono); 81 | padding: 0.1em; 82 | overflow-x: auto; 83 | color: var(--text-color); 84 | } 85 | 86 | input, 87 | button { 88 | font-size: inherit; 89 | font-family: inherit; 90 | } 91 | 92 | button:focus:not(:focus-visible) { 93 | outline: none; 94 | } 95 | 96 | @media (min-width: 720px) { 97 | h1 { 98 | font-size: 2.4rem; 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /ui/src/app.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | %svelte.head% 9 | 10 | 11 |
%svelte.body%
12 | 13 | 14 | -------------------------------------------------------------------------------- /ui/src/global.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | -------------------------------------------------------------------------------- /ui/src/hooks.ts: -------------------------------------------------------------------------------- 1 | import type { Handle } from '@sveltejs/kit'; 2 | 3 | export const handle: Handle = async ({ event, resolve }) => { 4 | const response = await resolve(event); 5 | return response; 6 | }; 7 | -------------------------------------------------------------------------------- /ui/src/lib/Ledger/EditingPanel.svelte: -------------------------------------------------------------------------------- 1 | 25 | 26 | 48 | 49 |
54 |
55 | 56 | 57 |
58 |
59 | 60 |
61 |
62 | -------------------------------------------------------------------------------- /ui/src/lib/Ledger/EntryMetadata.svelte: -------------------------------------------------------------------------------- 1 | 21 | 22 | 44 | 45 |

{title.length > 100 ? title_short : title}

46 |
 {pretty_domain.length > 18 ? pretty_domain_short : pretty_domain}
49 |
50 | 51 |
52 |      {shortHash(sku)}
53 | 54 |
55 |      {shortHash(screenshot_hash)}
56 |   
57 | {#if entry.one_file_hash} 58 | 59 |
60 |        {shortHash(one_file_hash)}
61 |     
62 | {/if} 63 |
64 | -------------------------------------------------------------------------------- /ui/src/lib/Ledger/EntryThumbnail.svelte: -------------------------------------------------------------------------------- 1 | 5 | 6 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /ui/src/lib/Ledger/History.svelte: -------------------------------------------------------------------------------- 1 | 24 | 25 | 48 | 49 | 50 | 51 | {#each history as item, i} 52 | 53 | {/each} 54 | 55 | {#each ticks as tick, i} 56 | 57 | {format(tick)} 58 | {/each} 59 | 60 | 61 | -------------------------------------------------------------------------------- /ui/src/lib/Ledger/LedgerEntry.svelte: -------------------------------------------------------------------------------- 1 | 29 | 30 | 75 | 76 | 77 |
81 | 82 |
83 | 84 |
85 | 86 | 121 |
122 | -------------------------------------------------------------------------------- /ui/src/lib/Ledger/index.ts: -------------------------------------------------------------------------------- 1 | export { default as LedgerEntryComponent } from './LedgerEntry.svelte'; 2 | import { ledgerData } from '../stores'; 3 | 4 | export type LedgerEntry = { 5 | title: string; 6 | url: string; 7 | sku: string; 8 | screenshot_hash?: string; 9 | thumb_hash: string; 10 | one_file_hash?: string; 11 | history?: QLDBHistory; 12 | description?: string; 13 | }; 14 | 15 | export type OriginalTx = { 16 | originalTxDate: string; 17 | originalTxTime: string; 18 | }; 19 | 20 | export type QLDBHistory = QLDBHistoryItem[]; 21 | 22 | type QLDBHistoryItem = { 23 | blockAddress: {}; 24 | data: {}; 25 | metadata: { txTime: string }; 26 | }; 27 | 28 | // @TODO: error handling! 29 | export async function fetchData(): Promise { 30 | const res = await fetch('/api/list-docs'); 31 | const data = await res.json(); 32 | return data; 33 | } 34 | 35 | /** 36 | * Handle the convoluted querying of a record's history 37 | * @param id string - the record's ID 38 | * @returns a promise of a history (an array of revisions) 39 | */ 40 | // @TODO: error handling! 41 | async function fetchItemHistory(id: string): Promise { 42 | const res = await fetch(`/api/history/${id}`); 43 | const data = await res.json(); 44 | return data; 45 | } 46 | 47 | /** 48 | * Show a record's history in the UI: fetch this history 49 | * and add it to the `historyData` store 50 | * @param entry object - the record as represented in QLDB 51 | */ 52 | export async function addHistoryTo(entry: LedgerEntry) { 53 | const { sku } = entry; 54 | const itemHistoryData = await fetchItemHistory(sku); 55 | 56 | ledgerData.update(async (d: Promise) => { 57 | let newStore = await d.then(data => { 58 | const itemToUpdate = data.find(e => e.sku === sku); 59 | const updatedItem = { ...itemToUpdate, history: itemHistoryData }; 60 | 61 | return data.map((e: LedgerEntry) => { 62 | if (e.sku === sku) { 63 | return updatedItem; 64 | } else { 65 | return e; 66 | } 67 | }); 68 | }); 69 | return newStore; 70 | }); 71 | } 72 | 73 | export const getTXDateFromBlock = (b: QLDBHistoryItem): Date => 74 | new Date(b.metadata.txTime); 75 | 76 | export const getOriginalTX = (h: QLDBHistory): OriginalTx => { 77 | const originalTx = h[0]; 78 | const originalTxDate = getTXDateFromBlock(originalTx); 79 | return { 80 | originalTxDate: originalTxDate.toDateString(), 81 | originalTxTime: originalTxDate.toLocaleTimeString(), 82 | }; 83 | }; 84 | 85 | /** 86 | * Sends the edits to a document to the API 87 | * @param thing a FormData from the form popping up for each document 88 | * @param id the ID of the document to edit 89 | **/ 90 | // @TODO: make this function return a fulfilling or rejecting promise 91 | export async function postDocumentRevision(thing: FormData, id: string) { 92 | const res = await fetch(`/api/edit-description/${id}`, { 93 | method: 'POST', 94 | body: thing, 95 | }); 96 | 97 | // @TODO: implement store.update() to avoid a full page refresh 98 | if (res.ok === true) { 99 | ledgerData.set(fetchData()); 100 | } 101 | } 102 | 103 | /** 104 | * Sends a FormData containing a single File to the API 105 | * @param payload a FormData containing a File 106 | * @returns a promise of a response 107 | */ 108 | export async function verifyFile(payload: FormData): Promise { 109 | return await fetch(`/api/verify`, { 110 | method: 'POST', 111 | body: payload, 112 | }); 113 | } 114 | 115 | export async function requestWorkingCopy(sku: string) { 116 | downloadAFile(`/api/export-copy/${sku}.zip`, `${sku}.zip`); 117 | } 118 | 119 | const downloadAFile = (fileUrl: string, fileName: string) => { 120 | const a = document.createElement('a'); 121 | a.href = fileUrl; 122 | a.setAttribute('download', fileName); 123 | a.click(); 124 | }; 125 | -------------------------------------------------------------------------------- /ui/src/lib/header/Header.svelte: -------------------------------------------------------------------------------- 1 | 5 | 6 |
7 |
8 | 9 | DigitalEvidenceToolkit 10 | 11 |
12 | 13 | 32 | 33 |
34 | 35 |
36 |
37 | 38 | 128 | -------------------------------------------------------------------------------- /ui/src/lib/header/logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | -------------------------------------------------------------------------------- /ui/src/lib/helpers.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * shorten a string to a desired length 3 | * @param str a string, ideally too long to your taste 4 | * @param desiredLength a number, the length to which to cut the string 5 | **/ 6 | export const shortenStringToLength = ( 7 | str: string, 8 | desiredLength: number 9 | ): string => str.substring(0, desiredLength - 1); 10 | 11 | /** 12 | * from https://stackoverflow.com/questions/34818020/javascript-regex-url-extract-domain-only/34818545 13 | * @param url a string representing a URL 14 | * @return the TLD 15 | **/ 16 | export const domainFromUrl = (url: string): string => { 17 | let result: string; 18 | let match: Array; 19 | if ( 20 | (match = url.match( 21 | /^(?:https?:\/\/)?(?:[^@\n]+@)?(?:www\.)?([^:\/\n\?\=]+)/im 22 | )) 23 | ) { 24 | result = match[1]; 25 | if ((match = result.match(/^[^\.]+\.(.+\..+)$/))) { 26 | result = match[1]; 27 | } 28 | } 29 | return result; 30 | }; 31 | 32 | /** 33 | * shorten a sha256 string 34 | * @param h string, sha256 35 | **/ 36 | export const shortHash = (h: string): string => h.substr(0, 6); 37 | 38 | export const putFileinFormData = (f: File): Promise => { 39 | return new Promise(resolve => { 40 | const form = new FormData(); 41 | form.append(f.name, f, f.name); 42 | resolve(form); 43 | }); 44 | }; 45 | 46 | export const wait = (n: number) => 47 | new Promise(resolve => setTimeout(resolve, n)); 48 | -------------------------------------------------------------------------------- /ui/src/lib/stores.ts: -------------------------------------------------------------------------------- 1 | import { writable } from 'svelte/store'; 2 | import type { Writable } from 'svelte/store'; 3 | import type { LedgerEntry } from './Ledger/index'; 4 | 5 | export const ledgerData: Writable> = writable(null); 6 | -------------------------------------------------------------------------------- /ui/src/lib/types.d.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Can be made globally available by placing this 3 | * inside `global.d.ts` and removing `export` keyword 4 | */ 5 | export interface Locals { 6 | userid: string; 7 | } 8 | -------------------------------------------------------------------------------- /ui/src/routes/__layout.svelte: -------------------------------------------------------------------------------- 1 | 5 | 6 | 32 | 33 |
34 | 35 |
36 | 37 |
38 | 39 |
40 | 43 |
44 | -------------------------------------------------------------------------------- /ui/src/routes/index.svelte: -------------------------------------------------------------------------------- 1 | 6 | 7 | 12 | 13 | 95 | 96 | 97 | Home 98 | 102 | 103 | 104 |
105 |
106 |

The Digital Evidence Preservation Toolkit

107 |

108 | An archiving and annotation tool demonstrating chain of custody in view of 109 | international prosecutions 110 |

111 |
112 | 113 |
114 | 115 | 129 | 130 | 147 |
148 | -------------------------------------------------------------------------------- /ui/src/routes/library.svelte: -------------------------------------------------------------------------------- 1 | 10 | 11 | 30 | 31 | 32 | Library 33 | 37 | 38 | 39 |
40 |

Library

41 |
42 | {#await $ledgerData} 43 |
44 | 45 | 46 |
47 | {:then data} 48 | {#each data as item, i} 49 | 50 | {/each} 51 | {:catch error} 52 |

{error.message}

53 | {/await} 54 |
55 | -------------------------------------------------------------------------------- /ui/src/routes/verify.svelte: -------------------------------------------------------------------------------- 1 | 27 | 28 | 60 | 61 | 62 | Verify 63 | 67 | 68 | 69 |
70 |

Verifying an archive

71 | 72 |

73 | Content dragged-and-dropped into this page will be cross-checked against the 74 | database and will be sent over the internet to the Toolkit. 75 |

76 |

77 | The SHA256 signature of the items to cross-reference are generated upon 78 | reception server-side and looked up in the ledger. 79 |

80 |

Matches will be surfaced below. Non-matches won't appear.

81 | 82 |
83 | 84 |
85 | 86 | {#if uploads.length > 0} 87 |
88 | 89 |
90 | {/if} 91 | 92 | 93 | {#if matches.length > 0} 94 |
95 | 96 |

Positive database matches:

97 | {#each matches as item, i} 98 | 99 | {/each} 100 | {/if} 101 |
102 | -------------------------------------------------------------------------------- /ui/static/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/digitalevidencetoolkit/deptoolkit/348a9ed840a2771e3d08b83d122acdbc3cb9667c/ui/static/favicon.png -------------------------------------------------------------------------------- /ui/static/robots.txt: -------------------------------------------------------------------------------- 1 | # https://www.robotstxt.org/robotstxt.html 2 | User-agent: * 3 | Disallow: 4 | -------------------------------------------------------------------------------- /ui/svelte.config.js: -------------------------------------------------------------------------------- 1 | import preprocess from 'svelte-preprocess'; 2 | import adapter from '@sveltejs/adapter-static'; 3 | 4 | /** @type {import('@sveltejs/kit').Config} */ 5 | const config = { 6 | preprocess: preprocess(), 7 | 8 | kit: { 9 | target: '#svelte', 10 | adapter: adapter({ 11 | pages: 'out/', 12 | assets: 'out/', 13 | }), 14 | vite: () => ({ 15 | clearScreen: false, 16 | server: { 17 | proxy: { 18 | '/api': { 19 | target: 'http://localhost:3000', 20 | rewrite: path => path.replace(/^\/api/, ''), 21 | }, 22 | }, 23 | }, 24 | }), 25 | }, 26 | }; 27 | 28 | export default config; 29 | -------------------------------------------------------------------------------- /ui/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "moduleResolution": "node", 4 | "module": "es2020", 5 | "lib": ["es2015", "es2020", "DOM", "dom.iterable"], 6 | "target": "es2019", 7 | /** 8 | svelte-preprocess cannot figure out whether you have a value or a type, so tell TypeScript 9 | to enforce using \`import type\` instead of \`import\` for Types. 10 | */ 11 | "importsNotUsedAsValues": "error", 12 | "isolatedModules": true, 13 | "resolveJsonModule": true, 14 | /** 15 | To have warnings/errors of the Svelte compiler at the correct position, 16 | enable source maps by default. 17 | */ 18 | "sourceMap": true, 19 | "esModuleInterop": true, 20 | "skipLibCheck": true, 21 | "forceConsistentCasingInFileNames": true, 22 | "baseUrl": ".", 23 | "allowJs": true, 24 | "checkJs": true, 25 | "paths": { 26 | "$lib": ["src/lib"], 27 | "$lib/*": ["src/lib/*"] 28 | }, 29 | "types": ["svelte"], 30 | }, 31 | "include": ["src/**/*.d.ts", "src/**/*.js", "src/**/*.ts", "src/**/*.svelte"] 32 | } 33 | --------------------------------------------------------------------------------