├── .gitignore ├── .npmignore ├── CLI_README.md ├── README.md ├── bun.lock ├── docs ├── architecture.md └── article.md ├── examples ├── iridium.user.js ├── json-formatter.user.js ├── material-design-fileicons.user.js ├── modern-hackernews.user.js ├── modern-wikipedia.user.js ├── return-dislikes.user.js ├── ublacklist.user.js └── web-search-navigator.user.js ├── gen_examples.sh ├── package.json └── src ├── abstractionLayer.js ├── assetsGenerator.js ├── buildPolyfillString.js ├── cli ├── download.js ├── downloadExt.js ├── index.js ├── minify.js ├── require.js ├── unpack.js └── workflow.js ├── convert.js ├── getIcon.js ├── locales.js ├── manifestParser.js ├── metadataGenerator.js ├── outputBuilder.js ├── patches └── ExtPay.js ├── resourceProcessor.js ├── runtimePolyfill.js ├── scriptAssembler.js ├── templateManager.js ├── templates ├── abstractionLayer.handle_postmessage.template.js ├── abstractionLayer.postmessage.template.js ├── abstractionLayer.userscript.template.js ├── abstractionLayer.vanilla.template.js ├── messaging.template.js ├── orchestration.template.js ├── polyfill.template.js └── trustedTypes.template.js └── utils.js /.gitignore: -------------------------------------------------------------------------------- 1 | extensions 2 | node_modules 3 | .DS_Store 4 | *.user.js 5 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | examples 2 | extensions 3 | *.user.js 4 | node_modules 5 | .DS_Store -------------------------------------------------------------------------------- /CLI_README.md: -------------------------------------------------------------------------------- 1 | # Extension to Userscript CLI 2 | 3 | A powerful command-line tool to convert browser extensions to userscripts with support for downloading from web stores. 4 | 5 | ## Installation 6 | 7 | ```bash 8 | npm install -g extension-to-userscript 9 | ``` 10 | 11 | Or run directly: 12 | 13 | ```bash 14 | npx extension-to-userscript 15 | ``` 16 | 17 | ## Quick Start 18 | 19 | ```bash 20 | # Convert from Chrome Web Store 21 | ext2us convert https://chrome.google.com/webstore/detail/extension-id 22 | 23 | # Convert local extension folder 24 | ext2us convert ./my-extension/ 25 | 26 | # Convert CRX/XPI/ZIP file 27 | ext2us convert extension.crx -o my-script.user.js 28 | 29 | # Download only (no conversion) 30 | ext2us download extension-id -s chrome 31 | ``` 32 | 33 | ## Commands 34 | 35 | ### `convert ` 36 | 37 | Convert an extension to userscript from various sources: 38 | 39 | - **Web Store URL**: `https://chrome.google.com/webstore/detail/...` 40 | - **Extension ID**: `abcdefghijklmnopqrstuvwxyz123456` 41 | - **Local Archive**: `extension.crx`, `extension.xpi`, `extension.zip` 42 | - **Local Directory**: `./extension-folder/` 43 | 44 | #### Options 45 | 46 | - `-o, --output ` - Output file path (default: `extension.user.js`) 47 | - `-s, --store ` - Web store type: `chrome` or `firefox` (default: `chrome`) 48 | - `-t, --target ` - Build target: `userscript` or `vanilla` (default: `userscript`) 49 | - `-m, --minify` - Minify output (default: `true`) 50 | - `--no-minify` - Disable minification 51 | - `--temp-dir ` - Custom temporary directory 52 | - `--keep-temp` - Keep temporary files after conversion 53 | - `-v, --verbose` - Verbose output with detailed information 54 | 55 | #### Examples 56 | 57 | ```bash 58 | # Convert Chrome extension with custom output 59 | ext2us convert "https://chrome.google.com/webstore/detail/name/abcd..." -o my-script.user.js 60 | 61 | # Convert Firefox addon 62 | ext2us convert ublock-origin -s firefox --verbose 63 | 64 | # Convert local extension without minification 65 | ext2us convert ./extension-folder/ --no-minify 66 | 67 | # Convert to vanilla JS target 68 | ext2us convert extension.crx -t vanilla -o extension.js 69 | ``` 70 | 71 | ### `download ` 72 | 73 | Download extension from web store without converting: 74 | 75 | #### Options 76 | 77 | - `-o, --output ` - Output file path (auto-generated if not specified) 78 | - `-s, --store ` - Web store type: `chrome` or `firefox` (default: `chrome`) 79 | 80 | #### Examples 81 | 82 | ```bash 83 | # Download Chrome extension 84 | ext2us download extension-id 85 | 86 | # Download Firefox addon 87 | ext2us download addon-name -s firefox -o addon.xpi 88 | 89 | # Download from URL 90 | ext2us download "https://addons.mozilla.org/addon/name/" 91 | ``` 92 | 93 | ## Supported Formats 94 | 95 | ### Input Sources 96 | 97 | - **Chrome Web Store**: URLs or extension IDs 98 | - **Firefox Add-ons**: URLs or addon names/IDs 99 | - **Archive Files**: `.crx`, `.xpi`, `.zip` 100 | - **Directories**: Local extension folders with `manifest.json` 101 | 102 | ### Output Targets 103 | 104 | - **Userscript** (default): `.user.js` files for Tampermonkey, Greasemonkey, etc. 105 | - **Vanilla JS**: Standalone `.js` files using browser APIs (limited functionality) 106 | 107 | ## Features 108 | 109 | ### Web Store Integration 110 | 111 | - **Chrome Web Store**: Direct download via CRX URLs 112 | - **Firefox Add-ons**: API integration for latest versions 113 | - **Progress tracking**: Real-time download progress 114 | - **Validation**: File type and size validation 115 | 116 | ### Archive Handling 117 | 118 | - **CRX Support**: Chrome extension format with header parsing 119 | - **XPI Support**: Firefox addon format 120 | - **ZIP Support**: Generic ZIP archives 121 | - **Security**: Path traversal protection 122 | - **Validation**: Manifest.json verification 123 | 124 | ### Conversion Features 125 | 126 | - **Content Scripts**: Full support with proper timing 127 | - **Background Scripts**: Auto-execution in polyfilled environment 128 | - **Options Pages**: Modal iframe implementation 129 | - **Popup Pages**: Modal iframe implementation 130 | - **Assets**: Automatic inlining and resource management 131 | - **Permissions**: Smart grant detection and metadata generation 132 | - **Icons**: Embedded in userscript metadata 133 | 134 | ### Code Processing 135 | 136 | - **Minification**: Prettier-based code formatting (can be disabled) 137 | - **Error Handling**: Comprehensive error reporting 138 | - **Validation**: Input and output validation 139 | - **Cleanup**: Automatic temporary file management 140 | 141 | ## Advanced Usage 142 | 143 | ### Verbose Mode 144 | 145 | Use `-v` or `--verbose` for detailed information: 146 | 147 | ```bash 148 | ext2us convert extension.crx -v 149 | ``` 150 | 151 | Output includes: 152 | 153 | - Extension metadata 154 | - File processing details 155 | - Conversion summary 156 | - Performance metrics 157 | 158 | ### Custom Temporary Directory 159 | 160 | ```bash 161 | ext2us convert extension.crx --temp-dir /custom/temp --keep-temp 162 | ``` 163 | 164 | ### Batch Processing 165 | 166 | Process multiple extensions: 167 | 168 | ```bash 169 | ext2us convert ext1.crx -o script1.user.js 170 | ext2us convert ext2.crx -o script2.user.js 171 | ``` 172 | 173 | ## Troubleshooting 174 | 175 | ### Common Issues 176 | 177 | 1. **"Extension not found"** 178 | 179 | - Verify the extension ID or URL 180 | - Check if the extension is publicly available 181 | - Try using the full web store URL 182 | 183 | 2. **"Archive extraction failed"** 184 | 185 | - Ensure the file is a valid extension archive 186 | - Check file permissions 187 | - Verify the file isn't corrupted 188 | 189 | 3. **"Manifest parsing error"** 190 | 191 | - Ensure the extension contains a valid manifest.json 192 | - Check for JSON syntax errors 193 | - Verify required manifest fields are present 194 | 195 | 4. **"Download failed"** 196 | - Check internet connection 197 | - Verify the extension is still available 198 | - Try again later if web store is temporarily unavailable 199 | 200 | ### Debug Options 201 | 202 | ```bash 203 | # Maximum verbosity 204 | ext2us convert extension.crx -v --keep-temp 205 | 206 | # Check temporary files 207 | ls /tmp/ext2us-* 208 | ``` 209 | 210 | ## Technical Details 211 | 212 | ### Supported Manifest Versions 213 | 214 | - **Manifest V2**: Full support 215 | - **Manifest V3**: Basic support (limitations apply) 216 | 217 | ### Browser Compatibility 218 | 219 | Generated userscripts work with: 220 | 221 | - Tampermonkey (Chrome, Firefox, Safari, Edge) 222 | - Greasemonkey (Firefox) 223 | - Violentmonkey (Chrome, Firefox, Edge) 224 | 225 | ### API Coverage 226 | 227 | The converter provides polyfills for: 228 | 229 | - `chrome.storage` (local, sync, managed) 230 | - `chrome.runtime` (sendMessage, onMessage, getURL, getManifest) 231 | - `chrome.tabs` (query, create, sendMessage) 232 | - `chrome.notifications` (create, clear, onClicked) 233 | - `chrome.contextMenus` (create, remove, onClicked) 234 | - And more... 235 | 236 | ### Limitations 237 | 238 | - Service workers (MV3) have limited support 239 | - Some privileged APIs cannot be polyfilled 240 | - Cross-origin requests require proper @connect grants 241 | - Vanilla target has significant CORS limitations 242 | 243 | ## Development 244 | 245 | ### Project Structure 246 | 247 | ``` 248 | src/cli/ 249 | ├── index.js # Main CLI entry point 250 | ├── inputValidator.js # Input type detection and validation 251 | ├── storeDownloader.js # Web store download functionality 252 | ├── archiveExtractor.js # Archive extraction (CRX/XPI/ZIP) 253 | ├── workspaceManager.js # Temporary directory management 254 | ├── extensionConverter.js # Conversion orchestration 255 | └── downloadExt.js # CRX URL generation 256 | ``` 257 | 258 | ### Contributing 259 | 260 | 1. Fork the repository 261 | 2. Create a feature branch 262 | 3. Add tests for new functionality 263 | 4. Submit a pull request 264 | 265 | ## License 266 | 267 | ISC License - see LICENSE file for details 268 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | # to-userscript 5 | 6 |
7 | 8 | Demo 9 | 10 | 11 | ##### 2x Speed (conversion takes 4s total normal speed, incl. downloading) 12 | 13 | https://github.com/user-attachments/assets/e0adebcb-843d-4b04-830b-0b6ef5344652 14 | 15 | 16 | ##### Demo but normal speed 17 | 18 | https://github.com/user-attachments/assets/874dc6fd-ad6c-4e07-9d27-da1184f3036d 19 | 20 | 21 |
22 | 23 | A powerful CLI for converting browser extensions into standalone userscripts. 24 | 25 | ## Examples: 26 | 27 | | Extension | View output code | Install | 28 | | ---------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | 29 | | [Material Design File Icons](https://chromewebstore.google.com/detail/material-icons-for-github/bggfcpfjbdkhfhfmkjpbhnkhnpjjeomc/) | [View](https://github.com/Explosion-Scratch/to-userscript/blob/main/examples/material-design-fileicons.user.js) | [Install](https://raw.githubusercontent.com/Explosion-Scratch/to-userscript/main/examples/material-design-fileicons.user.js) | 30 | | [JSON Formatter](https://chromewebstore.google.com/detail/json-formatter/bcjindcccaagfpapjjmafapmmgkkhgoa) | [View](https://github.com/Explosion-Scratch/to-userscript/blob/main/examples/json-formatter.user.js) | [Install](https://raw.githubusercontent.com/Explosion-Scratch/to-userscript/main/examples/json-formatter.user.js) | 31 | | [Modern for Wikipedia](https://chromewebstore.google.com/detail/modern-for-wikipedia/emdkdnnopdnajipoapepbeeiemahbjcn?hl=en) | [View](https://github.com/Explosion-Scratch/to-userscript/blob/main/examples/modern-wikipedia.user.js) | [Install](https://raw.githubusercontent.com/Explosion-Scratch/to-userscript/main/examples/modern-wikipedia.user.js) | 32 | | [Return Dislikes](https://chromewebstore.google.com/detail/return-youtube-dislike/gebbhagfogifgggkldgodflihgfeippi) | [View](https://github.com/Explosion-Scratch/to-userscript/blob/main/examples/return-dislikes.user.js) | [Install](https://raw.githubusercontent.com/Explosion-Scratch/to-userscript/main/examples/return-dislikes.user.js) | 33 | | [Web Search Navigator](https://chromewebstore.google.com/detail/web-search-navigator/cohamjploocgoejdfanacfgkhjkhdkek?hl=en) | [View](https://github.com/Explosion-Scratch/to-userscript/blob/main/examples/web-search-navigator.user.js) | [Install](https://raw.githubusercontent.com/Explosion-Scratch/to-userscript/main/examples/web-search-navigator.user.js) | 34 | | [uBlacklist](https://chromewebstore.google.com/detail/ublacklist/pncfbmialoiaghdehhbnbhkkgmjanfhe) | [View](https://github.com/Explosion-Scratch/to-userscript/blob/main/examples/ublacklist.user.js) | [Install](https://raw.githubusercontent.com/Explosion-Scratch/to-userscript/main/examples/ublacklist.user.js) | 35 | | [Iridium](https://chromewebstore.google.com/detail/iridium-for-youtube/gbjmgndncjkjfcnpfhgidhbgokofegbl?hl=en-US) | [View](https://github.com/Explosion-Scratch/to-userscript/blob/main/examples/iridium.user.js) | [Install](https://raw.githubusercontent.com/Explosion-Scratch/to-userscript/main/examples/iridium.user.js) | 36 | | [Modern for Hackernews](https://chromewebstore.google.com/detail/modern-for-hacker-news/dabkegjlekdcmefifaolmdhnhdcplklo/) | [View](https://github.com/Explosion-Scratch/to-userscript/blob/main/examples/modern-hackernews.user.js) | [Install](https://raw.githubusercontent.com/Explosion-Scratch/to-userscript/main/examples/modern-hackernews.user.js) | 37 | 38 | ## What is this? 39 | 40 | `to-userscript` converts browser extensions into portable userscripts. It takes an extension from the Chrome or Firefox store, a local directory, or a zip/xpi, and creates a single `.user.js` file that can be run in any userscript manager. This allows you to use, or modify extensions on browsers or platforms that might not natively support them. 41 | 42 | ## Features 43 | 44 | - Convert from Chrome Web Store, Firefox Add-ons site, direct URLs, local directories, or archive files (`.crx`, `.xpi`, `.zip`). 45 | - Replaces WebExtension APIs (`chrome.storage`, `chrome.runtime`, `chrome.tabs`, etc.) with userscript-compatible equivalents (`GM_*` functions, IndexedDB, and custom event buses). 46 | - Asset inlining: Embeds CSS, images, fonts, and other resources into the userscript (replacing runtime.getURL and options/popup pages). 47 | - Renders extension popup and options pages within an embedded modal (after inlining all asset links), preserving most UI functionality. 48 | - Emulates the background script environment, allowing persistent logic to run as intended and two way messaging. 49 | - Respects `_locales` directories and uses the specified or default locale for manifest, text, etc. 50 | - Optional minification (`terser`) and code formatting (`prettier`) for the final output. 51 | 52 | ## Installation 53 | 54 | `to-userscript` requires Node.js v16 or higher. 55 | 56 | You can install it globally using your preferred package manager: 57 | 58 | ```bash 59 | # npm 60 | npm install -g to-userscript 61 | ``` 62 | 63 | ```bash 64 | # pnpm 65 | pnpm add -g to-userscript 66 | ``` 67 | 68 | ```bash 69 | # bun 70 | bun install -g to-userscript 71 | ``` 72 | 73 | Alternatively, you can run it directly without a global installation using `npx`, `pnpm dlx`, or `bunx`. 74 | 75 | ## Polyfill status: 76 | 77 | ### Supported APIs 78 | 79 | - [x] `chrome.storage` 80 | - [x] `local` (backed by `GM_*` storage or IndexedDB) 81 | - [x] `sync` (maps to local storage) 82 | - [x] `managed` (shimmed; read-only and empty) 83 | - [x] `onChanged` 84 | - [x] `chrome.runtime` 85 | - [x] `sendMessage`/`onMessage` (in-page/iframe communication only) 86 | - [x] `getURL` (for bundled assets only) 87 | - [x] `getManifest` 88 | - [x] `openOptionsPage`: Opens options popup 89 | - [x] `chrome.tabs` 90 | - [x] `create` (maps to `GM_openInTab` or `window.open`) 91 | - [x] `query` (shimmed; returns current tab only) 92 | - [x] `sendMessage` (shimmed; redirects to `runtime.sendMessage`) 93 | - [x] `chrome.i18n` 94 | - [x] `getMessage` 95 | - [x] `getUILanguage` 96 | - [x] `chrome.contextMenus` (emulated via `GM_registerMenuCommand`; limited functionality) 97 | - [x] `chrome.permissions` 98 | - [x] `request`/`contains`/`getAll` (shimmed) 99 | - [x] `chrome.notifications` (maps to native Web Notifications API) 100 | - [x] `chrome.cookies` 101 | - [x] `get` 102 | - [x] `getAll` 103 | - [x] `set` 104 | - [x] `remove` 105 | - [x] `getAllCookieStores` 106 | - [x] `getPartitionKey` 107 | 108 | ### Planned APIs 109 | 110 | - [ ] `chrome.browserAction` 111 | - [ ] `chrome.tabs.insertCSS` 112 | - [ ] `chrome.tabs.reload` 113 | - [ ] `chrome.tabs.onActivated` 114 | - [ ] `chrome.scripting.insertCSS` 115 | - [ ] `chrome.scripting.executeScript` 116 | - [ ] `chrome.identity`: 117 | - [ ] `getAuthToken` 118 | - [ ] `getProfileUserInfo` 119 | - [ ] `clearAllCachedAuthTokens` 120 | - [ ] `chrome.runtime.onMessageExternal`: Shim 121 | - [ ] `chrome.runtime.sendMessageExternal`: Shim 122 | - [ ] `chrome.webNavigation` 123 | - [ ] (partial) `chrome.permissions`: 124 | - Get listeners working 125 | - Removing permission for an origin should persist this change and not match the script on it 126 | - [ ] (mostly implemented) `chrome.storage`: Proper listeners 127 | - [ ] `chrome.action` 128 | - `onClicked.addListener`: Add menu item for click 129 | - [ ] Keyboard shortcuts (from manifest) 130 | - [ ] `background.page` support 131 | 132 | ## General tasks 133 | 134 | - [ ] Make `vanilla` target better and more capable 135 | - [ ] API bindings: `import makeUserscript from 'to-userscript';` 136 | - [ ] Vite plugin 137 | - [ ] Take dir and output 138 | - [ ] Create reusable listener logic for supporting callbacks/promises 139 | 140 | ## Usage & Commands 141 | 142 | ### `convert` 143 | 144 | Converts an extension into a userscript. This is the primary command. 145 | 146 | **Syntax:** `to-userscript convert [options]` 147 | 148 | **`` Types:** 149 | 150 | - **URL:** A Chrome Web Store or Firefox Add-ons URL. 151 | - **Direct URL:** A direct link to a `.crx`, `.xpi`, or `.zip` file. 152 | - **Local Directory:** A path to an unpacked extension directory. 153 | - **Local Archive:** A path to a local `.crx`, `.xpi`, or `.zip` file. 154 | 155 | | Option | Alias | Description | 156 | | :---------------- | :---- | :-------------------------------------------------------------------------------------- | 157 | | `--output` | `-o` | Specify the output `.user.js` file path. | 158 | | `--target` | `-t` | Set build target. `userscript` (default) includes metadata, `vanilla` outputs plain JS. | 159 | | `--minify` | | Minify the JavaScript output using `terser`. | 160 | | `--beautify` | | Beautify the JavaScript output using `prettier`. | 161 | | `--locale` | `-l` | Preferred locale for name/description (e.g., `en`, `fr`, `de`). | 162 | | `--ignore-assets` | | Asset extensions to ignore during inlining (e.g., `mp4,webm,ttf`). | 163 | | `--force` | `-f` | Overwrite the output file if it exists. | 164 | | `--keep-temp` | | Keep temporary files for debugging purposes. | 165 | 166 | #### Examples 167 | 168 | **1. Convert from the Chrome Web Store with minification:** 169 | 170 | ```bash 171 | to-userscript convert "https://chromewebstore.google.com/detail/modern-for-wikipedia/emdkdnnopdnajipoapepbeeiemahbjcn?hl=en" -o modern-wikipedia.user.js --minify 172 | ``` 173 | 174 | **2. Convert a local directory with French localization:** 175 | 176 | ```bash 177 | to-userscript convert ./my-unpacked-extension/ -o my-script.user.js --locale fr 178 | ``` 179 | 180 | **3. Convert a downloaded XPI to vanilla JS (using `pnpm dlx`):** 181 | 182 | ```bash 183 | pnpm dlx to-userscript convert ./my-addon.xpi --target vanilla -o my-addon.js 184 | ``` 185 | 186 | ### `download` 187 | 188 | **Syntax:** `to-userscript download ` 189 | 190 | #### Example 191 | 192 | ```bash 193 | # Using bunx to download Material Design File Icons 194 | bunx to-userscript download "https://chromewebstore.google.com/detail/material-icons-for-github/bggfcpfjbdkhfhfmkjpbhnkhnpjjeomc/" -o ublock-origin.xpi 195 | ``` 196 | 197 | ### `require` 198 | 199 | Generates a metadata block to `@require` another local userscript. 200 | 201 | **Syntax:** `to-userscript require ` 202 | 203 | #### Example 204 | 205 | ```bash 206 | # Generate a main script that @requires the converted script 207 | to-userscript require ./material-design-fileicons.user.js | pbcopy 208 | ``` 209 | 210 | ## How It Works 211 | 212 | For a detailed explanation of the internal conversion pipeline, see the **[Architecture Guide](docs/architecture.md)** _note: AI generated but proofread_. 213 | 214 | ## What it can (and can't/shouldn't) convert 215 | 216 | - Extensions intended for one site should work well, extensions meant to apply some sort of enhancement to the entire web might not work quite as well 217 | - Extensions where the functionality is primarily browser API based (even though these are polyfilled most of the time it's not something useful for the functionality) 218 | - E.g: An adblocker is a bad use case for this, as is something like a custom new tab page or similar, whereas an extension like "YouTube auto HD" might work really well 219 | 220 | 221 | 222 | ## Troubleshooting & Advanced Usage 223 | 224 | ### Content Security Policy (CSP) Issues 225 | 226 | Some websites have strict CSP rules that can prevent converted userscripts from functioning correctly (blocking data urls and blobs). If your script isn't working as expected on a specific site, you may need to adjust Tampermonkey's security settings. 227 | 228 | 1. In Tampermonkey, go to the **Dashboard**. 229 | 2. Click the **Settings** tab. 230 | 3. Change **Config mode** from "Beginner" to **"Advanced"**. 231 | 4. Scroll down to the **Security** section. 232 | 5. For the option **Modify existing Content Security headers**, select **"Remove entirely"**. 233 | 234 | ## Resulting userscript output format: 235 | 236 | ```js 237 | // ==UserScript== 238 | // ... Metadata Block ... 239 | // ==/UserScript== 240 | 241 | (function () { 242 | // IIFE for scope isolation 243 | "use strict"; 244 | 245 | // 1. UNIFIED POLYFILL is defined here 246 | // - messaging.template.js -> createEventBus, createRuntime 247 | // - abstractionLayer.*.template.js -> _storageSet, _fetch, etc. 248 | // - assetsGenerator code -> EXTENSION_ASSETS_MAP, _createAssetUrl 249 | // - polyfill.template.js -> buildPolyfill() which creates chrome.* 250 | 251 | // 2. BACKGROUND SCRIPT ENVIRONMENT is defined and executed 252 | // - Runs all background scripts inside the polyfill's scope. 253 | // - This happens immediately on script start. 254 | 255 | // 3. ORCHESTRATION LOGIC is defined and executed 256 | // - Checks if location.href matches a content_script pattern. 257 | // - If it matches: 258 | // - Calls `executeAllScripts()`. 259 | // - This function injects CSS and runs JS in phases: 260 | // - document-start 261 | // - document-end 262 | // - document-idle 263 | // - Registers GM_registerMenuCommand for options/popup pages. 264 | // - Options/Popup pages are rendered in a modal with an iframe. 265 | // - The iframe's content is populated with the inlined HTML and 266 | // a specialized 'postmessage' version of the polyfill. 267 | })(); 268 | ``` 269 | 270 | ## Contributing 271 | 272 | I welcome contributions! Especially to implement new chrome apis, or fix bugs. 273 | -------------------------------------------------------------------------------- /bun.lock: -------------------------------------------------------------------------------- 1 | { 2 | "lockfileVersion": 1, 3 | "workspaces": { 4 | "": { 5 | "name": "extension-to-userscript", 6 | "dependencies": { 7 | "chalk": "^4.1.2", 8 | "cli-progress": "^3.12.0", 9 | "debug": "^4.3.4", 10 | "fs-extra": "^11.2.0", 11 | "isomorphic-fetch": "^3.0.0", 12 | "ora": "^5.4.1", 13 | "prettier": "^3.5.3", 14 | "terser": "^5.36.0", 15 | "tmp": "^0.2.3", 16 | "yargs": "^17.7.2", 17 | "yauzl": "^3.0.0", 18 | }, 19 | }, 20 | }, 21 | "packages": { 22 | "@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.8", "", { "dependencies": { "@jridgewell/set-array": "^1.2.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA=="], 23 | 24 | "@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="], 25 | 26 | "@jridgewell/set-array": ["@jridgewell/set-array@1.2.1", "", {}, "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A=="], 27 | 28 | "@jridgewell/source-map": ["@jridgewell/source-map@0.3.6", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25" } }, "sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ=="], 29 | 30 | "@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.0", "", {}, "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ=="], 31 | 32 | "@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="], 33 | 34 | "acorn": ["acorn@8.15.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg=="], 35 | 36 | "ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], 37 | 38 | "ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="], 39 | 40 | "base64-js": ["base64-js@1.5.1", "", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="], 41 | 42 | "bl": ["bl@4.1.0", "", { "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w=="], 43 | 44 | "buffer": ["buffer@5.7.1", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ=="], 45 | 46 | "buffer-crc32": ["buffer-crc32@0.2.13", "", {}, "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ=="], 47 | 48 | "buffer-from": ["buffer-from@1.1.2", "", {}, "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="], 49 | 50 | "chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], 51 | 52 | "cli-cursor": ["cli-cursor@3.1.0", "", { "dependencies": { "restore-cursor": "^3.1.0" } }, "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw=="], 53 | 54 | "cli-progress": ["cli-progress@3.12.0", "", { "dependencies": { "string-width": "^4.2.3" } }, "sha512-tRkV3HJ1ASwm19THiiLIXLO7Im7wlTuKnvkYaTkyoAPefqjNg7W7DHKUlGRxy9vxDvbyCYQkQozvptuMkGCg8A=="], 55 | 56 | "cli-spinners": ["cli-spinners@2.9.2", "", {}, "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg=="], 57 | 58 | "cliui": ["cliui@8.0.1", "", { "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.1", "wrap-ansi": "^7.0.0" } }, "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ=="], 59 | 60 | "clone": ["clone@1.0.4", "", {}, "sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg=="], 61 | 62 | "color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="], 63 | 64 | "color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="], 65 | 66 | "commander": ["commander@2.20.3", "", {}, "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="], 67 | 68 | "debug": ["debug@4.4.1", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ=="], 69 | 70 | "defaults": ["defaults@1.0.4", "", { "dependencies": { "clone": "^1.0.2" } }, "sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A=="], 71 | 72 | "emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], 73 | 74 | "escalade": ["escalade@3.2.0", "", {}, "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="], 75 | 76 | "fs-extra": ["fs-extra@11.3.0", "", { "dependencies": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", "universalify": "^2.0.0" } }, "sha512-Z4XaCL6dUDHfP/jT25jJKMmtxvuwbkrD1vNSMFlo9lNLY2c5FHYSQgHPRZUjAB26TpDEoW9HCOgplrdbaPV/ew=="], 77 | 78 | "get-caller-file": ["get-caller-file@2.0.5", "", {}, "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="], 79 | 80 | "graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="], 81 | 82 | "has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="], 83 | 84 | "ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="], 85 | 86 | "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="], 87 | 88 | "is-fullwidth-code-point": ["is-fullwidth-code-point@3.0.0", "", {}, "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="], 89 | 90 | "is-interactive": ["is-interactive@1.0.0", "", {}, "sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w=="], 91 | 92 | "is-unicode-supported": ["is-unicode-supported@0.1.0", "", {}, "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw=="], 93 | 94 | "isomorphic-fetch": ["isomorphic-fetch@3.0.0", "", { "dependencies": { "node-fetch": "^2.6.1", "whatwg-fetch": "^3.4.1" } }, "sha512-qvUtwJ3j6qwsF3jLxkZ72qCgjMysPzDfeV240JHiGZsANBYd+EEuu35v7dfrJ9Up0Ak07D7GGSkGhCHTqg/5wA=="], 95 | 96 | "jsonfile": ["jsonfile@6.1.0", "", { "dependencies": { "universalify": "^2.0.0" }, "optionalDependencies": { "graceful-fs": "^4.1.6" } }, "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ=="], 97 | 98 | "log-symbols": ["log-symbols@4.1.0", "", { "dependencies": { "chalk": "^4.1.0", "is-unicode-supported": "^0.1.0" } }, "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg=="], 99 | 100 | "mimic-fn": ["mimic-fn@2.1.0", "", {}, "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg=="], 101 | 102 | "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], 103 | 104 | "node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="], 105 | 106 | "onetime": ["onetime@5.1.2", "", { "dependencies": { "mimic-fn": "^2.1.0" } }, "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg=="], 107 | 108 | "ora": ["ora@5.4.1", "", { "dependencies": { "bl": "^4.1.0", "chalk": "^4.1.0", "cli-cursor": "^3.1.0", "cli-spinners": "^2.5.0", "is-interactive": "^1.0.0", "is-unicode-supported": "^0.1.0", "log-symbols": "^4.1.0", "strip-ansi": "^6.0.0", "wcwidth": "^1.0.1" } }, "sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ=="], 109 | 110 | "pend": ["pend@1.2.0", "", {}, "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg=="], 111 | 112 | "prettier": ["prettier@3.5.3", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw=="], 113 | 114 | "readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], 115 | 116 | "require-directory": ["require-directory@2.1.1", "", {}, "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q=="], 117 | 118 | "restore-cursor": ["restore-cursor@3.1.0", "", { "dependencies": { "onetime": "^5.1.0", "signal-exit": "^3.0.2" } }, "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA=="], 119 | 120 | "safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="], 121 | 122 | "signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="], 123 | 124 | "source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="], 125 | 126 | "source-map-support": ["source-map-support@0.5.21", "", { "dependencies": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" } }, "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w=="], 127 | 128 | "string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], 129 | 130 | "string_decoder": ["string_decoder@1.3.0", "", { "dependencies": { "safe-buffer": "~5.2.0" } }, "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA=="], 131 | 132 | "strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], 133 | 134 | "supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], 135 | 136 | "terser": ["terser@5.43.0", "", { "dependencies": { "@jridgewell/source-map": "^0.3.3", "acorn": "^8.14.0", "commander": "^2.20.0", "source-map-support": "~0.5.20" }, "bin": { "terser": "bin/terser" } }, "sha512-CqNNxKSGKSZCunSvwKLTs8u8sGGlp27sxNZ4quGh0QeNuyHM0JSEM/clM9Mf4zUp6J+tO2gUXhgXT2YMMkwfKQ=="], 137 | 138 | "tmp": ["tmp@0.2.3", "", {}, "sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w=="], 139 | 140 | "tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="], 141 | 142 | "universalify": ["universalify@2.0.1", "", {}, "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw=="], 143 | 144 | "util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="], 145 | 146 | "wcwidth": ["wcwidth@1.0.1", "", { "dependencies": { "defaults": "^1.0.3" } }, "sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg=="], 147 | 148 | "webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="], 149 | 150 | "whatwg-fetch": ["whatwg-fetch@3.6.20", "", {}, "sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg=="], 151 | 152 | "whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="], 153 | 154 | "wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], 155 | 156 | "y18n": ["y18n@5.0.8", "", {}, "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="], 157 | 158 | "yargs": ["yargs@17.7.2", "", { "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.3", "y18n": "^5.0.5", "yargs-parser": "^21.1.1" } }, "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w=="], 159 | 160 | "yargs-parser": ["yargs-parser@21.1.1", "", {}, "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw=="], 161 | 162 | "yauzl": ["yauzl@3.2.0", "", { "dependencies": { "buffer-crc32": "~0.2.3", "pend": "~1.2.0" } }, "sha512-Ow9nuGZE+qp1u4JIPvg+uCiUr7xGQWdff7JQSk5VGYTAZMDe2q8lxJ10ygv10qmSj031Ty/6FNJpLO4o1Sgc+w=="], 163 | } 164 | } 165 | -------------------------------------------------------------------------------- /docs/architecture.md: -------------------------------------------------------------------------------- 1 | > NOTE: Generated by AI but should help you understand what's going on should you wish to contribute/are curious. As always the actual code, or emailing me is a way to be sure =) 2 | 3 | # Architecture Document: `to-userscript` 4 | 5 | This document provides a detailed overview of the architecture for the `to-userscript` converter. It is intended for developers looking to understand the project's structure, data flow, and core design principles. 6 | 7 | ## 1. High-Level Overview 8 | 9 | The `to-userscript` project is a Node.js command-line tool designed to convert a standard WebExtension into a single, self-contained userscript (`.user.js`) or a vanilla JavaScript file. Its primary goal is to emulate the WebExtension environment (APIs, resources, execution lifecycle) within the constraints of a userscript engine like Tampermonkey or Greasemonkey. 10 | 11 | The tool achieves this by: 12 | 13 | 1. **Parsing** the extension's `manifest.json`. 14 | 2. **Reading** all specified JavaScript and CSS files. 15 | 3. **Inlining** all local assets (images, fonts, HTML, CSS) as Data URLs to make the script self-contained. 16 | 4. **Polyfilling** common WebExtension APIs (`chrome.storage`, `chrome.runtime`, `chrome.i18n`, etc.). 17 | 5. **Orchestrating** the execution of content scripts and injection of styles according to the manifest's `run_at` rules. 18 | 6. **Generating** a single output file with a proper userscript metadata block. 19 | 20 | ## 2. Core Concepts & Design Patterns 21 | 22 | The project is built around several key architectural concepts: 23 | 24 | ### 2.1. Abstraction Layer (Adapter Pattern) 25 | 26 | The most critical part of the polyfill is the abstraction layer. It provides a common internal interface for core functionalities that have different implementations depending on the target environment. 27 | 28 | - **Interface**: `_storageSet`, `_storageGet`, `_fetch`, `_openTab`, etc. 29 | - **Implementations**: 30 | - **`userscript` target**: Maps to functions (`GM_setValue`, `GM_xmlhttpRequest`, etc.). This is defined in `templates/abstractionLayer.userscript.template.js`. 31 | - **`vanilla` target**: Maps to browser-native APIs like IndexedDB for storage and `fetch`. This is defined in `templates/abstractionLayer.vanilla.template.js`. 32 | - **`postmessage` target**: For code running inside an `iframe` (like an options or popup page). It forwards all API calls to the parent window via `postMessage`. This is defined in `templates/abstractionLayer.postmessage.template.js`. 33 | - **`handle_postmessage`**: The counterpart to the above, it runs in the main userscript context to listen for and handle API requests from iframes. 34 | 35 | This design decouples the WebExtension API polyfill from the underlying execution environment, making the system extensible. 36 | 37 | ### 2.2. WebExtension API Polyfill 38 | 39 | The tool constructs a `chrome` and `browser` object that mimics the real WebExtension APIs. 40 | 41 | - **Source**: `templates/polyfill.template.js` 42 | - **Functionality**: It provides stubs and working implementations for APIs like `runtime`, `storage`, `i18n`, `tabs`, `contextMenus`, and `notifications`. 43 | - **Dependency**: It relies on the **Abstraction Layer** to perform its tasks. For example, `chrome.storage.local.set()` calls `_storageSet()`. 44 | - **Context Isolation**: The polyfill uses a `with` block and proxies to create a sandboxed global scope for the extension's scripts. This ensures that `window`, `chrome`, etc., refer to the polyfilled versions, minimizing conflicts with the host page. 45 | 46 | ### 2.3. Asset Inlining and Management 47 | 48 | A key feature is making the script self-contained. `assetsGenerator.js` is the engine for this. 49 | 50 | 1. **Recursive Processing**: It starts with top-level files (like options/popup HTML) and recursively scans them for asset references (`src`, `href`, `url()`). 51 | 2. **Asset Conversion**: 52 | - **Binary assets** (images, fonts) are read and converted to Base64 Data URLs. 53 | - **Text assets** (CSS, HTML) are read, and their contents are also recursively scanned for more assets before being inlined. 54 | 3. **Asset Map**: All processed assets are stored in a large JavaScript object `EXTENSION_ASSETS_MAP`, which is injected into the final script. 55 | 4. **`runtime.getURL` Polyfill**: The polyfilled `chrome.runtime.getURL` function does not return a relative path. Instead, it looks up the requested path in the `EXTENSION_ASSETS_MAP` and generates a `blob:` or `data:` URL from the in-memory content. This allows the extension's code to access its resources as if they were files. 56 | 57 | ### 2.4. Execution Orchestration 58 | 59 | The generated userscript doesn't just dump all the code into the page. It follows the execution logic defined in the manifest. 60 | 61 | - **Source**: `templates/orchestration.template.js` and `scriptAssembler.js`. 62 | - **Lifecycle**: The orchestration logic is the `main` function of the generated script. 63 | - **Matching**: It first checks if the current page URL matches any of the `content_scripts` patterns from the manifest. 64 | - **Phased Execution**: If there's a match, it executes code in the order defined by `run_at`: 65 | 1. `document-start` 66 | 2. `document-end` 67 | 3. `document-idle` 68 | - **Assembly**: `scriptAssembler.js` is responsible for taking all the individual script contents and generating a single `executeAllScripts` function string, which neatly orders the code and CSS injections for each phase. 69 | 70 | ### 2.5. Inter-Context Communication (Message Bus) 71 | 72 | Since UI pages (options, popup) are rendered in sandboxed `iframe`s, a message bus is required to emulate `chrome.runtime.sendMessage` and other cross-context communication. 73 | 74 | - **Source**: `templates/messaging.template.js` 75 | - **Mechanism**: It uses `window.postMessage` to send events between the main userscript context and any iframe contexts. 76 | - **`createEventBus`**: Sets up the low-level `on`/`emit` listeners. 77 | - **`createRuntime`**: Builds a `chrome.runtime`-like object on top of the event bus, handling request/response logic for `sendMessage`. 78 | 79 | ## 3. Architectural Flow & Module Breakdown 80 | 81 | The project has two main workflows, driven by the CLI commands `convert` and `download`. 82 | 83 | ### 3.1. `convert` Workflow 84 | 85 | This is the primary workflow for converting an extension. 86 | 87 | **`cli/workflow.js -> run()`** acts as the main conductor. 88 | 89 | 1. **Source Analysis (`determineSourceType`)**: Determines if the source is a URL, local archive (`.crx`, `.xpi`, `.zip`), or a directory. 90 | 2. **Preparation (Download/Unpack)**: 91 | - If the source is a URL (e.g., Chrome/Firefox store), `cli/download.js` is used to fetch the extension archive. It contains specific logic (`getCrxUrl`, `getFirefoxAddonUrl`) to find the direct download link. 92 | - If the source is an archive, `cli/unpack.js` is used to extract its contents into a temporary directory. It can handle `.zip`, `.xpi`, and CRXv2/v3 formats. 93 | 3. **Core Conversion (`convert.js -> convertExtension()`)**: This is the pure, library-level conversion function. 94 | - **Manifest Parsing (`manifestParser.js`)**: Reads `manifest.json`, applies localization from `_locales` using `locales.js`, and normalizes the structure. 95 | - **Resource Processing (`resourceProcessor.js`)**: Reads all JS and CSS files listed in `content_scripts` and `background` into memory maps, keyed by their relative paths. 96 | - **Output Building (`outputBuilder.js`)**: This is the assembler. It orchestrates the creation of the final script. 97 | - It initializes `assetsGenerator.js` to process all assets and create the `EXTENSION_ASSETS_MAP`. 98 | - It calls `metadataGenerator.js` to create the `// ==UserScript==` block, including resolving the best icon with `getIcon.js`. 99 | - It calls `buildPolyfillString.js` which combines the abstraction layer and polyfill templates. 100 | - It calls `scriptAssembler.js` to create the ordered script execution logic. 101 | - It injects all these generated parts into the master `orchestration.template.js`. 102 | 4. **Post-processing (`cli/minify.js`)**: Optionally, the final script is minified with `terser` or beautified with `prettier`. 103 | 5. **File Output**: The final string is written to the specified output file. 104 | 105 | ### 3.2. File-by-File Module Responsibilities 106 | 107 | #### `src/cli/` - Command-Line Interface Layer 108 | 109 | - `index.js`: The CLI entry point. Uses `yargs` to define commands (`convert`, `download`, `require`) and their options. Delegates execution to `workflow.js`. 110 | - `workflow.js`: The high-level orchestrator for CLI commands. It manages temporary directories, spinners, and the step-by-step flow of downloading, unpacking, and converting. It separates CLI concerns from the core conversion logic. 111 | - `download.js`: Handles downloading files from URLs. It includes a progress bar and logic to determine the downloadable URL from store pages. 112 | - `downloadExt.js`: A helper specifically for constructing the direct download URL for a Chrome Web Store extension. 113 | - `unpack.js`: Extracts extension archives (`.crx`, `.xpi`, `.zip`) using `yauzl`. Contains logic to handle the CRX header. 114 | - `minify.js`: A wrapper around `terser` and `prettier` to provide minification and beautification, correctly preserving the userscript metadata block. 115 | - `require.js`: Logic for the `require` command, which generates a metadata block that `@require`s another userscript. 116 | 117 | #### `src/` - Core Logic Layer 118 | 119 | - `convert.js`: A high-level library function that encapsulates the entire conversion process. It's the main entry point for using the converter programmatically. 120 | - `manifestParser.js`: Responsible for reading, parsing, and normalizing `manifest.json`. It integrates with `locales.js` to provide localized names and descriptions. 121 | - `resourceProcessor.js`: Reads the content of all JS and CSS files specified in the manifest. 122 | - `assetsGenerator.js`: The powerful asset inlining engine. Recursively finds and converts all referenced assets to be self-contained within the script. 123 | - `scriptAssembler.js`: Organizes the JS and CSS from content scripts into an `executeAllScripts` function, respecting the `run_at` order. 124 | - `outputBuilder.js`: The master assembler. It takes the output from all other core modules and uses templates to build the final script string. 125 | - `buildPolyfillString.js`: Specifically responsible for constructing the complete polyfill code by combining the messaging, abstraction layer, and assets helper templates. 126 | - `abstractionLayer.js`: Selects the correct abstraction layer code based on the target and determines the necessary `@grant` permissions for userscripts. 127 | - `locales.js`: Handles loading `_locales/` directories and replacing `__MSG_...__` placeholders. 128 | - `getIcon.js`: Finds the most appropriate icon from the manifest and converts it to a Data URL. 129 | - `templateManager.js`: A simple manager to read and cache the `.template.js` files. 130 | - `utils.js`: A collection of utility functions used across the project (e.g., `normalizePath`, `convertMatchPatternToRegExp`). 131 | 132 | #### `src/templates/` - Generated Code Blueprint Layer 133 | 134 | These files are not executed by the tool itself; they are the source code for the _generated_ userscript. 135 | 136 | - `orchestration.template.js`: The main runtime logic of the final script. It contains the logic to check URL matches, trigger phased execution, and handle UI (popup/options modals). 137 | - `polyfill.template.js`: The core `chrome.*` API polyfill. 138 | - `abstractionLayer.*.template.js`: The different backends for the polyfill (Greasemonkey, Vanilla JS, PostMessage). 139 | - `messaging.template.js`: The `postMessage`-based event bus for communication between the main script and iframes. 140 | - `trustedTypes.template.js`: A small script injected via `@require` to bypass Trusted Types security policies on some websites. 141 | 142 | ## 4. The Generated Userscript Architecture 143 | 144 | The final output file has its own internal architecture, composed from the templates: 145 | 146 | ``` 147 | // ==UserScript== 148 | // ... Metadata Block ... 149 | // ==/UserScript== 150 | 151 | (function() { // IIFE for scope isolation 152 | 'use strict'; 153 | 154 | // 1. UNIFIED POLYFILL is defined here 155 | // - messaging.template.js -> createEventBus, createRuntime 156 | // - abstractionLayer.*.template.js -> _storageSet, _fetch, etc. 157 | // - assetsGenerator code -> EXTENSION_ASSETS_MAP, _createAssetUrl 158 | // - polyfill.template.js -> buildPolyfill() which creates chrome.* 159 | 160 | // 2. BACKGROUND SCRIPT ENVIRONMENT is defined and executed 161 | // - Runs all background scripts inside the polyfill's scope. 162 | // - This happens immediately on script start. 163 | 164 | // 3. ORCHESTRATION LOGIC is defined and executed 165 | // - Checks if location.href matches a content_script pattern. 166 | // - If it matches: 167 | // - Calls `executeAllScripts()`. 168 | // - This function injects CSS and runs JS in phases: 169 | // - document-start 170 | // - document-end 171 | // - document-idle 172 | // - Registers GM_registerMenuCommand for options/popup pages. 173 | // - Options/Popup pages are rendered in a modal with an iframe. 174 | // - The iframe's content is populated with the inlined HTML and 175 | // a specialized 'postmessage' version of the polyfill. 176 | })(); 177 | ``` 178 | -------------------------------------------------------------------------------- /docs/article.md: -------------------------------------------------------------------------------- 1 | # Converting browser extensions to userscripts 2 | 3 | ## TL;DR: 4 | 5 | - **GitHub Repository**: [Explosion-Scratch/to-userscript](https://github.com/explosion-scratch/to-userscript) 6 | - **Quick start**: 7 | 8 | ```bash 9 | # Install the tool 10 | bun i -g to-userscript 11 | # or pnpx/npx to-userscript 12 | # Convert your favorite extension from the Chrome Store 13 | to-userscript convert "https://chrome.google.com/webstore/detail/..." --minify -o my-script.user.js 14 | 15 | # Or download an addon from Mozilla 16 | to-userscript download "https://addons.mozilla.org/en-US/firefox/addon/..." 17 | ``` 18 | 19 | There are a lot of browser extensions out there, and many of them are relatively simple, changing something simple about a page, something somewhat useful, etc. There are also more complex ones that need bundling to be built, rely on libraries and that tyep of thing. Being a programmer and seeing many popular open-source browser extensions on GitHub I noticed one common feature request: A userscript version. 20 | 21 | Userscripts offer a lot of power for users, there are Userscript managers for almost every browser, they are self contained, and less ambiguous than chrome extensions. They also are by their nature open source (at least if the code is not heavily obfuscated). In the past I've sunk hours into converting chrome extensions into [userscripts](<[url](https://github.com/infokiller/web-search-navigator/issues/564#issuecomment-1759550504)>) [manually](<[url](https://github.com/Explosion-Scratch/userscripts/blob/main/github_file_icons_material_design_beta.user.js)>). I always thought it would be too difficult to make a general tool to do something like this due to the complexity of browser APIs so I kind of tabled the idea - until now (yay). 22 | 23 | The goals of my project were as follows: 24 | 25 | - Convert browser extensions to userscript with minimal changes to the actual code 26 | - Make the code as environment agnostic as possible — e.g. allow building to vanilla JS if possible 27 | - The resulting userscript should support as many of the original features as possible. 28 | 29 | ## My first pass - Emulating `chrome` 30 | 31 | My first attempt at this involved iterating for several hours with Gemini (a few months ago before 2.5 pro sadly) to create a design doc that planned out what I had in mind. This was a bit of a complicated thing because I wanted the userscripts to run in a very-close-to-browser environment, this means that variables like `chrome`, `browser`, etc should be declared globally and polyfilled. It also meant meticulously changing the environment of these files so that libraries and things that try to define functions in the global scope work correctly. Here were some attempts I made at this: 32 | 33 | This was my first attempt at this. This attempt worked somewhat, declaring the variables I wanted, but not correctly allowing assignment: 34 | 35 | ```js 36 | function runAllTheCode({chrome, browser, window, this, self, ...etc}){ 37 | /* 38 | Code that goes here can now access the various things I may want to polyfill, but if there's a library like the following, it won't work: 39 | 40 | ExtPay = { 41 | 42 | } 43 | -> Now that assignment is still only local to this function 44 | */ 45 | } 46 | ``` 47 | 48 | My second attempt was to essentially do the same, but `.call(polyfill)` the function, so as to better simulate a scope. This unfortunately still didn't allow assignment in the way I wanted. 49 | 50 | What finally ended up working, is a bit hacky, but essentially I created a [Proxy](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Proxy) which on set would set any values to a custom storage object, then on get check first on that object, before on the target, and then used a `with(polyfill)` statement to make the code run in the polyfilled scope. 51 | 52 | ```js 53 | let customStorage = { 54 | chrome: /* ... */, 55 | browser: /* ... */, 56 | }; 57 | new Proxy(target, { 58 | set(target, prop, value) { 59 | customStorage[prop] = value; 60 | return Reflect.set(target, prop, value); 61 | }, 62 | get(target, prop) { 63 | return customStorage[prop] || Reflect.get(target, prop); 64 | }, 65 | }); 66 | 67 | /* 68 | Now code can run in the polyfilled scope: 69 | */ 70 | with (polyfill) { 71 | /* 72 | // Yay it works!! 73 | ExtPay = {} 74 | */ 75 | } 76 | ``` 77 | 78 | But now the problem is I need code like globalThis.chrome to also function correctly, so I made a function that when setting a value, sets it across domains, e.g. for each of `[window, globalThis, customStorage, ...etc]` we set the new key to the value. 79 | 80 | Now I was free to polyfill away some of the APIs, and try to make extensions work, but my next big problem was sending messages. 81 | 82 | ## Sending messages across scopes 83 | 84 | Sending messages across scopes was especially tricky, because I needed to allow messages to be sent and recieved from the content script and the background (which are in the same scope, but need to have simulated messaging). My initial take on this was to create an internalMessagingBus which essentially just acted as a hub to send and recieve messages. This could just be a simple object containing `.emit`, `.on`, and `.off` listeners, but this quickly failed as soon as I needed scripts to run in a seperate context, e.g. the options/popup pages. 85 | 86 | ## Options + Popups 87 | 88 | This was one of the biggest hurdles of the project, and involved maybe 12 hours of messing around with various things, the complexity of this task is threefold: 89 | 90 | 1. [Options or popup pages](https://developer.chrome.com/docs/extensions/develop/ui/options-page) needed to access other resources: 91 | - These pages needed things such as scripts to make the options page function, styles or libraries. This meant that I had to polyfill `chrome.runtime.getURL` to map to a static assets map generated on build time. This assets map is simply an object keying each file within the extension that may be accessed to its content. 92 | - 93 | 2. Access to browser APIs: 94 | - These pages needed direct access to Browser APIs, and I couldn't just directly copy over the polyfill because an iframe with `[srcdoc]` set couldn't access `GM_` related functions. Also to keep things working well I didn't want to instances that had to sync with each other. 95 | 3. 96 | 97 | I eventually settled on a [templates system](../src/templates/) which has a series of templates for a few different "targets" for the build: 98 | 99 | - `userscript`: This is the default target, and contains all of the logic for the various APIs. 100 | - `postmessage`: This is for pages like options or the popup, which shouldn't work by just creating a new API instance. Instead [their versions](../src/templates/messaging.template.js) of `chrome.runtime.sendMessage` post to the parent 101 | - `handle_postmessage`: Simply a listener in the userscript that proxies the calls from the iframes back to the real runtime. 102 | 103 | ## Inlining Assets 104 | 105 | As I outlined earlier, for a variety of reasons (namely `runtime.getURL()` and `web_accessible_resources`), the extension needs to have access to files that it comes with, and since we're building the extension to a single script, these must be embedded directly. I settled on creating an `EXTENSION_ASSETS_MAP` that stores `[filename]: [content]` format. For text assets this is stored simply as text, and for other assets, they are stored as base64. This presented a slight hicup though for the options page though, as the map might sometimes contain `` which would obviously derail the content of the script in which it's injected (in iframes). 106 | 107 | The other problem of this method, was CSS `url()` imports which might rely on photos, icons, etc throughout, so these needed to also be replaced, but recursively, since we start with an options of popup page. 108 | 109 | ## Locales 110 | 111 | Many extensions rely on locale support within the chrome extension to support multilingual extensions. I didn't want to incorporate every locale into the built userscript so instead I allow this to be passed as part of the CLI. I also had to be careful when replacing not to alter scripts, or anything like that, and then this locale also needed to be passed down to the options and popup pages. 112 | 113 | ## Background scripts 114 | 115 | ## Russian nesting doll polyfills 116 | 117 | - Building as strings 118 | 119 | ## Patching things that just don't work 120 | -------------------------------------------------------------------------------- /gen_examples.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | rm -rf examples/* 4 | bun src/cli/index.js convert extensions/simple -o examples/simple.user.js --force 5 | bun src/cli/index.js convert extensions/particle-iridium -o examples/iridium.user.js --force 6 | bun src/cli/index.js convert extensions/modernhn -o examples/modern-hackernews.user.js --force 7 | bun src/cli/index.js convert extensions/modern-wikipedia -o examples/modern-wikipedia.user.js --force 8 | bun src/cli/index.js convert extensions/web-search-navigator -o examples/web-search-navigator.user.js --force 9 | bun src/cli/index.js convert extensions/json-formatter -o examples/json-formatter.user.js --force 10 | bun src/cli/index.js convert extensions/return-dislikes -o examples/return-dislikes.user.js --force 11 | bun src/cli/index.js convert extensions/material-design-fileicons -o examples/material-design-fileicons.user.js --force 12 | bun src/cli/index.js convert extensions/ublacklist -o examples/ublacklist.user.js --force 13 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "to-userscript", 3 | "version": "0.1.3", 4 | "description": "Converts simple browser extensions to userscripts", 5 | "main": "src/cli/index.js", 6 | "bin": { 7 | "to-userscript": "src/cli/index.js" 8 | }, 9 | "scripts": { 10 | "test": "echo \"Error: no test specified\" && exit 1", 11 | "start": "node src/cli/index.js", 12 | "cli": "node src/cli/index.js" 13 | }, 14 | "keywords": [ 15 | "userscript", 16 | "extension", 17 | "converter", 18 | "tampermonkey", 19 | "greasemonkey" 20 | ], 21 | "author": "", 22 | "license": "ISC", 23 | "dependencies": { 24 | "chalk": "^4.1.2", 25 | "cli-progress": "^3.12.0", 26 | "debug": "^4.3.4", 27 | "fs-extra": "^11.2.0", 28 | "isomorphic-fetch": "^3.0.0", 29 | "ora": "^5.4.1", 30 | "prettier": "^3.5.3", 31 | "terser": "^5.36.0", 32 | "tmp": "^0.2.3", 33 | "yargs": "^17.7.2", 34 | "yauzl": "^3.0.0" 35 | }, 36 | "engines": { 37 | "node": ">=14.0.0" 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/abstractionLayer.js: -------------------------------------------------------------------------------- 1 | const templateManager = require("./templateManager"); 2 | 3 | /** 4 | * Retrieves the abstraction layer code string for the specified target. 5 | * @param {string} target - The build target ('userscript', 'vanilla', 'postmessage', or 'handle_postmessage'). 6 | * @returns {Promise} The code string for the abstraction layer. 7 | */ 8 | async function getAbstractionLayerCode(target = "userscript") { 9 | let template = await templateManager.getAbstractionLayerTemplate(target); 10 | 11 | // For targets other than postmessage and handle_postmessage, also include the handle_postmessage template 12 | // This allows the main context to handle postmessage requests from iframes 13 | if (target !== "postmessage" && target !== "handle_postmessage") { 14 | const handlePostMessageTemplate = 15 | await templateManager.getAbstractionLayerTemplate("handle_postmessage"); 16 | template = handlePostMessageTemplate + "\n\n" + template; 17 | } 18 | 19 | return template; 20 | } 21 | 22 | // This list MUST match the GM_* functions actually used in the userscript template 23 | function getRequiredGmGrants(target = "userscript") { 24 | if (target === "userscript") { 25 | return [ 26 | "GM_setValue", 27 | "GM_getValue", 28 | "GM_listValues", // Used by _storageGet(null), _storageClear 29 | "GM_deleteValue", // Used by _storageRemove, _storageClear 30 | "GM_xmlhttpRequest", // Used by _fetch 31 | "GM_registerMenuCommand", // Used by _registerMenuCommand 32 | "GM_openInTab", // Used by _openTab 33 | ].filter(Boolean); 34 | } 35 | 36 | if (target === "postmessage" || target === "handle_postmessage") { 37 | return []; 38 | } 39 | 40 | return []; 41 | } 42 | 43 | module.exports = { getAbstractionLayerCode, getRequiredGmGrants }; 44 | -------------------------------------------------------------------------------- /src/buildPolyfillString.js: -------------------------------------------------------------------------------- 1 | const templateManager = require("./templateManager"); 2 | const abstractionLayer = require("./abstractionLayer"); 3 | 4 | /** 5 | * Generates a complete polyfill string that includes: 6 | * - Message bus implementation 7 | * - Abstraction layer functions 8 | * - Assets logic and runtime.getURL override 9 | * - buildPolyfill function with integrated runtime.getURL 10 | * 11 | * @param {string} target - The build target ('userscript', 'vanilla', 'postmessage', or 'handle_postmessage') 12 | * @param {Object} assetsMap - Map of asset paths to content (base64 for binary, text for text assets) 13 | * @param {Object} manifest - Parsed manifest object 14 | * @returns {Promise} Complete polyfill code string 15 | */ 16 | async function generateBuildPolyfillString( 17 | target = "userscript", 18 | assetsMap = {}, 19 | manifest = {}, 20 | ) { 21 | const abstractionLayerCode = 22 | await abstractionLayer.getAbstractionLayerCode(target); 23 | 24 | const assetsHelperFunctions = generateAssetsHelperFunctions( 25 | assetsMap, 26 | target === "postmessage", 27 | ); 28 | 29 | const messagingTemplate = await templateManager.getMessagingTemplate(); 30 | 31 | const polyfillTemplate = await templateManager.getPolyfillTemplate(); 32 | 33 | const combinedPolyfillString = ` 34 | ${messagingTemplate} 35 | 36 | ${abstractionLayerCode} 37 | 38 | ${assetsHelperFunctions} 39 | 40 | ${polyfillTemplate 41 | .replaceAll("{{IS_IFRAME}}", target === "postmessage" ? "true" : "false") 42 | .replaceAll("{{SCRIPT_ID}}", manifest._id)} 43 | 44 | if (typeof window !== 'undefined') { 45 | window.buildPolyfill = buildPolyfill; 46 | } 47 | `; 48 | 49 | return combinedPolyfillString; 50 | } 51 | 52 | /** 53 | * Generates the asset helper functions and EXTENSION_ASSETS_MAP 54 | * @param {Object} assetsMap - Map of asset paths to content 55 | * @param {boolean} inlineAssets - Whether to use placeholder for postmessage target 56 | * @returns {string} Asset helper functions code 57 | */ 58 | function generateAssetsHelperFunctions(assetsMap = {}, inlineAssets = false) { 59 | const assetMapJson = inlineAssets 60 | ? `{{EXTENSION_ASSETS_MAP}}` 61 | : JSON.stringify(assetsMap, null, 2); 62 | 63 | return `const EXTENSION_ASSETS_MAP = ${assetMapJson};`; 64 | } 65 | 66 | module.exports = { generateBuildPolyfillString }; 67 | -------------------------------------------------------------------------------- /src/cli/download.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs").promises; 2 | const path = require("path"); 3 | require("isomorphic-fetch"); 4 | const chalk = require("chalk"); 5 | const debug = require("debug")("to-userscript:downloader"); 6 | const { getCrxUrl } = require("./downloadExt"); 7 | 8 | async function getDownloadableUrl(sourceInfo) { 9 | debug("Getting downloadable URL for: %o", sourceInfo); 10 | 11 | switch (sourceInfo.type) { 12 | case "chrome-store": 13 | return getCrxUrl(sourceInfo.url); 14 | 15 | case "firefox-store": 16 | return getFirefoxAddonUrl(sourceInfo.url); 17 | 18 | case "url": 19 | return sourceInfo.url; 20 | 21 | default: 22 | throw new Error( 23 | `Unsupported source type for download: ${sourceInfo.type}`, 24 | ); 25 | } 26 | } 27 | 28 | async function getFirefoxAddonUrl(webstoreUrl) { 29 | debug("Extracting Firefox addon download URL from: %s", webstoreUrl); 30 | 31 | // Extract addon ID from Firefox addon URL 32 | const match = webstoreUrl.match(/\/addon\/([^\/]+)/); 33 | if (!match) { 34 | throw new Error("Invalid Firefox addon URL format"); 35 | } 36 | 37 | const addonId = match[1]; 38 | debug("Extracted addon ID: %s", addonId); 39 | 40 | // Firefox addon API endpoint for latest version 41 | // Note: This is a simplified approach. In production, you might want to: 42 | // 1. Use the AMO API to get addon details 43 | // 2. Parse the addon page HTML to find the download link 44 | // 3. Handle addon permissions and compatibility 45 | 46 | try { 47 | // Try to get addon details from AMO API 48 | const apiUrl = `https://addons.mozilla.org/api/v5/addons/addon/${addonId}/`; 49 | debug("Fetching addon details from API: %s", apiUrl); 50 | 51 | const response = await fetch(apiUrl); 52 | if (!response.ok) { 53 | throw new Error( 54 | `AMO API request failed: ${response.status} ${response.statusText}`, 55 | ); 56 | } 57 | 58 | const addonData = await response.json(); 59 | debug( 60 | "Addon data received: %s v%s", 61 | addonData.name?.en || addonData.name, 62 | addonData.current_version?.version, 63 | ); 64 | 65 | if (!addonData.current_version?.file?.url) { 66 | throw new Error("No download URL found in addon data"); 67 | } 68 | 69 | return addonData.current_version.file.url; 70 | } catch (error) { 71 | debug("AMO API failed, trying direct URL construction: %s", error.message); 72 | 73 | // Fallback: construct direct download URL 74 | // This might not always work, but it's worth trying 75 | const directUrl = `https://addons.mozilla.org/firefox/downloads/latest/${addonId}/addon-${addonId}-latest.xpi`; 76 | debug("Using direct download URL: %s", directUrl); 77 | 78 | return directUrl; 79 | } 80 | } 81 | 82 | const ProgressBar = require("cli-progress"); 83 | 84 | async function downloadFile(url, destinationPath) { 85 | debug("Downloading file from: %s", url); 86 | debug("Destination: %s", destinationPath); 87 | 88 | // Ensure destination directory exists 89 | await fs.mkdir(path.dirname(destinationPath), { recursive: true }); 90 | 91 | const response = await fetch(url, { 92 | headers: { 93 | "User-Agent": 94 | "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", 95 | }, 96 | }); 97 | 98 | if (!response.ok) { 99 | throw new Error( 100 | `Download failed: ${response.status} ${response.statusText} for ${url}`, 101 | ); 102 | } 103 | 104 | const contentLength = parseInt( 105 | response.headers.get("content-length") || "0", 106 | 10, 107 | ); 108 | const contentType = 109 | response.headers.get("content-type") || "application/octet-stream"; 110 | 111 | debug("Content-Length: %d bytes", contentLength); 112 | debug("Content-Type: %s", contentType); 113 | 114 | let progressBar = null; 115 | if (contentLength > 0) { 116 | progressBar = new ProgressBar.SingleBar({ 117 | format: "Downloading [{bar}] {percentage}% | {value}/{total} bytes", 118 | barCompleteChar: "█", 119 | barIncompleteChar: "░", 120 | hideCursor: true, 121 | }); 122 | progressBar.start(contentLength, 0); 123 | } else { 124 | console.log(chalk.yellow("Downloading... (size unknown)")); 125 | } 126 | 127 | const fileStream = await fs.open(destinationPath, "w"); 128 | let downloadedBytes = 0; 129 | 130 | try { 131 | for await (const chunk of response.body) { 132 | await fileStream.write(chunk); 133 | downloadedBytes += chunk.length; 134 | 135 | if (contentLength > 0 && progressBar) { 136 | progressBar.update(downloadedBytes); 137 | } 138 | } 139 | 140 | if (progressBar) { 141 | progressBar.stop(); 142 | console.log(chalk.green(`Downloaded ${downloadedBytes} bytes`)); 143 | } else { 144 | console.log(chalk.green(`Downloaded ${downloadedBytes} bytes`)); 145 | } 146 | debug( 147 | "Download completed: %d bytes written to %s", 148 | downloadedBytes, 149 | destinationPath, 150 | ); 151 | } finally { 152 | await fileStream.close(); 153 | } 154 | 155 | // Verify the downloaded file exists and has content 156 | const stats = await fs.stat(destinationPath); 157 | if (stats.size === 0) { 158 | throw new Error("Downloaded file is empty"); 159 | } 160 | 161 | debug("Download verification successful: %d bytes", stats.size); 162 | return destinationPath; 163 | } 164 | 165 | function guessFileExtension(url, contentType) { 166 | // Try to guess from URL first 167 | const urlExt = path.extname(new URL(url).pathname).toLowerCase(); 168 | if ([".crx", ".xpi", ".zip"].includes(urlExt)) { 169 | return urlExt; 170 | } 171 | 172 | // Guess from content type 173 | if (contentType) { 174 | if (contentType.includes("chrome-extension")) { 175 | return ".crx"; 176 | } else if (contentType.includes("application/zip")) { 177 | return ".zip"; 178 | } else if (contentType.includes("application/x-xpinstall")) { 179 | return ".xpi"; 180 | } 181 | } 182 | 183 | // Default to .zip as most archives can be handled as zip 184 | return ".zip"; 185 | } 186 | 187 | async function downloadExtension(sourceInfo, downloadDir) { 188 | debug("Starting extension download: %o", sourceInfo); 189 | 190 | try { 191 | // Ensure download directory exists 192 | await fs.mkdir(downloadDir, { recursive: true }); 193 | 194 | // Get the actual download URL 195 | const downloadUrl = await getDownloadableUrl(sourceInfo); 196 | debug("Download URL resolved: %s", downloadUrl); 197 | 198 | // Make a HEAD request to get content info 199 | const headResponse = await fetch(downloadUrl, { 200 | method: "HEAD", 201 | headers: { 202 | "User-Agent": 203 | "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", 204 | }, 205 | }); 206 | 207 | if (!headResponse.ok) { 208 | debug( 209 | "HEAD request failed, proceeding with GET: %s %s", 210 | headResponse.status, 211 | headResponse.statusText, 212 | ); 213 | } 214 | 215 | const contentType = headResponse.headers.get("content-type") || ""; 216 | const extension = guessFileExtension(downloadUrl, contentType); 217 | 218 | const filename = `extension-${Date.now()}${extension}`; 219 | const destinationPath = path.join(downloadDir, filename); 220 | 221 | debug("Will save as: %s", destinationPath); 222 | 223 | // Download the file 224 | await downloadFile(downloadUrl, destinationPath); 225 | 226 | return destinationPath; 227 | } catch (error) { 228 | const errorMsg = `Failed to download extension: ${error.message}`; 229 | debug("Download error: %s", errorMsg); 230 | throw new Error(errorMsg); 231 | } 232 | } 233 | 234 | module.exports = { 235 | downloadExtension, 236 | getDownloadableUrl, 237 | downloadFile, 238 | guessFileExtension, 239 | }; 240 | -------------------------------------------------------------------------------- /src/cli/downloadExt.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Get the .crx download URL from a Chrome Web Store URL or extension ID. 3 | * This is a simplified version based on crxviewer/src/cws_pattern.js. 4 | * @param {string} webstoreUrlOrId - Chrome Web Store URL or extension ID 5 | * @returns {string} .crx direct download URL 6 | */ 7 | function getCrxUrl(webstoreUrlOrId) { 8 | function extractExtensionId(input) { 9 | // Try new Chrome Web Store format: /detail/extension-name/extension-id 10 | var m = input.match(/\/detail\/[^/]+\/([a-z]{32})/i); 11 | if (m) return m[1]; 12 | 13 | // Try old format: /detail/extension-id 14 | m = input.match(/\/detail\/([a-z]{32})/i); 15 | if (m) return m[1]; 16 | 17 | // Direct extension ID 18 | if (/^[a-z]{32}$/.test(input)) return input; 19 | 20 | throw new Error("Invalid Chrome Web Store URL or extension ID"); 21 | } 22 | 23 | // Fill with highest version to avoid 204 responses from CWS 24 | var product_version = "9999.0.9999.0"; // see repo comments for why 25 | 26 | // These can be further detected, but here are set to common values for broad compatibility 27 | var os = "win"; // or 'mac', 'linux', etc. 28 | var arch = "x64"; 29 | var nacl_arch = "x86-64"; 30 | var prod = "chromecrx"; // 'chromiumcrx' for Chromium 31 | var prodchannel = "unknown"; 32 | 33 | var extensionId = extractExtensionId(webstoreUrlOrId); 34 | 35 | var url = "https://clients2.google.com/service/update2/crx?response=redirect"; 36 | url += "&os=" + os; 37 | url += "&arch=" + arch; 38 | url += "&os_arch=" + arch; 39 | url += "&nacl_arch=" + nacl_arch; 40 | url += "&prod=" + prod; 41 | url += "&prodchannel=" + prodchannel; 42 | url += "&prodversion=" + product_version; 43 | url += "&acceptformat=crx2,crx3"; 44 | url += "&x=id%3D" + extensionId + "%26uc"; 45 | 46 | return url; 47 | } 48 | 49 | module.exports = { getCrxUrl }; 50 | -------------------------------------------------------------------------------- /src/cli/index.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const yargs = require("yargs/yargs"); 4 | const { hideBin } = require("yargs/helpers"); 5 | const path = require("path"); 6 | const chalk = require("chalk"); 7 | const workflow = require("./workflow"); 8 | 9 | async function main() { 10 | try { 11 | await yargs(hideBin(process.argv)) 12 | .scriptName("to-userscript") 13 | .usage("$0 [options]") 14 | .command( 15 | "convert ", 16 | "Convert an extension to userscript", 17 | (yargs) => { 18 | return yargs 19 | .positional("source", { 20 | describe: "Extension source (URL, archive file, or directory)", 21 | type: "string", 22 | demandOption: true, 23 | }) 24 | .option("output", { 25 | alias: "o", 26 | describe: "Output .user.js file path", 27 | type: "string", 28 | }) 29 | .option("minify", { 30 | describe: "Minify the JavaScript output using terser", 31 | type: "boolean", 32 | default: false, 33 | }) 34 | .option("beautify", { 35 | describe: "Beautify the JavaScript output using prettier", 36 | type: "boolean", 37 | default: false, 38 | }) 39 | .option("target", { 40 | alias: "t", 41 | describe: "Build target type", 42 | choices: ["userscript", "vanilla"], 43 | default: "userscript", 44 | }) 45 | .option("locale", { 46 | alias: "l", 47 | describe: 48 | "Preferred locale for extension name/description (e.g., 'en', 'fr', 'de')", 49 | type: "string", 50 | }) 51 | .option("ignore-assets", { 52 | describe: 53 | "Asset file extensions to ignore during inlining (comma-separated, e.g., 'mp4,webm,ttf')", 54 | type: "string", 55 | }) 56 | .option("force", { 57 | alias: "f", 58 | describe: "Overwrite output file if it exists", 59 | type: "boolean", 60 | default: false, 61 | }) 62 | .option("keep-temp", { 63 | describe: "Keep temporary files for debugging", 64 | type: "boolean", 65 | default: false, 66 | }) 67 | .option("temp-dir", { 68 | describe: "Custom temporary directory path", 69 | type: "string", 70 | }); 71 | }, 72 | async (argv) => { 73 | try { 74 | await workflow.run(argv); 75 | } catch (error) { 76 | console.error(chalk.red("Conversion failed:"), error.message); 77 | if (process.env.DEBUG) { 78 | console.error(error.stack); 79 | } 80 | process.exit(1); 81 | } 82 | } 83 | ) 84 | .command( 85 | "download ", 86 | "Download an extension archive from a URL", 87 | (yargs) => { 88 | return yargs 89 | .positional("source", { 90 | describe: "Extension source URL (Chrome/Firefox store or direct)", 91 | type: "string", 92 | demandOption: true, 93 | }) 94 | .option("output", { 95 | alias: "o", 96 | describe: 97 | "Output path for the downloaded file. If a directory is provided, a filename will be generated.", 98 | type: "string", 99 | }) 100 | .option("extract", { 101 | describe: 102 | "Extract the downloaded archive into a folder named after the extension", 103 | type: "boolean", 104 | default: false, 105 | }) 106 | .option("locale", { 107 | alias: "l", 108 | describe: 109 | "Preferred locale for the extracted folder name (e.g., 'en', 'fr')", 110 | type: "string", 111 | }) 112 | .option("force", { 113 | alias: "f", 114 | describe: 115 | "Overwrite output file or extracted directory if it exists", 116 | type: "boolean", 117 | default: false, 118 | }); 119 | }, 120 | async (argv) => { 121 | try { 122 | await workflow.runDownload(argv); 123 | } catch (error) { 124 | console.error(chalk.red("Download failed:"), error.message); 125 | if (process.env.DEBUG) { 126 | console.error(error.stack); 127 | } 128 | process.exit(1); 129 | } 130 | } 131 | ) 132 | .command( 133 | "require ", 134 | "Generate a metadata block with a @require pointing to the file", 135 | (yargs) => { 136 | return yargs.positional("userscript", { 137 | describe: "Path to the .user.js file to reference", 138 | type: "string", 139 | demandOption: true, 140 | normalize: true, 141 | }); 142 | }, 143 | async (argv) => { 144 | try { 145 | const { generateRequireBlock } = require("./require"); 146 | const requireBlock = await generateRequireBlock(argv.userscript); 147 | process.stdout.write(requireBlock); 148 | } catch (error) { 149 | console.error( 150 | chalk.red("Failed to generate require block:"), 151 | error.message 152 | ); 153 | if (process.env.DEBUG) { 154 | console.error(error.stack); 155 | } 156 | process.exit(1); 157 | } 158 | } 159 | ) 160 | .option("verbose", { 161 | alias: "v", 162 | describe: "Enable verbose logging", 163 | type: "boolean", 164 | global: true, 165 | }) 166 | .help() 167 | .alias("help", "h") 168 | .version() 169 | .alias("version", "V") 170 | .example([ 171 | [ 172 | '$0 convert "https://chrome.google.com/webstore/detail/..." -o material-design-fileicons.user.js --minify', 173 | "Convert from Chrome Web Store with minification", 174 | ], 175 | [ 176 | "$0 convert ./extension.xpi --target vanilla -o extension.js --locale fr", 177 | "Convert XPI to vanilla JS with French locale", 178 | ], 179 | [ 180 | "$0 convert ./my-extension/ -o my-script.user.js --ignore-assets mp4,webm,ttf", 181 | "Convert local directory ignoring video and font assets", 182 | ], 183 | [ 184 | "$0 convert ./extension/ --locale en --minify --keep-temp", 185 | "Convert with English locale, minification, and debug files", 186 | ], 187 | [ 188 | '$0 download "https://addons.mozilla.org/..." -o my-addon.xpi', 189 | "Download an extension from the Firefox store", 190 | ], 191 | [ 192 | '$0 download "https://chrome.google.com/webstore/detail/..." --extract --locale en', 193 | "Download and extract an extension, using English for the folder name", 194 | ], 195 | [ 196 | "$0 require ./path/to/my-script.user.js", 197 | "Outputs a metadata block that @requires my-script.user.js", 198 | ], 199 | ]) 200 | .demandCommand(1, "You must specify a command") 201 | .strict() 202 | .parse(); 203 | } catch (error) { 204 | console.error(chalk.red("CLI error:"), error.message); 205 | process.exit(1); 206 | } 207 | } 208 | 209 | if (require.main === module) { 210 | main(); 211 | } 212 | 213 | module.exports = { main }; 214 | -------------------------------------------------------------------------------- /src/cli/minify.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs").promises; 2 | const debug = require("debug")("to-userscript:cli:minify"); 3 | 4 | let terser = null; 5 | try { 6 | terser = require("terser"); 7 | } catch (error) { 8 | debug("Terser not available, falling back to simple minification"); 9 | } 10 | 11 | let prettier = null; 12 | try { 13 | prettier = require("prettier"); 14 | } catch (error) { 15 | debug("Prettier not available, beautify functionality will be limited."); 16 | } 17 | 18 | async function terserMinify(code) { 19 | debug("Applying terser minification"); 20 | 21 | try { 22 | const result = await terser.minify(code, { 23 | compress: { 24 | dead_code: true, 25 | drop_debugger: true, 26 | conditionals: true, 27 | evaluate: true, 28 | booleans: true, 29 | loops: true, 30 | unused: true, 31 | hoist_funs: true, 32 | keep_fargs: false, 33 | hoist_vars: false, 34 | if_return: true, 35 | join_vars: true, 36 | side_effects: false, 37 | warnings: false, 38 | global_defs: {}, 39 | }, 40 | mangle: { 41 | toplevel: false, 42 | keep_fnames: false, 43 | reserved: ["GM_", "GM", "unsafeWindow", "cloneInto", "exportFunction"], 44 | }, 45 | format: { 46 | comments: function (node, comment) { 47 | // Keep @license, @preserve, and important comments 48 | const text = comment.value; 49 | return /@license|@preserve|@grant|@match|@include|@exclude|@name|@namespace|@version|@description|@author|@homepage|@homepageURL|@website|@source|@icon|@iconURL|@defaulticon|@icon64|@icon64URL|@run-at|@noframes|@unwrap|@connect|@require|@resource|@supportURL|@updateURL|@downloadURL|@contributionURL|@contributionAmount|@compatible|@incompatible/.test( 50 | text 51 | ); 52 | }, 53 | beautify: false, 54 | preamble: "", 55 | }, 56 | sourceMap: false, 57 | toplevel: false, 58 | parse: {}, 59 | rename: {}, 60 | }); 61 | 62 | if (result.error) { 63 | throw new Error(`Terser error: ${result.error}`); 64 | } 65 | 66 | return result.code; 67 | } catch (error) { 68 | debug( 69 | "Terser minification failed, falling back to simple: %s", 70 | error.message 71 | ); 72 | return code; 73 | } 74 | } 75 | 76 | async function prettierBeautify(code) { 77 | debug("Applying prettier beautification"); 78 | 79 | if (!prettier) { 80 | debug("Prettier is not available, skipping beautification."); 81 | return code; 82 | } 83 | 84 | try { 85 | const result = await prettier.format(code, { 86 | parser: "babel", 87 | printWidth: 100, 88 | tabWidth: 2, 89 | useTabs: false, 90 | semi: true, 91 | singleQuote: false, 92 | quoteProps: "as-needed", 93 | jsxSingleQuote: false, 94 | trailingComma: "es5", 95 | bracketSpacing: true, 96 | jsxBracketSameLine: false, 97 | arrowParens: "always", 98 | requirePragma: false, 99 | insertPragma: false, 100 | proseWrap: "preserve", 101 | htmlWhitespaceSensitivity: "css", 102 | vueIndentScriptAndStyle: false, 103 | endOfLine: "lf", 104 | embeddedLanguageFormatting: "auto", 105 | }); 106 | return result; 107 | } catch (error) { 108 | debug( 109 | "Prettier beautification failed, falling back to original code: %s", 110 | error.message 111 | ); 112 | return code; 113 | } 114 | } 115 | 116 | async function minifyScript(filePath) { 117 | debug("Starting minification of: %s", filePath); 118 | 119 | try { 120 | // Read the script file 121 | const content = await fs.readFile(filePath, "utf-8"); 122 | debug("Original file size: %d bytes", content.length); 123 | 124 | // Extract userscript metadata block 125 | const metadataMatch = content.match( 126 | /(^\/\/\s*==UserScript==[\s\S]*?\/\/\s*==\/UserScript==)/m 127 | ); 128 | 129 | let metadataBlock = ""; 130 | let scriptCode = content; 131 | 132 | if (metadataMatch) { 133 | metadataBlock = metadataMatch[1]; 134 | scriptCode = content.replace(metadataMatch[0], "").trim(); 135 | debug("Extracted metadata block (%d bytes)", metadataBlock.length); 136 | } else { 137 | debug("No userscript metadata block found"); 138 | } 139 | 140 | // Minify the script code (excluding metadata) 141 | const minifiedCode = await terserMinify(scriptCode); 142 | debug("Minified code size: %d bytes", minifiedCode.length); 143 | 144 | // Combine metadata and minified code 145 | const finalContent = metadataBlock 146 | ? `${metadataBlock}\n\n${minifiedCode}` 147 | : minifiedCode; 148 | 149 | // Write back to file 150 | await fs.writeFile(filePath, finalContent, "utf-8"); 151 | 152 | const reduction = content.length - finalContent.length; 153 | const reductionPercent = Math.round((reduction / content.length) * 100); 154 | 155 | debug( 156 | "Minification complete: %d bytes -> %d bytes (-%d bytes, -%d%%)", 157 | content.length, 158 | finalContent.length, 159 | reduction, 160 | reductionPercent 161 | ); 162 | 163 | return { 164 | originalSize: content.length, 165 | minifiedSize: finalContent.length, 166 | reduction, 167 | reductionPercent, 168 | }; 169 | } catch (error) { 170 | const errorMsg = `Failed to minify script ${filePath}: ${error.message}`; 171 | debug("Minification error: %s", errorMsg); 172 | throw new Error(errorMsg); 173 | } 174 | } 175 | 176 | async function beautifyScript(filePath) { 177 | debug("Starting beautification of: %s", filePath); 178 | 179 | try { 180 | const content = await fs.readFile(filePath, "utf-8"); 181 | debug("Original file size: %d bytes", content.length); 182 | 183 | const metadataMatch = content.match( 184 | /(^\/\/\s*==UserScript==[\s\S]*?\/\/\s*==\/UserScript==)/m 185 | ); 186 | 187 | let metadataBlock = ""; 188 | let scriptCode = content; 189 | 190 | if (metadataMatch) { 191 | metadataBlock = metadataMatch[1]; 192 | scriptCode = content.replace(metadataMatch[0], "").trim(); 193 | debug("Extracted metadata block (%d bytes)", metadataBlock.length); 194 | } else { 195 | debug("No userscript metadata block found"); 196 | } 197 | 198 | const beautifiedCode = await prettierBeautify(scriptCode); 199 | debug("Beautified code size: %d bytes", beautifiedCode.length); 200 | 201 | const finalContent = metadataBlock 202 | ? `${metadataBlock}\n\n${beautifiedCode}` 203 | : beautifiedCode; 204 | 205 | await fs.writeFile(filePath, finalContent, "utf-8"); 206 | 207 | const expansion = finalContent.length - content.length; 208 | const expansionPercent = Math.round((expansion / content.length) * 100); 209 | 210 | debug( 211 | "Beautification complete: %d bytes -> %d bytes (+%d bytes, +%d%%)", 212 | content.length, 213 | finalContent.length, 214 | expansion, 215 | expansionPercent 216 | ); 217 | 218 | return { 219 | originalSize: content.length, 220 | beautifiedSize: finalContent.length, 221 | expansion, 222 | expansionPercent, 223 | }; 224 | } catch (error) { 225 | const errorMsg = `Failed to beautify script ${filePath}: ${error.message}`; 226 | debug("Beautification error: %s", errorMsg); 227 | throw new Error(errorMsg); 228 | } 229 | } 230 | 231 | module.exports = { 232 | minifyScript, 233 | beautifyScript, 234 | }; 235 | -------------------------------------------------------------------------------- /src/cli/require.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs").promises; 2 | const path = require("path"); 3 | const { normalizePath } = require("../utils"); 4 | 5 | /** 6 | * Generates a metadata block with a @require directive pointing to the specified userscript file 7 | * @param {string} filePath - Path to the .user.js file to reference 8 | * @returns {Promise} The complete metadata block with @require directive 9 | */ 10 | async function generateRequireBlock(filePath) { 11 | // File Validation 12 | try { 13 | const stats = await fs.stat(filePath); 14 | if (!stats.isFile()) { 15 | throw new Error(`Path exists but is not a file: ${filePath}`); 16 | } 17 | } catch (error) { 18 | if (error.code === "ENOENT") { 19 | throw new Error(`File not found: ${filePath}`); 20 | } 21 | throw new Error(`File validation failed: ${error.message}`); 22 | } 23 | 24 | // File Reading 25 | const content = await fs.readFile(filePath, "utf-8"); 26 | 27 | // Metadata Extraction 28 | const metadataMatch = content.match( 29 | /(^\/\/\s*==UserScript==[\s\S]*?\/\/\s*==\/UserScript==)/m 30 | ); 31 | 32 | if (!metadataMatch) { 33 | throw new Error(`No UserScript metadata block found in: ${filePath}`); 34 | } 35 | 36 | const existingBlock = metadataMatch[1]; 37 | 38 | // Metadata Parsing and Filtering 39 | const lines = existingBlock.split("\n"); 40 | const filteredLines = lines.filter((line) => { 41 | // Remove existing @require directives 42 | return !/\/\/\s*@require\s+/.test(line); 43 | }); 44 | 45 | // Path Resolution 46 | const absolutePath = path.resolve(filePath); 47 | const normalizedAbsolutePath = normalizePath(absolutePath); 48 | 49 | // New Block Assembly 50 | const newRequireDirective = `// @require file://${normalizedAbsolutePath}`; 51 | 52 | // Find the position to insert the new @require (after the opening tag but before the closing tag) 53 | const openingIndex = filteredLines.findIndex((line) => 54 | /\/\/\s*==UserScript==/.test(line) 55 | ); 56 | const closingIndex = filteredLines.findIndex((line) => 57 | /\/\/\s*==\/UserScript==/.test(line) 58 | ); 59 | 60 | if (openingIndex === -1 || closingIndex === -1) { 61 | throw new Error( 62 | `Invalid UserScript metadata block structure in: ${filePath}` 63 | ); 64 | } 65 | 66 | // Insert the new @require directive before the closing tag 67 | const finalLines = [ 68 | ...filteredLines.slice(0, closingIndex), 69 | newRequireDirective, 70 | ...filteredLines.slice(closingIndex), 71 | ]; 72 | 73 | return finalLines.join("\n") + "\n"; 74 | } 75 | 76 | module.exports = { generateRequireBlock }; 77 | -------------------------------------------------------------------------------- /src/cli/unpack.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs").promises; 2 | const path = require("path"); 3 | const yauzl = require("yauzl"); 4 | const debug = require("debug")("to-userscript:unpacker"); 5 | 6 | async function detectArchiveType(archivePath) { 7 | debug("Detecting archive type for: %s", archivePath); 8 | 9 | const ext = path.extname(archivePath).toLowerCase(); 10 | if ([".zip", ".xpi"].includes(ext)) { 11 | return "zip"; 12 | } 13 | 14 | if (ext === ".crx") { 15 | return "crx"; 16 | } 17 | 18 | // Try to detect by reading file header 19 | try { 20 | const handle = await fs.open(archivePath, "r"); 21 | const buffer = Buffer.alloc(8); 22 | await handle.read(buffer, 0, 8, 0); 23 | await handle.close(); 24 | 25 | // Check for ZIP signature 26 | if (buffer[0] === 0x50 && buffer[1] === 0x4b) { 27 | debug("Detected ZIP signature"); 28 | return "zip"; 29 | } 30 | 31 | // Check for CRX signature 32 | if ( 33 | buffer[0] === 0x43 && 34 | buffer[1] === 0x72 && 35 | buffer[2] === 0x32 && 36 | buffer[3] === 0x34 37 | ) { 38 | debug("Detected CRX signature"); 39 | return "crx"; 40 | } 41 | } catch (error) { 42 | debug("Error reading file header: %s", error.message); 43 | } 44 | 45 | // Default to zip if we can't determine 46 | debug("Defaulting to ZIP format"); 47 | return "zip"; 48 | } 49 | 50 | async function extractZip(archivePath, destinationDir) { 51 | debug("Extracting ZIP archive: %s -> %s", archivePath, destinationDir); 52 | 53 | return new Promise((resolve, reject) => { 54 | yauzl.open(archivePath, { lazyEntries: true }, (err, zipfile) => { 55 | if (err) { 56 | reject(new Error(`Failed to open ZIP file: ${err.message}`)); 57 | return; 58 | } 59 | 60 | let extractedFiles = 0; 61 | 62 | zipfile.readEntry(); 63 | 64 | zipfile.on("entry", async (entry) => { 65 | const entryPath = path.join(destinationDir, entry.fileName); 66 | 67 | // Security check: ensure the entry doesn't escape the destination directory 68 | const normalizedPath = path.normalize(entryPath); 69 | if (!normalizedPath.startsWith(path.normalize(destinationDir))) { 70 | debug("Skipping potentially dangerous path: %s", entry.fileName); 71 | zipfile.readEntry(); 72 | return; 73 | } 74 | 75 | if (/\/$/.test(entry.fileName)) { 76 | // Directory entry 77 | try { 78 | await fs.mkdir(entryPath, { recursive: true }); 79 | debug("Created directory: %s", entry.fileName); 80 | } catch (mkdirError) { 81 | debug( 82 | "Error creating directory %s: %s", 83 | entry.fileName, 84 | mkdirError.message 85 | ); 86 | } 87 | zipfile.readEntry(); 88 | } else { 89 | // File entry 90 | zipfile.openReadStream(entry, async (streamErr, readStream) => { 91 | if (streamErr) { 92 | debug( 93 | "Error opening read stream for %s: %s", 94 | entry.fileName, 95 | streamErr.message 96 | ); 97 | zipfile.readEntry(); 98 | return; 99 | } 100 | 101 | try { 102 | // Ensure parent directory exists 103 | await fs.mkdir(path.dirname(entryPath), { recursive: true }); 104 | 105 | const writeStream = await fs.open(entryPath, "w"); 106 | 107 | for await (const chunk of readStream) { 108 | await writeStream.write(chunk); 109 | } 110 | 111 | await writeStream.close(); 112 | extractedFiles++; 113 | debug( 114 | "Extracted file: %s (%d bytes)", 115 | entry.fileName, 116 | entry.uncompressedSize 117 | ); 118 | } catch (fileError) { 119 | debug( 120 | "Error extracting file %s: %s", 121 | entry.fileName, 122 | fileError.message 123 | ); 124 | } 125 | 126 | zipfile.readEntry(); 127 | }); 128 | } 129 | }); 130 | 131 | zipfile.on("end", () => { 132 | debug("ZIP extraction complete: %d files extracted", extractedFiles); 133 | resolve(destinationDir); 134 | }); 135 | 136 | zipfile.on("error", (zipError) => { 137 | reject(new Error(`ZIP extraction failed: ${zipError.message}`)); 138 | }); 139 | }); 140 | }); 141 | } 142 | 143 | async function extractCrx(archivePath, destinationDir) { 144 | debug("Extracting CRX archive: %s -> %s", archivePath, destinationDir); 145 | 146 | // CRX files are ZIP files with a header 147 | // We need to skip the header and extract the ZIP portion 148 | 149 | const handle = await fs.open(archivePath, "r"); 150 | 151 | try { 152 | // Read CRX header to determine ZIP offset 153 | const headerBuffer = Buffer.alloc(16); 154 | await handle.read(headerBuffer, 0, 16, 0); 155 | 156 | // CRX3 format: "Cr24" + version (4 bytes) + header_length (4 bytes) + header_content 157 | if (headerBuffer.toString("ascii", 0, 4) !== "Cr24") { 158 | throw new Error("Invalid CRX file: missing Cr24 signature"); 159 | } 160 | 161 | const version = headerBuffer.readUInt32LE(4); 162 | debug("CRX version: %d", version); 163 | 164 | let zipOffset; 165 | 166 | if (version === 2) { 167 | // CRX2 format 168 | const publicKeyLength = headerBuffer.readUInt32LE(8); 169 | const signatureLength = headerBuffer.readUInt32LE(12); 170 | zipOffset = 16 + publicKeyLength + signatureLength; 171 | } else if (version === 3) { 172 | // CRX3 format 173 | const headerLength = headerBuffer.readUInt32LE(8); 174 | zipOffset = 12 + headerLength; 175 | } else { 176 | throw new Error(`Unsupported CRX version: ${version}`); 177 | } 178 | 179 | debug("ZIP data starts at offset: %d", zipOffset); 180 | 181 | // Create a temporary ZIP file with just the ZIP portion 182 | const tempZipPath = archivePath + ".zip"; 183 | const stats = await fs.stat(archivePath); 184 | const zipSize = stats.size - zipOffset; 185 | 186 | debug("Creating temporary ZIP file: %s (%d bytes)", tempZipPath, zipSize); 187 | 188 | const tempZipHandle = await fs.open(tempZipPath, "w"); 189 | 190 | try { 191 | // Copy ZIP portion to temporary file 192 | const buffer = Buffer.alloc(64 * 1024); // 64KB buffer 193 | let position = zipOffset; 194 | let remaining = zipSize; 195 | 196 | while (remaining > 0) { 197 | const chunkSize = Math.min(buffer.length, remaining); 198 | const { bytesRead } = await handle.read(buffer, 0, chunkSize, position); 199 | 200 | if (bytesRead === 0) break; 201 | 202 | await tempZipHandle.write(buffer, 0, bytesRead); 203 | position += bytesRead; 204 | remaining -= bytesRead; 205 | } 206 | } finally { 207 | await tempZipHandle.close(); 208 | } 209 | 210 | // Extract the temporary ZIP file 211 | await extractZip(tempZipPath, destinationDir); 212 | 213 | // Clean up temporary file 214 | try { 215 | await fs.unlink(tempZipPath); 216 | debug("Cleaned up temporary ZIP file"); 217 | } catch (cleanupError) { 218 | debug( 219 | "Warning: failed to clean up temporary file: %s", 220 | cleanupError.message 221 | ); 222 | } 223 | 224 | return destinationDir; 225 | } finally { 226 | await handle.close(); 227 | } 228 | } 229 | 230 | async function unpack(archivePath, destinationDir) { 231 | debug("Starting unpack: %s -> %s", archivePath, destinationDir); 232 | 233 | try { 234 | // Ensure destination directory exists 235 | await fs.mkdir(destinationDir, { recursive: true }); 236 | 237 | // Detect archive type 238 | const archiveType = await detectArchiveType(archivePath); 239 | debug("Archive type detected: %s", archiveType); 240 | 241 | // Extract based on type 242 | let result; 243 | if (archiveType === "crx") { 244 | result = await extractCrx(archivePath, destinationDir); 245 | } else { 246 | result = await extractZip(archivePath, destinationDir); 247 | } 248 | 249 | // Verify manifest.json exists 250 | const manifestPath = path.join(destinationDir, "manifest.json"); 251 | try { 252 | await fs.access(manifestPath); 253 | debug("Manifest verified at: %s", manifestPath); 254 | } catch (manifestError) { 255 | throw new Error( 256 | `No manifest.json found after extraction. This may not be a valid browser extension.` 257 | ); 258 | } 259 | 260 | debug("Unpack completed successfully: %s", result); 261 | return result; 262 | } catch (error) { 263 | const errorMsg = `Failed to unpack archive ${archivePath}: ${error.message}`; 264 | debug("Unpack error: %s", errorMsg); 265 | throw new Error(errorMsg); 266 | } 267 | } 268 | 269 | module.exports = { 270 | unpack, 271 | detectArchiveType, 272 | extractZip, 273 | extractCrx, 274 | }; 275 | -------------------------------------------------------------------------------- /src/cli/workflow.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs").promises; 2 | const path = require("path"); 3 | const tmp = require("tmp"); 4 | const chalk = require("chalk"); 5 | const ora = require("ora"); 6 | const unpack = require("./unpack"); 7 | const minify = require("./minify"); 8 | const debug = require("debug")("to-userscript:cli:workflow"); 9 | const { getLocale, getLocalizedName } = require("../locales"); 10 | const fetch = require("node-fetch"); 11 | 12 | async function determineSourceType(source) { 13 | if (!source || typeof source !== "string") { 14 | throw new Error("Source must be a non-empty string"); 15 | } 16 | 17 | const urlRegex = /^https?:\/\//; 18 | const chromeStoreRegex = 19 | /chromewebstore\.google\.com\/detail\/([^\/]+)\/([a-z]{32})/; 20 | const firefoxStoreRegex = /addons\.mozilla\.org.*\/addon\/([^\/]+)/; 21 | 22 | if (urlRegex.test(source)) { 23 | if (chromeStoreRegex.test(source)) { 24 | return { type: "chrome-store", url: source }; 25 | } else if (firefoxStoreRegex.test(source)) { 26 | return { type: "firefox-store", url: source }; 27 | } else { 28 | return { type: "url", url: source }; 29 | } 30 | } 31 | 32 | try { 33 | const stats = await fs.stat(source); 34 | if (stats.isDirectory()) { 35 | return { type: "directory", path: path.resolve(source) }; 36 | } else if (stats.isFile()) { 37 | const ext = path.extname(source).toLowerCase(); 38 | if ([".crx", ".xpi", ".zip"].includes(ext)) { 39 | return { type: "archive", path: path.resolve(source) }; 40 | } else { 41 | throw new Error( 42 | `Unsupported file type: ${ext}. Supported types: .crx, .xpi, .zip` 43 | ); 44 | } 45 | } 46 | } catch (error) { 47 | if (error.code === "ENOENT") { 48 | throw new Error(`Source not found: ${source}`); 49 | } 50 | throw error; 51 | } 52 | 53 | throw new Error(`Unable to determine source type: ${source}`); 54 | } 55 | 56 | async function generateOutputPath(config, manifest, localizedName = null) { 57 | if (config.output) { 58 | return path.resolve(config.output); 59 | } 60 | 61 | // Use localized name if available, otherwise fall back to raw manifest name 62 | const name = localizedName || manifest?.name || "converted-extension"; 63 | const version = manifest?.version || "1.0.0"; 64 | const target = config.target || "userscript"; 65 | 66 | const cleanName = name 67 | .replace(/[^a-z0-9]+/gi, "-") 68 | .replace(/^-+|-+$/g, "") 69 | .toLowerCase(); 70 | 71 | const extension = target === "userscript" ? ".user.js" : ".js"; 72 | const filename = `${cleanName}-${version}${extension}`; 73 | 74 | return path.resolve(process.cwd(), filename); 75 | } 76 | 77 | async function checkOutputFile(outputPath, force, isDirectory = false) { 78 | try { 79 | await fs.access(outputPath); 80 | if (!force) { 81 | const itemType = isDirectory ? "directory" : "file"; 82 | throw new Error( 83 | `Output ${itemType} already exists: ${outputPath}. Use --force to overwrite.` 84 | ); 85 | } 86 | const itemType = isDirectory ? "directory" : "file"; 87 | debug(`Output ${itemType} exists, will be overwritten due to --force flag`); 88 | } catch (error) { 89 | if (error.code !== "ENOENT") { 90 | throw error; 91 | } 92 | } 93 | } 94 | 95 | async function createTempDirectory(customTempDir) { 96 | const tempDirOptions = { 97 | prefix: "to-userscript-", 98 | unsafeCleanup: true, 99 | }; 100 | 101 | if (customTempDir) { 102 | tempDirOptions.dir = customTempDir; 103 | } 104 | 105 | return new Promise((resolve, reject) => { 106 | tmp.dir(tempDirOptions, (err, tmpPath, cleanupCallback) => { 107 | if (err) { 108 | reject( 109 | new Error(`Failed to create temporary directory: ${err.message}`) 110 | ); 111 | } else { 112 | debug("Created temporary directory: %s", tmpPath); 113 | resolve({ tmpPath, cleanupCallback }); 114 | } 115 | }); 116 | }); 117 | } 118 | 119 | async function runDownload(config) { 120 | const download = require("./download"); 121 | const spinner = ora(); 122 | let tempCleanup = null; 123 | 124 | try { 125 | debug("Starting download with config: %o", config); 126 | 127 | // Source Analysis 128 | spinner.start("Analyzing source..."); 129 | const sourceInfo = await determineSourceType(config.source); 130 | debug("Source type determined: %o", sourceInfo); 131 | 132 | // URL Resolution 133 | spinner.text = "Resolving download URL..."; 134 | const downloadUrl = await download.getDownloadableUrl(sourceInfo); 135 | debug("Download URL resolved: %s", downloadUrl); 136 | 137 | // Output Path Determination 138 | spinner.text = "Determining output path..."; 139 | let outputPath; 140 | 141 | if (config.output) { 142 | const outputStat = await fs.stat(config.output).catch(() => null); 143 | if (outputStat && outputStat.isDirectory()) { 144 | // Output is a directory, generate filename 145 | const headResponse = await fetch(downloadUrl, { 146 | method: "HEAD", 147 | headers: { 148 | "User-Agent": 149 | "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", 150 | }, 151 | }).catch(() => null); 152 | 153 | const contentType = headResponse?.headers.get("content-type") || ""; 154 | const extension = download.guessFileExtension(downloadUrl, contentType); 155 | const filename = `extension-${Date.now()}${extension}`; 156 | outputPath = path.join(config.output, filename); 157 | } else if (config.output.endsWith("/") || config.output.endsWith("\\")) { 158 | // Treat as directory path 159 | await fs.mkdir(config.output, { recursive: true }); 160 | const headResponse = await fetch(downloadUrl, { 161 | method: "HEAD", 162 | headers: { 163 | "User-Agent": 164 | "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", 165 | }, 166 | }).catch(() => null); 167 | 168 | const contentType = headResponse?.headers.get("content-type") || ""; 169 | const extension = download.guessFileExtension(downloadUrl, contentType); 170 | const filename = `extension-${Date.now()}${extension}`; 171 | outputPath = path.join(config.output, filename); 172 | } else { 173 | // Treat as full file path 174 | outputPath = path.resolve(config.output); 175 | } 176 | } else { 177 | // No output specified, use current working directory 178 | const headResponse = await fetch(downloadUrl, { 179 | method: "HEAD", 180 | headers: { 181 | "User-Agent": 182 | "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", 183 | }, 184 | }).catch(() => null); 185 | 186 | const contentType = headResponse?.headers.get("content-type") || ""; 187 | const extension = download.guessFileExtension(downloadUrl, contentType); 188 | const filename = `extension-${Date.now()}${extension}`; 189 | outputPath = path.join(process.cwd(), filename); 190 | } 191 | 192 | debug("Output path determined: %s", outputPath); 193 | 194 | // Overwrite Check 195 | await checkOutputFile(outputPath, config.force); 196 | 197 | // File Download 198 | spinner.stop(); 199 | const downloadedFilePath = await download.downloadFile( 200 | downloadUrl, 201 | outputPath 202 | ); 203 | 204 | // Completion 205 | console.log(chalk.green("Download complete!")); 206 | console.log(chalk.blue("📄 Downloaded:"), outputPath); 207 | 208 | const stats = await fs.stat(outputPath); 209 | console.log(chalk.blue("📊 Size:"), humanFileSize(stats.size, true)); 210 | 211 | let extractedDirPath = null; 212 | if (config.extract) { 213 | spinner.start("Extracting extension..."); 214 | 215 | const { tmpPath, cleanupCallback } = await createTempDirectory( 216 | config.tempDir 217 | ); 218 | tempCleanup = cleanupCallback; 219 | 220 | const unpackedDir = await unpack.unpack( 221 | downloadedFilePath, 222 | path.join(tmpPath, "unpacked") 223 | ); 224 | debug("Unpacked to: %s", unpackedDir); 225 | 226 | spinner.text = "Reading manifest..."; 227 | const manifestPath = path.join(unpackedDir, "manifest.json"); 228 | const { parseManifest } = require("../manifestParser"); 229 | 230 | const manifestResult = await parseManifest(manifestPath, config.locale); 231 | if (!manifestResult || !manifestResult.parsedManifest) { 232 | throw new Error( 233 | "Failed to parse manifest.json from the downloaded archive." 234 | ); 235 | } 236 | const { parsedManifest } = manifestResult; 237 | const version = parsedManifest.version || "1.0.0"; 238 | 239 | spinner.succeed(`Read manifest for: ${parsedManifest.name} v${version}`); 240 | spinner.start("Preparing final directory..."); 241 | 242 | const cleanName = (parsedManifest.name || "unnamed-extension") 243 | .replace(/[^a-z0-9._-]+/gi, "-") 244 | .replace(/--+/g, "-") 245 | .replace(/^-+|-+$/g, "") 246 | .toLowerCase(); 247 | 248 | const finalDirName = `${cleanName}-${version}`; 249 | extractedDirPath = path.join( 250 | path.dirname(downloadedFilePath), 251 | finalDirName 252 | ); 253 | debug("Final extraction path: %s", extractedDirPath); 254 | 255 | await checkOutputFile(extractedDirPath, config.force, true); 256 | await fs.rename(unpackedDir, extractedDirPath); 257 | 258 | spinner.succeed(chalk.green("Extraction complete!")); 259 | console.log(chalk.blue("📂 Extracted to:"), extractedDirPath); 260 | } 261 | 262 | return { 263 | success: true, 264 | outputFile: outputPath, 265 | extractedPath: extractedDirPath, 266 | }; 267 | } catch (error) { 268 | spinner.fail(chalk.red("Download failed")); 269 | debug("Download error: %o", error); 270 | throw error; 271 | } finally { 272 | if (tempCleanup && !config.keepTemp) { 273 | try { 274 | tempCleanup(); 275 | debug("Temporary files cleaned up"); 276 | } catch (cleanupError) { 277 | console.warn( 278 | chalk.yellow("Warning: Failed to clean up temporary files:"), 279 | cleanupError.message 280 | ); 281 | } 282 | } 283 | } 284 | } 285 | 286 | async function run(config) { 287 | const spinner = ora(); 288 | let tempCleanup = null; 289 | 290 | try { 291 | debug("Starting conversion with config: %o", config); 292 | 293 | // Determine source type 294 | spinner.start("Analyzing source..."); 295 | const sourceInfo = await determineSourceType(config.source); 296 | debug("Source type determined: %o", sourceInfo); 297 | 298 | // Create temporary directory 299 | const { tmpPath, cleanupCallback } = await createTempDirectory( 300 | config.tempDir 301 | ); 302 | tempCleanup = cleanupCallback; 303 | 304 | let inputDir; 305 | let downloadedFile = null; 306 | 307 | // Handle different source types 308 | if (sourceInfo.type === "directory") { 309 | inputDir = sourceInfo.path; 310 | spinner.succeed(`Source: Local directory (${path.basename(inputDir)})`); 311 | } else if (sourceInfo.type === "archive") { 312 | spinner.text = "Extracting archive..."; 313 | inputDir = await unpack.unpack( 314 | sourceInfo.path, 315 | path.join(tmpPath, "unpacked") 316 | ); 317 | spinner.succeed( 318 | `Source: Archive file (${path.basename(sourceInfo.path)})` 319 | ); 320 | } else if ( 321 | sourceInfo.type === "chrome-store" || 322 | sourceInfo.type === "firefox-store" || 323 | sourceInfo.type === "url" 324 | ) { 325 | const download = require("./download"); 326 | spinner.stop(); 327 | // spinner.text = "Downloading extension..."; 328 | const downloadPath = path.join(tmpPath, "download"); 329 | downloadedFile = await download.downloadExtension( 330 | sourceInfo, 331 | downloadPath 332 | ); 333 | 334 | debug("Downloaded file: %s", downloadedFile); 335 | spinner.start("Extracting downloaded archive..."); 336 | 337 | inputDir = await unpack.unpack( 338 | downloadedFile, 339 | path.join(tmpPath, "unpacked") 340 | ); 341 | spinner.succeed( 342 | `Source: Downloaded from ${sourceInfo.type} (${path.basename(downloadedFile)})` 343 | ); 344 | } 345 | 346 | // Verify manifest exists 347 | const manifestPath = path.join(inputDir, "manifest.json"); 348 | try { 349 | await fs.access(manifestPath); 350 | } catch (error) { 351 | throw new Error( 352 | `No manifest.json found in extracted extension at: ${manifestPath}` 353 | ); 354 | } 355 | 356 | // Read manifest for output path generation 357 | let manifest; 358 | try { 359 | const manifestContent = await fs.readFile(manifestPath, "utf-8"); 360 | manifest = JSON.parse(manifestContent); 361 | debug("Manifest loaded: %s v%s", manifest.name, manifest.version); 362 | } catch (error) { 363 | throw new Error(`Failed to parse manifest.json: ${error.message}`); 364 | } 365 | 366 | const locale = await getLocale(manifest, manifestPath, config.locale); 367 | // Generate output path 368 | const localizedName = getLocalizedName(manifest, locale); 369 | const outputPath = await generateOutputPath( 370 | config, 371 | manifest, 372 | localizedName 373 | ); 374 | debug("Output path: %s", outputPath); 375 | 376 | // Check if output file exists 377 | await checkOutputFile(outputPath, config.force); 378 | 379 | // Convert using existing converter 380 | spinner.start("Converting extension to userscript..."); 381 | const { convertExtension } = require("../convert"); 382 | 383 | const convertConfig = { 384 | inputDir, 385 | outputFile: outputPath, 386 | target: config.target, 387 | locale: config.locale, 388 | ignoredAssets: config.ignoreAssets, 389 | }; 390 | 391 | const result = await convertExtension(convertConfig); 392 | debug("Conversion result: %o", result); 393 | 394 | // Minify if requested 395 | if (config.minify) { 396 | spinner.text = "Minifying output..."; 397 | await minify.minifyScript(outputPath); 398 | debug("Minification complete"); 399 | } else if (config.beautify) { 400 | spinner.text = "Beautifying output..."; 401 | await minify.beautifyScript(outputPath); 402 | debug("Beautification complete"); 403 | } 404 | 405 | spinner.succeed(chalk.green(`Conversion complete!`)); 406 | 407 | // Display results 408 | const stats = await fs.stat(outputPath); 409 | 410 | console.log( 411 | chalk.blue("📦 Extension:"), 412 | chalk.bold(result.extension.name || "Unknown") 413 | ); 414 | console.log( 415 | chalk.blue("📋 Version:"), 416 | result.extension.version || "Unknown" 417 | ); 418 | if (result.extension.description) { 419 | console.log(chalk.blue("📝 Description:"), result.extension.description); 420 | } 421 | console.log(chalk.blue("🎯 Target:"), config.target); 422 | if (config.locale) { 423 | console.log(chalk.blue("🌐 Locale:"), config.locale); 424 | } 425 | console.log(chalk.blue("📄 Output:"), outputPath); 426 | console.log(chalk.blue("📊 Size:"), humanFileSize(stats.size, true)); 427 | 428 | if (config.minify) { 429 | console.log(chalk.blue("🗜️ Minified:"), "Yes"); 430 | } 431 | 432 | return { success: true, outputFile: outputPath }; 433 | } catch (error) { 434 | spinner.fail(chalk.red("Conversion failed")); 435 | debug("Conversion error: %o", error); 436 | throw error; 437 | } finally { 438 | // Cleanup temporary files unless requested to keep them 439 | if (tempCleanup && !config.keepTemp) { 440 | try { 441 | tempCleanup(); 442 | debug("Temporary files cleaned up"); 443 | } catch (cleanupError) { 444 | console.warn( 445 | chalk.yellow("Warning: Failed to clean up temporary files:"), 446 | cleanupError.message 447 | ); 448 | } 449 | } else if (config.keepTemp && tempCleanup) { 450 | console.log( 451 | chalk.yellow("Temporary files preserved for debugging:"), 452 | tempCleanup.path 453 | ); 454 | } 455 | } 456 | } 457 | 458 | module.exports = { run, runDownload }; 459 | 460 | /** 461 | * Format bytes as human-readable text. 462 | * 463 | * @param bytes Number of bytes. 464 | * @param si True to use metric (SI) units, aka powers of 1000. False to use 465 | * binary (IEC), aka powers of 1024. 466 | * @param dp Number of decimal places to display. 467 | * 468 | * @return Formatted string. 469 | */ 470 | function humanFileSize(bytes, si = true, dp = 1) { 471 | const thresh = si ? 1000 : 1024; 472 | 473 | if (Math.abs(bytes) < thresh) { 474 | return `${bytes} B`; 475 | } 476 | 477 | const units = si 478 | ? ["kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"] 479 | : ["KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"]; 480 | let u = -1; 481 | const r = 10 ** dp; 482 | 483 | do { 484 | bytes /= thresh; 485 | ++u; 486 | } while ( 487 | Math.round(Math.abs(bytes) * r) / r >= thresh && 488 | u < units.length - 1 489 | ); 490 | 491 | return `${bytes.toFixed(dp)} ${units[u]}`; 492 | } 493 | -------------------------------------------------------------------------------- /src/convert.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs").promises; 2 | const path = require("path"); 3 | const debug = require("debug")("to-userscript:convert"); 4 | const { parseManifest } = require("./manifestParser"); 5 | const { 6 | readScriptsAndStyles, 7 | readBackgroundScripts, 8 | } = require("./resourceProcessor"); 9 | const { getRequiredGmGrants } = require("./abstractionLayer"); 10 | const { generateMetadata } = require("./metadataGenerator"); 11 | const { buildUserScript } = require("./outputBuilder"); 12 | const { normalizePath } = require("./utils"); 13 | const { getLocalizedName, getLocalizedDescription } = require("./locales"); 14 | 15 | /** 16 | * Converts a browser extension to a userscript or vanilla JS file. 17 | * This is a pure library function with no CLI dependencies. 18 | * 19 | * @param {Object} config - Configuration object 20 | * @param {string} config.inputDir - Absolute path to the extension directory 21 | * @param {string} config.outputFile - Absolute path for the output file 22 | * @param {string} [config.target='userscript'] - Build target ('userscript' or 'vanilla') 23 | * @param {string} [config.locale] - Preferred locale for extension name/description 24 | * @param {string} [config.ignoredAssets] - Comma-separated asset extensions to ignore 25 | * @returns {Promise} Result object with success status and details 26 | */ 27 | async function convertExtension(config) { 28 | const { 29 | inputDir, 30 | outputFile, 31 | target = "userscript", 32 | locale: preferredLocale, 33 | ignoredAssets, 34 | } = config; 35 | 36 | // Validate configuration 37 | if (!inputDir || typeof inputDir !== "string") { 38 | throw new Error("config.inputDir must be a non-empty string"); 39 | } 40 | 41 | if (!outputFile || typeof outputFile !== "string") { 42 | throw new Error("config.outputFile must be a non-empty string"); 43 | } 44 | 45 | if (!["userscript", "vanilla"].includes(target)) { 46 | throw new Error('config.target must be "userscript" or "vanilla"'); 47 | } 48 | 49 | // Normalize paths 50 | const normalizedInputDir = normalizePath(path.resolve(inputDir)); 51 | const normalizedOutputFile = normalizePath(path.resolve(outputFile)); 52 | const manifestPath = path.join(normalizedInputDir, "manifest.json"); 53 | 54 | try { 55 | debug("Starting conversion: %s target", target); 56 | debug("Input directory: %s", normalizedInputDir); 57 | debug("Output file: %s", normalizedOutputFile); 58 | 59 | if (preferredLocale) { 60 | debug("Preferred locale: %s", preferredLocale); 61 | } 62 | 63 | if (ignoredAssets) { 64 | debug("Ignored asset extensions: %s", ignoredAssets); 65 | } 66 | 67 | // Verify input directory exists and is accessible 68 | try { 69 | const stats = await fs.stat(normalizedInputDir); 70 | if (!stats.isDirectory()) { 71 | throw new Error(`Input path is not a directory: ${normalizedInputDir}`); 72 | } 73 | } catch (error) { 74 | if (error.code === "ENOENT") { 75 | throw new Error(`Input directory not found: ${normalizedInputDir}`); 76 | } 77 | throw new Error( 78 | `Cannot access input directory: ${normalizedInputDir} - ${error.message}` 79 | ); 80 | } 81 | 82 | // Parse manifest with preferred locale 83 | debug("Parsing manifest: %s", manifestPath); 84 | const manifestResult = await parseManifest(manifestPath, preferredLocale); 85 | if (!manifestResult) { 86 | throw new Error("Failed to parse manifest.json"); 87 | } 88 | 89 | const { parsedManifest, locale } = manifestResult; 90 | 91 | if (!parsedManifest) { 92 | throw new Error("Manifest parsing returned null"); 93 | } 94 | 95 | const localizedName = getLocalizedName(parsedManifest, locale); 96 | const localizedDescription = getLocalizedDescription( 97 | parsedManifest, 98 | locale 99 | ); 100 | 101 | debug("Manifest parsed: %s v%s", localizedName, parsedManifest.version); 102 | 103 | if (localizedDescription) { 104 | debug("Description: %s", localizedDescription); 105 | } 106 | 107 | // Validate content scripts 108 | const contentScriptConfigs = parsedManifest.content_scripts || []; 109 | if (contentScriptConfigs.length === 0) { 110 | debug( 111 | "Warning: No content scripts found in manifest. Output might be empty or non-functional." 112 | ); 113 | } 114 | 115 | // Process resources 116 | debug("Processing content scripts and styles..."); 117 | const resourceResult = await readScriptsAndStyles( 118 | normalizedInputDir, 119 | contentScriptConfigs 120 | ); 121 | 122 | if (!resourceResult) { 123 | throw new Error("Failed to read scripts and styles"); 124 | } 125 | 126 | const { jsContents, cssContents } = resourceResult; 127 | 128 | debug( 129 | "Processed %d JS file(s) and %d CSS file(s)", 130 | Object.keys(jsContents).length, 131 | Object.keys(cssContents).length 132 | ); 133 | 134 | // Process background scripts 135 | const backgroundScriptsList = parsedManifest.background?.service_worker 136 | ? [parsedManifest.background.service_worker] 137 | : parsedManifest.background?.scripts || []; 138 | 139 | const backgroundJsContents = await readBackgroundScripts( 140 | normalizedInputDir, 141 | backgroundScriptsList 142 | ); 143 | 144 | if (backgroundScriptsList.length > 0) { 145 | debug( 146 | "Processed %d background script(s)", 147 | Object.keys(backgroundJsContents).length 148 | ); 149 | } 150 | 151 | // Generate metadata (for userscript target) 152 | let metadataBlock = ""; 153 | if (target === "userscript") { 154 | debug("Generating userscript metadata..."); 155 | const requiredGmGrants = getRequiredGmGrants(target); 156 | metadataBlock = generateMetadata( 157 | parsedManifest, 158 | requiredGmGrants, 159 | normalizedInputDir 160 | ); 161 | } 162 | 163 | // Build final script 164 | debug("Building final script..."); 165 | const finalScriptContent = await buildUserScript({ 166 | metadataBlock, 167 | jsContents, 168 | cssContents, 169 | parsedManifest, 170 | backgroundJsContents, 171 | extensionRoot: normalizedInputDir, 172 | locale, 173 | target, 174 | ignoredAssets, // Pass ignored assets to buildUserScript 175 | }); 176 | 177 | // Ensure output directory exists 178 | const outputDir = path.dirname(normalizedOutputFile); 179 | await fs.mkdir(outputDir, { recursive: true }); 180 | 181 | // Write output file 182 | debug("Writing output to: %s", normalizedOutputFile); 183 | await fs.writeFile(normalizedOutputFile, finalScriptContent, "utf-8"); 184 | 185 | // Get file stats for response 186 | const outputStats = await fs.stat(normalizedOutputFile); 187 | 188 | debug("Conversion completed successfully!"); 189 | 190 | return { 191 | success: true, 192 | outputFile: normalizedOutputFile, 193 | target, 194 | extension: { 195 | name: localizedName, 196 | version: parsedManifest.version, 197 | description: localizedDescription, 198 | }, 199 | stats: { 200 | jsFiles: Object.keys(jsContents).length, 201 | cssFiles: Object.keys(cssContents).length, 202 | backgroundScripts: Object.keys(backgroundJsContents).length, 203 | outputSize: outputStats.size, 204 | }, 205 | warnings: [], // Could be populated with non-fatal issues 206 | }; 207 | } catch (error) { 208 | debug("Conversion failed: %s", error.message); 209 | 210 | // Re-throw with additional context but preserve original error 211 | const enhancedError = new Error( 212 | `Extension conversion failed: ${error.message}` 213 | ); 214 | enhancedError.originalError = error; 215 | enhancedError.inputDir = normalizedInputDir; 216 | enhancedError.outputFile = normalizedOutputFile; 217 | enhancedError.target = target; 218 | 219 | throw enhancedError; 220 | } 221 | } 222 | 223 | module.exports = { 224 | convertExtension, 225 | }; 226 | -------------------------------------------------------------------------------- /src/getIcon.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs"); 2 | const path = require("path"); 3 | const debug = require("debug")("to-userscript:icon"); 4 | 5 | function getMimeTypeFromExt(ext) { 6 | switch (ext.toLowerCase()) { 7 | case "png": 8 | return "image/png"; 9 | case "jpg": 10 | case "jpeg": 11 | return "image/jpeg"; 12 | case "gif": 13 | return "image/gif"; 14 | case "svg": 15 | return "image/svg+xml"; 16 | case "ico": 17 | return "image/x-icon"; 18 | default: 19 | return "application/octet-stream"; 20 | } 21 | } 22 | 23 | /** 24 | * Gets the best icon from the manifest, preferring size 48 or closest to it. 25 | * @param {object} manifest - The parsed manifest object 26 | * @param {string} extensionRoot - Absolute path to the extension root directory 27 | * @returns {string|null} Data URL of the icon, or null if no icon found 28 | */ 29 | function getIcon(manifest, extensionRoot) { 30 | if (!manifest.icons || !extensionRoot) { 31 | return null; 32 | } 33 | 34 | const sizes = Object.keys(manifest.icons) 35 | .map((s) => parseInt(s, 10)) 36 | .filter(Boolean) 37 | .sort((a, b) => Math.abs(a - 48) - Math.abs(b - 48)); // Sort by distance from 48 38 | 39 | if (sizes.length === 0) { 40 | return null; 41 | } 42 | 43 | // Get the size closest to 48 44 | const bestSize = sizes[0].toString(); 45 | const iconRel = manifest.icons[bestSize]; 46 | 47 | try { 48 | const iconPath = path.join(extensionRoot, iconRel); 49 | const data = fs.readFileSync(iconPath); 50 | const ext = path.extname(iconPath).substring(1); 51 | const mime = getMimeTypeFromExt(ext); 52 | const dataUri = `data:${mime};base64,${data.toString("base64")}`; 53 | debug("Selected icon: %s (size %s)", iconRel, bestSize); 54 | return dataUri; 55 | } catch (e) { 56 | debug("Failed to read icon file: %s", e.message); 57 | return null; 58 | } 59 | } 60 | 61 | module.exports = { getIcon }; 62 | -------------------------------------------------------------------------------- /src/locales.js: -------------------------------------------------------------------------------- 1 | const path = require("path"); 2 | const fs = require("fs").promises; 3 | const debug = require("debug")("to-userscript:locales"); 4 | 5 | async function getLocale(manifest, manifestPath, preferredLocale = null) { 6 | try { 7 | const LOCALE = preferredLocale || manifest.default_locale || "en"; 8 | const localePath = path.join( 9 | path.dirname(manifestPath), 10 | "_locales", 11 | LOCALE, 12 | "messages.json" 13 | ); 14 | 15 | let locale = null; 16 | try { 17 | locale = JSON.parse(await fs.readFile(localePath, "utf8")); 18 | } catch (localeError) { 19 | // If preferred locale fails, try default locale 20 | if ( 21 | preferredLocale && 22 | preferredLocale !== (manifest.default_locale || "en") 23 | ) { 24 | const fallbackLocale = manifest.default_locale || "en"; 25 | const fallbackPath = path.join( 26 | path.dirname(manifestPath), 27 | "_locales", 28 | fallbackLocale, 29 | "messages.json" 30 | ); 31 | try { 32 | locale = JSON.parse(await fs.readFile(fallbackPath, "utf8")); 33 | debug( 34 | "Locale '%s' not found, using fallback '%s'", 35 | preferredLocale, 36 | fallbackLocale 37 | ); 38 | } catch (fallbackError) { 39 | debug( 40 | "No locale files found for '%s' or '%s', using raw text", 41 | preferredLocale, 42 | fallbackLocale 43 | ); 44 | } 45 | } else { 46 | debug("No locale file found for '%s', using raw text", LOCALE); 47 | } 48 | } 49 | 50 | function processString(str) { 51 | if (!locale || !str) { 52 | return str; 53 | } 54 | return str.replace(/__MSG_(\w+)__/g, (match, p1) => { 55 | return locale[p1]?.message || match; 56 | }); 57 | } 58 | processString.__data = locale; 59 | processString.__locale = LOCALE; 60 | return processString; 61 | } catch (e) { 62 | debug("Error loading locale: %s", e.message); 63 | return (str) => str; 64 | } 65 | } 66 | 67 | /** 68 | * Gets localized extension name from manifest 69 | * @param {object} manifest - The extension manifest 70 | * @param {function} localeProcessor - Locale processing function from getLocale 71 | * @returns {string} Localized extension name 72 | */ 73 | function getLocalizedName(manifest, localeProcessor) { 74 | if (!manifest || !manifest.name) { 75 | return "Unknown Extension"; 76 | } 77 | 78 | const name = localeProcessor ? localeProcessor(manifest.name) : manifest.name; 79 | return name || manifest.name || "Unknown Extension"; 80 | } 81 | 82 | /** 83 | * Gets localized extension description from manifest 84 | * @param {object} manifest - The extension manifest 85 | * @param {function} localeProcessor - Locale processing function from getLocale 86 | * @returns {string} Localized extension description 87 | */ 88 | function getLocalizedDescription(manifest, localeProcessor) { 89 | if (!manifest || !manifest.description) { 90 | return ""; 91 | } 92 | 93 | const description = localeProcessor 94 | ? localeProcessor(manifest.description) 95 | : manifest.description; 96 | return description || manifest.description || ""; 97 | } 98 | 99 | /** 100 | * Lists available locales in the extension 101 | * @param {string} manifestPath - Path to the manifest file 102 | * @returns {Promise>} Array of available locale codes 103 | */ 104 | async function getAvailableLocales(manifestPath) { 105 | try { 106 | const localesDir = path.join(path.dirname(manifestPath), "_locales"); 107 | const entries = await fs.readdir(localesDir, { withFileTypes: true }); 108 | 109 | const locales = []; 110 | for (const entry of entries) { 111 | if (entry.isDirectory()) { 112 | const messagesFile = path.join(localesDir, entry.name, "messages.json"); 113 | try { 114 | await fs.access(messagesFile, fs.constants.F_OK); 115 | locales.push(entry.name); 116 | } catch { 117 | // Skip directories without messages.json 118 | } 119 | } 120 | } 121 | 122 | return locales.sort(); 123 | } catch (error) { 124 | debug("Error listing available locales: %s", error.message); 125 | return []; 126 | } 127 | } 128 | 129 | module.exports = { 130 | getLocale, 131 | getLocalizedName, 132 | getLocalizedDescription, 133 | getAvailableLocales, 134 | }; 135 | -------------------------------------------------------------------------------- /src/manifestParser.js: -------------------------------------------------------------------------------- 1 | const { normalizePath } = require("./utils"); 2 | const fs = require("fs").promises; 3 | const debug = require("debug")("to-userscript:manifest"); 4 | const { 5 | getLocale, 6 | getLocalizedName, 7 | getLocalizedDescription, 8 | } = require("./locales"); 9 | 10 | async function parseManifest(manifestPath, preferredLocale = null) { 11 | try { 12 | const content = await fs.readFile(manifestPath, "utf-8"); 13 | const manifest = JSON.parse(content); 14 | const locale = await getLocale(manifest, manifestPath, preferredLocale); 15 | 16 | // Use localization functions for name and description 17 | const localizedName = getLocalizedName(manifest, locale); 18 | const localizedDescription = getLocalizedDescription(manifest, locale); 19 | 20 | const parsed = { 21 | manifest_version: manifest.manifest_version, 22 | name: localizedName, 23 | version: manifest.version || "0.0.0", 24 | description: localizedDescription, 25 | permissions: manifest.permissions || [], 26 | optional_permissions: manifest.optional_permissions || [], 27 | content_scripts: manifest.content_scripts || [], 28 | options_ui: manifest.options_ui || {}, 29 | browser_action: manifest.browser_action || {}, 30 | page_action: manifest.page_action || {}, 31 | action: manifest.action || {}, 32 | icons: manifest.icons || {}, 33 | web_accessible_resources: manifest.web_accessible_resources || [], 34 | background: manifest.background || {}, 35 | _id: localizedName 36 | ?.replace(/[^a-z0-9]+/gi, "-") 37 | .replace(/\-+$/, "") 38 | .replace(/^-+/, "") 39 | .toLowerCase(), 40 | }; 41 | 42 | if (parsed.content_scripts) { 43 | parsed.content_scripts = parsed.content_scripts.filter( 44 | (cs) => cs.matches && (cs.js || cs.css) 45 | ); 46 | parsed.content_scripts.forEach((cs) => { 47 | // Preserve the exact ordering of scripts and styles - this is critical 48 | // for proper execution where scripts depend on each other. 49 | // Normalize paths here for consistency downstream. 50 | cs.js = (cs.js || []).map(normalizePath); 51 | cs.css = (cs.css || []).map(normalizePath); 52 | }); 53 | } 54 | 55 | return { parsedManifest: parsed, locale }; 56 | } catch (error) { 57 | debug( 58 | "Error reading or parsing manifest file at %s: %s", 59 | manifestPath, 60 | error.message 61 | ); 62 | return null; 63 | } 64 | } 65 | 66 | module.exports = { parseManifest }; 67 | -------------------------------------------------------------------------------- /src/metadataGenerator.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs"); 2 | const path = require("path"); 3 | const debug = require("debug")("to-userscript:metadata"); 4 | const { getIcon } = require("./getIcon"); 5 | const { minify_sync } = require("terser"); 6 | 7 | function generateMetadata( 8 | parsedManifest, 9 | requiredGmGrants = [], 10 | extensionRoot = null, 11 | ) { 12 | const { name, version, description, content_scripts, _id } = parsedManifest; 13 | 14 | const lines = ["// ==UserScript=="]; 15 | lines.push(`// @name ${name || "Converted Extension"}`); 16 | lines.push(`// @version ${version || "1.0.0"}`); 17 | if (description) { 18 | lines.push(`// @description ${description}`); 19 | } 20 | // Simple namespace generation, improve later 21 | lines.push(`// @namespace ${_id}`); 22 | lines.push(`// @author Converter Script`); // Placeholder 23 | 24 | const matches = new Set(); 25 | const MATCH_REPLACEMENTS = { 26 | "": "*://*/*", 27 | }; 28 | if (content_scripts) { 29 | content_scripts.forEach((cs) => { 30 | if (cs.matches) { 31 | cs.matches.forEach((match) => 32 | matches.add(MATCH_REPLACEMENTS[match] || match), 33 | ); 34 | } 35 | }); 36 | } 37 | if (matches.size === 0) { 38 | debug( 39 | "No @match patterns found in manifest content_scripts. Adding '// @match *://*/*' as a fallback.", 40 | ); 41 | lines.push("// @match *://*/*"); 42 | } else { 43 | matches.forEach((match) => lines.push(`// @match ${match}`)); 44 | } 45 | 46 | if (requiredGmGrants.length > 0) { 47 | requiredGmGrants.forEach((grant) => lines.push(`// @grant ${grant}`)); 48 | } else { 49 | lines.push("// @grant none"); 50 | } 51 | 52 | const iconDataUrl = getIcon(parsedManifest, extensionRoot); 53 | if (iconDataUrl) { 54 | lines.push(`// @icon ${iconDataUrl}`); 55 | } 56 | 57 | let earliestRunAt = "document-idle"; 58 | const runAtOrder = { 59 | "document-start": 1, 60 | "document-end": 2, 61 | "document-idle": 3, 62 | }; 63 | 64 | if (content_scripts) { 65 | content_scripts.forEach((cs) => { 66 | // Note: Can be document-start or document_start formats 67 | const runAt = cs.run_at?.replaceAll("_", "-") || "document-idle"; 68 | if (runAtOrder[runAt] < runAtOrder[earliestRunAt]) { 69 | earliestRunAt = runAt; 70 | } 71 | }); 72 | } 73 | lines.push(`// @run-at ${earliestRunAt}`); 74 | lines.push("// ==/UserScript=="); 75 | lines.push(""); 76 | lines.push( 77 | minify_sync( 78 | fs.readFileSync( 79 | path.resolve(__dirname, "templates", "trustedTypes.template.js"), 80 | "utf-8", 81 | ), 82 | { 83 | compress: true, 84 | mangle: { 85 | toplevel: true, 86 | }, 87 | }, 88 | ).code, 89 | ); 90 | lines.push(""); 91 | 92 | return lines.join("\n"); 93 | } 94 | 95 | module.exports = { generateMetadata }; 96 | -------------------------------------------------------------------------------- /src/outputBuilder.js: -------------------------------------------------------------------------------- 1 | const { 2 | normalizePath, 3 | convertMatchPatternToRegExp, 4 | convertMatchPatternToRegExpString, 5 | replaceComments, 6 | } = require("./utils"); 7 | const debug = require("debug")("to-userscript:output"); 8 | const templateManager = require("./templateManager"); 9 | const scriptAssembler = require("./scriptAssembler"); 10 | const { generateBuildPolyfillString } = require("./buildPolyfillString"); 11 | const { getIcon } = require("./getIcon"); 12 | const { AssetGenerator } = require("./assetsGenerator"); 13 | 14 | /** 15 | * Prepares CSS data for injection into the template. 16 | * @param {object} cssContents - Object mapping relative CSS paths to their content. 17 | * @returns {string} A string representation of the CSS data object for template injection. 18 | */ 19 | function prepareCssDataString(cssContents) { 20 | const extensionCss = {}; 21 | for (const [relativePath, content] of Object.entries(cssContents)) { 22 | extensionCss[normalizePath(relativePath)] = JSON.stringify(content); 23 | } 24 | // Format as key: value pairs for embedding in the template object literal 25 | // IMPORTANT OTHERWISE IT GETS DOUBLE ESCAPED 26 | return ( 27 | `{` + 28 | Object.entries(extensionCss) 29 | .map(([key, cssStr]) => ` "${key}": ${cssStr}`) 30 | .join(",\n") + 31 | `}` 32 | ); 33 | } 34 | 35 | /** 36 | * Processes manifest content scripts to structure JS and CSS data by run_at timing. 37 | * @param {object} parsedManifest - The parsed manifest object. 38 | * @param {object} jsContents - Object mapping relative JS paths to their content. 39 | * @param {object} cssContents - Object mapping relative CSS paths to their content. 40 | * @returns {{scriptsToRun: object, cssToInject: object}} - Objects mapping run_at times to JS/CSS data. 41 | */ 42 | function structureScriptsAndCssByRunAt( 43 | parsedManifest, 44 | jsContents, 45 | cssContents, 46 | ) { 47 | const scriptsToRun = { 48 | "document-start": [], 49 | "document-end": [], 50 | "document-idle": [], 51 | }; 52 | const cssToInject = { 53 | "document-start": [], 54 | "document-end": [], 55 | "document-idle": [], 56 | }; 57 | const processedJsPaths = new Set(); // Stores "path@runAt" 58 | const processedCssPaths = new Set(); // Stores "path@runAt" 59 | 60 | (parsedManifest.content_scripts || []).forEach((config) => { 61 | const runAt = config.run_at?.replaceAll("_", "-") || "document-idle"; 62 | const validRunAt = [ 63 | "document-start", 64 | "document-end", 65 | "document-idle", 66 | ].includes(runAt) 67 | ? runAt 68 | : "document-idle"; 69 | 70 | if (config.js) { 71 | config.js.forEach((jsPath) => { 72 | const normalizedPath = normalizePath(jsPath); 73 | const processKey = `${normalizedPath}@${validRunAt}`; 74 | if (jsContents[normalizedPath] && !processedJsPaths.has(processKey)) { 75 | scriptsToRun[validRunAt].push({ 76 | path: normalizedPath, 77 | content: jsContents[normalizedPath], 78 | }); 79 | processedJsPaths.add(processKey); 80 | } 81 | }); 82 | } 83 | 84 | if (config.css) { 85 | config.css.forEach((cssPath) => { 86 | const normalizedPath = normalizePath(cssPath); 87 | const processKey = `${normalizedPath}@${validRunAt}`; 88 | if (cssContents[normalizedPath] && !processedCssPaths.has(processKey)) { 89 | cssToInject[validRunAt].push(normalizedPath); 90 | processedCssPaths.add(processKey); 91 | } 92 | }); 93 | } 94 | }); 95 | 96 | return { scriptsToRun, cssToInject }; 97 | } 98 | 99 | function buildBackgroundExecutionString(backgroundJsContents = {}, scriptName) { 100 | const bgPaths = Object.keys(backgroundJsContents); 101 | if (bgPaths.length === 0) return ""; 102 | const sanitizedScripts = bgPaths.map((p) => ({ 103 | path: normalizePath(p), 104 | content: backgroundJsContents[p], 105 | })); 106 | const getContent = (str) => str.trim().replace(/^['"]use strict['"];?/, ""); 107 | return ` 108 | const START_BACKGROUND_SCRIPT = (function(){ 109 | const backgroundPolyfill = buildPolyfill({ isBackground: true }); 110 | const scriptName = ${JSON.stringify(scriptName)}; 111 | const debug = ${JSON.stringify(`[${scriptName}]`)}; 112 | _log(debug + ' Executing background scripts...'); 113 | 114 | function executeBackgroundScripts(){ 115 | with(backgroundPolyfill){ 116 | ${sanitizedScripts 117 | .map((s) => ` // BG: ${s.path}\n${getContent(s.content)}`) 118 | .join("\n\n")} 119 | } 120 | } 121 | 122 | executeBackgroundScripts.call(backgroundPolyfill); 123 | 124 | _log(debug + ' Background scripts execution complete.'); 125 | }); 126 | 127 | setTimeout(() => { 128 | // Wait for things to be defined 129 | START_BACKGROUND_SCRIPT(); 130 | }, 10); 131 | _log("START_BACKGROUND_SCRIPT", START_BACKGROUND_SCRIPT); 132 | // End background script environment 133 | `; 134 | } 135 | 136 | /** 137 | * Asynchronously builds the final userscript content using templates and processed data. 138 | */ 139 | async function buildUserScript({ 140 | metadataBlock, 141 | jsContents, // { 'path/to/script.js': 'content...' } 142 | cssContents, // { 'path/to/style.css': 'content...' } 143 | parsedManifest, // The parsed manifest object 144 | backgroundJsContents = {}, // background scripts map 145 | extensionRoot = null, 146 | locale, 147 | target = "userscript", // Build target 148 | ignoredAssets = null, // Ignored asset extensions 149 | }) { 150 | debug("Generating unified assets map..."); 151 | const assetGenerator = new AssetGenerator( 152 | extensionRoot, 153 | locale, 154 | ignoredAssets, 155 | ); 156 | const { assetsMap, optionsPagePath, popupPagePath } = 157 | await assetGenerator.generateAssetsMap(parsedManifest); 158 | 159 | const extensionCssString = prepareCssDataString(cssContents); 160 | 161 | const contentScriptConfigsForMatching = ( 162 | parsedManifest.content_scripts || [] 163 | ).map((cs) => ({ 164 | matches: cs.matches || [], 165 | // No need for js, css, run_at here, only matching patterns 166 | })); 167 | const contentScriptConfigsForMatchingString = JSON.stringify( 168 | contentScriptConfigsForMatching, 169 | null, 170 | 2, 171 | ); 172 | 173 | const injectedManifestString = JSON.stringify(parsedManifest); 174 | 175 | const { scriptsToRun, cssToInject } = structureScriptsAndCssByRunAt( 176 | parsedManifest, 177 | jsContents, 178 | cssContents, 179 | ); 180 | 181 | const scriptName = parsedManifest.name || "Converted Script"; 182 | const backgroundExecutionString = buildBackgroundExecutionString( 183 | backgroundJsContents, 184 | scriptName, 185 | ); 186 | 187 | const combinedExecutionLogicString = 188 | scriptAssembler.generateCombinedExecutionLogic( 189 | scriptsToRun, 190 | cssToInject, 191 | scriptName, 192 | ); 193 | 194 | const polyfillString = await generateBuildPolyfillString( 195 | target, 196 | assetsMap, 197 | parsedManifest, 198 | ); 199 | const optionsPolyfillString = await generateBuildPolyfillString( 200 | "postmessage", 201 | assetsMap, 202 | parsedManifest, 203 | ); 204 | 205 | const extensionIcon = extensionRoot 206 | ? getIcon(parsedManifest, extensionRoot) 207 | : null; 208 | 209 | const orchestrationTemplate = 210 | await templateManager.getOrchestrationTemplate(); 211 | 212 | const VERBOSE = false; 213 | const loggingString = VERBOSE 214 | ? ` 215 | const SCRIPT_NAME = ${JSON.stringify(scriptName)}; 216 | let lastTime = performance.now(); 217 | const __timeWrap = (func) => (...args) => { 218 | const now = performance.now(); 219 | if (now - lastTime > 100){ 220 | lastTime = performance.now(); 221 | } 222 | func(...args); 223 | console.log(\`\${now - lastTime}ms\`); 224 | lastTime = performance.now(); 225 | return func(...args); 226 | }; 227 | const _log = __timeWrap((...args) => console.log(\`[\${typeof SCRIPT_NAME === 'string' ? SCRIPT_NAME : '[USERSCRIPT_CONVERTED]'}]\`, ...args)); 228 | const _warn = __timeWrap((...args) => console.warn(\`[\${typeof SCRIPT_NAME === 'string' ? SCRIPT_NAME : '[USERSCRIPT_CONVERTED]'}]\`, ...args)); 229 | const _error = __timeWrap((...args) => console.error(\`[\${typeof SCRIPT_NAME === 'string' ? SCRIPT_NAME : '[USERSCRIPT_CONVERTED]'}]\`, ...args)); 230 | ` 231 | : ` 232 | const SCRIPT_NAME = ${JSON.stringify(scriptName)}; 233 | const _log = (...args) => {}; 234 | const _warn = (...args) => console.warn(\`[\${typeof SCRIPT_NAME === 'string' ? SCRIPT_NAME : '[USERSCRIPT_CONVERTED]'}]\`, ...args); 235 | const _error = (...args) => { 236 | let e = args[0]; 237 | console.error(\`[\${typeof SCRIPT_NAME === 'string' ? SCRIPT_NAME : '[USERSCRIPT_CONVERTED]'}]\`, ...args); 238 | } 239 | `; 240 | const replacements = { 241 | "{{SCRIPT_NAME}}": JSON.stringify(scriptName), 242 | "{{INJECTED_MANIFEST}}": injectedManifestString, 243 | "{{CONTENT_SCRIPT_CONFIGS_FOR_MATCHING_ONLY}}": 244 | contentScriptConfigsForMatchingString, 245 | "{{EXTENSION_CSS_DATA}}": extensionCssString, // Pass the actual CSS data object literal 246 | "{{CONVERT_MATCH_PATTERN_FUNCTION_STRING}}": 247 | convertMatchPatternToRegExpString.toString(), 248 | "{{CONVERT_MATCH_PATTERN_TO_REGEXP_FUNCTION}}": 249 | convertMatchPatternToRegExp.toString(), 250 | "{{COMBINED_EXECUTION_LOGIC}}": combinedExecutionLogicString, 251 | "{{OPTIONS_PAGE_PATH}}": optionsPagePath 252 | ? JSON.stringify(optionsPagePath) 253 | : "null", 254 | "{{POPUP_PAGE_PATH}}": popupPagePath 255 | ? JSON.stringify(popupPagePath) 256 | : "null", 257 | "{{EXTENSION_ICON}}": extensionIcon 258 | ? JSON.stringify(extensionIcon) 259 | : "null", 260 | "{{UNIFIED_POLYFILL_FOR_IFRAME}}": JSON.stringify(optionsPolyfillString), 261 | "{{LOCALE}}": JSON.stringify(locale?.__data || {}), 262 | "{{USED_LOCALE}}": JSON.stringify(locale?.__locale || "en"), 263 | }; 264 | 265 | let orchestrationLogic = orchestrationTemplate; 266 | for (const [placeholder, value] of Object.entries(replacements)) { 267 | // Use a robust regex for replacement 268 | const regex = new RegExp( 269 | placeholder.replace(/[-\/\\^$*+?.()|[\]{}]/g, "\\$&"), 270 | "g", 271 | ); 272 | // Ensure $ signs in the replacement value are properly escaped 273 | const safeValue = 274 | typeof value === "string" ? value.replace(/\$/g, "$$$$") : value; 275 | orchestrationLogic = orchestrationLogic.replace(regex, safeValue); 276 | } 277 | 278 | let finalScript = [ 279 | metadataBlock, 280 | "(function() {", 281 | " // - Logging", 282 | loggingString, 283 | " // - Unified Polyfill", 284 | polyfillString, 285 | " // - Background Script Environment", 286 | backgroundExecutionString, 287 | "", 288 | " // - Orchestration Logic", 289 | orchestrationLogic, 290 | "", 291 | "})();", 292 | ].join("\n"); 293 | 294 | finalScript = replaceComments(finalScript); 295 | 296 | return finalScript; 297 | } 298 | 299 | module.exports = { buildUserScript }; 300 | -------------------------------------------------------------------------------- /src/patches/ExtPay.js: -------------------------------------------------------------------------------- 1 | const ExtPay = (extensionId) => ({ 2 | startBackground() {}, 3 | 4 | getUser() { 5 | return new Promise((resolve) => { 6 | const dummyUser = { 7 | paid: false, 8 | paidAt: new Date(), 9 | email: "dummyuser@example.com", 10 | installedAt: new Date(), 11 | trialStartedAt: null, 12 | plan: { 13 | unitAmountCents: 1000, 14 | currency: "usd", 15 | nickname: "dummy_plan", 16 | intervalCount: 1, 17 | interval: "month", 18 | }, 19 | subscriptionStatus: "inactive", 20 | subscriptionCancelAt: null, 21 | }; 22 | resolve(dummyUser); 23 | }); 24 | }, 25 | openPaymentPage(planNickname) {}, 26 | getPlans() { 27 | return new Promise((resolve) => { 28 | const dummyPlans = [ 29 | { 30 | unitAmountCents: 1000, 31 | currency: "usd", 32 | nickname: "monthly_plan", 33 | interval: "month", 34 | intervalCount: 1, 35 | }, 36 | { 37 | unitAmountCents: 9900, 38 | currency: "usd", 39 | nickname: "yearly_plan", 40 | interval: "year", 41 | intervalCount: 1, 42 | }, 43 | ]; 44 | resolve(dummyPlans); 45 | }); 46 | }, 47 | 48 | onPaid: { 49 | addListener: () => {}, 50 | removeListener: () => {}, 51 | }, 52 | 53 | openTrialPage(displayText) {}, 54 | 55 | openLoginPage() { 56 | console.log("Dummy login page opened"); 57 | }, 58 | }); 59 | 60 | window.ExtPay = ExtPay; 61 | -------------------------------------------------------------------------------- /src/resourceProcessor.js: -------------------------------------------------------------------------------- 1 | const { normalizePath, scriptBlacklist } = require("./utils"); 2 | const fs = require("fs").promises; 3 | const path = require("path"); 4 | const debug = require("debug")("to-userscript:resources"); 5 | 6 | /** 7 | * Validates that a file exists and is readable 8 | * @param {string} filePath - The file path to validate 9 | * @returns {Promise} 10 | * @throws {Error} If file doesn't exist or isn't readable 11 | */ 12 | async function validateFileAccess(filePath) { 13 | try { 14 | const stats = await fs.stat(filePath); 15 | if (!stats.isFile()) { 16 | throw new Error(`Path exists but is not a file: ${filePath}`); 17 | } 18 | await fs.access(filePath, fs.constants.R_OK); 19 | } catch (error) { 20 | if (error.code === "ENOENT") { 21 | throw new Error(`File not found: ${filePath}`); 22 | } else if (error.code === "EACCES") { 23 | throw new Error(`File is not readable: ${filePath}`); 24 | } else { 25 | throw new Error( 26 | `File access validation failed: ${filePath} - ${error.message}`, 27 | ); 28 | } 29 | } 30 | } 31 | 32 | async function readScript(filePath) { 33 | try { 34 | if (!filePath || typeof filePath !== "string") { 35 | throw new Error(`Invalid file path provided: ${filePath}`); 36 | } 37 | 38 | await validateFileAccess(filePath); 39 | const content = await fs.readFile(filePath, "utf-8"); 40 | 41 | if (content === null || content === undefined) { 42 | throw new Error(`File content is null or undefined: ${filePath}`); 43 | } 44 | 45 | return content; 46 | } catch (error) { 47 | const errorMsg = `Error reading script file ${filePath}: ${error.message}`; 48 | debug("Error reading script file %s: %s", filePath, error.message); 49 | 50 | if ( 51 | error.message.includes("not found") || 52 | error.message.includes("not readable") 53 | ) { 54 | debug("Returning empty content for inaccessible script: %s", filePath); 55 | return ""; 56 | } 57 | 58 | throw new Error(errorMsg); 59 | } 60 | } 61 | 62 | async function readCSS(filePath) { 63 | try { 64 | if (!filePath || typeof filePath !== "string") { 65 | throw new Error(`Invalid CSS file path provided: ${filePath}`); 66 | } 67 | 68 | await validateFileAccess(filePath); 69 | const content = await fs.readFile(filePath, "utf-8"); 70 | 71 | if (content === null || content === undefined) { 72 | throw new Error(`CSS file content is null or undefined: ${filePath}`); 73 | } 74 | 75 | return content; 76 | } catch (error) { 77 | const errorMsg = `Error reading CSS file ${filePath}: ${error.message}`; 78 | debug("Error reading CSS file %s: %s", filePath, error.message); 79 | 80 | // For CSS files, missing files are less critical than JS files 81 | if ( 82 | error.message.includes("not found") || 83 | error.message.includes("not readable") 84 | ) { 85 | debug("Returning empty content for inaccessible CSS: %s", filePath); 86 | return ""; 87 | } 88 | 89 | throw new Error(errorMsg); 90 | } 91 | } 92 | 93 | /** 94 | * Reads JS and CSS files specified in content script configurations. 95 | * @param {string} baseDir - The root directory of the extension. 96 | * @param {Array} contentScriptConfigs - Array of content script objects from the manifest. 97 | * @returns {Promise<{jsContents: object, cssContents: object}>} - Object containing script and style contents keyed by relative path. 98 | */ 99 | async function readScriptsAndStyles(baseDir, contentScriptConfigs) { 100 | if (!baseDir || typeof baseDir !== "string") { 101 | throw new Error(`Invalid base directory provided: ${baseDir}`); 102 | } 103 | 104 | if (!Array.isArray(contentScriptConfigs)) { 105 | throw new Error( 106 | `Content script configs must be an array, got: ${typeof contentScriptConfigs}`, 107 | ); 108 | } 109 | 110 | const jsContents = {}; 111 | const cssContents = {}; 112 | const processedJsPaths = new Set(); 113 | const processedCssPaths = new Set(); 114 | const errors = []; 115 | 116 | try { 117 | await validateFileAccess(baseDir); 118 | } catch (error) { 119 | try { 120 | const stats = await fs.stat(baseDir); 121 | if (!stats.isDirectory()) { 122 | throw new Error(`Base directory is not a directory: ${baseDir}`); 123 | } 124 | } catch (dirError) { 125 | throw new Error( 126 | `Base directory is not accessible: ${baseDir} - ${dirError.message}`, 127 | ); 128 | } 129 | } 130 | 131 | for (const [configIndex, config] of contentScriptConfigs.entries()) { 132 | if (!config || typeof config !== "object") { 133 | debug( 134 | "Skipping invalid content script config at index %d: %o", 135 | configIndex, 136 | config, 137 | ); 138 | continue; 139 | } 140 | 141 | // Process JS files 142 | if (config.js && Array.isArray(config.js)) { 143 | for (const [jsIndex, jsPath] of config.js.entries()) { 144 | try { 145 | if (!jsPath || typeof jsPath !== "string") { 146 | throw new Error( 147 | `Invalid JS path at config[${configIndex}].js[${jsIndex}]: ${jsPath}`, 148 | ); 149 | } 150 | 151 | const relativePath = normalizePath(jsPath); 152 | if (!processedJsPaths.has(relativePath)) { 153 | if ( 154 | Object.keys(scriptBlacklist).includes(path.basename(relativePath)) 155 | ) { 156 | debug("Skipping JS: %s (blacklisted)", relativePath); 157 | jsContents[relativePath] = 158 | scriptBlacklist[path.basename(relativePath)]; 159 | processedJsPaths.add(relativePath); 160 | } else { 161 | const fullPath = path.join(baseDir, relativePath); 162 | debug("Reading JS: %s", relativePath); 163 | 164 | try { 165 | jsContents[relativePath] = await readScript(fullPath); 166 | processedJsPaths.add(relativePath); 167 | } catch (scriptError) { 168 | errors.push( 169 | `Failed to read JS file ${relativePath}: ${scriptError.message}`, 170 | ); 171 | // Continue processing other files even if one fails 172 | jsContents[relativePath] = ""; // Use empty content as fallback 173 | processedJsPaths.add(relativePath); 174 | } 175 | } 176 | } 177 | } catch (error) { 178 | const errorMsg = `Error processing JS path at config[${configIndex}].js[${jsIndex}]: ${error.message}`; 179 | errors.push(errorMsg); 180 | debug( 181 | "Error processing JS path at config[%d].js[%d]: %s", 182 | configIndex, 183 | jsIndex, 184 | error.message, 185 | ); 186 | } 187 | } 188 | } 189 | 190 | // Process CSS files 191 | if (config.css && Array.isArray(config.css)) { 192 | for (const [cssIndex, cssPath] of config.css.entries()) { 193 | try { 194 | if (!cssPath || typeof cssPath !== "string") { 195 | throw new Error( 196 | `Invalid CSS path at config[${configIndex}].css[${cssIndex}]: ${cssPath}`, 197 | ); 198 | } 199 | 200 | const relativePath = normalizePath(cssPath); 201 | if (!processedCssPaths.has(relativePath)) { 202 | const fullPath = path.join(baseDir, relativePath); 203 | debug("Reading CSS: %s", relativePath); 204 | 205 | try { 206 | cssContents[relativePath] = await readCSS(fullPath); 207 | processedCssPaths.add(relativePath); 208 | } catch (cssError) { 209 | errors.push( 210 | `Failed to read CSS file ${relativePath}: ${cssError.message}`, 211 | ); 212 | // Continue processing other files even if one fails 213 | cssContents[relativePath] = ""; 214 | processedCssPaths.add(relativePath); 215 | } 216 | } 217 | } catch (error) { 218 | const errorMsg = `Error processing CSS path at config[${configIndex}].css[${cssIndex}]: ${error.message}`; 219 | errors.push(errorMsg); 220 | debug( 221 | "Error processing CSS path at config[%d].css[%d]: %s", 222 | configIndex, 223 | cssIndex, 224 | error.message, 225 | ); 226 | } 227 | } 228 | } 229 | } 230 | 231 | // Log summary of errors if any occurred 232 | if (errors.length > 0) { 233 | debug( 234 | "Encountered %d error(s) while reading scripts and styles:", 235 | errors.length, 236 | ); 237 | errors.forEach((error, index) => debug(" %d. %s", index + 1, error)); 238 | 239 | // Only throw if we couldn't read any files at all 240 | const totalExpectedFiles = contentScriptConfigs.reduce((count, config) => { 241 | return ( 242 | count + 243 | (config.js ? config.js.length : 0) + 244 | (config.css ? config.css.length : 0) 245 | ); 246 | }, 0); 247 | 248 | const totalReadFiles = 249 | Object.keys(jsContents).length + Object.keys(cssContents).length; 250 | 251 | if (totalReadFiles === 0 && totalExpectedFiles > 0) { 252 | throw new Error( 253 | `Failed to read any of the ${totalExpectedFiles} expected script/style files. Check file paths and permissions.`, 254 | ); 255 | } 256 | } 257 | 258 | debug( 259 | "Successfully processed %d JS file(s) and %d CSS file(s)", 260 | Object.keys(jsContents).length, 261 | Object.keys(cssContents).length, 262 | ); 263 | 264 | return { jsContents, cssContents }; 265 | } 266 | 267 | /** 268 | * Reads background script files specified in manifest.background.scripts. 269 | * @param {string} baseDir Root directory of the extension. 270 | * @param {Array} bgScripts Array of script paths. 271 | * @returns {Promise} Object mapping normalized path -> script content. 272 | */ 273 | async function readBackgroundScripts(baseDir, bgScripts = []) { 274 | if (!baseDir || typeof baseDir !== "string") { 275 | throw new Error( 276 | `Invalid base directory for background scripts: ${baseDir}`, 277 | ); 278 | } 279 | 280 | if (!Array.isArray(bgScripts)) { 281 | throw new Error( 282 | `Background scripts must be an array, got: ${typeof bgScripts}`, 283 | ); 284 | } 285 | 286 | const bgContents = {}; 287 | const processed = new Set(); 288 | const errors = []; 289 | 290 | if (bgScripts.length === 0) { 291 | debug("No background scripts to process"); 292 | return bgContents; 293 | } 294 | 295 | debug("Processing %d background script(s)...", bgScripts.length); 296 | 297 | for (const [index, bgPath] of bgScripts.entries()) { 298 | try { 299 | if (!bgPath || typeof bgPath !== "string") { 300 | throw new Error( 301 | `Invalid background script path at index ${index}: ${bgPath}`, 302 | ); 303 | } 304 | 305 | const rel = normalizePath(bgPath); 306 | if (processed.has(rel)) { 307 | debug("Skipping duplicate background script: %s", rel); 308 | continue; 309 | } 310 | 311 | debug("Reading Background JS: %s", rel); 312 | const full = path.join(baseDir, rel); 313 | 314 | try { 315 | bgContents[rel] = await readScript(full); 316 | processed.add(rel); 317 | } catch (scriptError) { 318 | errors.push( 319 | `Failed to read background script ${rel}: ${scriptError.message}`, 320 | ); 321 | // Add empty content as fallback 322 | bgContents[rel] = ""; 323 | processed.add(rel); 324 | } 325 | } catch (error) { 326 | const errorMsg = `Error processing background script at index ${index}: ${error.message}`; 327 | errors.push(errorMsg); 328 | debug( 329 | "Error processing background script at index %d: %s", 330 | index, 331 | error.message, 332 | ); 333 | } 334 | } 335 | 336 | // Log summary 337 | if (errors.length > 0) { 338 | debug( 339 | "Encountered %d error(s) while reading background scripts:", 340 | errors.length, 341 | ); 342 | errors.forEach((error, index) => debug(" %d. %s", index + 1, error)); 343 | 344 | // Only throw if we couldn't read any background scripts at all 345 | if (Object.keys(bgContents).length === 0 && bgScripts.length > 0) { 346 | throw new Error( 347 | `Failed to read any of the ${bgScripts.length} expected background scripts. Check file paths and permissions.`, 348 | ); 349 | } 350 | } 351 | 352 | debug( 353 | "Successfully processed %d background script(s)", 354 | Object.keys(bgContents).length, 355 | ); 356 | 357 | return bgContents; 358 | } 359 | 360 | module.exports = { 361 | readScript, 362 | readCSS, 363 | readScriptsAndStyles, 364 | readBackgroundScripts, 365 | }; 366 | -------------------------------------------------------------------------------- /src/runtimePolyfill.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Minimal event bus and chrome.runtime polyfill. 3 | * @module runtimePolyfill 4 | */ 5 | 6 | /** 7 | * @typedef {"page"|"iframe"} BusEnvironment 8 | * @typedef {"background"|"tab"|"ext_page"} RuntimeType 9 | */ 10 | 11 | /** 12 | * @typedef {object} EventBus 13 | * @property {(event: string, handler: (data: any) => void) => void} on 14 | * @property {(event: string, handler: (data: any) => void) => void} off 15 | * @property {(event: string, data: any) => void} emit 16 | */ 17 | 18 | const FLAG = '__eventBus'; 19 | 20 | /** 21 | * Creates an event bus. 22 | * @param {BusEnvironment} env 23 | * @returns {EventBus} 24 | */ 25 | export function createEventBus(env = 'page') { 26 | const listeners = new Map(); 27 | 28 | const dispatch = (event, data) => { 29 | const subs = listeners.get(event); 30 | if (subs) subs.forEach((fn) => fn(data)); 31 | }; 32 | 33 | window.addEventListener('message', (e) => { 34 | const { data } = e; 35 | if (data && data[FLAG]) dispatch(data.event, data.payload); 36 | }); 37 | 38 | return { 39 | on(event, handler) { 40 | const set = listeners.get(event) || new Set(); 41 | set.add(handler); 42 | listeners.set(event, set); 43 | }, 44 | 45 | off(event, handler) { 46 | const set = listeners.get(event); 47 | if (!set) return; 48 | set.delete(handler); 49 | if (!set.size) listeners.delete(event); 50 | }, 51 | 52 | emit(event, payload) { 53 | dispatch(event, payload); 54 | const message = { [FLAG]: true, event, payload }; 55 | if (env === 'iframe') { 56 | if (window.parent) window.parent.postMessage(message, '*'); 57 | } else { 58 | Array.from(document.querySelectorAll('iframe')).forEach((frame) => { 59 | if (frame.contentWindow) frame.contentWindow.postMessage(message, '*'); 60 | }); 61 | } 62 | } 63 | }; 64 | } 65 | 66 | /** 67 | * Returns a chrome.runtime like object built on top of the event bus. 68 | * @param {RuntimeType} type 69 | * @param {EventBus} bus 70 | */ 71 | export function createRuntime(type = 'tab', bus) { 72 | const messageEvent = 'runtime-message'; 73 | const handlers = new Set(); 74 | 75 | bus.on(messageEvent, ({ sender, payload }) => { 76 | if (sender === type) return; 77 | handlers.forEach((fn) => fn(payload, { sender }, () => {})); 78 | }); 79 | 80 | return { 81 | sendMessage(payload) { 82 | bus.emit(messageEvent, { sender: type, payload }); 83 | }, 84 | onMessage: { 85 | addListener(fn) { 86 | handlers.add(fn); 87 | }, 88 | removeListener(fn) { 89 | handlers.delete(fn); 90 | }, 91 | hasListener(fn) { 92 | return handlers.has(fn); 93 | } 94 | } 95 | }; 96 | } -------------------------------------------------------------------------------- /src/scriptAssembler.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Generates the string for the `executeAllScripts` function, which handles 3 | * CSS injection and JS execution based on run_at timing. 4 | * 5 | * @param {object} scriptsToRun - Object mapping run_at times to arrays of { path, content }. 6 | * @param {object} cssToInject - Object mapping run_at times to arrays of CSS file paths. 7 | * @param {string} scriptName - The name of the script for logging purposes. 8 | * @returns {string} A string containing the JavaScript function definition. 9 | */ 10 | function generateCombinedExecutionLogic(scriptsToRun, cssToInject, scriptName) { 11 | // Helper to generate CSS injection code for a given run_at phase 12 | const generateCssInjection = (runAtKey, phaseName) => { 13 | return (cssToInject[runAtKey] || []) 14 | .map( 15 | (cssPath, idx) => ` 16 | const cssKey_${idx} = ${JSON.stringify(cssPath)}; 17 | try { 18 | if (extensionCssData[cssKey_${idx}]) { 19 | _log(\` Injecting CSS (${phaseName}): \${cssKey_${idx}}\`); 20 | const style = document.createElement('style'); 21 | style.textContent = extensionCssData[cssKey_${idx}]; 22 | (document.head || document.documentElement).appendChild(style); 23 | } else { console.warn(\` CSS not found (${phaseName}): \${cssKey_${idx}}\`); } 24 | } catch(e) { _error(\` Failed injecting CSS (\${cssKey_${idx}}) in phase ${phaseName}\`, e, extensionCssData); } 25 | ` 26 | ) 27 | .join("\n"); 28 | }; 29 | 30 | // Helper to generate JS execution code for a given run_at phase 31 | const generateJsExecution = (runAtKey, phaseName) => { 32 | const allScripts = scriptsToRun[runAtKey] || []; 33 | 34 | const getContent = (content) => 35 | content.trim().replace(/^['"]use strict['"];?/, ""); 36 | return `const scriptPaths = ${JSON.stringify( 37 | allScripts.map((script) => script.path) 38 | )}; 39 | _log(\` Executing JS (${phaseName}): \${scriptPaths}\`); 40 | 41 | try { 42 | // Keep variables from being redeclared for global scope, but also make them apply to global scope. (Theoretically) 43 | with (globalThis){;\n${allScripts 44 | .map( 45 | (script) => 46 | `// START: ${script.path}\n${getContent(script.content)}\n// END: ${ 47 | script.path 48 | }` 49 | ) 50 | .join("\n\n")}\n;} 51 | } catch(e) { _error(\` Error executing scripts \${scriptPaths}\`, e); } 52 | `; 53 | }; 54 | 55 | const functionString = ` 56 | // -- Script Execution Logic 57 | async function executeAllScripts(globalThis, extensionCssData) { 58 | const {chrome, browser, global, window, self} = globalThis; 59 | const scriptName = ${JSON.stringify(scriptName)}; 60 | _log(\`Starting execution phases...\`); 61 | 62 | // --- Document Start 63 | if (typeof document !== 'undefined') { 64 | _log(\`Executing document-start phase...\`); 65 | ${generateCssInjection("document-start", "start")} 66 | ${generateJsExecution("document-start", "start")} 67 | } else { 68 | _log(\`Skipping document-start phase (no document).\`); 69 | } 70 | 71 | 72 | // --- Wait for Document End (DOMContentLoaded) --- 73 | if (typeof document !== 'undefined' && document.readyState === 'loading') { 74 | _log(\`Waiting for DOMContentLoaded...\`); 75 | await new Promise(resolve => document.addEventListener('DOMContentLoaded', resolve, { once: true })); 76 | _log(\`DOMContentLoaded fired.\`); 77 | } else if (typeof document !== 'undefined') { 78 | _log(\`DOMContentLoaded already passed or not applicable.\`); 79 | } 80 | 81 | 82 | // --- Document End 83 | if (typeof document !== 'undefined') { 84 | _log(\`Executing document-end phase...\`); 85 | ${generateCssInjection("document-end", "end")} 86 | ${generateJsExecution("document-end", "end")} 87 | } else { 88 | _log(\`Skipping document-end phase (no document).\`); 89 | } 90 | 91 | 92 | // --- Wait for Document Idle 93 | _log(\`Waiting for document idle state...\`); 94 | if (typeof window !== 'undefined' && typeof window.requestIdleCallback === 'function') { 95 | await new Promise(resolve => window.requestIdleCallback(resolve, { timeout: 2000 })); // 2-second timeout fallback 96 | _log(\`requestIdleCallback fired or timed out.\`); 97 | } else { 98 | // Fallback: wait a short period after DOMContentLoaded/current execution if requestIdleCallback is unavailable 99 | await new Promise(resolve => setTimeout(resolve, 50)); 100 | _log(\`Idle fallback timer completed.\`); 101 | } 102 | 103 | 104 | // --- Document Idle 105 | if (typeof document !== 'undefined') { 106 | _log(\`Executing document-idle phase...\`); 107 | ${generateCssInjection("document-idle", "idle")} 108 | ${generateJsExecution("document-idle", "idle")} 109 | } else { 110 | _log(\`Skipping document-idle phase (no document).\`); 111 | } 112 | 113 | _log(\`All execution phases complete, re-firing load events.\`); 114 | document.dispatchEvent(new Event("DOMContentLoaded", { 115 | bubbles: true, 116 | cancelable: true 117 | })); 118 | }`; 119 | 120 | return functionString; 121 | } 122 | 123 | module.exports = { generateCombinedExecutionLogic }; 124 | -------------------------------------------------------------------------------- /src/templateManager.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs").promises; 2 | const path = require("path"); 3 | const debug = require("debug")("to-userscript:templates"); 4 | 5 | // Cache templates to avoid repeated file reads 6 | const templateCache = {}; 7 | 8 | /** 9 | * Validates that template path exists and is accessible 10 | * @param {string} templatePath - The full path to the template file 11 | * @returns {Promise} Whether the template exists and is readable 12 | */ 13 | async function validateTemplatePath(templatePath) { 14 | try { 15 | const stats = await fs.stat(templatePath); 16 | if (!stats.isFile()) { 17 | throw new Error( 18 | `Template path exists but is not a file: ${templatePath}` 19 | ); 20 | } 21 | await fs.access(templatePath, fs.constants.R_OK); 22 | return true; 23 | } catch (error) { 24 | if (error.code === "ENOENT") { 25 | throw new Error(`Template file not found: ${templatePath}`); 26 | } else if (error.code === "EACCES") { 27 | throw new Error(`Template file is not readable: ${templatePath}`); 28 | } else { 29 | throw new Error( 30 | `Template path validation failed: ${templatePath} - ${error.message}` 31 | ); 32 | } 33 | } 34 | } 35 | 36 | /** 37 | * Resolves template path with fallback options 38 | * @param {string} templateName - The base name of the template file 39 | * @param {string} target - Optional target suffix 40 | * @returns {Promise<{path: string, fileName: string}>} Resolved template info 41 | */ 42 | async function resolveTemplatePath(templateName, target = null) { 43 | if (!templateName || typeof templateName !== "string") { 44 | throw new Error(`Invalid template name: ${templateName}`); 45 | } 46 | 47 | const fileNameParts = [templateName]; 48 | if (target && typeof target === "string") { 49 | fileNameParts.push(target); 50 | } 51 | fileNameParts.push("template.js"); 52 | const templateFileName = fileNameParts.join("."); 53 | 54 | const templatesDir = path.join(__dirname, "templates"); 55 | const templatePath = path.join(templatesDir, templateFileName); 56 | 57 | try { 58 | await validateTemplatePath(templatePath); 59 | return { path: templatePath, fileName: templateFileName }; 60 | } catch (error) { 61 | if (target) { 62 | const baseFileName = [templateName, "template.js"].join("."); 63 | const basePath = path.join(templatesDir, baseFileName); 64 | try { 65 | await validateTemplatePath(basePath); 66 | debug( 67 | "Target-specific template not found: %s, using base template: %s", 68 | templateFileName, 69 | baseFileName 70 | ); 71 | return { path: basePath, fileName: baseFileName }; 72 | } catch (baseError) { 73 | throw new Error( 74 | `Template resolution failed for '${templateName}' with target '${target}': ${error.message}. Base template also failed: ${baseError.message}` 75 | ); 76 | } 77 | } 78 | throw error; 79 | } 80 | } 81 | 82 | /** 83 | * Reads and caches a template file. 84 | * @param {string} templateName - The base name of the template file (e.g., 'polyfill'). 85 | * @param {string} target - Optional target suffix (e.g., 'userscript', 'vanilla'). 86 | * @returns {Promise} The content of the template file. 87 | * @throws {Error} If the template file cannot be read. 88 | */ 89 | async function getTemplate(templateName, target = null) { 90 | try { 91 | const { path: templatePath, fileName: templateFileName } = 92 | await resolveTemplatePath(templateName, target); 93 | 94 | const cacheKey = templateFileName; // Use full filename as cache key 95 | if (templateCache[cacheKey]) { 96 | return templateCache[cacheKey]; 97 | } 98 | 99 | debug("Reading template: %s", templateFileName); 100 | const content = await fs.readFile(templatePath, "utf-8"); 101 | 102 | if (!content || content.trim().length === 0) { 103 | throw new Error(`Template file is empty: ${templateFileName}`); 104 | } 105 | 106 | templateCache[cacheKey] = content; 107 | return content; 108 | } catch (error) { 109 | const errorMsg = `Failed to load template '${templateName}'${target ? ` with target '${target}'` : ""}: ${error.message}`; 110 | debug( 111 | "Failed to load template '%s'%s: %s", 112 | templateName, 113 | target ? ` with target '${target}'` : "", 114 | error.message 115 | ); 116 | throw new Error(errorMsg); 117 | } 118 | } 119 | 120 | async function getAbstractionLayerTemplate(target = "userscript") { 121 | return getTemplate("abstractionLayer", target); 122 | } 123 | 124 | async function getPolyfillTemplate() { 125 | return getTemplate("polyfill"); 126 | } 127 | 128 | async function getOrchestrationTemplate() { 129 | return getTemplate("orchestration"); 130 | } 131 | 132 | async function getMessagingTemplate(target = null) { 133 | return getTemplate("messaging", target); 134 | } 135 | 136 | module.exports = { 137 | getAbstractionLayerTemplate, 138 | getPolyfillTemplate, 139 | getOrchestrationTemplate, 140 | getMessagingTemplate, 141 | }; 142 | -------------------------------------------------------------------------------- /src/templates/abstractionLayer.handle_postmessage.template.js: -------------------------------------------------------------------------------- 1 | // --- Abstraction layer: Handle postmesage for 2 | (function () { 3 | const pendingRequests = new Map(); // requestId -> { resolve, reject, timeout } 4 | let nextRequestId = 1; 5 | 6 | window.addEventListener("message", async (event) => { 7 | const { type, requestId, method, args } = event.data; 8 | 9 | if (type === "abstraction-request") { 10 | try { 11 | let result; 12 | 13 | switch (method) { 14 | case "_storageSet": 15 | result = await _storageSet(args[0]); 16 | break; 17 | case "_storageGet": 18 | result = await _storageGet(args[0]); 19 | break; 20 | case "_storageRemove": 21 | result = await _storageRemove(args[0]); 22 | break; 23 | case "_storageClear": 24 | result = await _storageClear(); 25 | break; 26 | case "_cookieList": 27 | result = await _cookieList(args[0]); 28 | break; 29 | case "_cookieSet": 30 | result = await _cookieSet(args[0]); 31 | break; 32 | case "_cookieDelete": 33 | result = await _cookieDelete(args[0]); 34 | break; 35 | case "_fetch": 36 | result = await _fetch(args[0], args[1]); 37 | break; 38 | case "_registerMenuCommand": 39 | result = _registerMenuCommand(args[0], args[1]); 40 | break; 41 | case "_openTab": 42 | result = _openTab(args[0], args[1]); 43 | break; 44 | case "_initStorage": 45 | result = await _initStorage(); 46 | break; 47 | default: 48 | throw new Error(`Unknown abstraction method: ${method}`); 49 | } 50 | 51 | event.source.postMessage({ 52 | type: "abstraction-response", 53 | requestId, 54 | success: true, 55 | result, 56 | }); 57 | } catch (error) { 58 | event.source.postMessage({ 59 | type: "abstraction-response", 60 | requestId, 61 | success: false, 62 | error: { 63 | message: error.message, 64 | stack: error.stack, 65 | }, 66 | }); 67 | } 68 | } 69 | }); 70 | 71 | _log("[PostMessage Handler] Abstraction layer message handler initialized"); 72 | })(); 73 | -------------------------------------------------------------------------------- /src/templates/abstractionLayer.postmessage.template.js: -------------------------------------------------------------------------------- 1 | // --- Abstraction Layer: PostMessage Target 2 | 3 | let nextRequestId = 1; 4 | const pendingRequests = new Map(); // requestId -> { resolve, reject, timeout } 5 | 6 | function sendAbstractionRequest(method, args = []) { 7 | return new Promise((resolve, reject) => { 8 | const requestId = nextRequestId++; 9 | 10 | const timeout = setTimeout(() => { 11 | pendingRequests.delete(requestId); 12 | reject(new Error(`PostMessage request timeout for method: ${method}`)); 13 | }, 10000); 14 | 15 | pendingRequests.set(requestId, { resolve, reject, timeout }); 16 | 17 | window.parent.postMessage({ 18 | type: "abstraction-request", 19 | requestId, 20 | method, 21 | args, 22 | }); 23 | }); 24 | } 25 | 26 | window.addEventListener("message", (event) => { 27 | const { type, requestId, success, result, error } = event.data; 28 | 29 | if (type === "abstraction-response") { 30 | const pending = pendingRequests.get(requestId); 31 | if (pending) { 32 | clearTimeout(pending.timeout); 33 | pendingRequests.delete(requestId); 34 | 35 | if (success) { 36 | pending.resolve(result); 37 | } else { 38 | const err = new Error(error.message); 39 | err.stack = error.stack; 40 | pending.reject(err); 41 | } 42 | } 43 | } 44 | }); 45 | 46 | async function _storageSet(items) { 47 | return sendAbstractionRequest("_storageSet", [items]); 48 | } 49 | 50 | async function _storageGet(keys) { 51 | return sendAbstractionRequest("_storageGet", [keys]); 52 | } 53 | 54 | async function _storageRemove(keysToRemove) { 55 | return sendAbstractionRequest("_storageRemove", [keysToRemove]); 56 | } 57 | 58 | async function _storageClear() { 59 | return sendAbstractionRequest("_storageClear"); 60 | } 61 | 62 | async function _cookieList(details) { 63 | return sendAbstractionRequest("_cookieList", [details]); 64 | } 65 | 66 | async function _cookieSet(details) { 67 | return sendAbstractionRequest("_cookieSet", [details]); 68 | } 69 | 70 | async function _cookieDelete(details) { 71 | return sendAbstractionRequest("_cookieDelete", [details]); 72 | } 73 | 74 | async function _fetch(url, options) { 75 | return sendAbstractionRequest("_fetch", [url, options]); 76 | } 77 | 78 | function _registerMenuCommand(name, func) { 79 | _warn("_registerMenuCommand called from iframe context:", name); 80 | return sendAbstractionRequest("_registerMenuCommand", [ 81 | name, 82 | func.toString(), 83 | ]); 84 | } 85 | 86 | function _openTab(url, active) { 87 | return sendAbstractionRequest("_openTab", [url, active]); 88 | } 89 | 90 | async function _initStorage() { 91 | return sendAbstractionRequest("_initStorage"); 92 | } 93 | -------------------------------------------------------------------------------- /src/templates/abstractionLayer.userscript.template.js: -------------------------------------------------------------------------------- 1 | // --- Abstraction Layer: Userscript Target 2 | 3 | async function _storageSet(items) { 4 | try { 5 | for (const key in items) { 6 | if (items.hasOwnProperty(key)) { 7 | await GM_setValue(key, items[key]); 8 | } 9 | } 10 | return Promise.resolve(); 11 | } catch (e) { 12 | _error("GM_setValue error:", e); 13 | return Promise.reject(e); 14 | } 15 | } 16 | 17 | async function _storageGet(keys) { 18 | if (!keys) { 19 | keys = null; 20 | } 21 | if ( 22 | Array.isArray(keys) && 23 | (keys.length === 0 || [null, undefined].includes(keys[0])) 24 | ) { 25 | keys = null; 26 | } 27 | try { 28 | const results = {}; 29 | let keyList = []; 30 | let defaults = {}; 31 | let requestedKeys = []; 32 | 33 | if (keys === null) { 34 | keyList = await GM_listValues(); 35 | requestedKeys = [...keyList]; 36 | } else if (typeof keys === "string") { 37 | keyList = [keys]; 38 | requestedKeys = [keys]; 39 | } else if (Array.isArray(keys)) { 40 | keyList = keys; 41 | requestedKeys = [...keys]; 42 | } else if (typeof keys === "object" && keys !== null) { 43 | keyList = Object.keys(keys); 44 | requestedKeys = [...keyList]; 45 | defaults = keys; 46 | } else { 47 | _error("_storageGet error: Invalid keys format", keys); 48 | return Promise.reject(new Error("Invalid keys format for get")); 49 | } 50 | 51 | for (const key of keyList) { 52 | const defaultValue = defaults.hasOwnProperty(key) 53 | ? defaults[key] 54 | : undefined; 55 | const storedValue = await GM_getValue(key, defaultValue); 56 | results[key] = storedValue; 57 | } 58 | 59 | const finalResult = {}; 60 | for (const key of requestedKeys) { 61 | if (results.hasOwnProperty(key)) { 62 | finalResult[key] = results[key]; 63 | } else if (defaults.hasOwnProperty(key)) { 64 | finalResult[key] = defaults[key]; 65 | } 66 | } 67 | 68 | return Promise.resolve(finalResult); 69 | } catch (e) { 70 | _error("GM_getValue/GM_listValues error:", e); 71 | return Promise.reject(e); 72 | } 73 | } 74 | 75 | async function _storageRemove(keysToRemove) { 76 | try { 77 | let keyList = []; 78 | if (typeof keysToRemove === "string") { 79 | keyList = [keysToRemove]; 80 | } else if (Array.isArray(keysToRemove)) { 81 | keyList = keysToRemove; 82 | } else { 83 | _error("_storageRemove error: Invalid keys format", keysToRemove); 84 | return Promise.reject(new Error("Invalid keys format for remove")); 85 | } 86 | 87 | for (const key of keyList) { 88 | await GM_deleteValue(key); 89 | } 90 | return Promise.resolve(); 91 | } catch (e) { 92 | _error("GM_deleteValue error:", e); 93 | return Promise.reject(e); 94 | } 95 | } 96 | 97 | async function _storageClear() { 98 | try { 99 | const keys = await GM_listValues(); 100 | await Promise.all(keys.map((key) => GM_deleteValue(key))); 101 | return Promise.resolve(); 102 | } catch (e) { 103 | _error("GM_listValues/GM_deleteValue error during clear:", e); 104 | return Promise.reject(e); 105 | } 106 | } 107 | 108 | async function _cookieList(details) { 109 | return new Promise((resolve, reject) => { 110 | if (typeof GM_cookie === "undefined" || !GM_cookie.list) { 111 | return reject(new Error("GM_cookie.list is not available.")); 112 | } 113 | GM_cookie.list(details, (cookies, error) => { 114 | if (error) { 115 | return reject(new Error(error)); 116 | } 117 | resolve(cookies); 118 | }); 119 | }); 120 | } 121 | 122 | async function _cookieSet(details) { 123 | return new Promise((resolve, reject) => { 124 | if (typeof GM_cookie === "undefined" || !GM_cookie.set) { 125 | return reject(new Error("GM_cookie.set is not available.")); 126 | } 127 | GM_cookie.set(details, (error) => { 128 | if (error) { 129 | return reject(new Error(error)); 130 | } 131 | resolve(); 132 | }); 133 | }); 134 | } 135 | 136 | async function _cookieDelete(details) { 137 | return new Promise((resolve, reject) => { 138 | if (typeof GM_cookie === "undefined" || !GM_cookie.delete) { 139 | return reject(new Error("GM_cookie.delete is not available.")); 140 | } 141 | GM_cookie.delete(details, (error) => { 142 | if (error) { 143 | return reject(new Error(error)); 144 | } 145 | resolve(); 146 | }); 147 | }); 148 | } 149 | 150 | async function _fetch(url, options = {}) { 151 | return new Promise((resolve, reject) => { 152 | try { 153 | GM_xmlhttpRequest({ 154 | method: options.method || "GET", 155 | url: url, 156 | headers: options.headers || {}, 157 | data: options.body, 158 | responseType: options.responseType, 159 | timeout: options.timeout || 0, 160 | binary: 161 | options.responseType === "blob" || 162 | options.responseType === "arraybuffer", 163 | onload: function (response) { 164 | const responseHeaders = {}; 165 | if (response.responseHeaders) { 166 | response.responseHeaders 167 | .trim() 168 | .split("\\r\\n") 169 | .forEach((header) => { 170 | const parts = header.match(/^([^:]+):\s*(.*)$/); 171 | if (parts && parts.length === 3) { 172 | responseHeaders[parts[1].toLowerCase()] = parts[2]; 173 | } 174 | }); 175 | } 176 | 177 | const mockResponse = { 178 | ok: response.status >= 200 && response.status < 300, 179 | status: response.status, 180 | statusText: 181 | response.statusText || 182 | (response.status >= 200 && response.status < 300 ? "OK" : ""), 183 | url: response.finalUrl || url, 184 | headers: new Headers(responseHeaders), 185 | text: () => Promise.resolve(response.responseText), 186 | json: () => { 187 | try { 188 | return Promise.resolve(JSON.parse(response.responseText)); 189 | } catch (e) { 190 | return Promise.reject(new SyntaxError("Could not parse JSON")); 191 | } 192 | }, 193 | blob: () => { 194 | if (response.response instanceof Blob) { 195 | return Promise.resolve(response.response); 196 | } 197 | return Promise.reject( 198 | new Error("Requires responseType:'blob' in GM_xmlhttpRequest") 199 | ); 200 | }, 201 | arrayBuffer: () => { 202 | if (response.response instanceof ArrayBuffer) { 203 | return Promise.resolve(response.response); 204 | } 205 | return Promise.reject( 206 | new Error( 207 | "Requires responseType:'arraybuffer' in GM_xmlhttpRequest" 208 | ) 209 | ); 210 | }, 211 | clone: function () { 212 | const cloned = { ...this }; 213 | cloned.text = () => Promise.resolve(response.responseText); 214 | cloned.json = () => this.json(); 215 | cloned.blob = () => this.blob(); 216 | cloned.arrayBuffer = () => this.arrayBuffer(); 217 | return cloned; 218 | }, 219 | }; 220 | 221 | if (mockResponse.ok) { 222 | resolve(mockResponse); 223 | } else { 224 | const error = new Error(`HTTP error! status: ${response.status}`); 225 | error.response = mockResponse; 226 | reject(error); 227 | } 228 | }, 229 | onerror: function (response) { 230 | reject( 231 | new Error( 232 | `GM_xmlhttpRequest network error: ${ 233 | response.statusText || "Unknown Error" 234 | }` 235 | ) 236 | ); 237 | }, 238 | onabort: function () { 239 | reject(new Error("GM_xmlhttpRequest aborted")); 240 | }, 241 | ontimeout: function () { 242 | reject(new Error("GM_xmlhttpRequest timed out")); 243 | }, 244 | }); 245 | } catch (e) { 246 | _error("_fetch (GM_xmlhttpRequest) error:", e); 247 | reject(e); 248 | } 249 | }); 250 | } 251 | 252 | function _registerMenuCommand(name, func) { 253 | if (typeof GM_registerMenuCommand === "function") { 254 | try { 255 | GM_registerMenuCommand(name, func); 256 | } catch (e) { 257 | _error("GM_registerMenuCommand failed:", e); 258 | } 259 | } else { 260 | _warn("GM_registerMenuCommand not available."); 261 | } 262 | } 263 | 264 | function _openTab(url, active) { 265 | if (typeof GM_openInTab === "function") { 266 | try { 267 | GM_openInTab(url, { loadInBackground: !active }); 268 | } catch (e) { 269 | _error("GM_openInTab failed:", e); 270 | } 271 | } else { 272 | _warn("GM_openInTab not available, using window.open as fallback."); 273 | try { 274 | window.open(url); 275 | } catch (e) { 276 | _error("window.open fallback failed:", e); 277 | } 278 | } 279 | } 280 | 281 | async function _initStorage() { 282 | return Promise.resolve(); 283 | } 284 | -------------------------------------------------------------------------------- /src/templates/messaging.template.js: -------------------------------------------------------------------------------- 1 | // -- Messaging implementation 2 | 3 | function createEventBus( 4 | scopeId, 5 | type = "page", // "page" or "iframe" 6 | { allowedOrigin = "*", children = [], parentWindow = null } = {} 7 | ) { 8 | if (!scopeId) throw new Error("createEventBus requires a scopeId"); 9 | 10 | const handlers = {}; 11 | 12 | function handleIncoming(ev) { 13 | if (allowedOrigin !== "*" && ev.origin !== allowedOrigin) return; 14 | 15 | const msg = ev.data; 16 | if (!msg || msg.__eventBus !== true || msg.scopeId !== scopeId) return; 17 | 18 | const { event, payload } = msg; 19 | 20 | // PAGE: if it's an INIT from an iframe, adopt it 21 | if (type === "page" && event === "__INIT__") { 22 | const win = ev.source; 23 | if (win && !children.includes(win)) { 24 | children.push(win); 25 | } 26 | return; 27 | } 28 | 29 | (handlers[event] || []).forEach((fn) => 30 | fn(payload, { origin: ev.origin, source: ev.source }) 31 | ); 32 | } 33 | 34 | window.addEventListener("message", handleIncoming); 35 | 36 | function emitTo(win, event, payload) { 37 | const envelope = { 38 | __eventBus: true, 39 | scopeId, 40 | event, 41 | payload, 42 | }; 43 | win.postMessage(envelope, allowedOrigin); 44 | } 45 | 46 | // IFRAME: announce to page on startup 47 | if (type === "iframe") { 48 | setTimeout(() => { 49 | const pw = parentWindow || window.parent; 50 | if (pw && pw.postMessage) { 51 | emitTo(pw, "__INIT__", null); 52 | } 53 | }, 0); 54 | } 55 | 56 | return { 57 | on(event, fn) { 58 | handlers[event] = handlers[event] || []; 59 | handlers[event].push(fn); 60 | }, 61 | off(event, fn) { 62 | if (!handlers[event]) return; 63 | handlers[event] = handlers[event].filter((h) => h !== fn); 64 | }, 65 | /** 66 | * Emits an event. 67 | * @param {string} event - The event name. 68 | * @param {any} payload - The event payload. 69 | * @param {object} [options] - Emission options. 70 | * @param {Window} [options.to] - A specific window to target. If provided, message is ONLY sent to the target. 71 | */ 72 | emit(event, payload, { to } = {}) { 73 | // If a specific target window is provided, send only to it and DO NOT dispatch locally. 74 | // This prevents a port from receiving its own messages. 75 | if (to) { 76 | if (to && typeof to.postMessage === "function") { 77 | emitTo(to, event, payload); 78 | } 79 | return; // Exit after targeted send. 80 | } 81 | 82 | // For broadcast messages (no 'to' target), dispatch locally first. 83 | (handlers[event] || []).forEach((fn) => 84 | fn(payload, { origin: location.origin, source: window }) 85 | ); 86 | 87 | // Then propagate the broadcast to other windows. 88 | if (type === "page") { 89 | children.forEach((win) => emitTo(win, event, payload)); 90 | } else { 91 | const pw = parentWindow || window.parent; 92 | if (pw && pw.postMessage) { 93 | emitTo(pw, event, payload); 94 | } 95 | } 96 | }, 97 | }; 98 | } 99 | 100 | function createRuntime(type = "background", bus) { 101 | let nextId = 1; 102 | const pending = {}; 103 | const msgListeners = []; 104 | 105 | let nextPortId = 1; 106 | const ports = {}; 107 | const onConnectListeners = []; 108 | 109 | function parseArgs(args) { 110 | let target, message, options, callback; 111 | const arr = [...args]; 112 | if (arr.length === 0) { 113 | throw new Error("sendMessage requires at least one argument"); 114 | } 115 | if (arr.length === 1) { 116 | return { message: arr[0] }; 117 | } 118 | // last object could be options 119 | if ( 120 | arr.length && 121 | typeof arr[arr.length - 1] === "object" && 122 | !Array.isArray(arr[arr.length - 1]) 123 | ) { 124 | options = arr.pop(); 125 | } 126 | // last function is callback 127 | if (arr.length && typeof arr[arr.length - 1] === "function") { 128 | callback = arr.pop(); 129 | } 130 | if ( 131 | arr.length === 2 && 132 | (typeof arr[0] === "string" || typeof arr[0] === "number") 133 | ) { 134 | [target, message] = arr; 135 | } else { 136 | [message] = arr; 137 | } 138 | return { target, message, options, callback }; 139 | } 140 | 141 | if (type === "background") { 142 | bus.on("__REQUEST__", ({ id, message }, { source }) => { 143 | let responded = false, 144 | isAsync = false; 145 | function sendResponse(resp) { 146 | if (responded) return; 147 | responded = true; 148 | // Target the response directly back to the window that sent the request. 149 | bus.emit("__RESPONSE__", { id, response: resp }, { to: source }); 150 | } 151 | const results = msgListeners 152 | .map((fn) => { 153 | try { 154 | // msg, sender, sendResponse 155 | const ret = fn(message, { id, tab: { id: source } }, sendResponse); 156 | if (ret === true || (ret && typeof ret.then === "function")) { 157 | isAsync = true; 158 | return ret; 159 | } 160 | return ret; 161 | } catch (e) { 162 | _error(e); 163 | } 164 | }) 165 | .filter((r) => r !== undefined); 166 | 167 | const promises = results.filter((r) => r && typeof r.then === "function"); 168 | if (!isAsync && promises.length === 0) { 169 | const out = results.length === 1 ? results[0] : results; 170 | sendResponse(out); 171 | } else if (promises.length) { 172 | Promise.all(promises).then((vals) => { 173 | if (!responded) { 174 | const out = vals.length === 1 ? vals[0] : vals; 175 | sendResponse(out); 176 | } 177 | }); 178 | } 179 | }); 180 | } 181 | 182 | if (type !== "background") { 183 | bus.on("__RESPONSE__", ({ id, response }) => { 184 | const entry = pending[id]; 185 | if (!entry) return; 186 | entry.resolve(response); 187 | if (entry.callback) entry.callback(response); 188 | delete pending[id]; 189 | }); 190 | } 191 | 192 | function sendMessage(...args) { 193 | // Background should be able to send message to itself 194 | // if (type === "background") { 195 | // throw new Error("Background cannot sendMessage to itself"); 196 | // } 197 | const { target, message, callback } = parseArgs(args); 198 | const id = nextId++; 199 | const promise = new Promise((resolve) => { 200 | pending[id] = { resolve, callback }; 201 | bus.emit("__REQUEST__", { id, message }); 202 | }); 203 | return promise; 204 | } 205 | 206 | bus.on("__PORT_CONNECT__", ({ portId, name }, { source }) => { 207 | if (type !== "background") return; 208 | const backgroundPort = makePort("background", portId, name, source); 209 | ports[portId] = backgroundPort; 210 | 211 | onConnectListeners.forEach((fn) => fn(backgroundPort)); 212 | 213 | // send back a CONNECT_ACK so the client can 214 | // start listening on its end: 215 | bus.emit("__PORT_CONNECT_ACK__", { portId, name }, { to: source }); 216 | }); 217 | 218 | // Clients handle the ACK and finalize their Port object by learning the remote window. 219 | bus.on("__PORT_CONNECT_ACK__", ({ portId, name }, { source }) => { 220 | if (type === "background") return; // ignore 221 | const p = ports[portId]; 222 | if (!p) return; 223 | // Call the port's internal finalize method to complete the handshake 224 | if (p._finalize) { 225 | p._finalize(source); 226 | } 227 | }); 228 | 229 | // Any port message travels via "__PORT_MESSAGE__" 230 | bus.on("__PORT_MESSAGE__", (envelope, { source }) => { 231 | const { portId } = envelope; 232 | const p = ports[portId]; 233 | if (!p) return; 234 | p._receive(envelope, source); 235 | }); 236 | 237 | // Any port disconnect: 238 | bus.on("__PORT_DISCONNECT__", ({ portId }) => { 239 | const p = ports[portId]; 240 | if (!p) return; 241 | p._disconnect(); 242 | delete ports[portId]; 243 | }); 244 | 245 | // Refactored makePort to correctly manage internal state and the connection handshake. 246 | function makePort(side, portId, name, remoteWindow) { 247 | let onMessageHandlers = []; 248 | let onDisconnectHandlers = []; 249 | let buffer = []; 250 | // Unique instance ID for this port instance 251 | const instanceId = Math.random().toString(36).slice(2) + Date.now(); 252 | // These state variables are part of the closure and are updated by _finalize 253 | let _ready = side === "background"; 254 | 255 | function _drainBuffer() { 256 | buffer.forEach((m) => _post(m)); 257 | buffer = []; 258 | } 259 | 260 | function _post(msg) { 261 | // Always use the 'to' parameter for port messages, making them directional. 262 | // Include senderInstanceId 263 | bus.emit( 264 | "__PORT_MESSAGE__", 265 | { portId, msg, senderInstanceId: instanceId }, 266 | { to: remoteWindow } 267 | ); 268 | } 269 | 270 | function postMessage(msg) { 271 | if (!_ready) { 272 | buffer.push(msg); 273 | } else { 274 | _post(msg); 275 | } 276 | } 277 | 278 | function _receive(envelope, source) { 279 | // envelope: { msg, senderInstanceId } 280 | if (envelope.senderInstanceId === instanceId) return; // Don't dispatch to self 281 | onMessageHandlers.forEach((fn) => 282 | fn(envelope.msg, { id: portId, tab: { id: source } }) 283 | ); 284 | } 285 | 286 | function disconnect() { 287 | // Also use the 'to' parameter for disconnect messages 288 | bus.emit("__PORT_DISCONNECT__", { portId }, { to: remoteWindow }); 289 | _disconnect(); 290 | delete ports[portId]; 291 | } 292 | 293 | function _disconnect() { 294 | onDisconnectHandlers.forEach((fn) => fn()); 295 | onMessageHandlers = []; 296 | onDisconnectHandlers = []; 297 | } 298 | 299 | // This function is called on the client port when the ACK is received from background. 300 | // It updates the port's state, completing the connection. 301 | function _finalize(win) { 302 | remoteWindow = win; // <-- This is the crucial part: learn the destination 303 | _ready = true; 304 | _drainBuffer(); 305 | } 306 | 307 | return { 308 | name, 309 | sender: { 310 | id: portId, 311 | }, 312 | onMessage: { 313 | addListener(fn) { 314 | onMessageHandlers.push(fn); 315 | }, 316 | removeListener(fn) { 317 | onMessageHandlers = onMessageHandlers.filter((x) => x !== fn); 318 | }, 319 | }, 320 | onDisconnect: { 321 | addListener(fn) { 322 | onDisconnectHandlers.push(fn); 323 | }, 324 | removeListener(fn) { 325 | onDisconnectHandlers = onDisconnectHandlers.filter((x) => x !== fn); 326 | }, 327 | }, 328 | postMessage, 329 | disconnect, 330 | // Internal methods used by the runtime 331 | _receive, 332 | _disconnect, 333 | _finalize, // Expose the finalizer for the ACK handler 334 | }; 335 | } 336 | 337 | function connect(connectInfo = {}) { 338 | if (type === "background") { 339 | throw new Error("Background must use onConnect, not connect()"); 340 | } 341 | const name = connectInfo.name || ""; 342 | const portId = nextPortId++; 343 | // create the client side port 344 | // remoteWindow is initially null; it will be set by _finalize upon ACK. 345 | const clientPort = makePort("client", portId, name, null); 346 | ports[portId] = clientPort; 347 | 348 | // fire the connect event across the bus 349 | bus.emit("__PORT_CONNECT__", { portId, name }); 350 | return clientPort; 351 | } 352 | 353 | function onConnect(fn) { 354 | if (type !== "background") { 355 | throw new Error("connect event only fires in background"); 356 | } 357 | onConnectListeners.push(fn); 358 | } 359 | 360 | return { 361 | // rpc: 362 | sendMessage, 363 | onMessage: { 364 | addListener(fn) { 365 | msgListeners.push(fn); 366 | }, 367 | removeListener(fn) { 368 | const i = msgListeners.indexOf(fn); 369 | if (i >= 0) msgListeners.splice(i, 1); 370 | }, 371 | }, 372 | 373 | // port API: 374 | connect, 375 | onConnect: { 376 | addListener(fn) { 377 | onConnect(fn); 378 | }, 379 | removeListener(fn) { 380 | const i = onConnectListeners.indexOf(fn); 381 | if (i >= 0) onConnectListeners.splice(i, 1); 382 | }, 383 | }, 384 | }; 385 | } 386 | -------------------------------------------------------------------------------- /src/templates/trustedTypes.template.js: -------------------------------------------------------------------------------- 1 | // Needed on some sites for scripts to set .innerHTML of things. 2 | console.log("Script start:", performance.now()); 3 | const overwrite_default = true; 4 | const passThroughFunc = (string) => string; 5 | const TTPName = "passthrough"; 6 | let TTP_default, 7 | TTP = { 8 | createHTML: passThroughFunc, 9 | createScript: passThroughFunc, 10 | createScriptURL: passThroughFunc, 11 | }; 12 | let needsTrustedHTML = false; 13 | 14 | const doit = () => { 15 | try { 16 | if ( 17 | typeof window.isSecureContext !== "undefined" && 18 | window.isSecureContext 19 | ) { 20 | if (window.trustedTypes && window.trustedTypes.createPolicy) { 21 | needsTrustedHTML = true; 22 | if (trustedTypes.defaultPolicy) { 23 | log("TT Default Policy exists"); 24 | if (overwrite_default) 25 | TTP = window.trustedTypes.createPolicy("default", TTP); 26 | else TTP = window.trustedTypes.createPolicy(TTPName, TTP); 27 | TTP_default = trustedTypes.defaultPolicy; 28 | log( 29 | `Created custom passthrough policy, in case the default policy is too restrictive: Use Policy '${TTPName}' in var 'TTP':`, 30 | TTP 31 | ); 32 | } else { 33 | TTP_default = TTP = window.trustedTypes.createPolicy("default", TTP); 34 | } 35 | log("Trusted-Type Policies: TTP:", TTP, "TTP_default:", TTP_default); 36 | } 37 | } 38 | } catch (e) { 39 | log(e); 40 | } 41 | }; 42 | 43 | const log = (...args) => { 44 | console.log(...args); 45 | }; 46 | 47 | doit(); 48 | -------------------------------------------------------------------------------- /src/utils.js: -------------------------------------------------------------------------------- 1 | const { readdirSync, readFileSync } = require("fs"); 2 | const path = require("path"); 3 | const debug = require("debug")("to-userscript:utils"); 4 | 5 | /** 6 | * Escapes characters in a string that have special meaning in regular expressions. 7 | * @param {string} s The string to escape. 8 | * @returns {string} The escaped string. 9 | */ 10 | function escapeRegex(s) { 11 | return s.replace(/[.*+?^${}()|[\]\\]/g, "\\\\$&"); 12 | } 13 | 14 | /** 15 | * Converts a WebExtension match pattern string to a RegExp pattern string. 16 | * Handles schemes (*, http, https, file, ftp), host (*, *.domain, specific), and path (/*). 17 | * @param {string} pattern The match pattern string. 18 | * @returns {string} A string representation of the RegExp pattern, suitable for new RegExp(). 19 | * Needs double escaping for embedding in generated code strings. 20 | */ 21 | function convertMatchPatternToRegExpString(pattern) { 22 | function escapeRegex(s) { 23 | return s.replace(/[.*+?^${}()|[\]\\]/g, "\\\\$&"); 24 | } 25 | 26 | if (typeof pattern !== "string" || !pattern) { 27 | return "$."; // Matches nothing 28 | } 29 | 30 | const schemeMatch = pattern.match(/^(\*|https?|file|ftp):\/\//); 31 | if (!schemeMatch) return "$."; // Invalid pattern 32 | const scheme = schemeMatch[1]; 33 | pattern = pattern.substring(schemeMatch[0].length); 34 | const schemeRegex = scheme === "*" ? "https?|file|ftp" : scheme; 35 | 36 | const hostMatch = pattern.match(/^([^\/]+)/); 37 | if (!hostMatch) return "$."; // Invalid pattern 38 | const host = hostMatch[1]; 39 | pattern = pattern.substring(host.length); // Remainder is path 40 | 41 | let hostRegex; 42 | if (host === "*") { 43 | hostRegex = "[^/]+"; // Matches any sequence of non-slash characters 44 | } else if (host.startsWith("*.")) { 45 | // Match any subdomain or the main domain 46 | hostRegex = "(?:[^\\/]+\\.)?" + escapeRegex(host.substring(2)); 47 | } else { 48 | hostRegex = escapeRegex(host); // Exact host match 49 | } 50 | 51 | let pathRegex = pattern; 52 | if (!pathRegex.startsWith("/")) { 53 | pathRegex = "/" + pathRegex; // Ensure path starts with / 54 | } 55 | // Convert glob (*) to regex (.*) and escape other special chars 56 | pathRegex = pathRegex.split("*").map(escapeRegex).join(".*"); 57 | 58 | // Ensure the pattern covers the entire path segment correctly 59 | if (pathRegex === "/.*") { 60 | // Equivalent to /* in manifest, matches the root and anything after 61 | pathRegex = "(?:/.*)?"; 62 | } else { 63 | // Match the specific path and optionally query/hash or end of string 64 | pathRegex = pathRegex + "(?:[?#]|$)"; 65 | } 66 | 67 | // Combine and return the pattern string 68 | // Needs double escaping for direct embedding in generated JS strings 69 | const finalRegexString = `^${schemeRegex}:\\/\\/${hostRegex}${pathRegex}`; 70 | return finalRegexString; 71 | } 72 | 73 | /** 74 | * Creates a RegExp object from a match pattern string. 75 | * @param {string} pattern The match pattern string. 76 | * @returns {RegExp} The corresponding regular expression. 77 | */ 78 | function convertMatchPatternToRegExp(pattern) { 79 | if (pattern === "") { 80 | return new RegExp(".*"); 81 | } 82 | try { 83 | const singleEscapedPattern = convertMatchPatternToRegExpString( 84 | pattern 85 | ).replace(/\\\\/g, "\\"); 86 | return new RegExp(singleEscapedPattern); 87 | } catch (error) { 88 | debug( 89 | "Error converting match pattern to RegExp: %s, Error: %s", 90 | pattern, 91 | error.message 92 | ); 93 | return new RegExp("$."); // Matches nothing on error 94 | } 95 | } 96 | 97 | /** 98 | * Normalizes a file path for consistent comparisons. 99 | * Replaces backslashes with forward slashes. 100 | * @param {string} filePath The file path to normalize. 101 | * @returns {string} The normalized file path. 102 | */ 103 | function normalizePath(filePath) { 104 | return path.normalize(filePath).replace(/\\/g, "/"); 105 | } 106 | 107 | const scriptBlacklist = { 108 | "browser-polyfill.js": "", 109 | "web-ext-polyfill.js": "", 110 | ...Object.fromEntries( 111 | readdirSync(path.join(__dirname, "patches")).map((file) => [ 112 | file, 113 | readFileSync(path.join(__dirname, "patches", file), "utf8"), 114 | ]) 115 | ), 116 | }; 117 | 118 | /** 119 | * Simple glob pattern matcher for web_accessible_resources 120 | * Supports * for any characters and ** for recursive directory matching 121 | * @param {string} pattern - The glob pattern 122 | * @param {string} path - The path to test 123 | * @returns {boolean} Whether the path matches the pattern 124 | */ 125 | function matchGlobPattern(pattern, path) { 126 | if (!pattern || !path) return false; 127 | 128 | // Normalize paths to use forward slashes 129 | pattern = pattern.replace(/\\/g, "/"); 130 | path = path.replace(/\\/g, "/"); 131 | 132 | // Handle exact matches first 133 | if (pattern === path) return true; 134 | 135 | // Convert glob pattern to regex 136 | // Escape special regex chars except * and ** 137 | let regexPattern = pattern 138 | .replace(/[.+?^${}()|[\]\\]/g, "\\$&") // Escape regex chars 139 | .replace(/\*\*/g, "__DOUBLESTAR__") // Temporarily replace ** 140 | .replace(/\*/g, "[^/]*") // * matches any chars except / 141 | .replace(/__DOUBLESTAR__/g, ".*"); // ** matches any chars including / 142 | 143 | // Ensure pattern matches from start to end 144 | regexPattern = "^" + regexPattern + "$"; 145 | 146 | try { 147 | const regex = new RegExp(regexPattern); 148 | return regex.test(path); 149 | } catch (e) { 150 | debug("Invalid glob pattern: %s, Error: %s", pattern, e.message); 151 | return false; 152 | } 153 | } 154 | 155 | /** 156 | * Checks if a resource path matches any of the web_accessible_resources patterns 157 | * @param {string} resourcePath - The resource path to check 158 | * @param {Array} webAccessibleResources - Array of web_accessible_resources from manifest 159 | * @returns {boolean} Whether the resource is web accessible 160 | */ 161 | function isWebAccessibleResource(resourcePath, webAccessibleResources) { 162 | if ( 163 | !Array.isArray(webAccessibleResources) || 164 | webAccessibleResources.length === 0 165 | ) { 166 | return false; 167 | } 168 | 169 | // Normalize the resource path 170 | const normalizedPath = normalizePath(resourcePath).replace(/^\/+/, ""); 171 | 172 | for (const webAccessibleResource of webAccessibleResources) { 173 | let patterns = []; 174 | 175 | // Handle both manifest v2 and v3 formats 176 | if (typeof webAccessibleResource === "string") { 177 | // Manifest v2 format: array of strings 178 | patterns = [webAccessibleResource]; 179 | } else if ( 180 | webAccessibleResource && 181 | Array.isArray(webAccessibleResource.resources) 182 | ) { 183 | // Manifest v3 format: objects with resources array 184 | patterns = webAccessibleResource.resources; 185 | } 186 | 187 | // Check if the path matches any pattern 188 | for (const pattern of patterns) { 189 | if (matchGlobPattern(pattern, normalizedPath)) { 190 | return true; 191 | } 192 | } 193 | } 194 | 195 | return false; 196 | } 197 | 198 | function replaceComments(code) { 199 | const lines = code.split("\n"); 200 | const output = []; 201 | const stack = []; // { level, title, indent, startIndex, contentCount } 202 | 203 | const headerRegex = /^(\s*)\/\/\s*(-+)\s*(.+)$/; 204 | 205 | lines.forEach((line) => { 206 | const match = line.match(headerRegex); 207 | if (match) { 208 | const indent = match[1] || ""; 209 | const level = match[2].length; 210 | // Sanitize title: replace non-alphanumeric with space, collapse spaces 211 | let title = match[3] 212 | .trim() 213 | .replace(/[^a-zA-Z0-9 \-_]/g, " ") 214 | .replace(/\s+/g, " ") 215 | .trim(); 216 | 217 | // Close regions of same or deeper level 218 | while (stack.length && stack[stack.length - 1].level >= level) { 219 | const region = stack.pop(); 220 | if (region.contentCount > 0) { 221 | output.push(`${region.indent}// #endregion`); 222 | } else { 223 | // remove empty region opening 224 | output.splice(region.startIndex, 1); 225 | } 226 | } 227 | 228 | // Open new region 229 | const regionIndex = output.length; 230 | output.push(`${indent}// #region ${title}`); 231 | stack.push({ 232 | level, 233 | title, 234 | indent, 235 | startIndex: regionIndex, 236 | contentCount: 0, 237 | }); 238 | } else { 239 | // Regular line: add accumulated indentation from all open regions 240 | let accumulatedIndent = ""; 241 | stack.forEach(() => { 242 | accumulatedIndent += "\t"; 243 | }); 244 | 245 | // Apply accumulated indentation to the line 246 | const indentedLine = accumulatedIndent + line; 247 | output.push(indentedLine); 248 | stack.forEach((region) => region.contentCount++); 249 | } 250 | }); 251 | 252 | // Close any remaining open regions 253 | while (stack.length) { 254 | const region = stack.pop(); 255 | if (region.contentCount > 0) { 256 | output.push(`${region.indent}// #endregion`); 257 | } else { 258 | output.splice(region.startIndex, 1); 259 | } 260 | } 261 | 262 | return output.join("\n"); 263 | } 264 | 265 | module.exports = { 266 | convertMatchPatternToRegExpString, 267 | convertMatchPatternToRegExp, 268 | normalizePath, 269 | escapeRegex, 270 | scriptBlacklist, 271 | matchGlobPattern, 272 | isWebAccessibleResource, 273 | replaceComments, 274 | }; 275 | --------------------------------------------------------------------------------