├── .gitignore ├── docs └── api_key.png ├── .eslintignore ├── .eslintrc.js ├── src ├── types.ts ├── utils.ts ├── upload.ts └── inputs.ts ├── tsconfig.json ├── package.json ├── LICENSE ├── action.yml ├── README.md └── dist └── 886.index.js /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | node_modules 3 | lib 4 | -------------------------------------------------------------------------------- /docs/api_key.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EmergeTools/emerge-upload-action/HEAD/docs/api_key.png -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | # Don't lint node_modules 2 | node_modules 3 | 4 | # Don't lint build output 5 | dist 6 | build 7 | 8 | # Ignore eslintrc 9 | .eslintrc.js -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | root: true, 3 | parser: '@typescript-eslint/parser', 4 | plugins: ['@typescript-eslint'], 5 | extends: ['airbnb-base', 'airbnb-typescript/base'], 6 | parserOptions: { 7 | project: ['./tsconfig.json'] 8 | }, 9 | }; -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | export type Nullable = T | undefined | null; 2 | 3 | export type UploadInputs = { 4 | artifactPath: string 5 | filename: string 6 | emergeApiKey: string 7 | sha: string 8 | baseSha: string 9 | repoName: string 10 | 11 | // Required for PRs 12 | prNumber: Nullable 13 | 14 | // Optional args 15 | buildType: Nullable 16 | branchName: Nullable 17 | appIdSuffix: Nullable 18 | }; 19 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es6", 4 | "module": "commonjs", 5 | "outDir": "./lib", 6 | "rootDir": "./src", 7 | "strict": true, 8 | "noImplicitAny": false, 9 | "moduleResolution": "node", 10 | "allowSyntheticDefaultImports": true, 11 | "esModuleInterop": true, 12 | "declaration": false, 13 | "sourceMap": true, 14 | "lib": ["es6"] 15 | }, 16 | "exclude": ["node_modules", "**/*.test.ts"] 17 | } -------------------------------------------------------------------------------- /src/utils.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'path'; 2 | import { Nullable } from './types'; 3 | 4 | export function getPRNumber(refName: string): Nullable { 5 | if (refName === '') { 6 | return undefined; 7 | } 8 | 9 | if (!refName.includes('pull')) { 10 | return undefined; 11 | } 12 | 13 | const splits = refName.split('/'); 14 | return splits[2]; 15 | } 16 | 17 | export function getAbsoluteArtifactPath(artifactPath: string): string { 18 | let cleanedArtifactPath = artifactPath; 19 | if (cleanedArtifactPath.startsWith('.')) { 20 | cleanedArtifactPath = cleanedArtifactPath.substr(1); 21 | } 22 | 23 | return path.join( 24 | process.env.GITHUB_WORKSPACE ?? '', 25 | cleanedArtifactPath, 26 | ); 27 | } 28 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "emerge-upload-action", 3 | "version": "1.1.0", 4 | "description": "Upload an artifact to Emerge.", 5 | "main": "dist/index.js", 6 | "scripts": { 7 | "build": "tsc", 8 | "release": "ncc build src/upload.ts && git add -f dist/", 9 | "lint": "eslint **/*.ts" 10 | }, 11 | "author": "EmergeTools", 12 | "dependencies": { 13 | "@actions/core": "^1.10.1", 14 | "@actions/github": "^6.0.0", 15 | "node-fetch": "^3.3.2" 16 | }, 17 | "devDependencies": { 18 | "@types/node": "^18", 19 | "@types/node-fetch": "^2.5.12", 20 | "@typescript-eslint/eslint-plugin": "^5.19.0", 21 | "@typescript-eslint/parser": "^5.19.0", 22 | "@vercel/ncc": "^0.38.1", 23 | "eslint": "^8.38.0", 24 | "eslint-config-airbnb-base": "^15.0.0", 25 | "eslint-config-airbnb-typescript": "^17.0.0", 26 | "eslint-plugin-github": "^4.10.2", 27 | "typescript": "^5.1.3" 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 Emerge Tools 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /action.yml: -------------------------------------------------------------------------------- 1 | name: "Emerge Upload" 2 | description: "Github Action for uploading an artifact to Emerge. Should only used for iOS use cases." 3 | author: "EmergeTools" 4 | inputs: 5 | artifact_path: 6 | description: "Relative path from the location of the main workspace runner to the artifact generated in CI." 7 | required: true 8 | emerge_api_key: 9 | description: > 10 | Emerge API token used to access the Emerge API. You can generate 11 | an API key from your Profile page in the Emerge dashboard. 12 | [Learn more about generating an Emerge API key](https://docs.emergetools.com/docs/fastlane#obtain-an-api-key) 13 | 14 | Make sure to store this key using Github Action's encrypted secrets. 15 | [Learn more about creating and using encrypted secrets](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets) 16 | required: true 17 | build_type: 18 | description: "The name for the build type (e.g. 'release', 'debug', 'beta')" 19 | required: false 20 | app_id_suffix: 21 | description: "The suffix for the app id (e.g. 'snapshots', 'debug', 'enterprise'), used to differentiate between different builds when uploading with the same SHA." 22 | required: false 23 | runs: 24 | using: "node20" 25 | main: "dist/index.js" 26 | -------------------------------------------------------------------------------- /src/upload.ts: -------------------------------------------------------------------------------- 1 | import fetch, { Headers } from 'node-fetch'; 2 | import * as fs from 'fs'; 3 | import getInputs from './inputs'; 4 | 5 | const core = require('@actions/core'); 6 | 7 | async function run(): Promise { 8 | const inputs = getInputs(); 9 | 10 | const requestBody = { 11 | filename: inputs.filename, 12 | prNumber: inputs.prNumber, 13 | branch: inputs.branchName, 14 | sha: inputs.sha, 15 | baseSha: inputs.baseSha, 16 | repoName: inputs.repoName, 17 | buildType: inputs.buildType, 18 | appIdSuffix: inputs.appIdSuffix, 19 | }; 20 | 21 | core.debug(`requestBody: ${JSON.stringify(requestBody)}`); 22 | const response = await fetch('https://api.emergetools.com/upload', { 23 | method: 'post', 24 | headers: { 25 | Accept: 'application/json', 26 | 'Content-type': 'application/json', 27 | 'X-API-Token': inputs.emergeApiKey, 28 | }, 29 | body: JSON.stringify(requestBody), 30 | }); 31 | 32 | const data: any = await response.json(); 33 | const { uploadURL } = data; 34 | if (!uploadURL || uploadURL === '') { 35 | core.setFailed('No uploadURL found in upload response.'); 36 | } 37 | 38 | const file = fs.readFileSync(inputs.artifactPath); 39 | 40 | const headers = new Headers({ 'Content-Type': 'application/zip' }); 41 | core.info(`Uploading artifact at path ${inputs.artifactPath}...`); 42 | await fetch(uploadURL, { 43 | method: 'PUT', 44 | body: file, 45 | headers, 46 | }); 47 | } 48 | 49 | run().catch((e) => { 50 | core.setFailed(`Error uploading artifact to Emerge: ${e.message}`); 51 | }); 52 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Emerge upload action 2 | 3 | > [!WARNING] 4 | > This action is **deprecated** and no longer maintained.\ 5 | > For Android, use the [Emerge Gradle plugin](https://docs.emergetools.com/docs/gradle-plugin) instead.\ 6 | > For iOS, we strongly recommend you use the [Emerge Fastlane plugin](https://docs.emergetools.com/docs/fastlane) if you don't have an existing CI setup or already use fastlane.\ 7 | > Note: when using the Fastlane or Gradle plugins, the GitHub action is unnecessary, as all upload functionality is built into the respective integrations. 8 | 9 | Action to upload your build directly to Emerge for analysis. 10 | 11 | ## Usage 12 | 13 | The Emerge upload action should be run on any event that should trigger a size analysis. Examples of this are: 14 | 15 | - When pushing to specific branches (main/release). 16 | - On any PR to the main branch. 17 | 18 | See [action.yml](https://github.com/EmergeTools/emerge-upload-action/blob/main/action.yml) for all inputs and 19 | descriptions. 20 | 21 | ### Create Emerge API key and add to secrets 22 | 23 | First, create an Emerge API key. You can create one from your [Emerge profile page](https://www.emergetools.com/profile). 24 | 25 | ![Create Emerge API Key](./docs/api_key.png) 26 | 27 | Add the API key to your secrets in your repository. **Do not leave this key in plain text in your workflow!** 28 | 29 | [Learn more about creating and using encrypted secrets](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets) 30 | 31 | ### Incorporate in your workflow 32 | 33 | Build your artifact in a step before the Emerge upload action. Pass the generated artifact's path as the `artifact_path` 34 | argument, and your Emerge API key secret as the `emerge_api_key` argument: 35 | 36 | ```yaml 37 | name: Your workflow 38 | 39 | on: 40 | push: 41 | branches: [main] 42 | pull_request: 43 | branches: [main] 44 | 45 | jobs: 46 | build: 47 | runs-on: ubuntu-latest 48 | steps: 49 | - uses: actions/checkout@v2 50 | - name: Generate iOS archive build 51 | run: # generate release build, e.g. with xcodebuild 52 | - name: Upload artifact to Emerge 53 | uses: EmergeTools/emerge-upload-action@v1.0.4 54 | with: 55 | artifact_path: path/to/app-release.xcarchive.zip 56 | emerge_api_key: ${{ secrets.EMERGE_API_KEY }} 57 | build_type: release # Optional, change if your workflow builds a specific type 58 | app_id_suffix: snapshots # Optional, change if needed, see https://docs.emergetools.com/docs/integrate-into-ci#which-builds-should-be-uploaded 59 | ``` 60 | 61 | After uploading, Emerge will run analysis on the uploaded build. If you haven't, set up Github comments by following 62 | our [Github documentation](https://docs.emergetools.com/docs/github). 63 | 64 | For more details about the upload process and Emerge's supported artifact types, see 65 | the [Emerge Uploading docs](https://docs.emergetools.com/docs/uploading-basics). 66 | -------------------------------------------------------------------------------- /src/inputs.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs'; 2 | import * as process from 'process'; 3 | import { UploadInputs } from './types'; 4 | import { getPRNumber, getAbsoluteArtifactPath } from './utils'; 5 | 6 | const core = require('@actions/core'); 7 | const github = require('@actions/github'); 8 | 9 | function getInputs(): UploadInputs { 10 | core.info('Parsing inputs...'); 11 | 12 | const artifactPath = core.getInput('artifact_path', { required: true }); 13 | if (artifactPath === '') { 14 | core.setFailed('No artifact_path argument provided.'); 15 | } 16 | 17 | const emergeApiKey = core.getInput('emerge_api_key', { required: true }); 18 | if (emergeApiKey === '') { 19 | core.setFailed('No emerge_api_key input provided.'); 20 | } 21 | 22 | // On PRs, the GITHUB_SHA refers to the merge commit instead 23 | // of the commit that triggered this action. 24 | // Therefore, on a PR we need to explicitly get the head sha from the event json data. 25 | let sha; 26 | let baseSha; 27 | let branchName; 28 | const eventFile = fs.readFileSync(process.env.GITHUB_EVENT_PATH ?? '', { 29 | encoding: 'utf8', 30 | }); 31 | const eventFileJson = JSON.parse(eventFile); 32 | if (process.env.GITHUB_EVENT_NAME === 'pull_request' || process.env.GITHUB_EVENT_NAME === 'pull_request_target') { 33 | sha = eventFileJson?.pull_request?.head?.sha ?? process.env.GITHUB_SHA ?? ''; 34 | baseSha = eventFileJson?.pull_request?.base?.sha ?? ''; 35 | branchName = process.env.GITHUB_HEAD_REF ?? ''; 36 | } else if (process.env.GITHUB_EVENT_NAME === 'push') { 37 | sha = process.env.GITHUB_SHA ?? ''; 38 | // Get the SHA of the previous commit, which will be the baseSha in the case of a push event. 39 | baseSha = eventFileJson?.before ?? ''; 40 | 41 | const ref = process.env.GITHUB_REF ?? ''; 42 | if (ref !== '') { 43 | const refSplits = ref.split('/'); 44 | branchName = refSplits[refSplits.length - 1]; 45 | } 46 | } else { 47 | core.setFailed(`Unsupported action trigger: ${process.env.GITHUB_EVENT_NAME}`); 48 | } 49 | 50 | if (sha === '') { 51 | core.setFailed('Could not get SHA of the head branch.'); 52 | } 53 | if (baseSha === '') { 54 | core.setFailed('Could not get SHA of the base branch.'); 55 | } 56 | // branchName is optional, so we won't fail if not present 57 | if (branchName === '') { 58 | // Explicitly set to undefined so we won't send an empty string to the Emerge API 59 | branchName = undefined; 60 | } 61 | 62 | const repoName = `${github.context.repo.owner}/${github.context.repo.repo}`; 63 | if (repoName === '') { 64 | core.setFailed('Could not get repository name.'); 65 | } 66 | 67 | // Required for PRs 68 | const refName = process.env.GITHUB_REF ?? ''; 69 | let prNumber = getPRNumber(refName); 70 | if (refName.includes('pull') && !prNumber) { 71 | core.setFailed('Could not get prNumber for a PR triggered build.'); 72 | } 73 | if (!prNumber) { 74 | prNumber = eventFileJson?.number; 75 | } 76 | // Optional args 77 | let buildType = core.getInput('build_type'); 78 | if (buildType === '') { 79 | // Explicitly set to undefined so we won't send an empty string to the Emerge API 80 | buildType = undefined; 81 | } 82 | 83 | let appIdSuffix = core.getInput('app_id_suffix'); 84 | if (appIdSuffix === '') { 85 | // Explicitly set to undefined so we won't send an empty string to the Emerge API 86 | appIdSuffix = undefined; 87 | } 88 | 89 | // Pre-processing the filename 90 | const pathSplits = artifactPath.split('/'); 91 | const filename = pathSplits[pathSplits.length - 1]; 92 | const absoluteArtifactPath = getAbsoluteArtifactPath(artifactPath); 93 | 94 | return { 95 | artifactPath: absoluteArtifactPath, 96 | filename, 97 | emergeApiKey, 98 | sha, 99 | baseSha, 100 | repoName, 101 | prNumber, 102 | buildType, 103 | branchName, 104 | appIdSuffix, 105 | }; 106 | } 107 | 108 | export default getInputs; 109 | -------------------------------------------------------------------------------- /dist/886.index.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | exports.id = 886; 3 | exports.ids = [886]; 4 | exports.modules = { 5 | 6 | /***/ 4886: 7 | /***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { 8 | 9 | /* harmony export */ __webpack_require__.d(__webpack_exports__, { 10 | /* harmony export */ toFormData: () => (/* binding */ toFormData) 11 | /* harmony export */ }); 12 | /* harmony import */ var fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(939); 13 | /* harmony import */ var formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(8369); 14 | 15 | 16 | 17 | let s = 0; 18 | const S = { 19 | START_BOUNDARY: s++, 20 | HEADER_FIELD_START: s++, 21 | HEADER_FIELD: s++, 22 | HEADER_VALUE_START: s++, 23 | HEADER_VALUE: s++, 24 | HEADER_VALUE_ALMOST_DONE: s++, 25 | HEADERS_ALMOST_DONE: s++, 26 | PART_DATA_START: s++, 27 | PART_DATA: s++, 28 | END: s++ 29 | }; 30 | 31 | let f = 1; 32 | const F = { 33 | PART_BOUNDARY: f, 34 | LAST_BOUNDARY: f *= 2 35 | }; 36 | 37 | const LF = 10; 38 | const CR = 13; 39 | const SPACE = 32; 40 | const HYPHEN = 45; 41 | const COLON = 58; 42 | const A = 97; 43 | const Z = 122; 44 | 45 | const lower = c => c | 0x20; 46 | 47 | const noop = () => {}; 48 | 49 | class MultipartParser { 50 | /** 51 | * @param {string} boundary 52 | */ 53 | constructor(boundary) { 54 | this.index = 0; 55 | this.flags = 0; 56 | 57 | this.onHeaderEnd = noop; 58 | this.onHeaderField = noop; 59 | this.onHeadersEnd = noop; 60 | this.onHeaderValue = noop; 61 | this.onPartBegin = noop; 62 | this.onPartData = noop; 63 | this.onPartEnd = noop; 64 | 65 | this.boundaryChars = {}; 66 | 67 | boundary = '\r\n--' + boundary; 68 | const ui8a = new Uint8Array(boundary.length); 69 | for (let i = 0; i < boundary.length; i++) { 70 | ui8a[i] = boundary.charCodeAt(i); 71 | this.boundaryChars[ui8a[i]] = true; 72 | } 73 | 74 | this.boundary = ui8a; 75 | this.lookbehind = new Uint8Array(this.boundary.length + 8); 76 | this.state = S.START_BOUNDARY; 77 | } 78 | 79 | /** 80 | * @param {Uint8Array} data 81 | */ 82 | write(data) { 83 | let i = 0; 84 | const length_ = data.length; 85 | let previousIndex = this.index; 86 | let {lookbehind, boundary, boundaryChars, index, state, flags} = this; 87 | const boundaryLength = this.boundary.length; 88 | const boundaryEnd = boundaryLength - 1; 89 | const bufferLength = data.length; 90 | let c; 91 | let cl; 92 | 93 | const mark = name => { 94 | this[name + 'Mark'] = i; 95 | }; 96 | 97 | const clear = name => { 98 | delete this[name + 'Mark']; 99 | }; 100 | 101 | const callback = (callbackSymbol, start, end, ui8a) => { 102 | if (start === undefined || start !== end) { 103 | this[callbackSymbol](ui8a && ui8a.subarray(start, end)); 104 | } 105 | }; 106 | 107 | const dataCallback = (name, clear) => { 108 | const markSymbol = name + 'Mark'; 109 | if (!(markSymbol in this)) { 110 | return; 111 | } 112 | 113 | if (clear) { 114 | callback(name, this[markSymbol], i, data); 115 | delete this[markSymbol]; 116 | } else { 117 | callback(name, this[markSymbol], data.length, data); 118 | this[markSymbol] = 0; 119 | } 120 | }; 121 | 122 | for (i = 0; i < length_; i++) { 123 | c = data[i]; 124 | 125 | switch (state) { 126 | case S.START_BOUNDARY: 127 | if (index === boundary.length - 2) { 128 | if (c === HYPHEN) { 129 | flags |= F.LAST_BOUNDARY; 130 | } else if (c !== CR) { 131 | return; 132 | } 133 | 134 | index++; 135 | break; 136 | } else if (index - 1 === boundary.length - 2) { 137 | if (flags & F.LAST_BOUNDARY && c === HYPHEN) { 138 | state = S.END; 139 | flags = 0; 140 | } else if (!(flags & F.LAST_BOUNDARY) && c === LF) { 141 | index = 0; 142 | callback('onPartBegin'); 143 | state = S.HEADER_FIELD_START; 144 | } else { 145 | return; 146 | } 147 | 148 | break; 149 | } 150 | 151 | if (c !== boundary[index + 2]) { 152 | index = -2; 153 | } 154 | 155 | if (c === boundary[index + 2]) { 156 | index++; 157 | } 158 | 159 | break; 160 | case S.HEADER_FIELD_START: 161 | state = S.HEADER_FIELD; 162 | mark('onHeaderField'); 163 | index = 0; 164 | // falls through 165 | case S.HEADER_FIELD: 166 | if (c === CR) { 167 | clear('onHeaderField'); 168 | state = S.HEADERS_ALMOST_DONE; 169 | break; 170 | } 171 | 172 | index++; 173 | if (c === HYPHEN) { 174 | break; 175 | } 176 | 177 | if (c === COLON) { 178 | if (index === 1) { 179 | // empty header field 180 | return; 181 | } 182 | 183 | dataCallback('onHeaderField', true); 184 | state = S.HEADER_VALUE_START; 185 | break; 186 | } 187 | 188 | cl = lower(c); 189 | if (cl < A || cl > Z) { 190 | return; 191 | } 192 | 193 | break; 194 | case S.HEADER_VALUE_START: 195 | if (c === SPACE) { 196 | break; 197 | } 198 | 199 | mark('onHeaderValue'); 200 | state = S.HEADER_VALUE; 201 | // falls through 202 | case S.HEADER_VALUE: 203 | if (c === CR) { 204 | dataCallback('onHeaderValue', true); 205 | callback('onHeaderEnd'); 206 | state = S.HEADER_VALUE_ALMOST_DONE; 207 | } 208 | 209 | break; 210 | case S.HEADER_VALUE_ALMOST_DONE: 211 | if (c !== LF) { 212 | return; 213 | } 214 | 215 | state = S.HEADER_FIELD_START; 216 | break; 217 | case S.HEADERS_ALMOST_DONE: 218 | if (c !== LF) { 219 | return; 220 | } 221 | 222 | callback('onHeadersEnd'); 223 | state = S.PART_DATA_START; 224 | break; 225 | case S.PART_DATA_START: 226 | state = S.PART_DATA; 227 | mark('onPartData'); 228 | // falls through 229 | case S.PART_DATA: 230 | previousIndex = index; 231 | 232 | if (index === 0) { 233 | // boyer-moore derrived algorithm to safely skip non-boundary data 234 | i += boundaryEnd; 235 | while (i < bufferLength && !(data[i] in boundaryChars)) { 236 | i += boundaryLength; 237 | } 238 | 239 | i -= boundaryEnd; 240 | c = data[i]; 241 | } 242 | 243 | if (index < boundary.length) { 244 | if (boundary[index] === c) { 245 | if (index === 0) { 246 | dataCallback('onPartData', true); 247 | } 248 | 249 | index++; 250 | } else { 251 | index = 0; 252 | } 253 | } else if (index === boundary.length) { 254 | index++; 255 | if (c === CR) { 256 | // CR = part boundary 257 | flags |= F.PART_BOUNDARY; 258 | } else if (c === HYPHEN) { 259 | // HYPHEN = end boundary 260 | flags |= F.LAST_BOUNDARY; 261 | } else { 262 | index = 0; 263 | } 264 | } else if (index - 1 === boundary.length) { 265 | if (flags & F.PART_BOUNDARY) { 266 | index = 0; 267 | if (c === LF) { 268 | // unset the PART_BOUNDARY flag 269 | flags &= ~F.PART_BOUNDARY; 270 | callback('onPartEnd'); 271 | callback('onPartBegin'); 272 | state = S.HEADER_FIELD_START; 273 | break; 274 | } 275 | } else if (flags & F.LAST_BOUNDARY) { 276 | if (c === HYPHEN) { 277 | callback('onPartEnd'); 278 | state = S.END; 279 | flags = 0; 280 | } else { 281 | index = 0; 282 | } 283 | } else { 284 | index = 0; 285 | } 286 | } 287 | 288 | if (index > 0) { 289 | // when matching a possible boundary, keep a lookbehind reference 290 | // in case it turns out to be a false lead 291 | lookbehind[index - 1] = c; 292 | } else if (previousIndex > 0) { 293 | // if our boundary turned out to be rubbish, the captured lookbehind 294 | // belongs to partData 295 | const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength); 296 | callback('onPartData', 0, previousIndex, _lookbehind); 297 | previousIndex = 0; 298 | mark('onPartData'); 299 | 300 | // reconsider the current character even so it interrupted the sequence 301 | // it could be the beginning of a new sequence 302 | i--; 303 | } 304 | 305 | break; 306 | case S.END: 307 | break; 308 | default: 309 | throw new Error(`Unexpected state entered: ${state}`); 310 | } 311 | } 312 | 313 | dataCallback('onHeaderField'); 314 | dataCallback('onHeaderValue'); 315 | dataCallback('onPartData'); 316 | 317 | // Update properties for the next call 318 | this.index = index; 319 | this.state = state; 320 | this.flags = flags; 321 | } 322 | 323 | end() { 324 | if ((this.state === S.HEADER_FIELD_START && this.index === 0) || 325 | (this.state === S.PART_DATA && this.index === this.boundary.length)) { 326 | this.onPartEnd(); 327 | } else if (this.state !== S.END) { 328 | throw new Error('MultipartParser.end(): stream ended unexpectedly'); 329 | } 330 | } 331 | } 332 | 333 | function _fileName(headerValue) { 334 | // matches either a quoted-string or a token (RFC 2616 section 19.5.1) 335 | const m = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i); 336 | if (!m) { 337 | return; 338 | } 339 | 340 | const match = m[2] || m[3] || ''; 341 | let filename = match.slice(match.lastIndexOf('\\') + 1); 342 | filename = filename.replace(/%22/g, '"'); 343 | filename = filename.replace(/&#(\d{4});/g, (m, code) => { 344 | return String.fromCharCode(code); 345 | }); 346 | return filename; 347 | } 348 | 349 | async function toFormData(Body, ct) { 350 | if (!/multipart/i.test(ct)) { 351 | throw new TypeError('Failed to fetch'); 352 | } 353 | 354 | const m = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i); 355 | 356 | if (!m) { 357 | throw new TypeError('no or bad content-type header, no multipart boundary'); 358 | } 359 | 360 | const parser = new MultipartParser(m[1] || m[2]); 361 | 362 | let headerField; 363 | let headerValue; 364 | let entryValue; 365 | let entryName; 366 | let contentType; 367 | let filename; 368 | const entryChunks = []; 369 | const formData = new formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__/* .FormData */ .fS(); 370 | 371 | const onPartData = ui8a => { 372 | entryValue += decoder.decode(ui8a, {stream: true}); 373 | }; 374 | 375 | const appendToFile = ui8a => { 376 | entryChunks.push(ui8a); 377 | }; 378 | 379 | const appendFileToFormData = () => { 380 | const file = new fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__/* .File */ .ZH(entryChunks, filename, {type: contentType}); 381 | formData.append(entryName, file); 382 | }; 383 | 384 | const appendEntryToFormData = () => { 385 | formData.append(entryName, entryValue); 386 | }; 387 | 388 | const decoder = new TextDecoder('utf-8'); 389 | decoder.decode(); 390 | 391 | parser.onPartBegin = function () { 392 | parser.onPartData = onPartData; 393 | parser.onPartEnd = appendEntryToFormData; 394 | 395 | headerField = ''; 396 | headerValue = ''; 397 | entryValue = ''; 398 | entryName = ''; 399 | contentType = ''; 400 | filename = null; 401 | entryChunks.length = 0; 402 | }; 403 | 404 | parser.onHeaderField = function (ui8a) { 405 | headerField += decoder.decode(ui8a, {stream: true}); 406 | }; 407 | 408 | parser.onHeaderValue = function (ui8a) { 409 | headerValue += decoder.decode(ui8a, {stream: true}); 410 | }; 411 | 412 | parser.onHeaderEnd = function () { 413 | headerValue += decoder.decode(); 414 | headerField = headerField.toLowerCase(); 415 | 416 | if (headerField === 'content-disposition') { 417 | // matches either a quoted-string or a token (RFC 2616 section 19.5.1) 418 | const m = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i); 419 | 420 | if (m) { 421 | entryName = m[2] || m[3] || ''; 422 | } 423 | 424 | filename = _fileName(headerValue); 425 | 426 | if (filename) { 427 | parser.onPartData = appendToFile; 428 | parser.onPartEnd = appendFileToFormData; 429 | } 430 | } else if (headerField === 'content-type') { 431 | contentType = headerValue; 432 | } 433 | 434 | headerValue = ''; 435 | headerField = ''; 436 | }; 437 | 438 | for await (const chunk of Body) { 439 | parser.write(chunk); 440 | } 441 | 442 | parser.end(); 443 | 444 | return formData; 445 | } 446 | 447 | 448 | /***/ }) 449 | 450 | }; 451 | ; --------------------------------------------------------------------------------