├── .changeset ├── README.md └── config.json ├── .coderabbit.yml ├── .editorconfig ├── .github ├── ISSUE_TEMPLATE │ ├── 1-bug.yml │ ├── 2-feature.yml │ └── config.yml ├── contributing.md ├── dependabot.yaml ├── scripts │ └── decrypt_secret.sh └── workflows │ ├── ci.yml │ └── release.yml ├── .gitignore ├── .npmrc ├── .well-known └── funding-manifest-urls ├── LICENSE ├── README.md ├── biome.json ├── keyfile.json.gpg ├── package-lock.json ├── package.json ├── packages ├── azure-store │ ├── CHANGELOG.md │ ├── LICENSE │ ├── README.md │ ├── package.json │ ├── src │ │ ├── index.ts │ │ └── test │ │ │ └── index.ts │ └── tsconfig.json ├── file-store │ ├── CHANGELOG.md │ ├── LICENSE │ ├── README.md │ ├── package.json │ ├── src │ │ ├── configstores │ │ │ └── index.ts │ │ ├── index.ts │ │ └── test │ │ │ └── index.ts │ └── tsconfig.json ├── gcs-store │ ├── CHANGELOG.md │ ├── LICENSE │ ├── README.md │ ├── package.json │ ├── src │ │ ├── index.ts │ │ └── test │ │ │ └── index.ts │ └── tsconfig.json ├── s3-store │ ├── CHANGELOG.md │ ├── LICENSE │ ├── README.md │ ├── package.json │ ├── src │ │ ├── index.ts │ │ └── test │ │ │ └── index.ts │ └── tsconfig.json ├── server │ ├── CHANGELOG.md │ ├── LICENSE │ ├── README.md │ ├── package.json │ ├── src │ │ ├── handlers │ │ │ ├── BaseHandler.ts │ │ │ ├── DeleteHandler.ts │ │ │ ├── GetHandler.ts │ │ │ ├── HeadHandler.ts │ │ │ ├── OptionsHandler.ts │ │ │ ├── PatchHandler.ts │ │ │ └── PostHandler.ts │ │ ├── index.ts │ │ ├── lockers │ │ │ ├── MemoryLocker.ts │ │ │ └── index.ts │ │ ├── server.ts │ │ ├── test │ │ │ ├── BaseHandler.test.ts │ │ │ ├── DataStore.test.ts │ │ │ ├── DeleteHandler.test.ts │ │ │ ├── GetHandler.test.ts │ │ │ ├── HeadHandler.test.ts │ │ │ ├── HeaderValidator.test.ts │ │ │ ├── Locker.test.ts │ │ │ ├── OptionsHandler.test.ts │ │ │ ├── PatchHandler.test.ts │ │ │ ├── PostHandler.test.ts │ │ │ └── Server.test.ts │ │ ├── types.ts │ │ └── validators │ │ │ └── HeaderValidator.ts │ └── tsconfig.json └── utils │ ├── CHANGELOG.md │ ├── package.json │ ├── src │ ├── constants.ts │ ├── index.ts │ ├── kvstores │ │ ├── FileKvStore.ts │ │ ├── IoRedisKvStore.ts │ │ ├── MemoryKvStore.ts │ │ ├── RedisKvStore.ts │ │ ├── Types.ts │ │ └── index.ts │ ├── models │ │ ├── Context.ts │ │ ├── DataStore.ts │ │ ├── Locker.ts │ │ ├── Metadata.ts │ │ ├── StreamLimiter.ts │ │ ├── StreamSplitter.ts │ │ ├── Uid.ts │ │ ├── Upload.ts │ │ └── index.ts │ └── test │ │ ├── Metadata.test.ts │ │ ├── StreamSplitter.test.ts │ │ ├── Uid.test.ts │ │ ├── Upload.test.ts │ │ └── stores.ts │ └── tsconfig.json ├── test ├── fixtures │ ├── test.mp4 │ └── test.pdf ├── package.json ├── src │ ├── e2e.test.ts │ └── s3.e2e.ts └── tsconfig.json ├── tsconfig.base.json └── tsconfig.json /.changeset/README.md: -------------------------------------------------------------------------------- 1 | # Changesets 2 | 3 | Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a 4 | build tool that works with multi-package repos, or single-package repos to help you 5 | version and publish your code. You can find the full documentation for it 6 | [in our repository](https://github.com/changesets/changesets) 7 | 8 | We have a quick list of common questions to get you started engaging with this project in 9 | [our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md) 10 | -------------------------------------------------------------------------------- /.changeset/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://unpkg.com/@changesets/config@3.0.0/schema.json", 3 | "changelog": "@changesets/cli/changelog", 4 | "commit": false, 5 | "fixed": [], 6 | "linked": [], 7 | "access": "public", 8 | "baseBranch": "main", 9 | "updateInternalDependencies": "patch", 10 | "ignore": ["test"] 11 | } 12 | -------------------------------------------------------------------------------- /.coderabbit.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://coderabbit.ai/integrations/schema.v2.json 2 | language: "en-US" 3 | early_access: false 4 | reviews: 5 | profile: "chill" 6 | request_changes_workflow: false 7 | high_level_summary: false 8 | poem: false 9 | review_status: true 10 | sequence_diagrams: false 11 | collapse_walkthrough: false 12 | auto_review: 13 | enabled: true 14 | drafts: false 15 | chat: 16 | auto_reply: true 17 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | ; This file is for unifying the coding style for different editors and IDEs. 2 | ; More information at http://editorconfig.org 3 | 4 | root = true 5 | 6 | [*] 7 | charset = utf-8 8 | indent_style = space 9 | indent_size = 2 10 | end_of_line = lf 11 | insert_final_newline = true 12 | trim_trailing_whitespace = true 13 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/1-bug.yml: -------------------------------------------------------------------------------- 1 | name: 🐛 Bug report 2 | labels: [bug] 3 | description: Describe a bug with a project 4 | body: 5 | - type: checkboxes 6 | id: initial-checklist 7 | attributes: 8 | label: Initial checklist 9 | options: 10 | - label: 11 | I understand this is a bug report and questions should be posted in the 12 | [Community Forum](https://community.transloadit.com/c/tus/6) 13 | required: true 14 | - label: 15 | I searched 16 | [issues](https://github.com/tus/tus-node-server/issues?q=is%3Aissue) and 17 | couldn’t find anything (or linked relevant results below) 18 | required: true 19 | - type: textarea 20 | id: steps-to-reproduce 21 | attributes: 22 | label: Steps to reproduce 23 | description: 24 | How did this happen? Please provide a [minimal, reproducible 25 | example](https://stackoverflow.com/help/minimal-reproducible-example). 26 | validations: 27 | required: true 28 | - type: textarea 29 | id: expected-behavior 30 | attributes: 31 | label: Expected behavior 32 | description: What should happen? 33 | validations: 34 | required: true 35 | - type: textarea 36 | id: actual-behavior 37 | attributes: 38 | label: Actual behavior 39 | description: What happens instead? 40 | validations: 41 | required: true 42 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/2-feature.yml: -------------------------------------------------------------------------------- 1 | name: 🚀 Feature request 2 | labels: [feature] 3 | description: Suggest an idea 4 | body: 5 | - type: checkboxes 6 | id: initial-checklist 7 | attributes: 8 | label: Initial checklist 9 | options: 10 | - label: 11 | I understand this is a feature request and questions should be posted in the 12 | [Community Forum](https://community.transloadit.com/c/tus/6) 13 | required: true 14 | - label: 15 | I searched 16 | [issues](https://github.com/tus/tus-node-server/issues?q=is%3Aissue) and 17 | couldn’t find anything (or linked relevant results below) 18 | required: true 19 | - type: textarea 20 | id: problem 21 | attributes: 22 | label: Problem 23 | description: Please describe the problem you are trying to solve here. 24 | validations: 25 | required: true 26 | - type: textarea 27 | id: solution 28 | attributes: 29 | label: Solution 30 | description: What should happen? Please describe the desired behavior. 31 | validations: 32 | required: true 33 | - type: textarea 34 | id: alternatives 35 | attributes: 36 | label: Alternatives 37 | description: 38 | What are the alternative solutions? Can this be solved in a different way? 39 | validations: 40 | required: true 41 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: 🙋 Ask a question 4 | url: https://community.transloadit.com/c/tus/6 5 | about: Ask questions and discuss with other community members 6 | -------------------------------------------------------------------------------- /.github/contributing.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | ## Changesets 4 | 5 | We use [changesets](https://github.com/changesets/changesets) to manage versioning, 6 | changelogs and publishing. This means when you contribute a PR you have to run 7 | `npx changeset add` to indicate the semver bump you are making and to add a changelog 8 | entry. 9 | 10 | ## Tests 11 | 12 | You can run tests for individual packages by running a NPM workspace command. For 13 | instance, for the `@tus/server`: 14 | 15 | ```bash 16 | npm run --workspace @tus/server test 17 | ``` 18 | 19 | Running tests for `@tus/gcs-store` requires a `keyfile.json` with credentials to be 20 | present in root. 21 | 22 | `@uppy/s3-store` also requires credentials, but these should be injected. The easiest way 23 | to do this is creating a `.env.sh` (which is in `.gitignore`) with the following exports: 24 | 25 | ```bash 26 | export AWS_BUCKET="***" 27 | export AWS_ACCESS_KEY_ID="***" 28 | export AWS_SECRET_ACCESS_KEY="***" 29 | export AWS_REGION="***" 30 | ``` 31 | 32 | And run it: 33 | 34 | ```bash 35 | source .env.sh && npm run --workspace @tus/s3-store test 36 | ``` 37 | 38 | You can run all tests with (requires both S3 and GCS credentials): 39 | 40 | ```bash 41 | npm test 42 | ``` 43 | 44 | --- 45 | 46 | If setting up buckets is too much effort, create a pull request and check if GitHub 47 | Actions succeeds with your changes. 48 | -------------------------------------------------------------------------------- /.github/dependabot.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: github-actions 4 | directory: / 5 | schedule: 6 | interval: monthly 7 | 8 | - package-ecosystem: npm 9 | directory: / 10 | schedule: 11 | interval: monthly 12 | -------------------------------------------------------------------------------- /.github/scripts/decrypt_secret.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # --batch to prevent interactive command --yes to assume "yes" for questions 4 | gpg --quiet --batch --yes --decrypt --passphrase="$KEYFILE_PASSPHRASE" \ 5 | --output keyfile.json keyfile.json.gpg 6 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: 3 | push: 4 | branches: [main] 5 | pull_request_target: 6 | types: [opened, synchronize, reopened] 7 | paths-ignore: 8 | - "**.md" 9 | - ".changeset/**" 10 | pull_request: 11 | types: [opened, synchronize, reopened] 12 | paths: 13 | - .github/workflows/ci.yml 14 | 15 | concurrency: ${{ github.workflow }}--${{ github.ref }} 16 | 17 | jobs: 18 | main: 19 | name: Node.js 20 20 | runs-on: ubuntu-latest 21 | 22 | steps: 23 | - name: Checkout sources 24 | uses: actions/checkout@v4 25 | with: 26 | ref: ${{ github.event.pull_request.head.sha || github.sha }} 27 | 28 | - name: Decrypt keyfile 29 | run: ./.github/scripts/decrypt_secret.sh 30 | env: 31 | KEYFILE_PASSPHRASE: ${{secrets.KEYFILE_PASSPHRASE}} 32 | 33 | - name: Install Node.js 34 | uses: actions/setup-node@v3 35 | with: 36 | node-version: 20.19 37 | 38 | - name: Install dependencies 39 | run: npm ci --no-fund --no-audit 40 | 41 | - name: Build 42 | run: npm run build 43 | 44 | - name: Check formatting 45 | run: npm run format:check 46 | 47 | - name: Run linters 48 | run: npm run lint 49 | 50 | - name: Run tests 51 | run: npm run test 52 | env: 53 | AWS_BUCKET: ${{secrets.AWS_BUCKET}} 54 | AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}} 55 | AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}} 56 | AZURE_ACCOUNT_ID: ${{secrets.AZURE_ACCOUNT_ID}} 57 | AZURE_ACCOUNT_KEY: ${{secrets.AZURE_ACCOUNT_KEY}} 58 | AZURE_CONTAINER_NAME: ${{secrets.AZURE_CONTAINER_NAME}} 59 | AWS_REGION: ${{secrets.AWS_REGION}} 60 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: release 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | concurrency: ${{ github.workflow }}-${{ github.ref }} 9 | 10 | jobs: 11 | release: 12 | name: Release 13 | runs-on: ubuntu-latest 14 | steps: 15 | - name: Checkout Repo 16 | uses: actions/checkout@v4 17 | 18 | - name: Setup Node.js 19 | uses: actions/setup-node@v3 20 | with: 21 | node-version: lts/* 22 | 23 | - name: Install dependencies 24 | run: npm ci --no-fund --no-audit 25 | 26 | - name: Build 27 | run: npm run build 28 | 29 | - name: Create Release Pull Request or Publish 30 | id: changesets 31 | uses: changesets/action@v1 32 | with: 33 | # Note: pnpm install after versioning is necessary to refresh lockfile 34 | version: npm run version 35 | publish: npm run release:local 36 | commit: '[ci] release' 37 | title: '[ci] release' 38 | env: 39 | # Needs access to push to main 40 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 41 | # Needs access to publish to npm 42 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }} 43 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | .eslintcache 3 | dist 4 | .turbo 5 | *.tsbuildinfo 6 | output/ 7 | 8 | # Yarn 9 | .pnp.* 10 | .yarn/* 11 | !.yarn/patches 12 | !.yarn/plugins 13 | !.yarn/releases 14 | !.yarn/sdks 15 | !.yarn/versions 16 | 17 | .env 18 | .env.sh 19 | 20 | # Logs 21 | logs 22 | *.log 23 | 24 | # Runtime data 25 | pids 26 | *.pid 27 | *.seed 28 | 29 | # Directory for instrumented libs generated by jscoverage/JSCover 30 | lib-cov 31 | 32 | # Coverage directory used by tools like istanbul 33 | coverage 34 | .nyc_output 35 | 36 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 37 | .grunt 38 | 39 | # node-waf configuration 40 | .lock-wscript 41 | 42 | # Compiled binary addons (http://nodejs.org/api/addons.html) 43 | build/Release 44 | 45 | # Dependency directory 46 | # https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git 47 | node_modules 48 | 49 | # Example local FileStore 50 | example/files 51 | test/output 52 | 53 | #OSX 54 | .DS_Store 55 | 56 | *.sublime-workspace 57 | 58 | # Keyfile will be decrypted from keyfile.json.enc by GitHub Actions 59 | keyfile.json 60 | -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | //registry.npmjs.org/:_authToken=${NPM_TOKEN} 2 | -------------------------------------------------------------------------------- /.well-known/funding-manifest-urls: -------------------------------------------------------------------------------- 1 | https://tus.io/funding.json 2 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 tus - Resumable File Uploads 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # tus-node-server 2 | 3 | Tus logo 4 | 5 | > **tus** is a protocol based on HTTP for _resumable file uploads_. Resumable means that 6 | > an upload can be interrupted at any moment and can be resumed without re-uploading the 7 | > previous data again. An interruption may happen willingly, if the user wants to pause, 8 | > or bn accident in case of an network issue or server outage. 9 | 10 | tus-node-server is an official implementation of the 11 | [tus resumable upload protocol](http://www.tus.io/protocols/resumable-upload.html). The 12 | protocol specifies a flexible method to upload files to remote servers using HTTP. The 13 | special feature is the ability to pause and resume uploads at any moment allowing to 14 | continue seamlessly after e.g. network interruptions. 15 | 16 | It is capable of accepting uploads with arbitrary sizes and storing them locally on disk, 17 | on Google Cloud Storage or on AWS S3 (or any other S3-compatible storage system). Due to 18 | its modularization and extensibility, support for nearly any other cloud provider could 19 | easily be added to tus-node-server 20 | 21 | > [!IMPORTANT] 22 | > Read the 2.0.0 announcement [post](https://tus.io/blog/2025/03/25/tus-node-server-v200): integrate in all meta-frameworks and JS runtimes. 23 | 24 | ## Contents 25 | 26 | - [When should I use this?](#when-should-i-use-this) 27 | - [Quick start](#quick-start) 28 | - [Packages](#packages) 29 | - [Extensions](#extensions) 30 | - [Types](#types) 31 | - [Compatibility](#compatibility) 32 | - [Contribute](#contribute) 33 | - [License](#license) 34 | 35 | ## When should I use this? 36 | 37 | When you want reliable, resumable uploads. Together with a client like 38 | [tus-js-client](https://github.com/tus/tus-js-client) or [Uppy](https://uppy.io), you'll 39 | have a plug-and-play experience. 40 | 41 | tus Node.js in particular makes sense if you want to host a Node.js server or 42 | integrate it into your existing one. You can also run tus Node.js in all meta frameworks 43 | (such as Next.js, Nuxt, React Router, SvelteKit, etc) and other Node.js compatible runtime environments 44 | (AWS Lambda, Cloudflare, Bun, Deno Deploy, etc). 45 | 46 | There are also other mature servers, like 47 | [tusd](https://github.com/tus/tusd), [tusdotnet](https://github.com/tusdotnet/tusdotnet), 48 | [rustus](https://github.com/s3rius/rustus), and 49 | [many others](https://tus.io/implementations.html). 50 | 51 | ## Quick start 52 | 53 | A standalone server which stores files on disk. 54 | 55 | > [!TIP] 56 | > Try it yourself in [StackBlitz](https://stackblitz.com/edit/stackblitz-starters-zg6mgnuf?file=index.js) 57 | 58 | ```js 59 | import { Server } from "@tus/server"; 60 | import { FileStore } from "@tus/file-store"; 61 | 62 | const host = "127.0.0.1"; 63 | const port = 1080; 64 | const server = new Server({ 65 | path: "/files", 66 | datastore: new FileStore({ directory: "./files" }), 67 | }); 68 | 69 | server.listen({ host, port }); 70 | ``` 71 | 72 | A tus server integrated into your existing Node.js server. `@tus/server` has no 73 | dependencies so it can be integrated in any server-side framework. More examples can be 74 | found in [`@tus/server`][]. 75 | 76 | ```js 77 | import fastify from "fastify"; 78 | import { Server } from "@tus/server"; 79 | import { FileStore } from "@tus/file-store"; 80 | 81 | const app = fastify({ logger: true }); 82 | const tusServer = new Server({ 83 | path: "/files", 84 | datastore: new FileStore({ directory: "./files" }), 85 | }); 86 | 87 | app.addContentTypeParser( 88 | "application/offset+octet-stream", 89 | (request, payload, done) => done(null) 90 | ); 91 | app.all("/files", (req, res) => { 92 | tusServer.handle(req.raw, res.raw); 93 | }); 94 | app.all("/files/*", (req, res) => { 95 | tusServer.handle(req.raw, res.raw); 96 | }); 97 | app.listen(3000, (err) => { 98 | if (err) { 99 | app.log.error(err); 100 | process.exit(1); 101 | } 102 | }); 103 | ``` 104 | 105 | ## Packages 106 | 107 | - [`@tus/server`][]. The tus server. Standalone or integrate it into your Node.js server. 108 | - [`@tus/file-store`][]. Store files on disk. 109 | - [`@tus/s3-store`][]. Store files on AWS S3. 110 | - [`@tus/gcs-store`][]. Store files on Google Cloud Storage. 111 | - [`@tus/azure-store`][]. Store files on Azure. 112 | 113 | ## Extensions 114 | 115 | The tus protocol supports optional [extensions][]. Below is a table of the supported 116 | extensions. 117 | 118 | | Extension | [`file-store`][`@tus/file-store`] | [`s3-store`][`@tus/s3-store`] | [`gcs-store`][`@tus/gcs-store`] | [`azure-store`][`@tus/azure-store`] | 119 | | ------------------------ | --------------------------------- | ----------------------------- | ------------------------------- | ----------------------------------- | 120 | | [Creation][] | ✅ | ✅ | ✅ | ✅ | 121 | | [Creation With Upload][] | ✅ | ✅ | ✅ | ✅ | 122 | | [Expiration][] | ✅ | ✅ | ❌ | ❌ | 123 | | [Checksum][] | ❌ | ❌ | ❌ | ❌ | 124 | | [Termination][] | ✅ | ✅ | ❌ | ❌ | 125 | | [Concatenation][] | ❌ | ❌ | ❌ | ❌ | 126 | 127 | ## Types 128 | 129 | All packages are fully typed with TypeScript. 130 | 131 | ## Compatibility 132 | 133 | All packages require Node.js >=20.19.0. 134 | 135 | ## Contribute 136 | 137 | See 138 | [`contributing.md`](https://github.com/tus/tus-node-server/blob/main/.github/contributing.md). 139 | 140 | ## License 141 | 142 | [MIT](https://github.com/tus/tus-node-server/blob/master/license) © 143 | [tus](https://github.com/tus) 144 | 145 | [corepack]: https://nodejs.org/api/corepack.html 146 | [`@tus/server`]: https://github.com/tus/tus-node-server/tree/main/packages/server 147 | [`@tus/file-store`]: https://github.com/tus/tus-node-server/tree/main/packages/file-store 148 | [`@tus/s3-store`]: https://github.com/tus/tus-node-server/tree/main/packages/s3-store 149 | [`@tus/gcs-store`]: https://github.com/tus/tus-node-server/tree/main/packages/gcs-store 150 | [`@tus/azure-store`]: https://github.com/tus/tus-node-server/tree/main/packages/azure-store 151 | [extensions]: https://tus.io/protocols/resumable-upload.html#protocol-extensions 152 | [creation]: https://tus.io/protocols/resumable-upload.html#creation 153 | [creation with upload]: https://tus.io/protocols/resumable-upload.html#creation-with-upload 154 | [expiration]: https://tus.io/protocols/resumable-upload.html#expiration 155 | [checksum]: https://tus.io/protocols/resumable-upload.html#checksum 156 | [termination]: https://tus.io/protocols/resumable-upload.html#termination 157 | [concatenation]: https://tus.io/protocols/resumable-upload.html#concatenation 158 | -------------------------------------------------------------------------------- /biome.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://biomejs.dev/schemas/1.9.0/schema.json", 3 | "organizeImports": { 4 | "enabled": true 5 | }, 6 | "files": { 7 | "ignore": [".git", "node_modules", "./**/dist/**/*"] 8 | }, 9 | "linter": { 10 | "enabled": true, 11 | "rules": { 12 | "recommended": true, 13 | "style": { 14 | "noParameterAssign": "off" 15 | }, 16 | "performance": { 17 | "noDelete": "off" 18 | } 19 | } 20 | }, 21 | "formatter": { 22 | "enabled": true, 23 | "formatWithErrors": false, 24 | "indentStyle": "space", 25 | "indentWidth": 2, 26 | "lineEnding": "lf", 27 | "lineWidth": 90 28 | }, 29 | "json": { 30 | "linter": { 31 | "enabled": false 32 | }, 33 | "formatter": { 34 | "enabled": false 35 | } 36 | }, 37 | "javascript": { 38 | "formatter": { 39 | "trailingCommas": "es5", 40 | "semicolons": "asNeeded", 41 | "bracketSpacing": false, 42 | "quoteStyle": "single" 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /keyfile.json.gpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tus/tus-node-server/b4029ab913cd3ff53665ae6aa38199553f610f81/keyfile.json.gpg -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/package.json", 3 | "private": true, 4 | "type": "module", 5 | "workspaces": [ 6 | "packages/*", 7 | "test" 8 | ], 9 | "scripts": { 10 | "build": "tsc --build", 11 | "lint": "biome lint --write .", 12 | "format": "biome format --write .", 13 | "format:check": "biome format --error-on-warnings .", 14 | "pretest": "tsc --build", 15 | "test": "npm test -w ./packages", 16 | "version": "changeset version", 17 | "release": "gh workflow run release", 18 | "release:local": "npm run build && changeset publish" 19 | }, 20 | "devDependencies": { 21 | "@biomejs/biome": "1.9.4", 22 | "@changesets/changelog-github": "^0.5.0", 23 | "@changesets/cli": "^2.29.2", 24 | "typescript": "^5.8.2" 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /packages/azure-store/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # @tus/azure-store 2 | 3 | ## 2.0.0 4 | 5 | ### Major Changes 6 | 7 | - 0f063d9: Change required Node.js version from 16 to 20.19.0 8 | - 7a5a60d: Make this package ESM-only instead of CommonJS. Since Node.js >= 20.19.0 you can `require(esm)` so you can consume this package even if you don't ESM yourself yet. 9 | 10 | ### Patch Changes 11 | 12 | - Updated dependencies [0f063d9] 13 | - Updated dependencies [f190875] 14 | - Updated dependencies [7a5a60d] 15 | - @tus/utils@0.6.0 16 | 17 | ## 0.1.3 18 | 19 | ### Patch Changes 20 | 21 | - f47f371: Fix error on saving metadata when it contains non-ASCII characters 22 | 23 | ## 0.1.2 24 | 25 | ### Patch Changes 26 | 27 | - 37dcd55: Correctly publish dist folder 28 | 29 | ## 0.1.1 30 | 31 | ### Patch Changes 32 | 33 | - Updated dependencies [8f19a53] 34 | - @tus/utils@0.5.0 35 | 36 | ## 0.1.0 37 | 38 | ### Minor Changes 39 | 40 | - 919cd85: Add basic store for Azure 41 | -------------------------------------------------------------------------------- /packages/azure-store/LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 tus - Resumable File Uploads 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /packages/azure-store/README.md: -------------------------------------------------------------------------------- 1 | # `@tus/azure-store` 2 | 3 | Azure Store based on the Append Blob Client [Azure Blob AppendBlobClient](https://learn.microsoft.com/en-us/rest/api/storageservices/append-block). 4 | 5 | ## Contents 6 | 7 | - [Install](#install) 8 | - [Use](#use) 9 | - [API](#api) 10 | - [`new AzureStore(options)`](#new-azurestoreoptions) 11 | - [Extensions](#extensions) 12 | - [Types](#types) 13 | - [Compatibility](#compatibility) 14 | - [Contribute](#contribute) 15 | - [License](#license) 16 | 17 | ## Install 18 | 19 | In Node.js >=20.19.0, install with npm: 20 | 21 | ```bash 22 | npm install @tus/azure-store 23 | ``` 24 | 25 | ## Use 26 | 27 | ```js 28 | import { Server } from "@tus/server"; 29 | import { AzureStore } from "@tus/azure-store"; 30 | 31 | const server = new Server({ 32 | path: "/files", 33 | datastore: new AzureStore({ 34 | account: process.env.AZURE_ACCOUNT_ID, 35 | accountKey: process.env.AZURE_ACCOUNT_KEY, 36 | containerName: process.env.AZURE_CONTAINER_NAME, 37 | }), 38 | }); 39 | // ... 40 | ``` 41 | 42 | ## API 43 | 44 | This package exports `AzureStore`. There is no default export. 45 | 46 | ### `new AzureStore(options)` 47 | 48 | Creates a new azure store with options. 49 | 50 | #### `options.account` 51 | 52 | Azure account ID (`string`). 53 | 54 | #### `options.accountKey` 55 | 56 | Azure account key (`string`). 57 | 58 | #### `options.containerName` 59 | 60 | Azure storage container name (`string`). 61 | 62 | #### `options.cache` 63 | 64 | Provide your own cache solution for the metadata of uploads ([`KvStore`][]) to reduce the calls to storage server. 65 | Default is ([`MemoryKvStore`][]) which stores the data in memory. 66 | 67 | ## Extensions 68 | 69 | The tus protocol supports optional [extensions][]. Below is a table of the supported 70 | extensions in `@tus/azure-store`. More will be added in the future releases. 71 | 72 | | Extension | `@tus/file-store` | 73 | | ------------------------ | ----------------- | 74 | | [Creation][] | ✅ | 75 | | [Creation With Upload][] | ✅ | 76 | | [Expiration][] | ❌ | 77 | | [Checksum][] | ❌ | 78 | | [Termination][] | ❌ | 79 | | [Concatenation][] | ❌ | 80 | 81 | ## Types 82 | 83 | This package is fully typed with TypeScript. 84 | 85 | ## Compatibility 86 | 87 | This package requires Node.js >=20.19.0. 88 | 89 | ## Contribute 90 | 91 | See 92 | [`contributing.md`](https://github.com/tus/tus-node-server/blob/main/.github/contributing.md). 93 | 94 | ## License 95 | 96 | [MIT](https://github.com/tus/tus-node-server/blob/master/license) © 97 | [tus](https://github.com/tus) 98 | 99 | [extensions]: https://tus.io/protocols/resumable-upload.html#protocol-extensions 100 | [creation]: https://tus.io/protocols/resumable-upload.html#creation 101 | [creation with upload]: https://tus.io/protocols/resumable-upload.html#creation-with-upload 102 | [expiration]: https://tus.io/protocols/resumable-upload.html#expiration 103 | [checksum]: https://tus.io/protocols/resumable-upload.html#checksum 104 | [termination]: https://tus.io/protocols/resumable-upload.html#termination 105 | [concatenation]: https://tus.io/protocols/resumable-upload.html#concatenation 106 | [`cleanUpExpiredUploads`]: https://github.com/tus/tus-node-server/tree/main/packages/server#cleanupexpireduploads 107 | [kvstores]: https://github.com/tus/tus-node-server/tree/main/packages/server#kvstores 108 | [`KvStore`]: https://github.com/tus/tus-node-server/blob/main/packages/utils/src/kvstores/Types.ts 109 | [`MemoryKvStore`]: https://github.com/tus/tus-node-server/blob/main/packages/utils/src/kvstores/MemoryKvStore.ts 110 | -------------------------------------------------------------------------------- /packages/azure-store/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/package.json", 3 | "name": "@tus/azure-store", 4 | "version": "2.0.0", 5 | "description": "Azure blob storage for @tus/server", 6 | "main": "./dist/index.js", 7 | "exports": "./dist/index.js", 8 | "type": "module", 9 | "homepage": "https://github.com/tus/tus-node-server#readme", 10 | "bugs": "https://github.com/tus/tus-node-server/issues", 11 | "repository": "tus/tus-node-server", 12 | "files": [ 13 | "dist", 14 | "src", 15 | "!test*" 16 | ], 17 | "license": "MIT", 18 | "scripts": { 19 | "build": "tsc --build", 20 | "pretest": "tsc --build", 21 | "test": "mocha './dist/test/*.js' --exit" 22 | }, 23 | "dependencies": { 24 | "@tus/utils": "^0.6.0", 25 | "@azure/storage-blob": "^12.24.0", 26 | "debug": "^4.3.4" 27 | }, 28 | "devDependencies": { 29 | "@types/debug": "^4.1.12", 30 | "@types/mocha": "^10.0.6", 31 | "@types/node": "^22.13.7", 32 | "mocha": "^11.0.1", 33 | "should": "^13.2.3" 34 | }, 35 | "engines": { 36 | "node": ">=20.19.0" 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /packages/azure-store/src/index.ts: -------------------------------------------------------------------------------- 1 | import type stream from 'node:stream' 2 | import debug from 'debug' 3 | import { 4 | DataStore, 5 | Upload, 6 | ERRORS, 7 | type KvStore, 8 | MemoryKvStore, 9 | TUS_RESUMABLE, 10 | Metadata, 11 | } from '@tus/utils' 12 | import { 13 | type AppendBlobClient, 14 | type BlobGetPropertiesResponse, 15 | BlobServiceClient, 16 | type ContainerClient, 17 | StorageSharedKeyCredential, 18 | } from '@azure/storage-blob' 19 | 20 | type Options = { 21 | cache?: KvStore 22 | account: string 23 | accountKey: string 24 | containerName: string 25 | } 26 | 27 | const log = debug('tus-node-server:stores:azurestore') 28 | 29 | /** 30 | * Store using the Azure Storage SDK 31 | * @author Bharath Battaje 32 | */ 33 | export class AzureStore extends DataStore { 34 | private cache: KvStore 35 | private blobServiceClient: BlobServiceClient 36 | private containerClient: ContainerClient 37 | private containerName: string 38 | 39 | constructor(options: Options) { 40 | super() 41 | this.cache = options.cache ?? new MemoryKvStore() 42 | this.extensions = ['creation', 'creation-defer-length'] 43 | 44 | if (!options.account) { 45 | throw new Error('Azure store must have a account') 46 | } 47 | if (!options.accountKey) { 48 | throw new Error('Azure store must have a account key') 49 | } 50 | if (!options.containerName) { 51 | throw new Error('Azure store must have a container name') 52 | } 53 | 54 | const storageAccountBaseUrl = `https://${options.account}.blob.core.windows.net` 55 | const sharedKeyCredential = new StorageSharedKeyCredential( 56 | options.account, 57 | options.accountKey 58 | ) 59 | 60 | this.blobServiceClient = new BlobServiceClient( 61 | storageAccountBaseUrl, 62 | sharedKeyCredential 63 | ) 64 | this.containerClient = this.blobServiceClient.getContainerClient( 65 | options.containerName 66 | ) 67 | this.containerName = options.containerName 68 | } 69 | 70 | /** 71 | * Saves upload metadata to blob metada. Also upload metadata 72 | * gets saved in local cache as well to avoid calling azure server everytime. 73 | */ 74 | private async saveMetadata(appendBlobClient: AppendBlobClient, upload: Upload) { 75 | log(`[${upload.id}] saving metadata`) 76 | 77 | await this.cache.set(appendBlobClient.url, upload) 78 | 79 | await appendBlobClient.setMetadata( 80 | { 81 | tus_version: TUS_RESUMABLE, 82 | upload: JSON.stringify({ 83 | ...upload, 84 | // Base64 encode the metadata to avoid errors for non-ASCII characters 85 | metadata: Metadata.stringify(upload.metadata ?? {}), 86 | }), 87 | }, 88 | {} 89 | ) 90 | 91 | log(`[${upload.id}] metadata saved`) 92 | } 93 | 94 | /** 95 | * Retrieves upload metadata previously saved. 96 | * It tries to get from local cache, else get from the blob metadata. 97 | */ 98 | private async getMetadata(appendBlobClient: AppendBlobClient): Promise { 99 | const cached = await this.cache.get(appendBlobClient.url) 100 | 101 | if (cached) { 102 | log(`[${cached.id}] metadata returned from cache`) 103 | return cached 104 | } 105 | 106 | let propertyData: BlobGetPropertiesResponse 107 | try { 108 | propertyData = await appendBlobClient.getProperties() 109 | } catch (error) { 110 | log('Error while fetching the metadata.', error) 111 | throw ERRORS.UNKNOWN_ERROR 112 | } 113 | 114 | if (!propertyData.metadata) { 115 | throw ERRORS.FILE_NOT_FOUND 116 | } 117 | const upload = JSON.parse(propertyData.metadata.upload) as Upload 118 | // Metadata is base64 encoded to avoid errors for non-ASCII characters 119 | // so we need to decode it separately 120 | upload.metadata = Metadata.parse(JSON.stringify(upload.metadata ?? {})) 121 | 122 | await this.cache.set(appendBlobClient.url, upload) 123 | 124 | log('metadata returned from blob get properties') 125 | 126 | return upload 127 | } 128 | 129 | /** 130 | * provides the readable stream for the previously uploaded file 131 | */ 132 | public async read(file_id: string) { 133 | const appendBlobClient = this.containerClient.getAppendBlobClient(file_id) 134 | const downloadResponse = await appendBlobClient.download() 135 | 136 | return downloadResponse.readableStreamBody 137 | } 138 | 139 | /** 140 | * Creates a empty append blob on Azure storage and attaches the metadata to it. 141 | */ 142 | public async create(upload: Upload) { 143 | log(`[${upload.id}] initializing azure storage file upload`) 144 | 145 | try { 146 | const appendBlobClient = this.containerClient.getAppendBlobClient(upload.id) 147 | await appendBlobClient.createIfNotExists() 148 | 149 | upload.storage = { 150 | type: 'AzureBlobStore', 151 | path: upload.id, 152 | bucket: this.containerName, 153 | } 154 | 155 | await this.saveMetadata(appendBlobClient, upload) 156 | 157 | return upload 158 | } catch (err) { 159 | throw ERRORS.UNKNOWN_ERROR 160 | } 161 | } 162 | 163 | /** 164 | * Gets the current file upload status 165 | */ 166 | public async getUpload(id: string): Promise { 167 | const appendBlobClient = this.containerClient.getAppendBlobClient(id) 168 | const upload = await this.getMetadata(appendBlobClient) 169 | 170 | if (!upload) { 171 | throw ERRORS.FILE_NOT_FOUND 172 | } 173 | 174 | return new Upload({ 175 | id: id, 176 | size: upload.size, 177 | metadata: upload.metadata, 178 | offset: upload.offset, 179 | storage: upload.storage, 180 | creation_date: upload.creation_date, 181 | }) 182 | } 183 | 184 | /** 185 | * Uploads each blob to the azure blob storage. Please note that current official Azure stoarge node sdk has some limitation 186 | * when it comes to stream upload. So here we are concatenating all the chunks from a request into a block and then uploading 187 | * to azure storage using the appendBlock. This can be upgraded to streamUpload when node sdk start supporting it. 188 | */ 189 | public async write( 190 | stream: stream.Readable, 191 | id: string, 192 | offset: number 193 | ): Promise { 194 | log(`started writing the file offset [${offset}]`) 195 | 196 | const appendBlobClient = this.containerClient.getAppendBlobClient(id) 197 | const upload = await this.getMetadata(appendBlobClient) 198 | 199 | // biome-ignore lint/suspicious/noAsyncPromiseExecutor: 200 | return new Promise(async (resolve, reject) => { 201 | if (offset < upload.offset) { 202 | //duplicate request scenario, dont want to write the same data 203 | return resolve(upload.offset) 204 | } 205 | 206 | try { 207 | const bufs: Buffer[] = [] 208 | 209 | stream.on('data', async (chunk: Buffer) => { 210 | if (stream.destroyed) { 211 | return reject(ERRORS.ABORTED) 212 | } 213 | 214 | bufs.push(chunk) 215 | }) 216 | 217 | stream.on('end', async () => { 218 | const buf = Buffer.concat(bufs) 219 | 220 | if (buf.length > 0) { 221 | await appendBlobClient.appendBlock(buf, buf.length) 222 | } 223 | 224 | upload.offset = upload.offset + buf.length 225 | log(`saved offset is [${upload.offset}]`) 226 | 227 | await this.saveMetadata(appendBlobClient, upload) 228 | 229 | if (upload.offset === upload.size) { 230 | await this.cache.delete(appendBlobClient.url) 231 | log(`file upload completed successfully [${id}]`) 232 | } 233 | 234 | return resolve(upload.offset) 235 | }) 236 | 237 | stream.on('error', async () => { 238 | return reject(ERRORS.UNKNOWN_ERROR) 239 | }) 240 | } catch (err) { 241 | return reject('something went wrong while writing the file.') 242 | } 243 | }) 244 | } 245 | 246 | public async declareUploadLength(id: string, upload_length: number) { 247 | const appendBlobClient = this.containerClient.getAppendBlobClient(id) 248 | const upload = await this.getMetadata(appendBlobClient) 249 | 250 | if (!upload) { 251 | throw ERRORS.FILE_NOT_FOUND 252 | } 253 | 254 | upload.size = upload_length 255 | 256 | await this.saveMetadata(appendBlobClient, upload) 257 | } 258 | } 259 | -------------------------------------------------------------------------------- /packages/azure-store/src/test/index.ts: -------------------------------------------------------------------------------- 1 | import 'should' 2 | import path from 'node:path' 3 | import {AzureStore} from '@tus/azure-store' 4 | import * as shared from '../../../utils/dist/test/stores.js' 5 | 6 | const fixturesPath = path.resolve('../', '../', 'test', 'fixtures') 7 | const storePath = path.resolve('../', '../', 'test', 'output', 'azure-store') 8 | 9 | describe('AzureStore', () => { 10 | before(function () { 11 | this.testFileSize = 960_244 12 | this.testFileName = 'test.mp4' 13 | this.storePath = storePath 14 | this.testFilePath = path.resolve(fixturesPath, this.testFileName) 15 | }) 16 | 17 | beforeEach(function () { 18 | this.datastore = new AzureStore({ 19 | account: process.env.AZURE_ACCOUNT_ID as string, 20 | accountKey: process.env.AZURE_ACCOUNT_KEY as string, 21 | containerName: process.env.AZURE_CONTAINER_NAME as string, 22 | }) 23 | }) 24 | 25 | shared.shouldHaveStoreMethods() 26 | shared.shouldCreateUploads() 27 | // shared.shouldRemoveUploads() // Not implemented yet 28 | // shared.shouldExpireUploads() // Not implemented yet 29 | shared.shouldWriteUploads() 30 | shared.shouldHandleOffset() 31 | shared.shouldDeclareUploadLength() // Creation-defer-length extension 32 | }) 33 | -------------------------------------------------------------------------------- /packages/azure-store/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig.json", 3 | "extends": "../../tsconfig.base.json", 4 | "references": [{ "path": "../utils/tsconfig.json" }], 5 | "include": ["src"], 6 | "compilerOptions": { 7 | "rootDir": "src", 8 | "outDir": "dist", 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /packages/file-store/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # @tus/file-store 2 | 3 | ## 2.0.0 4 | 5 | ### Major Changes 6 | 7 | - 0f063d9: Change required Node.js version from 16 to 20.19.0 8 | - 7a5a60d: Make this package ESM-only instead of CommonJS. Since Node.js >= 20.19.0 you can `require(esm)` so you can consume this package even if you don't ESM yourself yet. 9 | 10 | ### Patch Changes 11 | 12 | - Updated dependencies [0f063d9] 13 | - Updated dependencies [f190875] 14 | - Updated dependencies [7a5a60d] 15 | - @tus/utils@0.6.0 16 | 17 | ## 1.5.1 18 | 19 | ### Patch Changes 20 | 21 | - Updated dependencies [8f19a53] 22 | - @tus/utils@0.5.0 23 | 24 | ## 1.5.0 25 | 26 | ### Minor Changes 27 | 28 | - de28c6e: Publish source maps and declaration maps 29 | 30 | ### Patch Changes 31 | 32 | - Updated dependencies [de28c6e] 33 | - @tus/utils@0.4.0 34 | 35 | ## 1.4.0 36 | 37 | ### Minor Changes 38 | 39 | - 117e1b2: Add basic storage information to the Upload model. You can now access 40 | `upload.storage` which has `type` (`file`, `s3`, `gcs`), `path`, and when applicable 41 | `bucket`. 42 | 43 | ### Patch Changes 44 | 45 | - Updated dependencies [117e1b2] 46 | - @tus/utils@0.3.0 47 | 48 | ## 1.3.3 49 | 50 | ### Patch Changes 51 | 52 | - Updated dependencies [60698da] 53 | - @tus/utils@0.2.0 54 | 55 | ## 1.3.2 56 | 57 | ### Patch Changes 58 | 59 | - 54b7321: Fix dead links in README 60 | 61 | ## 1.3.1 62 | 63 | ### Patch Changes 64 | 65 | - 29a3644: Fix incorrectly published package 66 | 67 | ## 1.3.0 68 | 69 | ### Minor Changes 70 | 71 | - a896d25: Add new @tus/utils dependency to replace @tus/server peer dependency 72 | 73 | ### Patch Changes 74 | 75 | - Updated dependencies [a896d25] 76 | - @tus/utils@0.1.0 77 | -------------------------------------------------------------------------------- /packages/file-store/LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 tus - Resumable File Uploads 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /packages/file-store/README.md: -------------------------------------------------------------------------------- 1 | # `@tus/file-store` 2 | 3 | > 👉 **Note**: since 1.0.0 packages are split and published under the `@tus` scope. The 4 | > old package, `tus-node-server`, is considered unstable and will only receive security 5 | > fixes. Make sure to use the new packages. 6 | 7 | ## Contents 8 | 9 | - [Install](#install) 10 | - [Use](#use) 11 | - [API](#api) 12 | - [`new FileStore(options)`](#new-filestoreoptions) 13 | - [Extensions](#extensions) 14 | - [Examples](#examples) 15 | - [Example: creating your own config store](#example-creating-your-own-config-store) 16 | - [Types](#types) 17 | - [Compatibility](#compatibility) 18 | - [Contribute](#contribute) 19 | - [License](#license) 20 | 21 | ## Install 22 | 23 | In Node.js >=20.19.0, install with npm: 24 | 25 | ```bash 26 | npm install @tus/file-store 27 | ``` 28 | 29 | ## Use 30 | 31 | ```js 32 | import { Server } from "@tus/server"; 33 | import { FileStore } from "@tus/file-store"; 34 | 35 | const server = new Server({ 36 | path: "/files", 37 | datastore: new FileStore({ directory: "./some/path" }), 38 | }); 39 | // ... 40 | ``` 41 | 42 | ## API 43 | 44 | This package exports `FileStore`. There is no default export. 45 | 46 | ### `new FileStore(options)` 47 | 48 | Creates a new file store with options. 49 | 50 | #### `options.directory` 51 | 52 | The directory to store the files on disk (`string`). 53 | 54 | #### `options.configstore` 55 | 56 | Provide your own storage solution for the metadata of uploads ([`KvStore`][]). 57 | 58 | Default uses `FileKvStore` which puts the metadata file next to the uploaded file. See the 59 | exported [KV stores][kvstores] from `@tus/server` for more information. 60 | 61 | #### `options.expirationPeriodInMilliseconds` 62 | 63 | The time before an _ongoing_ upload is considered expired (`number`). 64 | 65 | This is since the time of creation, not modification. Once an upload is considered 66 | expired, uploads can be removed with [`cleanUpExpiredUploads`][]. 67 | 68 | ## Extensions 69 | 70 | The tus protocol supports optional [extensions][]. Below is a table of the supported 71 | extensions in `@tus/file-store`. 72 | 73 | | Extension | `@tus/file-store` | 74 | | ------------------------ | ----------------- | 75 | | [Creation][] | ✅ | 76 | | [Creation With Upload][] | ✅ | 77 | | [Expiration][] | ✅ | 78 | | [Checksum][] | ❌ | 79 | | [Termination][] | ✅ | 80 | | [Concatenation][] | ❌ | 81 | 82 | ## Examples 83 | 84 | ### Example: creating your own config store 85 | 86 | For demonstration purposes we will create a memory config store, but that's not a good 87 | idea. It's written in TypeScript. 88 | 89 | ```ts 90 | import type { Upload } from "@tus/server"; 91 | 92 | export class MemoryConfigstore { 93 | data: Map = new Map(); 94 | 95 | get(key: string): Upload | undefined { 96 | return this.data.get(key); 97 | } 98 | 99 | set(key: string, value: Upload) { 100 | this.data.set(key, value); 101 | } 102 | 103 | delete(key: string) { 104 | return this.data.delete(key); 105 | } 106 | 107 | get list(): Record { 108 | return Object.fromEntries(this.data.entries()); 109 | } 110 | } 111 | ``` 112 | 113 | Then use it: 114 | 115 | ```js 116 | import {MemoryConfigstore} from './MemoryConfigstore' 117 | 118 | const store = new FileStore({directory: './some/path', configstore: MemoryConfigstore}), 119 | ``` 120 | 121 | ## Types 122 | 123 | This package is fully typed with TypeScript. 124 | 125 | ## Compatibility 126 | 127 | This package requires Node.js >=20.19.0. 128 | 129 | ## Contribute 130 | 131 | See 132 | [`contributing.md`](https://github.com/tus/tus-node-server/blob/main/.github/contributing.md). 133 | 134 | ## License 135 | 136 | [MIT](https://github.com/tus/tus-node-server/blob/master/license) © 137 | [tus](https://github.com/tus) 138 | 139 | [extensions]: https://tus.io/protocols/resumable-upload.html#protocol-extensions 140 | [creation]: https://tus.io/protocols/resumable-upload.html#creation 141 | [creation with upload]: https://tus.io/protocols/resumable-upload.html#creation-with-upload 142 | [expiration]: https://tus.io/protocols/resumable-upload.html#expiration 143 | [checksum]: https://tus.io/protocols/resumable-upload.html#checksum 144 | [termination]: https://tus.io/protocols/resumable-upload.html#termination 145 | [concatenation]: https://tus.io/protocols/resumable-upload.html#concatenation 146 | [`cleanUpExpiredUploads`]: https://github.com/tus/tus-node-server/tree/main/packages/server#cleanupexpireduploads 147 | [kvstores]: https://github.com/tus/tus-node-server/tree/main/packages/server#kvstores 148 | [`KvStore`]: https://github.com/tus/tus-node-server/blob/main/packages/utils/src/kvstores/Types.ts 149 | -------------------------------------------------------------------------------- /packages/file-store/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/package.json", 3 | "name": "@tus/file-store", 4 | "version": "2.0.0", 5 | "description": "Local file storage for @tus/server", 6 | "main": "./dist/index.js", 7 | "exports": "./dist/index.js", 8 | "type": "module", 9 | "homepage": "https://github.com/tus/tus-node-server#readme", 10 | "bugs": "https://github.com/tus/tus-node-server/issues", 11 | "repository": "tus/tus-node-server", 12 | "files": [ 13 | "dist", 14 | "src", 15 | "!test*" 16 | ], 17 | "license": "MIT", 18 | "scripts": { 19 | "build": "tsc --build", 20 | "pretest": "tsc --build", 21 | "test": "mocha './dist/test/*.js' --exit" 22 | }, 23 | "dependencies": { 24 | "@tus/utils": "^0.6.0", 25 | "debug": "^4.3.4" 26 | }, 27 | "devDependencies": { 28 | "@types/debug": "^4.1.12", 29 | "@types/mocha": "^10.0.6", 30 | "@types/node": "^22.13.7", 31 | "mocha": "^11.0.1", 32 | "should": "^13.2.3" 33 | }, 34 | "optionalDependencies": { 35 | "@redis/client": "^1.6.0" 36 | }, 37 | "engines": { 38 | "node": ">=20.19.0" 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /packages/file-store/src/configstores/index.ts: -------------------------------------------------------------------------------- 1 | export { 2 | FileKvStore as FileConfigstore, 3 | MemoryKvStore as MemoryConfigstore, 4 | RedisKvStore as RedisConfigstore, 5 | KvStore as Configstore, 6 | } from '@tus/utils' 7 | -------------------------------------------------------------------------------- /packages/file-store/src/index.ts: -------------------------------------------------------------------------------- 1 | // TODO: use /promises versions 2 | import fs from 'node:fs' 3 | import fsProm from 'node:fs/promises' 4 | import path from 'node:path' 5 | import stream from 'node:stream' 6 | import type http from 'node:http' 7 | 8 | import debug from 'debug' 9 | 10 | import {type Configstore, FileConfigstore} from './configstores/index.js' 11 | import {DataStore, Upload, ERRORS} from '@tus/utils' 12 | 13 | export * from './configstores/index.js' 14 | 15 | type Options = { 16 | directory: string 17 | configstore?: Configstore 18 | expirationPeriodInMilliseconds?: number 19 | } 20 | 21 | const MASK = '0777' 22 | const IGNORED_MKDIR_ERROR = 'EEXIST' 23 | const FILE_DOESNT_EXIST = 'ENOENT' 24 | const log = debug('tus-node-server:stores:filestore') 25 | 26 | export class FileStore extends DataStore { 27 | directory: string 28 | configstore: Configstore 29 | expirationPeriodInMilliseconds: number 30 | 31 | constructor({directory, configstore, expirationPeriodInMilliseconds}: Options) { 32 | super() 33 | this.directory = directory 34 | this.configstore = configstore ?? new FileConfigstore(directory) 35 | this.expirationPeriodInMilliseconds = expirationPeriodInMilliseconds ?? 0 36 | this.extensions = [ 37 | 'creation', 38 | 'creation-with-upload', 39 | 'creation-defer-length', 40 | 'termination', 41 | 'expiration', 42 | ] 43 | // TODO: this async call can not happen in the constructor 44 | this.checkOrCreateDirectory() 45 | } 46 | 47 | /** 48 | * Ensure the directory exists. 49 | */ 50 | private checkOrCreateDirectory() { 51 | fs.mkdir(this.directory, {mode: MASK, recursive: true}, (error) => { 52 | if (error && error.code !== IGNORED_MKDIR_ERROR) { 53 | throw error 54 | } 55 | }) 56 | } 57 | 58 | /** 59 | * Create an empty file. 60 | */ 61 | async create(file: Upload): Promise { 62 | const dirs = file.id.split('/').slice(0, -1) 63 | const filePath = path.join(this.directory, file.id) 64 | 65 | await fsProm.mkdir(path.join(this.directory, ...dirs), {recursive: true}) 66 | await fsProm.writeFile(filePath, '') 67 | await this.configstore.set(file.id, file) 68 | 69 | file.storage = {type: 'file', path: filePath} 70 | 71 | return file 72 | } 73 | 74 | read(file_id: string) { 75 | return fs.createReadStream(path.join(this.directory, file_id)) 76 | } 77 | 78 | remove(file_id: string): Promise { 79 | return new Promise((resolve, reject) => { 80 | fs.unlink(`${this.directory}/${file_id}`, (err) => { 81 | if (err) { 82 | log('[FileStore] delete: Error', err) 83 | reject(ERRORS.FILE_NOT_FOUND) 84 | return 85 | } 86 | 87 | try { 88 | resolve(this.configstore.delete(file_id)) 89 | } catch (error) { 90 | reject(error) 91 | } 92 | }) 93 | }) 94 | } 95 | 96 | write( 97 | readable: http.IncomingMessage | stream.Readable, 98 | file_id: string, 99 | offset: number 100 | ): Promise { 101 | const file_path = path.join(this.directory, file_id) 102 | const writeable = fs.createWriteStream(file_path, { 103 | flags: 'r+', 104 | start: offset, 105 | }) 106 | 107 | let bytes_received = 0 108 | const transform = new stream.Transform({ 109 | transform(chunk, _, callback) { 110 | bytes_received += chunk.length 111 | callback(null, chunk) 112 | }, 113 | }) 114 | 115 | return new Promise((resolve, reject) => { 116 | stream.pipeline(readable, transform, writeable, (err) => { 117 | if (err) { 118 | log('[FileStore] write: Error', err) 119 | return reject(ERRORS.FILE_WRITE_ERROR) 120 | } 121 | 122 | log(`[FileStore] write: ${bytes_received} bytes written to ${file_path}`) 123 | offset += bytes_received 124 | log(`[FileStore] write: File is now ${offset} bytes`) 125 | 126 | return resolve(offset) 127 | }) 128 | }) 129 | } 130 | 131 | async getUpload(id: string): Promise { 132 | const file = await this.configstore.get(id) 133 | 134 | if (!file) { 135 | throw ERRORS.FILE_NOT_FOUND 136 | } 137 | 138 | return new Promise((resolve, reject) => { 139 | const file_path = `${this.directory}/${id}` 140 | fs.stat(file_path, (error, stats) => { 141 | if (error && error.code === FILE_DOESNT_EXIST && file) { 142 | log( 143 | `[FileStore] getUpload: No file found at ${file_path} but db record exists`, 144 | file 145 | ) 146 | return reject(ERRORS.FILE_NO_LONGER_EXISTS) 147 | } 148 | 149 | if (error && error.code === FILE_DOESNT_EXIST) { 150 | log(`[FileStore] getUpload: No file found at ${file_path}`) 151 | return reject(ERRORS.FILE_NOT_FOUND) 152 | } 153 | 154 | if (error) { 155 | return reject(error) 156 | } 157 | 158 | if (stats.isDirectory()) { 159 | log(`[FileStore] getUpload: ${file_path} is a directory`) 160 | return reject(ERRORS.FILE_NOT_FOUND) 161 | } 162 | 163 | return resolve( 164 | new Upload({ 165 | id, 166 | size: file.size, 167 | offset: stats.size, 168 | metadata: file.metadata, 169 | creation_date: file.creation_date, 170 | storage: {type: 'file', path: file_path}, 171 | }) 172 | ) 173 | }) 174 | }) 175 | } 176 | 177 | async declareUploadLength(id: string, upload_length: number) { 178 | const file = await this.configstore.get(id) 179 | 180 | if (!file) { 181 | throw ERRORS.FILE_NOT_FOUND 182 | } 183 | 184 | file.size = upload_length 185 | 186 | await this.configstore.set(id, file) 187 | } 188 | 189 | async deleteExpired(): Promise { 190 | const now = new Date() 191 | const toDelete: Promise[] = [] 192 | 193 | if (!this.configstore.list) { 194 | throw ERRORS.UNSUPPORTED_EXPIRATION_EXTENSION 195 | } 196 | 197 | const uploadKeys = await this.configstore.list() 198 | for (const file_id of uploadKeys) { 199 | try { 200 | const info = await this.configstore.get(file_id) 201 | if ( 202 | info && 203 | 'creation_date' in info && 204 | this.getExpiration() > 0 && 205 | info.size !== info.offset && 206 | info.creation_date 207 | ) { 208 | const creation = new Date(info.creation_date) 209 | const expires = new Date(creation.getTime() + this.getExpiration()) 210 | if (now > expires) { 211 | toDelete.push(this.remove(file_id)) 212 | } 213 | } 214 | } catch (error) { 215 | if (error !== ERRORS.FILE_NO_LONGER_EXISTS) { 216 | throw error 217 | } 218 | } 219 | } 220 | 221 | await Promise.all(toDelete) 222 | return toDelete.length 223 | } 224 | 225 | getExpiration(): number { 226 | return this.expirationPeriodInMilliseconds 227 | } 228 | } 229 | -------------------------------------------------------------------------------- /packages/file-store/src/test/index.ts: -------------------------------------------------------------------------------- 1 | import 'should' 2 | 3 | import {strict as assert} from 'node:assert' 4 | import fs from 'node:fs' 5 | import fsProm from 'node:fs/promises' 6 | import path from 'node:path' 7 | 8 | import sinon from 'sinon' 9 | 10 | import {FileStore, FileConfigstore} from '@tus/file-store' 11 | import {Upload} from '@tus/utils' 12 | 13 | import * as shared from '../../../utils/dist/test/stores.js' 14 | 15 | const fixturesPath = path.resolve('../', '../', 'test', 'fixtures') 16 | const storePath = path.resolve('../', '../', 'test', 'output', 'file-store') 17 | 18 | async function cleanup() { 19 | if (fs.existsSync(storePath)) { 20 | await fsProm.rm(storePath, {recursive: true}) 21 | await fsProm.mkdir(storePath) 22 | } 23 | } 24 | 25 | describe('FileStore', function () { 26 | before(function () { 27 | this.testFileSize = 960_244 28 | this.testFileName = 'test.mp4' 29 | this.storePath = storePath 30 | this.testFilePath = path.resolve(fixturesPath, this.testFileName) 31 | this.filesDirectory = storePath 32 | }) 33 | 34 | beforeEach(function () { 35 | sinon.spy(fs, 'mkdir') 36 | this.datastore = new FileStore({ 37 | directory: this.storePath, 38 | }) 39 | }) 40 | 41 | this.afterEach(async () => { 42 | // @ts-expect-error ignore 43 | fs.mkdir.restore() 44 | await cleanup() 45 | }) 46 | 47 | it('should create a directory for the files', function (done) { 48 | // @ts-expect-error should 49 | assert(fs.mkdir.calledOnce) 50 | // @ts-expect-error should 51 | assert.equal(this.datastore.directory, fs.mkdir.getCall(0).args[0]) 52 | done() 53 | }) 54 | 55 | describe('create', () => { 56 | const file = new Upload({id: '1234', size: 1000, offset: 0}) 57 | 58 | it('should resolve when the directory exists', function () { 59 | return this.datastore.create(file).should.be.fulfilled() 60 | }) 61 | 62 | it('should create an empty file', async function () { 63 | // TODO: this test would pass even if `datastore.create` would not create any file 64 | // as the file probably already exists from other tests 65 | await this.datastore.create(file) 66 | const stats = fs.statSync(path.join(this.datastore.directory, file.id)) 67 | assert.equal(stats.size, 0) 68 | }) 69 | }) 70 | 71 | describe('write', function () { 72 | const file = new Upload({ 73 | id: '1234', 74 | // @ts-expect-error todo 75 | size: this.testFileSize, 76 | offset: 0, 77 | metadata: {filename: 'world_domination_plan.pdf', is_confidential: null}, 78 | }) 79 | 80 | it("created file's size should match 'upload_length'", async function () { 81 | await this.datastore.create(file) 82 | await this.datastore.write(fs.createReadStream(this.testFilePath), file.id, 0) 83 | const stats = fs.statSync(this.testFilePath) 84 | assert.equal(stats.size, this.testFileSize) 85 | }) 86 | }) 87 | 88 | describe('getUpload', () => { 89 | it('should reject directories', function () { 90 | return this.datastore.getUpload('').should.be.rejected() 91 | }) 92 | }) 93 | 94 | describe('FileConfigstore', () => { 95 | it('should ignore random files in directory when calling list()', async () => { 96 | const store = new FileConfigstore(storePath) 97 | const files = ['tus', 'tus.json', 'tu', 'tuss.json', 'random'] 98 | for (const file of files) { 99 | await fsProm.writeFile(path.resolve(storePath, file), '') 100 | } 101 | const list = await store.list() 102 | 103 | // list returns the amount of uploads. 104 | // One upload consists of the file and the JSON info file. 105 | // But from the list perspective that is only one upload. 106 | assert.strictEqual(list.length, 1) 107 | }) 108 | }) 109 | 110 | shared.shouldHaveStoreMethods() 111 | shared.shouldCreateUploads() 112 | shared.shouldRemoveUploads() // Termination extension 113 | shared.shouldExpireUploads() // Expiration extension 114 | shared.shouldWriteUploads() 115 | shared.shouldHandleOffset() 116 | shared.shouldDeclareUploadLength() // Creation-defer-length extension 117 | }) 118 | -------------------------------------------------------------------------------- /packages/file-store/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig.json", 3 | "extends": "../../tsconfig.base.json", 4 | "references": [{ "path": "../utils/tsconfig.json" }], 5 | "include": ["src"], 6 | "compilerOptions": { 7 | "rootDir": "src", 8 | "outDir": "dist", 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /packages/gcs-store/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # @tus/gcs-store 2 | 3 | ## 2.0.0 4 | 5 | ### Major Changes 6 | 7 | - 4a0fe1c: @google-cloud/storage@7 is now required as peer dependency. 8 | - 0f063d9: Change required Node.js version from 16 to 20.19.0 9 | - 7a5a60d: Make this package ESM-only instead of CommonJS. Since Node.js >= 20.19.0 you can `require(esm)` so you can consume this package even if you don't ESM yourself yet. 10 | 11 | ### Patch Changes 12 | 13 | - Updated dependencies [0f063d9] 14 | - Updated dependencies [f190875] 15 | - Updated dependencies [7a5a60d] 16 | - @tus/utils@0.6.0 17 | 18 | ## 1.4.2 19 | 20 | ### Patch Changes 21 | 22 | - 8217f5e: Correctly pass the content type from upload.metadata to GCS. 23 | 24 | ## 1.4.1 25 | 26 | ### Patch Changes 27 | 28 | - Updated dependencies [8f19a53] 29 | - @tus/utils@0.5.0 30 | 31 | ## 1.4.0 32 | 33 | ### Minor Changes 34 | 35 | - de28c6e: Publish source maps and declaration maps 36 | 37 | ### Patch Changes 38 | 39 | - Updated dependencies [de28c6e] 40 | - @tus/utils@0.4.0 41 | 42 | ## 1.3.0 43 | 44 | ### Minor Changes 45 | 46 | - 117e1b2: Add basic storage information to the Upload model. You can now access 47 | `upload.storage` which has `type` (`file`, `s3`, `gcs`), `path`, and when applicable 48 | `bucket`. 49 | 50 | ### Patch Changes 51 | 52 | - Updated dependencies [117e1b2] 53 | - @tus/utils@0.3.0 54 | 55 | ## 1.2.2 56 | 57 | ### Patch Changes 58 | 59 | - 86b8b9f: Fix CRC32 error when writing offsetted data to store 60 | - Updated dependencies [60698da] 61 | - @tus/utils@0.2.0 62 | 63 | ## 1.2.1 64 | 65 | ### Patch Changes 66 | 67 | - 29a3644: Fix incorrectly published package 68 | 69 | ## 1.2.0 70 | 71 | ### Minor Changes 72 | 73 | - a896d25: Add new @tus/utils dependency to replace @tus/server peer dependency 74 | 75 | ### Patch Changes 76 | 77 | - Updated dependencies [a896d25] 78 | - @tus/utils@0.1.0 79 | -------------------------------------------------------------------------------- /packages/gcs-store/LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 tus - Resumable File Uploads 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /packages/gcs-store/README.md: -------------------------------------------------------------------------------- 1 | # `@tus/gcs-store` 2 | 3 | > 👉 **Note**: since 1.0.0 packages are split and published under the `@tus` scope. The 4 | > old package, `tus-node-server`, is considered unstable and will only receive security 5 | > fixes. Make sure to use the new packages. 6 | 7 | ## Contents 8 | 9 | - [Install](#install) 10 | - [Use](#use) 11 | - [API](#api) 12 | - [`new GCSStore(options)`](#new-gcsstoreoptions) 13 | - [Types](#types) 14 | - [Compatibility](#compatibility) 15 | - [Contribute](#contribute) 16 | - [License](#license) 17 | 18 | ## Install 19 | 20 | In Node.js >=20.19.0, install with npm: 21 | 22 | ```bash 23 | npm install @tus/gcs-store 24 | ``` 25 | 26 | ## Use 27 | 28 | ```js 29 | import { Server } from "@tus/server"; 30 | import { GCSStore } from "@tus/gcs-store"; 31 | import { Storage } from "@google-cloud/storage"; 32 | 33 | const storage = new Storage({ keyFilename: "key.json" }); 34 | 35 | const server = new Server({ 36 | path: "/files", 37 | datastore: new GCSStore({ 38 | bucket: storage.bucket("tus-node-server-ci"), 39 | }), 40 | }); 41 | // ... 42 | ``` 43 | 44 | ## API 45 | 46 | This package exports `GCSStore`. There is no default export. 47 | 48 | ### `new GCSStore(options)` 49 | 50 | Creates a new Google Cloud Storage store by passing a GCS bucket instance. 51 | 52 | #### `options.bucket` 53 | 54 | The bucket instance 55 | 56 | ## Extensions 57 | 58 | The tus protocol supports optional [extensions][]. Below is a table of the supported 59 | extensions in `@tus/gcs-store`. 60 | 61 | | Extension | `@tus/gcs-store` | 62 | | ------------------------ | ---------------- | 63 | | [Creation][] | ✅ | 64 | | [Creation With Upload][] | ✅ | 65 | | [Expiration][] | ❌ | 66 | | [Checksum][] | ❌ | 67 | | [Termination][] | ❌ | 68 | | [Concatenation][] | ❌ | 69 | 70 | ## Types 71 | 72 | This package is fully typed with TypeScript. 73 | 74 | ## Compatibility 75 | 76 | This package requires Node.js >=20.19.0. 77 | 78 | ## Contribute 79 | 80 | See 81 | [`contributing.md`](https://github.com/tus/tus-node-server/blob/main/.github/contributing.md). 82 | 83 | ## License 84 | 85 | [MIT](https://github.com/tus/tus-node-server/blob/master/license) © 86 | [tus](https://github.com/tus) 87 | 88 | [extensions]: https://tus.io/protocols/resumable-upload.html#protocol-extensions 89 | [creation]: https://tus.io/protocols/resumable-upload.html#creation 90 | [creation with upload]: https://tus.io/protocols/resumable-upload.html#creation-with-upload 91 | [expiration]: https://tus.io/protocols/resumable-upload.html#expiration 92 | [checksum]: https://tus.io/protocols/resumable-upload.html#checksum 93 | [termination]: https://tus.io/protocols/resumable-upload.html#termination 94 | [concatenation]: https://tus.io/protocols/resumable-upload.html#concatenation 95 | -------------------------------------------------------------------------------- /packages/gcs-store/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/package.json", 3 | "name": "@tus/gcs-store", 4 | "version": "2.0.0", 5 | "description": "Google Cloud Storage for @tus/server", 6 | "main": "./dist/index.js", 7 | "exports": "./dist/index.js", 8 | "type": "module", 9 | "homepage": "https://github.com/tus/tus-node-server#readme", 10 | "bugs": "https://github.com/tus/tus-node-server/issues", 11 | "repository": "tus/tus-node-server", 12 | "license": "MIT", 13 | "files": [ 14 | "dist", 15 | "src", 16 | "!test*" 17 | ], 18 | "scripts": { 19 | "build": "tsc --build", 20 | "pretest": "tsc --build", 21 | "test": "mocha './dist/test/*.js' --exit --timeout 30000" 22 | }, 23 | "dependencies": { 24 | "@tus/utils": "^0.6.0", 25 | "debug": "^4.3.4" 26 | }, 27 | "devDependencies": { 28 | "@google-cloud/storage": "^7.15.2", 29 | "@tus/server": "^2.0.0", 30 | "@types/debug": "^4.1.12", 31 | "@types/mocha": "^10.0.6", 32 | "@types/node": "^22.13.7", 33 | "mocha": "^11.0.1", 34 | "should": "^13.2.3" 35 | }, 36 | "peerDependencies": { 37 | "@google-cloud/storage": "^7.15.2" 38 | }, 39 | "engines": { 40 | "node": ">=20.19.0" 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /packages/gcs-store/src/index.ts: -------------------------------------------------------------------------------- 1 | import type {Bucket, CreateWriteStreamOptions} from '@google-cloud/storage' 2 | import stream from 'node:stream' 3 | import type http from 'node:http' 4 | import debug from 'debug' 5 | 6 | import {ERRORS, TUS_RESUMABLE, Upload, DataStore} from '@tus/utils' 7 | 8 | const log = debug('tus-node-server:stores:gcsstore') 9 | 10 | type Options = {bucket: Bucket} 11 | 12 | export class GCSStore extends DataStore { 13 | bucket: Bucket 14 | 15 | constructor(options: Options) { 16 | super() 17 | 18 | if (!options.bucket) { 19 | throw new Error('GCSDataStore must have a bucket') 20 | } 21 | 22 | this.bucket = options.bucket 23 | 24 | this.extensions = ['creation', 'creation-with-upload', 'creation-defer-length'] 25 | } 26 | 27 | create(file: Upload): Promise { 28 | return new Promise((resolve, reject) => { 29 | if (!file.id) { 30 | reject(ERRORS.FILE_NOT_FOUND) 31 | return 32 | } 33 | 34 | const gcs_file = this.bucket.file(file.id) 35 | 36 | file.storage = {type: 'gcs', path: file.id, bucket: this.bucket.name} 37 | 38 | const options: CreateWriteStreamOptions = { 39 | metadata: { 40 | metadata: { 41 | tus_version: TUS_RESUMABLE, 42 | ...this.#stringifyUploadKeys(file), 43 | }, 44 | }, 45 | } 46 | if (file.metadata?.contentType) { 47 | options.contentType = file.metadata.contentType 48 | } 49 | const fake_stream = new stream.PassThrough() 50 | fake_stream.end() 51 | fake_stream 52 | .pipe(gcs_file.createWriteStream(options)) 53 | .on('error', reject) 54 | .on('finish', () => { 55 | resolve(file) 56 | }) 57 | }) 58 | } 59 | 60 | read(file_id: string) { 61 | return this.bucket.file(file_id).createReadStream() 62 | } 63 | 64 | /** 65 | * Get the file metatata from the object in GCS, then upload a new version 66 | * passing through the metadata to the new version. 67 | */ 68 | write( 69 | readable: http.IncomingMessage | stream.Readable, 70 | id: string, 71 | offset: number 72 | ): Promise { 73 | // GCS Doesn't persist metadata within versions, 74 | // get that metadata first 75 | return this.getUpload(id).then((upload) => { 76 | return new Promise((resolve, reject) => { 77 | const file = this.bucket.file(id) 78 | const destination = upload.offset === 0 ? file : this.bucket.file(`${id}_patch`) 79 | 80 | upload.offset = offset 81 | 82 | const options = { 83 | metadata: { 84 | metadata: { 85 | tus_version: TUS_RESUMABLE, 86 | ...this.#stringifyUploadKeys(upload), 87 | }, 88 | }, 89 | } 90 | const write_stream = destination.createWriteStream(options) 91 | if (!write_stream || readable.destroyed) { 92 | reject(ERRORS.FILE_WRITE_ERROR) 93 | return 94 | } 95 | 96 | let bytes_received = upload.offset 97 | readable.on('data', (buffer) => { 98 | bytes_received += buffer.length 99 | }) 100 | stream.pipeline(readable, write_stream, async (e) => { 101 | if (e) { 102 | log(e) 103 | try { 104 | await destination.delete({ignoreNotFound: true}) 105 | } finally { 106 | reject(ERRORS.FILE_WRITE_ERROR) 107 | } 108 | } else { 109 | log(`${bytes_received} bytes written`) 110 | try { 111 | if (file !== destination) { 112 | await this.bucket.combine([file, destination], file) 113 | await Promise.all([ 114 | file.setMetadata(options.metadata), 115 | destination.delete({ignoreNotFound: true}), 116 | ]) 117 | } 118 | 119 | resolve(bytes_received) 120 | } catch (error) { 121 | log(error) 122 | reject(ERRORS.FILE_WRITE_ERROR) 123 | } 124 | } 125 | }) 126 | }) 127 | }) 128 | } 129 | 130 | getUpload(id: string): Promise { 131 | return new Promise((resolve, reject) => { 132 | if (!id) { 133 | reject(ERRORS.FILE_NOT_FOUND) 134 | return 135 | } 136 | 137 | // biome-ignore lint/suspicious/noExplicitAny: todo 138 | this.bucket.file(id).getMetadata((error: any, metadata: any) => { 139 | if (error && error.code === 404) { 140 | return reject(ERRORS.FILE_NOT_FOUND) 141 | } 142 | 143 | if (error) { 144 | log('[GCSDataStore] getFileMetadata', error) 145 | return reject(error) 146 | } 147 | 148 | const {size, metadata: meta} = metadata.metadata 149 | return resolve( 150 | new Upload({ 151 | id, 152 | size: size ? Number.parseInt(size, 10) : undefined, 153 | offset: Number.parseInt(metadata.size, 10), // `size` is set by GCS 154 | metadata: meta ? JSON.parse(meta) : undefined, 155 | storage: {type: 'gcs', path: id, bucket: this.bucket.name}, 156 | }) 157 | ) 158 | }) 159 | }) 160 | } 161 | 162 | async declareUploadLength(id: string, upload_length: number) { 163 | const upload = await this.getUpload(id) 164 | 165 | upload.size = upload_length 166 | 167 | await this.bucket.file(id).setMetadata({metadata: this.#stringifyUploadKeys(upload)}) 168 | } 169 | /** 170 | * Convert the Upload object to a format that can be stored in GCS metadata. 171 | */ 172 | #stringifyUploadKeys(upload: Upload) { 173 | return { 174 | size: upload.size ?? null, 175 | sizeIsDeferred: `${upload.sizeIsDeferred}`, 176 | offset: upload.offset, 177 | metadata: JSON.stringify(upload.metadata), 178 | storage: JSON.stringify(upload.storage), 179 | } 180 | } 181 | } 182 | -------------------------------------------------------------------------------- /packages/gcs-store/src/test/index.ts: -------------------------------------------------------------------------------- 1 | import path from 'node:path' 2 | 3 | import {GCSStore} from '@tus/gcs-store' 4 | 5 | import * as shared from '../../../utils/dist/test/stores.js' 6 | 7 | import {Storage} from '@google-cloud/storage' 8 | 9 | const fixturesPath = path.resolve('../', '../', 'test', 'fixtures') 10 | const storePath = path.resolve('../', '../', 'test', 'output', 'gcs-store') 11 | 12 | describe('GCSStore', () => { 13 | before(function () { 14 | this.testFileSize = 960_244 15 | this.testFileName = 'test.mp4' 16 | this.storePath = storePath 17 | this.testFilePath = path.resolve(fixturesPath, this.testFileName) 18 | }) 19 | 20 | beforeEach(function () { 21 | const storage = new Storage({ 22 | projectId: 'tus-node-server', 23 | keyFilename: path.resolve('../', '../', 'keyfile.json'), 24 | }) 25 | 26 | this.datastore = new GCSStore({ 27 | bucket: storage.bucket('tus-node-server-ci'), 28 | }) 29 | }) 30 | 31 | shared.shouldHaveStoreMethods() 32 | shared.shouldCreateUploads() 33 | // Termination extension not implemented yet 34 | // shared.shouldRemoveUploads() 35 | shared.shouldWriteUploads() 36 | shared.shouldHandleOffset() 37 | shared.shouldDeclareUploadLength() // Creation-defer-length extension 38 | }) 39 | -------------------------------------------------------------------------------- /packages/gcs-store/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig.json", 3 | "extends": "../../tsconfig.base.json", 4 | "references": [{ "path": "../utils/tsconfig.json" }], 5 | "include": ["src"], 6 | "compilerOptions": { 7 | "rootDir": "src", 8 | "outDir": "dist", 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /packages/s3-store/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # @tus/s3-store 2 | 3 | ## 2.0.0 4 | 5 | ### Major Changes 6 | 7 | - 0f063d9: Change required Node.js version from 16 to 20.19.0 8 | - 7a5a60d: Make this package ESM-only instead of CommonJS. Since Node.js >= 20.19.0 you can `require(esm)` so you can consume this package even if you don't ESM yourself yet. 9 | 10 | ### Patch Changes 11 | 12 | - Updated dependencies [0f063d9] 13 | - Updated dependencies [f190875] 14 | - Updated dependencies [7a5a60d] 15 | - @tus/utils@0.6.0 16 | 17 | ## 1.9.1 18 | 19 | ### Patch Changes 20 | 21 | - 274a0d1: Bump @aws-sdk/client-s3 from 3.717.0 to 3.758.0 22 | - 81eb03a: Add missing documentation for `maxMultipartParts` option added in #712 23 | - c8e78bd: Fix unhandled promise rejection when uploading a part fails, in which case we returned too early, leaving other parts running in the background. 24 | 25 | ## 1.9.0 26 | 27 | ### Minor Changes 28 | 29 | - 7db2f17: Add `maxMultipartParts` option. This can be used when using S3-compatible storage provider with different part number limitations. 30 | 31 | ## 1.8.0 32 | 33 | ### Minor Changes 34 | 35 | - 6351485: Add `minPartSize` option. This can be used alongside `partSize` to guarantee that all non-trailing parts are _exactly_ the same size, which is required for Cloudflare R2. 36 | 37 | ### Patch Changes 38 | 39 | - c970858: Fix zero byte files only storing a .info file. Now correctly stores an empty file. 40 | 41 | ## 1.7.0 42 | 43 | ### Minor Changes 44 | 45 | - b1c07bc: Change private modifier to protected 46 | 47 | ### Patch Changes 48 | 49 | - 8236c05: Bump @aws-sdk/client-s3 from 3.703.0 to 3.717.0 50 | - Updated dependencies [42c6267] 51 | - @tus/utils@0.5.1 52 | 53 | ## 1.6.2 54 | 55 | ### Patch Changes 56 | 57 | - 32d847d: Fix increment for part numbers 58 | - fdad8ff: Bump @aws-sdk/client-s3 from 3.701.0 to 3.703.0 59 | 60 | ## 1.6.1 61 | 62 | ### Patch Changes 63 | 64 | - Updated dependencies [8f19a53] 65 | - @tus/utils@0.5.0 66 | 67 | ## 1.6.0 68 | 69 | ### Minor Changes 70 | 71 | - de28c6e: Publish source maps and declaration maps 72 | 73 | ### Patch Changes 74 | 75 | - Updated dependencies [de28c6e] 76 | - @tus/utils@0.4.0 77 | 78 | ## 1.5.0 79 | 80 | ### Minor Changes 81 | 82 | - 117e1b2: Add basic storage information to the Upload model. You can now access 83 | `upload.storage` which has `type` (`file`, `s3`, `gcs`), `path`, and when applicable 84 | `bucket`. 85 | 86 | ### Patch Changes 87 | 88 | - Updated dependencies [117e1b2] 89 | - @tus/utils@0.3.0 90 | 91 | ## 1.4.3 92 | 93 | ### Patch Changes 94 | 95 | - Updated dependencies [60698da] 96 | - @tus/utils@0.2.0 97 | 98 | ## 1.4.2 99 | 100 | ### Patch Changes 101 | 102 | - 54b7321: Fix dead links in README 103 | 104 | ## 1.4.1 105 | 106 | ### Patch Changes 107 | 108 | - 29a3644: Fix incorrectly published package 109 | 110 | ## 1.4.0 111 | 112 | ### Minor Changes 113 | 114 | - 0393e75: Introduce backpressure to avoid writing more temporary files to disk than we 115 | can upload & fix offset calculation by downloading the incomplete part first 116 | - a896d25: Add new @tus/utils dependency to replace @tus/server peer dependency 117 | 118 | ### Patch Changes 119 | 120 | - Updated dependencies [a896d25] 121 | - @tus/utils@0.1.0 122 | -------------------------------------------------------------------------------- /packages/s3-store/LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 tus - Resumable File Uploads 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /packages/s3-store/README.md: -------------------------------------------------------------------------------- 1 | # `@tus/s3-store` 2 | 3 | > 👉 **Note**: since 1.0.0 packages are split and published under the `@tus` scope. The 4 | > old package, `tus-node-server`, is considered unstable and will only receive security 5 | > fixes. Make sure to use the new package. 6 | 7 | ## Contents 8 | 9 | - [Install](#install) 10 | - [Use](#use) 11 | - [API](#api) 12 | - [`new S3Store(options)`](#new-s3storeoptions) 13 | - [Extensions](#extensions) 14 | - [Examples](#examples) 15 | - [Example: using `credentials` to fetch credentials inside a AWS container](#example-using-credentials-to-fetch-credentials-inside-a-aws-container) 16 | - [Example: use with Cloudflare R2](#example-use-with-cloudflare-r2) 17 | - [Example: use with Scaleway Object Storage](#example-use-with-scaleway-object-storage) 18 | - [Types](#types) 19 | - [Compatibility](#compatibility) 20 | - [Contribute](#contribute) 21 | - [License](#license) 22 | 23 | ## Install 24 | 25 | In Node.js >=20.19.0, install with npm: 26 | 27 | ```bash 28 | npm install @tus/s3-store 29 | ``` 30 | 31 | ## Use 32 | 33 | ```js 34 | import { Server } from "@tus/server"; 35 | import { S3Store } from "@tus/s3-store"; 36 | 37 | const s3Store = new S3Store({ 38 | partSize: 8 * 1024 * 1024, // Each uploaded part will have ~8MiB, 39 | s3ClientConfig: { 40 | bucket: process.env.AWS_BUCKET, 41 | region: process.env.AWS_REGION, 42 | credentials: { 43 | accessKeyId: process.env.AWS_ACCESS_KEY_ID, 44 | secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY, 45 | }, 46 | }, 47 | }); 48 | const server = new Server({ path: "/files", datastore: s3Store }); 49 | // ... 50 | ``` 51 | 52 | ## API 53 | 54 | This package exports `S3Store`. There is no default export. 55 | 56 | ### `new S3Store(options)` 57 | 58 | Creates a new AWS S3 store with options. 59 | 60 | #### `options.bucket` 61 | 62 | The bucket name. 63 | 64 | #### `options.partSize` 65 | 66 | The **preferred** part size for parts send to S3. Can not be lower than 5MiB or more than 67 | 5GiB. The server calculates the optimal part size, which takes this size into account, but 68 | may increase it to not exceed the S3 10K parts limit. 69 | 70 | #### `options.minPartSize` 71 | 72 | The minimal part size for parts. 73 | Can be used to ensure that all non-trailing parts are exactly the same size 74 | by setting `partSize` and `minPartSize` to the same value. 75 | Can not be lower than 5MiB or more than 5GiB. 76 | 77 | The server calculates the optimal part size, which takes this size into account, but 78 | may increase it to not exceed the `options.maxMultipartParts` parts limit. 79 | 80 | #### `options.maxMultipartParts` 81 | 82 | The maximum number of parts allowed in a multipart upload. Defaults to 10,000. 83 | Some S3 providers have non-standard restrictions on the number of parts in a multipart 84 | upload. For example, AWS S3 has a limit of 10,000 parts, but some S3 compatible providers 85 | have a limit of 1,000 parts. 86 | 87 | #### `options.s3ClientConfig` 88 | 89 | Options to pass to the AWS S3 SDK. Checkout the 90 | [`S3ClientConfig`](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/clients/client-s3/interfaces/s3clientconfig.html) 91 | docs for the supported options. You need to at least set the `region`, `bucket` name, and 92 | your preferred method of authentication. 93 | 94 | #### `options.expirationPeriodInMilliseconds` 95 | 96 | Enables the expiration extension and sets the expiration period of an upload url in 97 | milliseconds. Once the expiration period has passed, the upload url will return a 410 Gone 98 | status code. 99 | 100 | #### `options.useTags` 101 | 102 | Some S3 providers don't support tagging objects. If you are using certain features like 103 | the expiration extension and your provider doesn't support tagging, you can set this 104 | option to `false` to disable tagging. 105 | 106 | #### `options.cache` 107 | 108 | An optional cache implementation ([`KvStore`][]). 109 | 110 | Default uses an in-memory cache (`MemoryKvStore`). When running multiple instances of the 111 | server, you need to provide a cache implementation that is shared between all instances 112 | like the `RedisKvStore`. 113 | 114 | See the exported [KV stores][kvstores] from `@tus/server` for more information. 115 | 116 | #### `options.maxConcurrentPartUploads` 117 | 118 | This setting determines the maximum number of simultaneous part uploads to an S3 storage 119 | service. The default value is 60. This default is chosen in conjunction with the typical 120 | partSize of 8MiB, aiming for an effective transfer rate of 3.84Gbit/s. 121 | 122 | **Considerations:** The ideal value for `maxConcurrentPartUploads` varies based on your 123 | `partSize` and the upload bandwidth to your S3 bucket. A larger partSize means less 124 | overall upload bandwidth available for other concurrent uploads. 125 | 126 | - **Lowering the Value**: Reducing `maxConcurrentPartUploads` decreases the number of 127 | simultaneous upload requests to S3. This can be beneficial for conserving memory, CPU, 128 | and disk I/O resources, especially in environments with limited system resources or 129 | where the upload speed it low or the part size is large. 130 | 131 | - **Increasing the Value**: A higher value potentially enhances the data transfer rate to 132 | the server, but at the cost of increased resource usage (memory, CPU, and disk I/O). 133 | This can be advantageous when the goal is to maximize throughput, and sufficient system 134 | resources are available. 135 | 136 | - **Bandwidth Considerations**: It's important to note that if your upload bandwidth to S3 137 | is a limiting factor, increasing `maxConcurrentPartUploads` won’t lead to higher 138 | throughput. Instead, it will result in additional resource consumption without 139 | proportional gains in transfer speed. 140 | 141 | ## Extensions 142 | 143 | The tus protocol supports optional [extensions][]. Below is a table of the supported 144 | extensions in `@tus/s3-store`. 145 | 146 | | Extension | `@tus/s3-store` | 147 | | ------------------------ | --------------- | 148 | | [Creation][] | ✅ | 149 | | [Creation With Upload][] | ✅ | 150 | | [Expiration][] | ✅ | 151 | | [Checksum][] | ❌ | 152 | | [Termination][] | ✅ | 153 | | [Concatenation][] | ❌ | 154 | 155 | ### Termination 156 | 157 | After a multipart upload is aborted, no additional parts can be uploaded using that upload 158 | ID. The storage consumed by any previously uploaded parts will be freed. However, if any 159 | part uploads are currently in progress, those part uploads might or might not succeed. As 160 | a result, it might be necessary to set an 161 | [S3 Lifecycle configuration](https://docs.aws.amazon.com/AmazonS3/latest/userguide/mpu-abort-incomplete-mpu-lifecycle-config.html) 162 | to abort incomplete multipart uploads. 163 | 164 | ### Expiration 165 | 166 | Unlike other stores, the expiration extension on the S3 store does not need to call 167 | [`server.cleanUpExpiredUploads()`][cleanExpiredUploads]. The store creates a 168 | `Tus-Completed` tag for all objects, including `.part` and `.info` files, to indicate 169 | whether an upload is finished. This means you could setup a [lifecyle][] policy to 170 | automatically clean them up without a CRON job. 171 | 172 | ```json 173 | { 174 | "Rules": [ 175 | { 176 | "Filter": { 177 | "Tag": { 178 | "Key": "Tus-Completed", 179 | "Value": "false" 180 | } 181 | }, 182 | "Expiration": { 183 | "Days": 2 184 | } 185 | } 186 | ] 187 | } 188 | ``` 189 | 190 | If you want more granularity, it is still possible to configure a CRON job to call 191 | [`server.cleanExpiredUploads()`][cleanExpiredUploads] yourself. 192 | 193 | ## Examples 194 | 195 | ### Example: using `credentials` to fetch credentials inside a AWS container 196 | 197 | The `credentials` config is directly passed into the AWS SDK so you can refer to the AWS 198 | docs for the supported values of 199 | [credentials](https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Credentials.html#constructor-property) 200 | 201 | ```js 202 | import aws from "aws-sdk"; 203 | import { Server } from "@tus/server"; 204 | import { S3Store } from "@tus/s3-store"; 205 | 206 | const s3Store = new S3Store({ 207 | partSize: 8 * 1024 * 1024, 208 | s3ClientConfig: { 209 | bucket: process.env.AWS_BUCKET, 210 | region: process.env.AWS_REGION, 211 | credentials: new aws.ECSCredentials({ 212 | httpOptions: { timeout: 5000 }, 213 | maxRetries: 10, 214 | }), 215 | }, 216 | }); 217 | const server = new Server({ path: "/files", datastore: s3Store }); 218 | // ... 219 | ``` 220 | 221 | ### Example: use with Cloudflare R2 222 | 223 | `@tus/s3-store` can be used with all S3-compatible storage solutions, including Cloudflare R2. 224 | However R2 requires that all non-trailing parts are _exactly_ the same size. 225 | This can be achieved by setting `partSize` and `minPartSize` to the same value. 226 | 227 | ```ts 228 | // ... 229 | 230 | const s3Store = new S3Store({ 231 | partSize: 8 * 1024 * 1024, 232 | minPartSize: 8 * 1024 * 1024, 233 | // ... 234 | }); 235 | ``` 236 | 237 | ### Example: use with Scaleway Object Storage 238 | 239 | `@tus/s3-store` can be used with Scaleway Object Storage but with some additional configuration. Scaleway Object Storage has a limit of 1,000 parts in a multipart upload. 240 | 241 | ```ts 242 | const s3Store = new S3Store({ 243 | maxMultipartParts: 1000, 244 | // ... 245 | }); 246 | ``` 247 | 248 | ## Types 249 | 250 | This package is fully typed with TypeScript. 251 | 252 | ## Compatibility 253 | 254 | This package requires Node.js >=20.19.0. 255 | 256 | ## Contribute 257 | 258 | See 259 | [`contributing.md`](https://github.com/tus/tus-node-server/blob/main/.github/contributing.md). 260 | 261 | ## License 262 | 263 | [MIT](https://github.com/tus/tus-node-server/blob/master/license) © 264 | [tus](https://github.com/tus) 265 | 266 | [extensions]: https://tus.io/protocols/resumable-upload.html#protocol-extensions 267 | [creation]: https://tus.io/protocols/resumable-upload.html#creation 268 | [creation with upload]: https://tus.io/protocols/resumable-upload.html#creation-with-upload 269 | [expiration]: https://tus.io/protocols/resumable-upload.html#expiration 270 | [checksum]: https://tus.io/protocols/resumable-upload.html#checksum 271 | [termination]: https://tus.io/protocols/resumable-upload.html#termination 272 | [concatenation]: https://tus.io/protocols/resumable-upload.html#concatenation 273 | [cleanExpiredUploads]: https://github.com/tus/tus-node-server/tree/main/packages/server#servercleanupexpireduploads 274 | [lifecyle]: https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-lifecycle-mgmt.html 275 | [kvstores]: https://github.com/tus/tus-node-server/tree/main/packages/server#kvstores 276 | [`KvStore`]: https://github.com/tus/tus-node-server/blob/main/packages/utils/src/kvstores/Types.ts 277 | -------------------------------------------------------------------------------- /packages/s3-store/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/package.json", 3 | "name": "@tus/s3-store", 4 | "version": "2.0.0", 5 | "description": "AWS S3 store for @tus/server", 6 | "main": "./dist/index.js", 7 | "exports": "./dist/index.js", 8 | "type": "module", 9 | "homepage": "https://github.com/tus/tus-node-server#readme", 10 | "bugs": "https://github.com/tus/tus-node-server/issues", 11 | "repository": "tus/tus-node-server", 12 | "license": "MIT", 13 | "files": [ 14 | "dist", 15 | "src", 16 | "!test*" 17 | ], 18 | "scripts": { 19 | "build": "tsc --build", 20 | "pretest": "tsc --build", 21 | "test": "mocha './dist/test/*.js' --exit --timeout 40000" 22 | }, 23 | "dependencies": { 24 | "@aws-sdk/client-s3": "^3.758.0", 25 | "@shopify/semaphore": "^3.1.0", 26 | "@tus/utils": "^0.6.0", 27 | "debug": "^4.3.4", 28 | "multistream": "^4.1.0" 29 | }, 30 | "devDependencies": { 31 | "@types/debug": "^4.1.12", 32 | "@types/mocha": "^10.0.6", 33 | "@types/multistream": "^4.1.3", 34 | "@types/node": "^22.13.7", 35 | "mocha": "^11.0.1", 36 | "should": "^13.2.3" 37 | }, 38 | "engines": { 39 | "node": ">=20.19.0" 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /packages/s3-store/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig.json", 3 | "extends": "../../tsconfig.base.json", 4 | "references": [{ "path": "../utils/tsconfig.json" }], 5 | "include": ["src"], 6 | "compilerOptions": { 7 | "rootDir": "src", 8 | "outDir": "dist", 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /packages/server/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # @tus/server 2 | 3 | ## 2.2.0 4 | 5 | ### Minor Changes 6 | 7 | - 1768b06: Introduce `exposedHeaders` option for custom Access-Control-Expose-Headers 8 | 9 | ## 2.1.0 10 | 11 | ### Minor Changes 12 | 13 | - a128e97: Use srvx for convert Node.js req/res to Request/Response. This also comes with a performance boost. When using `server.handle()` in a Node.js environment, you can now access the orignal req/res via `req.node.req`/`req.node.res`. 14 | 15 | ### Patch Changes 16 | 17 | - aa1e221: Fix unhandled promise rejection when converting a web stream to a Node.js stream when a client disconnects 18 | - b163a62: Correctly return upload-offset header as a string instead of number 19 | 20 | ## 2.0.0 21 | 22 | ### Major Changes 23 | 24 | - 0f063d9: Change required Node.js version from 16 to 20.19.0 25 | - 51419da: - Introduce `handleWeb(req: Request)` to integrate into meta frameworks 26 | (such as Next.js, Nuxt, React Router, SvelteKit, etc) and other Node.js compatible runtime environments. 27 | - All events and hooks now emit `Request`/`Response` instead of `http.IncomingMessage`/`http.ServerResponse`. 28 | - The function version of the options `maxSize`, `generateUrl`, `getFileIdFromRequest`, `namingFunction`, `locker` 29 | also now use `Request`/`Response`. 30 | - Your `onUploadCreate` and `onUploadFinish` hooks no longer need to return the response object. 31 | - If you want to change the metadata in `onUploadCreate` you can return `Promise<{ metadata: Record }>`. 32 | This will will internally merge the existing metadata with the new metadata. 33 | - `onUploadFinish` can return `Promise<{ status_code?: number headers?: Record body?: string }>` 34 | - f190875: - `POST_RECEIVE_V2` has been renamed to `POST_RECEIVE`. The deprecated version of `POST_RECEIVE` has been removed. 35 | - 7a5a60d: Make this package ESM-only instead of CommonJS. Since Node.js >= 20.19.0 you can `require(esm)` so you can consume this package even if you don't ESM yourself yet. 36 | 37 | ### Patch Changes 38 | 39 | - Updated dependencies [0f063d9] 40 | - Updated dependencies [f190875] 41 | - Updated dependencies [7a5a60d] 42 | - @tus/utils@0.6.0 43 | 44 | ## 1.10.2 45 | 46 | ### Patch Changes 47 | 48 | - 06954ac: Don't use AbortSignal.any to fix memory leak in older Node.js versions and to not break version support. 49 | 50 | ## 1.10.1 51 | 52 | ### Patch Changes 53 | 54 | - 42c6267: Consistent cancellation across streams and locks, fixing lock on file never being unlocked when the request ends prematurely. 55 | - Updated dependencies [42c6267] 56 | - @tus/utils@0.5.1 57 | 58 | ## 1.10.0 59 | 60 | ### Minor Changes 61 | 62 | - 8f19a53: Add ioredis as optional dependency 63 | 64 | ### Patch Changes 65 | 66 | - f465a0f: Send Tus-Version header in OPTIONS 67 | - Updated dependencies [8f19a53] 68 | - @tus/utils@0.5.0 69 | 70 | ## 1.9.0 71 | 72 | ### Minor Changes 73 | 74 | - a3c3a99: add Content-Type and Content-Disposition headers on GetHandler.send response 75 | 76 | ## 1.8.0 77 | 78 | ### Minor Changes 79 | 80 | - de28c6e: Publish source maps and declaration maps 81 | - ca03351: - Add `allowedCredentials` option for the Access-Control-Allow-Credentials header 82 | - Add `allowedOrigins` option for setting domains in Access-Control-Allow-Origin 83 | 84 | ### Patch Changes 85 | 86 | - Updated dependencies [de28c6e] 87 | - @tus/utils@0.4.0 88 | 89 | ## 1.7.0 90 | 91 | ### Minor Changes 92 | 93 | - ea2bf07: Add `lastPath` argument to `getFileIdFromRequest` to simplify a common use 94 | case. 95 | 96 | ### Patch Changes 97 | 98 | - Updated dependencies [117e1b2] 99 | - @tus/utils@0.3.0 100 | 101 | ## 1.6.0 102 | 103 | ### Minor Changes 104 | 105 | - 60698da: Introduce POST_RECEIVE_V2 event, which correctly fires during the stream write 106 | rather than after it is finished 107 | - 0f90980: Allow onUploadFinish hook to override response data 108 | 109 | ### Patch Changes 110 | 111 | - Updated dependencies [60698da] 112 | - @tus/utils@0.2.0 113 | 114 | ## 1.5.0 115 | 116 | ### Minor Changes 117 | 118 | - 9967900: Add `lockDrainTimeout` option 119 | - 9967900: Allow onUploadCreate hook to override metadata 120 | 121 | ## 1.4.2 122 | 123 | ### Patch Changes 124 | 125 | - 54b7321: Document `locker` option and fix dead links in README 126 | 127 | ## 1.4.1 128 | 129 | ### Patch Changes 130 | 131 | - 29a3644: Fix incorrectly published package 132 | 133 | ## 1.4.0 134 | 135 | ### Minor Changes 136 | 137 | - 1a4339a: Support async `namingFunction` 138 | - a896d25: Add new @tus/utils dependency to replace @tus/server peer dependency 139 | 140 | ### Patch Changes 141 | 142 | - Updated dependencies [a896d25] 143 | - @tus/utils@0.1.0 144 | -------------------------------------------------------------------------------- /packages/server/LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 tus - Resumable File Uploads 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /packages/server/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/package.json", 3 | "name": "@tus/server", 4 | "version": "2.2.0", 5 | "description": "Tus resumable upload protocol in Node.js", 6 | "main": "./dist/index.js", 7 | "exports": "./dist/index.js", 8 | "type": "module", 9 | "homepage": "https://github.com/tus/tus-node-server#readme", 10 | "bugs": "https://github.com/tus/tus-node-server/issues", 11 | "repository": "tus/tus-node-server", 12 | "license": "MIT", 13 | "files": [ 14 | "dist", 15 | "src", 16 | "!test*" 17 | ], 18 | "scripts": { 19 | "build": "tsc --build", 20 | "pretest": "tsc --build", 21 | "test": "mocha './dist/test/*.js' --exit --timeout 30000" 22 | }, 23 | "dependencies": { 24 | "@tus/utils": "^0.6.0", 25 | "debug": "^4.3.4", 26 | "lodash.throttle": "^4.1.1", 27 | "set-cookie-parser": "^2.7.1", 28 | "srvx": "^0.2.8" 29 | }, 30 | "devDependencies": { 31 | "@types/debug": "^4.1.12", 32 | "@types/lodash.throttle": "^4.1.9", 33 | "@types/mocha": "^10.0.6", 34 | "@types/node": "^22.13.7", 35 | "@types/set-cookie-parser": "^2.4.10", 36 | "@types/sinon": "^17.0.3", 37 | "@types/supertest": "^2.0.16", 38 | "mocha": "^11.0.1", 39 | "node-mocks-http": "^1.16.1", 40 | "should": "^13.2.3", 41 | "sinon": "^20.0.0", 42 | "supertest": "^6.3.4" 43 | }, 44 | "optionalDependencies": { 45 | "@redis/client": "^1.6.0", 46 | "ioredis": "^5.4.1" 47 | }, 48 | "engines": { 49 | "node": ">=20.19.0" 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /packages/server/src/handlers/BaseHandler.ts: -------------------------------------------------------------------------------- 1 | import EventEmitter from 'node:events' 2 | 3 | import type {ServerOptions} from '../types.js' 4 | import type {DataStore, CancellationContext} from '@tus/utils' 5 | import {ERRORS, type Upload, StreamLimiter, EVENTS} from '@tus/utils' 6 | import throttle from 'lodash.throttle' 7 | import stream from 'node:stream/promises' 8 | import {PassThrough, Readable} from 'node:stream' 9 | 10 | const reExtractFileID = /([^/]+)\/?$/ 11 | const reForwardedHost = /host="?([^";]+)/ 12 | const reForwardedProto = /proto=(https?)/ 13 | 14 | export class BaseHandler extends EventEmitter { 15 | options: ServerOptions 16 | store: DataStore 17 | 18 | constructor(store: DataStore, options: ServerOptions) { 19 | super() 20 | if (!store) { 21 | throw new Error('Store must be defined') 22 | } 23 | 24 | this.store = store 25 | this.options = options 26 | } 27 | 28 | write(status: number, headers = {}, body?: string) { 29 | const res = new Response(status === 204 ? null : body, {headers, status}) 30 | if (status !== 204 && body) { 31 | res.headers.set('Content-Length', Buffer.byteLength(body, 'utf8').toString()) 32 | } 33 | return res 34 | } 35 | 36 | generateUrl(req: Request, id: string) { 37 | const path = this.options.path === '/' ? '' : this.options.path 38 | 39 | if (this.options.generateUrl) { 40 | // user-defined generateUrl function 41 | const {proto, host} = BaseHandler.extractHostAndProto( 42 | req.headers, 43 | this.options.respectForwardedHeaders 44 | ) 45 | 46 | return this.options.generateUrl(req, { 47 | proto, 48 | host, 49 | path: path, 50 | id, 51 | }) 52 | } 53 | 54 | // Default implementation 55 | if (this.options.relativeLocation) { 56 | return `${path}/${id}` 57 | } 58 | 59 | const {proto, host} = BaseHandler.extractHostAndProto( 60 | req.headers, 61 | this.options.respectForwardedHeaders 62 | ) 63 | 64 | return `${proto}://${host}${path}/${id}` 65 | } 66 | 67 | getFileIdFromRequest(req: Request) { 68 | const match = reExtractFileID.exec(req.url as string) 69 | 70 | if (this.options.getFileIdFromRequest) { 71 | const lastPath = match ? decodeURIComponent(match[1]) : undefined 72 | return this.options.getFileIdFromRequest(req, lastPath) 73 | } 74 | 75 | if (!match || this.options.path.includes(match[1])) { 76 | return 77 | } 78 | 79 | return decodeURIComponent(match[1]) 80 | } 81 | 82 | static extractHostAndProto(headers: Headers, respectForwardedHeaders?: boolean) { 83 | let proto: string | undefined 84 | let host: string | undefined 85 | 86 | if (respectForwardedHeaders) { 87 | const forwarded = headers.get('forwarded') 88 | if (forwarded) { 89 | host ??= reForwardedHost.exec(forwarded)?.[1] 90 | proto ??= reForwardedProto.exec(forwarded)?.[1] 91 | } 92 | 93 | const forwardHost = headers.get('x-forwarded-host') 94 | const forwardProto = headers.get('x-forwarded-proto') 95 | 96 | // @ts-expect-error we can pass undefined 97 | if (['http', 'https'].includes(forwardProto)) { 98 | proto ??= forwardProto as string 99 | } 100 | 101 | host ??= forwardHost as string 102 | } 103 | 104 | host ??= headers.get('host') as string 105 | proto ??= 'http' 106 | 107 | return {host, proto} 108 | } 109 | 110 | protected async getLocker(req: Request) { 111 | if (typeof this.options.locker === 'function') { 112 | return this.options.locker(req) 113 | } 114 | return this.options.locker 115 | } 116 | 117 | protected async acquireLock(req: Request, id: string, context: CancellationContext) { 118 | const locker = await this.getLocker(req) 119 | 120 | const lock = locker.newLock(id) 121 | 122 | await lock.lock(context.signal, () => { 123 | context.cancel() 124 | }) 125 | 126 | return lock 127 | } 128 | 129 | protected writeToStore( 130 | webStream: ReadableStream | null, 131 | upload: Upload, 132 | maxFileSize: number, 133 | context: CancellationContext 134 | ) { 135 | // biome-ignore lint/suspicious/noAsyncPromiseExecutor: 136 | return new Promise(async (resolve, reject) => { 137 | // Abort early if the operation has been cancelled. 138 | if (context.signal.aborted) { 139 | reject(ERRORS.ABORTED) 140 | return 141 | } 142 | 143 | // Create a PassThrough stream as a proxy to manage the request stream. 144 | // This allows for aborting the write process without affecting the incoming request stream. 145 | const proxy = new PassThrough() 146 | const nodeStream = webStream ? Readable.fromWeb(webStream) : Readable.from([]) 147 | 148 | // Ignore errors on the data stream to prevent crashes from client disconnections 149 | // We handle errors on the proxy stream instead. 150 | nodeStream.on('error', (err) => { 151 | /* do nothing */ 152 | }) 153 | 154 | // gracefully terminate the proxy stream when the request is aborted 155 | const onAbort = () => { 156 | nodeStream.unpipe(proxy) 157 | 158 | if (!proxy.closed) { 159 | proxy.end() 160 | } 161 | } 162 | context.signal.addEventListener('abort', onAbort, {once: true}) 163 | 164 | proxy.on('error', (err) => { 165 | nodeStream.unpipe(proxy) 166 | reject(err.name === 'AbortError' ? ERRORS.ABORTED : err) 167 | }) 168 | 169 | const postReceive = throttle( 170 | (offset: number) => { 171 | this.emit(EVENTS.POST_RECEIVE, nodeStream, {...upload, offset}) 172 | }, 173 | this.options.postReceiveInterval, 174 | {leading: false} 175 | ) 176 | 177 | let tempOffset = upload.offset 178 | proxy.on('data', (chunk: Buffer) => { 179 | tempOffset += chunk.byteLength 180 | postReceive(tempOffset) 181 | }) 182 | 183 | // Pipe the request stream through the proxy. We use the proxy instead of the request stream directly 184 | // to ensure that errors in the pipeline do not cause the request stream to be destroyed, 185 | // which would result in a socket hangup error for the client. 186 | stream 187 | .pipeline( 188 | nodeStream.pipe(proxy), 189 | new StreamLimiter(maxFileSize), 190 | async (stream) => { 191 | return this.store.write(stream as StreamLimiter, upload.id, upload.offset) 192 | } 193 | ) 194 | .then(resolve) 195 | .catch(reject) 196 | .finally(() => { 197 | context.signal.removeEventListener('abort', onAbort) 198 | }) 199 | }) 200 | } 201 | 202 | getConfiguredMaxSize(req: Request, id: string | null) { 203 | if (typeof this.options.maxSize === 'function') { 204 | return this.options.maxSize(req, id) 205 | } 206 | return this.options.maxSize ?? 0 207 | } 208 | 209 | /** 210 | * Calculates the maximum allowed size for the body of an upload request. 211 | * This function considers both the server's configured maximum size and 212 | * the specifics of the upload, such as whether the size is deferred or fixed. 213 | */ 214 | async calculateMaxBodySize(req: Request, file: Upload, configuredMaxSize?: number) { 215 | // Use the server-configured maximum size if it's not explicitly provided. 216 | configuredMaxSize ??= await this.getConfiguredMaxSize(req, file.id) 217 | 218 | // Parse the Content-Length header from the request (default to 0 if not set). 219 | const length = Number.parseInt(req.headers.get('content-length') || '0', 10) 220 | const offset = file.offset 221 | 222 | const hasContentLengthSet = req.headers.get('content-length') !== null 223 | const hasConfiguredMaxSizeSet = configuredMaxSize > 0 224 | 225 | if (file.sizeIsDeferred) { 226 | // For deferred size uploads, if it's not a chunked transfer, check against the configured maximum size. 227 | if ( 228 | hasContentLengthSet && 229 | hasConfiguredMaxSizeSet && 230 | offset + length > configuredMaxSize 231 | ) { 232 | throw ERRORS.ERR_SIZE_EXCEEDED 233 | } 234 | 235 | if (hasConfiguredMaxSizeSet) { 236 | return configuredMaxSize - offset 237 | } 238 | return Number.MAX_SAFE_INTEGER 239 | } 240 | 241 | // Check if the upload fits into the file's size when the size is not deferred. 242 | if (offset + length > (file.size || 0)) { 243 | throw ERRORS.ERR_SIZE_EXCEEDED 244 | } 245 | 246 | if (hasContentLengthSet) { 247 | return length 248 | } 249 | 250 | return (file.size || 0) - offset 251 | } 252 | } 253 | -------------------------------------------------------------------------------- /packages/server/src/handlers/DeleteHandler.ts: -------------------------------------------------------------------------------- 1 | import {BaseHandler} from './BaseHandler.js' 2 | import {ERRORS, EVENTS, type CancellationContext} from '@tus/utils' 3 | 4 | export class DeleteHandler extends BaseHandler { 5 | async send(req: Request, context: CancellationContext, headers = new Headers()) { 6 | const id = this.getFileIdFromRequest(req) 7 | if (!id) { 8 | throw ERRORS.FILE_NOT_FOUND 9 | } 10 | 11 | if (this.options.onIncomingRequest) { 12 | await this.options.onIncomingRequest(req, id) 13 | } 14 | 15 | const lock = await this.acquireLock(req, id, context) 16 | try { 17 | if (this.options.disableTerminationForFinishedUploads) { 18 | const upload = await this.store.getUpload(id) 19 | if (upload.offset === upload.size) { 20 | throw ERRORS.INVALID_TERMINATION 21 | } 22 | } 23 | 24 | await this.store.remove(id) 25 | } finally { 26 | await lock.unlock() 27 | } 28 | const writtenRes = this.write(204, headers) 29 | this.emit(EVENTS.POST_TERMINATE, req, writtenRes, id) 30 | return writtenRes 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /packages/server/src/handlers/GetHandler.ts: -------------------------------------------------------------------------------- 1 | import {BaseHandler} from './BaseHandler.js' 2 | import {type CancellationContext, ERRORS, type Upload} from '@tus/utils' 3 | 4 | import type {RouteHandler} from '../types.js' 5 | 6 | export class GetHandler extends BaseHandler { 7 | paths: Map = new Map() 8 | 9 | /** 10 | * reMimeType is a RegExp for check mime-type form compliance with RFC1341 11 | * for support mime-type and extra parameters, for example: 12 | * 13 | * ``` 14 | * text/plain; charset=utf-8 15 | * ``` 16 | * 17 | * See: https://datatracker.ietf.org/doc/html/rfc1341 (Page 6) 18 | */ 19 | reMimeType = 20 | // biome-ignore lint/suspicious/noControlCharactersInRegex: it's fine 21 | /^(?:application|audio|example|font|haptics|image|message|model|multipart|text|video|x-(?:[0-9A-Za-z!#$%&'*+.^_`|~-]+))\/([0-9A-Za-z!#$%&'*+.^_`|~-]+)((?:[ ]*;[ ]*[0-9A-Za-z!#$%&'*+.^_`|~-]+=(?:[0-9A-Za-z!#$%&'*+.^_`|~-]+|"(?:[^"\\]|\.)*"))*)$/ 22 | 23 | /** 24 | * mimeInlineBrowserWhitelist is a set containing MIME types which should be 25 | * allowed to be rendered by browser inline, instead of being forced to be 26 | * downloaded. For example, HTML or SVG files are not allowed, since they may 27 | * contain malicious JavaScript. In a similar fashion PDF is not on this list 28 | * as their parsers commonly contain vulnerabilities which can be exploited. 29 | */ 30 | mimeInlineBrowserWhitelist = new Set([ 31 | 'text/plain', 32 | 33 | 'image/png', 34 | 'image/jpeg', 35 | 'image/gif', 36 | 'image/bmp', 37 | 'image/webp', 38 | 39 | 'audio/wave', 40 | 'audio/wav', 41 | 'audio/x-wav', 42 | 'audio/x-pn-wav', 43 | 'audio/webm', 44 | 'audio/ogg', 45 | 46 | 'video/mp4', 47 | 'video/webm', 48 | 'video/ogg', 49 | 50 | 'application/ogg', 51 | ]) 52 | 53 | registerPath(path: string, handler: RouteHandler): void { 54 | this.paths.set(path, handler) 55 | } 56 | 57 | /** 58 | * Read data from the DataStore and send the stream. 59 | */ 60 | async send( 61 | req: Request, 62 | context: CancellationContext, 63 | headers = new Headers() 64 | ): Promise { 65 | const path = new URL(req.url).pathname 66 | const handler = this.paths.get(path) 67 | 68 | if (handler) { 69 | return handler(req) 70 | } 71 | 72 | if (!('read' in this.store)) { 73 | throw ERRORS.FILE_NOT_FOUND 74 | } 75 | 76 | const id = this.getFileIdFromRequest(req) 77 | if (!id) { 78 | throw ERRORS.FILE_NOT_FOUND 79 | } 80 | 81 | if (this.options.onIncomingRequest) { 82 | await this.options.onIncomingRequest(req, id) 83 | } 84 | 85 | const stats = await this.store.getUpload(id) 86 | 87 | if (!stats || stats.offset !== stats.size) { 88 | throw ERRORS.FILE_NOT_FOUND 89 | } 90 | 91 | const {contentType, contentDisposition} = this.filterContentType(stats) 92 | 93 | const lock = await this.acquireLock(req, id, context) 94 | try { 95 | // @ts-expect-error exists if supported 96 | const fileStream = await this.store.read(id) 97 | headers.set('Content-Length', stats.offset.toString()) 98 | headers.set('Content-Type', contentType) 99 | headers.set('Content-Disposition', contentDisposition) 100 | return new Response(fileStream, {headers, status: 200}) 101 | } finally { 102 | await lock.unlock() 103 | } 104 | } 105 | 106 | /** 107 | * filterContentType returns the values for the Content-Type and 108 | * Content-Disposition headers for a given upload. These values should be used 109 | * in responses for GET requests to ensure that only non-malicious file types 110 | * are shown directly in the browser. It will extract the file name and type 111 | * from the "filename" and "filetype". 112 | * See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Disposition 113 | */ 114 | filterContentType(stats: Upload): { 115 | contentType: string 116 | contentDisposition: string 117 | } { 118 | let contentType: string 119 | let contentDisposition: string 120 | 121 | const {filetype, filename} = stats.metadata ?? {} 122 | 123 | if (filetype && this.reMimeType.test(filetype)) { 124 | // If the filetype from metadata is well formed, we forward use this 125 | // for the Content-Type header. However, only whitelisted mime types 126 | // will be allowed to be shown inline in the browser 127 | contentType = filetype 128 | 129 | if (this.mimeInlineBrowserWhitelist.has(filetype)) { 130 | contentDisposition = 'inline' 131 | } else { 132 | contentDisposition = 'attachment' 133 | } 134 | } else { 135 | // If the filetype from the metadata is not well formed, we use a 136 | // default type and force the browser to download the content 137 | contentType = 'application/octet-stream' 138 | contentDisposition = 'attachment' 139 | } 140 | 141 | // Add a filename to Content-Disposition if one is available in the metadata 142 | if (filename) { 143 | contentDisposition += `; filename=${this.quote(filename)}` 144 | } 145 | 146 | return { 147 | contentType, 148 | contentDisposition, 149 | } 150 | } 151 | 152 | /** 153 | * Convert string to quoted string literals 154 | */ 155 | quote(value: string) { 156 | return `"${value.replace(/"/g, '\\"')}"` 157 | } 158 | } 159 | -------------------------------------------------------------------------------- /packages/server/src/handlers/HeadHandler.ts: -------------------------------------------------------------------------------- 1 | import {BaseHandler} from './BaseHandler.js' 2 | 3 | import {ERRORS, Metadata, type Upload, type CancellationContext} from '@tus/utils' 4 | 5 | export class HeadHandler extends BaseHandler { 6 | async send(req: Request, context: CancellationContext, headers = new Headers()) { 7 | const id = this.getFileIdFromRequest(req) 8 | if (!id) { 9 | throw ERRORS.FILE_NOT_FOUND 10 | } 11 | 12 | if (this.options.onIncomingRequest) { 13 | await this.options.onIncomingRequest(req, id) 14 | } 15 | 16 | const lock = await this.acquireLock(req, id, context) 17 | 18 | let file: Upload 19 | try { 20 | file = await this.store.getUpload(id) 21 | } finally { 22 | await lock.unlock() 23 | } 24 | 25 | // If a Client does attempt to resume an upload which has since 26 | // been removed by the Server, the Server SHOULD respond with the 27 | // with the 404 Not Found or 410 Gone status. The latter one SHOULD 28 | // be used if the Server is keeping track of expired uploads. 29 | const now = new Date() 30 | if ( 31 | this.store.hasExtension('expiration') && 32 | this.store.getExpiration() > 0 && 33 | file.creation_date && 34 | now > new Date(new Date(file.creation_date).getTime() + this.store.getExpiration()) 35 | ) { 36 | throw ERRORS.FILE_NO_LONGER_EXISTS 37 | } 38 | 39 | const res = new Response('', {status: 200, headers}) 40 | 41 | // The Server MUST prevent the client and/or proxies from 42 | // caching the response by adding the Cache-Control: no-store 43 | // header to the response. 44 | res.headers.set('Cache-Control', 'no-store') 45 | // The Server MUST always include the Upload-Offset header in 46 | // the response for a HEAD request, even if the offset is 0 47 | res.headers.set('Upload-Offset', file.offset.toString()) 48 | 49 | if (file.sizeIsDeferred) { 50 | // As long as the length of the upload is not known, the Server 51 | // MUST set Upload-Defer-Length: 1 in all responses to HEAD requests. 52 | res.headers.set('Upload-Defer-Length', '1') 53 | } else { 54 | // If the size of the upload is known, the Server MUST include 55 | // the Upload-Length header in the response. 56 | res.headers.set('Upload-Length', (file.size as number).toString()) 57 | } 58 | 59 | if (file.metadata !== undefined) { 60 | // If an upload contains additional metadata, responses to HEAD 61 | // requests MUST include the Upload-Metadata header and its value 62 | // as specified by the Client during the creation. 63 | res.headers.set('Upload-Metadata', Metadata.stringify(file.metadata) as string) 64 | } 65 | 66 | return res 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /packages/server/src/handlers/OptionsHandler.ts: -------------------------------------------------------------------------------- 1 | import {BaseHandler} from './BaseHandler.js' 2 | import {ALLOWED_METHODS, MAX_AGE, HEADERS, type CancellationContext} from '@tus/utils' 3 | 4 | // A successful response indicated by the 204 No Content status MUST contain 5 | // the Tus-Version header. It MAY include the Tus-Extension and Tus-Max-Size headers. 6 | export class OptionsHandler extends BaseHandler { 7 | async send(req: Request, context: CancellationContext, headers = new Headers()) { 8 | const maxSize = await this.getConfiguredMaxSize(req, null) 9 | 10 | headers.set('Tus-Version', '1.0.0') 11 | if (this.store.extensions.length > 0) { 12 | headers.set('Tus-Extension', this.store.extensions.join(',')) 13 | } 14 | if (maxSize) { 15 | headers.set('Tus-Max-Size', maxSize.toString()) 16 | } 17 | 18 | const allowedHeaders = [...HEADERS, ...(this.options.allowedHeaders ?? [])] 19 | headers.set('Access-Control-Allow-Methods', ALLOWED_METHODS) 20 | headers.set('Access-Control-Allow-Headers', allowedHeaders.join(', ')) 21 | headers.set('Access-Control-Max-Age', MAX_AGE.toString()) 22 | 23 | return this.write(204, headers) 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /packages/server/src/handlers/PatchHandler.ts: -------------------------------------------------------------------------------- 1 | import debug from 'debug' 2 | import {Readable} from 'node:stream' 3 | 4 | import {BaseHandler} from './BaseHandler.js' 5 | 6 | import {ERRORS, EVENTS, type CancellationContext, type Upload} from '@tus/utils' 7 | 8 | const log = debug('tus-node-server:handlers:patch') 9 | 10 | export class PatchHandler extends BaseHandler { 11 | /** 12 | * Write data to the DataStore and return the new offset. 13 | */ 14 | async send(req: Request, context: CancellationContext, headers = new Headers()) { 15 | try { 16 | const id = this.getFileIdFromRequest(req) 17 | if (!id) { 18 | throw ERRORS.FILE_NOT_FOUND 19 | } 20 | 21 | // The request MUST include a Upload-Offset header 22 | if (req.headers.get('upload-offset') === null) { 23 | throw ERRORS.MISSING_OFFSET 24 | } 25 | 26 | const offset = Number.parseInt(req.headers.get('upload-offset') as string, 10) 27 | 28 | // The request MUST include a Content-Type header 29 | const content_type = req.headers.get('content-type') 30 | if (content_type === null) { 31 | throw ERRORS.INVALID_CONTENT_TYPE 32 | } 33 | 34 | if (this.options.onIncomingRequest) { 35 | await this.options.onIncomingRequest(req, id) 36 | } 37 | 38 | const maxFileSize = await this.getConfiguredMaxSize(req, id) 39 | 40 | const lock = await this.acquireLock(req, id, context) 41 | 42 | let upload: Upload 43 | let newOffset: number 44 | try { 45 | upload = await this.store.getUpload(id) 46 | 47 | // If a Client does attempt to resume an upload which has since 48 | // been removed by the Server, the Server SHOULD respond with the 49 | // with the 404 Not Found or 410 Gone status. The latter one SHOULD 50 | // be used if the Server is keeping track of expired uploads. 51 | const now = Date.now() 52 | const creation = upload.creation_date 53 | ? new Date(upload.creation_date).getTime() 54 | : now 55 | const expiration = creation + this.store.getExpiration() 56 | if ( 57 | this.store.hasExtension('expiration') && 58 | this.store.getExpiration() > 0 && 59 | now > expiration 60 | ) { 61 | throw ERRORS.FILE_NO_LONGER_EXISTS 62 | } 63 | 64 | if (upload.offset !== offset) { 65 | // If the offsets do not match, the Server MUST respond with the 409 Conflict status without modifying the upload resource. 66 | log( 67 | `[PatchHandler] send: Incorrect offset - ${offset} sent but file is ${upload.offset}` 68 | ) 69 | throw ERRORS.INVALID_OFFSET 70 | } 71 | 72 | // The request MUST validate upload-length related headers 73 | const upload_length = req.headers.get('upload-length') 74 | if (upload_length !== null) { 75 | const size = Number.parseInt(upload_length, 10) 76 | // Throw error if extension is not supported 77 | if (!this.store.hasExtension('creation-defer-length')) { 78 | throw ERRORS.UNSUPPORTED_CREATION_DEFER_LENGTH_EXTENSION 79 | } 80 | 81 | // Throw error if upload-length is already set. 82 | if (upload.size !== undefined) { 83 | throw ERRORS.INVALID_LENGTH 84 | } 85 | 86 | if (size < upload.offset) { 87 | throw ERRORS.INVALID_LENGTH 88 | } 89 | 90 | if (maxFileSize > 0 && size > maxFileSize) { 91 | throw ERRORS.ERR_MAX_SIZE_EXCEEDED 92 | } 93 | 94 | await this.store.declareUploadLength(id, size) 95 | upload.size = size 96 | } 97 | 98 | const maxBodySize = await this.calculateMaxBodySize(req, upload, maxFileSize) 99 | newOffset = await this.writeToStore(req.body, upload, maxBodySize, context) 100 | } finally { 101 | await lock.unlock() 102 | } 103 | 104 | upload.offset = newOffset 105 | 106 | //Recommended response defaults 107 | const responseData = { 108 | status: 204, 109 | headers: { 110 | ...Object.fromEntries(headers.entries()), 111 | 'Upload-Offset': newOffset.toString(), 112 | } as Record, 113 | body: '', 114 | } 115 | 116 | if (newOffset === upload.size && this.options.onUploadFinish) { 117 | try { 118 | const hookResponse = await this.options.onUploadFinish(req, upload) 119 | if (hookResponse) { 120 | const {status_code, body, headers} = hookResponse 121 | if (status_code) responseData.status = status_code 122 | if (body) responseData.body = body 123 | if (headers) 124 | responseData.headers = Object.assign(responseData.headers, headers) 125 | } 126 | } catch (error) { 127 | log(`onUploadFinish: ${error.body}`) 128 | throw error 129 | } 130 | } 131 | 132 | if ( 133 | this.store.hasExtension('expiration') && 134 | this.store.getExpiration() > 0 && 135 | upload.creation_date && 136 | (upload.size === undefined || newOffset < upload.size) 137 | ) { 138 | const creation = new Date(upload.creation_date) 139 | // Value MUST be in RFC 7231 datetime format 140 | const dateString = new Date( 141 | creation.getTime() + this.store.getExpiration() 142 | ).toUTCString() 143 | responseData.headers['Upload-Expires'] = dateString 144 | } 145 | 146 | // The Server MUST acknowledge successful PATCH requests with the 204 147 | const writtenRes = this.write( 148 | responseData.status, 149 | responseData.headers, 150 | responseData.body 151 | ) 152 | 153 | if (newOffset === upload.size) { 154 | this.emit(EVENTS.POST_FINISH, req, writtenRes, upload) 155 | } 156 | 157 | return writtenRes 158 | } catch (e) { 159 | // Only abort the context if it wasn't already aborted 160 | if (!context.signal.aborted) { 161 | context.abort() 162 | } 163 | throw e 164 | } 165 | } 166 | } 167 | -------------------------------------------------------------------------------- /packages/server/src/handlers/PostHandler.ts: -------------------------------------------------------------------------------- 1 | import debug from 'debug' 2 | import {Readable} from 'node:stream' 3 | 4 | import {BaseHandler} from './BaseHandler.js' 5 | import { 6 | Upload, 7 | Uid, 8 | Metadata, 9 | EVENTS, 10 | ERRORS, 11 | type DataStore, 12 | type CancellationContext, 13 | } from '@tus/utils' 14 | import {validateHeader} from '../validators/HeaderValidator.js' 15 | 16 | import type {ServerOptions, WithRequired} from '../types.js' 17 | 18 | const log = debug('tus-node-server:handlers:post') 19 | 20 | export class PostHandler extends BaseHandler { 21 | // Overriding the `BaseHandler` type. We always set `namingFunction` in the constructor. 22 | declare options: WithRequired 23 | 24 | constructor(store: DataStore, options: ServerOptions) { 25 | if (options.namingFunction && typeof options.namingFunction !== 'function') { 26 | throw new Error("'namingFunction' must be a function") 27 | } 28 | 29 | if (!options.namingFunction) { 30 | options.namingFunction = Uid.rand 31 | } 32 | 33 | super(store, options) 34 | } 35 | 36 | /** 37 | * Create a file in the DataStore. 38 | */ 39 | async send(req: Request, context: CancellationContext, headers = new Headers()) { 40 | if (req.headers.get('upload-concat') && !this.store.hasExtension('concatentation')) { 41 | throw ERRORS.UNSUPPORTED_CONCATENATION_EXTENSION 42 | } 43 | 44 | const upload_length = req.headers.get('upload-length') 45 | const upload_defer_length = req.headers.get('upload-defer-length') 46 | const upload_metadata = req.headers.get('upload-metadata') 47 | 48 | if ( 49 | upload_defer_length !== null && // Throw error if extension is not supported 50 | !this.store.hasExtension('creation-defer-length') 51 | ) { 52 | throw ERRORS.UNSUPPORTED_CREATION_DEFER_LENGTH_EXTENSION 53 | } 54 | 55 | if ((upload_length === null) === (upload_defer_length === null)) { 56 | throw ERRORS.INVALID_LENGTH 57 | } 58 | 59 | let metadata: ReturnType<(typeof Metadata)['parse']> | undefined 60 | if (upload_metadata) { 61 | try { 62 | metadata = Metadata.parse(upload_metadata ?? undefined) 63 | } catch { 64 | throw ERRORS.INVALID_METADATA 65 | } 66 | } 67 | 68 | let id: string 69 | try { 70 | id = await this.options.namingFunction(req, metadata) 71 | } catch (error) { 72 | log('create: check your `namingFunction`. Error', error) 73 | throw error 74 | } 75 | 76 | const maxFileSize = await this.getConfiguredMaxSize(req, id) 77 | 78 | if ( 79 | upload_length && 80 | maxFileSize > 0 && 81 | Number.parseInt(upload_length, 10) > maxFileSize 82 | ) { 83 | throw ERRORS.ERR_MAX_SIZE_EXCEEDED 84 | } 85 | 86 | if (this.options.onIncomingRequest) { 87 | await this.options.onIncomingRequest(req, id) 88 | } 89 | 90 | const upload = new Upload({ 91 | id, 92 | size: upload_length ? Number.parseInt(upload_length, 10) : undefined, 93 | offset: 0, 94 | metadata, 95 | }) 96 | 97 | if (this.options.onUploadCreate) { 98 | try { 99 | const patch = await this.options.onUploadCreate(req, upload) 100 | if (patch.metadata) { 101 | upload.metadata = patch.metadata 102 | } 103 | } catch (error) { 104 | log(`onUploadCreate error: ${error.body}`) 105 | throw error 106 | } 107 | } 108 | 109 | const lock = await this.acquireLock(req, id, context) 110 | 111 | let isFinal: boolean 112 | let url: string 113 | 114 | //Recommended response defaults 115 | const responseData = { 116 | status: 201, 117 | headers: Object.fromEntries(headers.entries()), 118 | body: '', 119 | } 120 | 121 | try { 122 | await this.store.create(upload) 123 | url = this.generateUrl(req, upload.id) 124 | 125 | this.emit(EVENTS.POST_CREATE, req, upload, url) 126 | 127 | isFinal = upload.size === 0 && !upload.sizeIsDeferred 128 | 129 | // The request MIGHT include a Content-Type header when using creation-with-upload extension 130 | if (validateHeader('content-type', req.headers.get('content-type'))) { 131 | const bodyMaxSize = await this.calculateMaxBodySize(req, upload, maxFileSize) 132 | const newOffset = await this.writeToStore(req.body, upload, bodyMaxSize, context) 133 | 134 | responseData.headers['Upload-Offset'] = newOffset.toString() 135 | isFinal = newOffset === Number.parseInt(upload_length as string, 10) 136 | upload.offset = newOffset 137 | } 138 | } catch (e) { 139 | context.abort() 140 | throw e 141 | } finally { 142 | await lock.unlock() 143 | } 144 | 145 | if (isFinal && this.options.onUploadFinish) { 146 | try { 147 | const patch = await this.options.onUploadFinish(req, upload) 148 | if (patch.status_code) responseData.status = patch.status_code 149 | if (patch.body) responseData.body = patch.body 150 | if (patch.headers) 151 | responseData.headers = Object.assign(patch.headers, responseData.headers) 152 | } catch (error) { 153 | log(`onUploadFinish: ${error.body}`) 154 | throw error 155 | } 156 | } 157 | 158 | // The Upload-Expires response header indicates the time after which the unfinished upload expires. 159 | // If expiration is known at creation time, Upload-Expires header MUST be included in the response 160 | if ( 161 | this.store.hasExtension('expiration') && 162 | this.store.getExpiration() > 0 && 163 | upload.creation_date 164 | ) { 165 | const created = await this.store.getUpload(upload.id) 166 | 167 | if (created.offset !== Number.parseInt(upload_length as string, 10)) { 168 | const creation = new Date(upload.creation_date) 169 | // Value MUST be in RFC 7231 datetime format 170 | responseData.headers['Upload-Expires'] = new Date( 171 | creation.getTime() + this.store.getExpiration() 172 | ).toUTCString() 173 | } 174 | } 175 | 176 | //Only append Location header if its valid for the final http status (201 or 3xx) 177 | if ( 178 | responseData.status === 201 || 179 | (responseData.status >= 300 && responseData.status < 400) 180 | ) { 181 | responseData.headers.Location = url 182 | } 183 | 184 | const writtenRes = this.write( 185 | responseData.status, 186 | responseData.headers, 187 | responseData.body 188 | ) 189 | 190 | if (isFinal) { 191 | this.emit(EVENTS.POST_FINISH, req, writtenRes, upload) 192 | } 193 | 194 | return writtenRes 195 | } 196 | } 197 | -------------------------------------------------------------------------------- /packages/server/src/index.ts: -------------------------------------------------------------------------------- 1 | export {Server} from './server.js' 2 | export * from './types.js' 3 | export * from './lockers/index.js' 4 | export * from '@tus/utils' 5 | -------------------------------------------------------------------------------- /packages/server/src/lockers/MemoryLocker.ts: -------------------------------------------------------------------------------- 1 | import {ERRORS, type Lock, type Locker, type RequestRelease} from '@tus/utils' 2 | 3 | /** 4 | * MemoryLocker is an implementation of the Locker interface that manages locks in memory. 5 | * This class is designed for exclusive access control over resources, often used in scenarios like upload management. 6 | * 7 | * Key Features: 8 | * - Ensures exclusive resource access by using a memory-based map to track locks. 9 | * - Implements timeout for lock acquisition, mitigating deadlock situations. 10 | * - Facilitates both immediate and graceful release of locks through different mechanisms. 11 | * 12 | * Locking Behavior: 13 | * - When the `lock` method is invoked for an already locked resource, the `cancelReq` callback is called. 14 | * This signals to the current lock holder that another process is requesting the lock, encouraging them to release it as soon as possible. 15 | * - The lock attempt continues until the specified timeout is reached. If the timeout expires and the lock is still not 16 | * available, an error is thrown to indicate lock acquisition failure. 17 | * 18 | * Lock Acquisition and Release: 19 | * - The `lock` method implements a wait mechanism, allowing a lock request to either succeed when the lock becomes available, 20 | * or fail after the timeout period. 21 | * - The `unlock` method releases a lock, making the resource available for other requests. 22 | */ 23 | 24 | export interface MemoryLockerOptions { 25 | acquireLockTimeout: number 26 | } 27 | 28 | interface LockEntry { 29 | requestRelease: RequestRelease 30 | } 31 | 32 | export class MemoryLocker implements Locker { 33 | timeout: number 34 | locks = new Map() 35 | 36 | constructor(options?: MemoryLockerOptions) { 37 | this.timeout = options?.acquireLockTimeout ?? 1000 * 30 38 | } 39 | 40 | newLock(id: string) { 41 | return new MemoryLock(id, this, this.timeout) 42 | } 43 | } 44 | 45 | class MemoryLock implements Lock { 46 | constructor( 47 | private id: string, 48 | private locker: MemoryLocker, 49 | private timeout: number = 1000 * 30 50 | ) {} 51 | 52 | async lock(stopSignal: AbortSignal, requestRelease: RequestRelease): Promise { 53 | const abortController = new AbortController() 54 | const onAbort = () => { 55 | abortController.abort() 56 | } 57 | stopSignal.addEventListener('abort', onAbort) 58 | 59 | try { 60 | const lock = await Promise.race([ 61 | this.waitTimeout(abortController.signal), 62 | this.acquireLock(this.id, requestRelease, abortController.signal), 63 | ]) 64 | 65 | if (!lock) { 66 | throw ERRORS.ERR_LOCK_TIMEOUT 67 | } 68 | } finally { 69 | stopSignal.removeEventListener('abort', onAbort) 70 | abortController.abort() 71 | } 72 | } 73 | 74 | protected async acquireLock( 75 | id: string, 76 | requestRelease: RequestRelease, 77 | signal: AbortSignal 78 | ): Promise { 79 | const lock = this.locker.locks.get(id) 80 | 81 | if (signal.aborted) { 82 | return typeof lock !== 'undefined' 83 | } 84 | 85 | if (!lock) { 86 | const lock = { 87 | requestRelease, 88 | } 89 | this.locker.locks.set(id, lock) 90 | return true 91 | } 92 | 93 | await lock.requestRelease?.() 94 | 95 | return await new Promise((resolve, reject) => { 96 | // Using setImmediate to: 97 | // 1. Prevent stack overflow by deferring recursive calls to the next event loop iteration. 98 | // 2. Allow event loop to process other pending events, maintaining server responsiveness. 99 | // 3. Ensure fairness in lock acquisition by giving other requests a chance to acquire the lock. 100 | setImmediate(() => { 101 | this.acquireLock(id, requestRelease, signal).then(resolve).catch(reject) 102 | }) 103 | }) 104 | } 105 | 106 | async unlock(): Promise { 107 | const lock = this.locker.locks.get(this.id) 108 | if (!lock) { 109 | throw new Error('Releasing an unlocked lock!') 110 | } 111 | 112 | this.locker.locks.delete(this.id) 113 | } 114 | 115 | protected waitTimeout(signal: AbortSignal) { 116 | return new Promise((resolve) => { 117 | const timeout = setTimeout(() => { 118 | resolve(false) 119 | }, this.timeout) 120 | 121 | const abortListener = () => { 122 | clearTimeout(timeout) 123 | signal.removeEventListener('abort', abortListener) 124 | resolve(false) 125 | } 126 | signal.addEventListener('abort', abortListener) 127 | }) 128 | } 129 | } 130 | -------------------------------------------------------------------------------- /packages/server/src/lockers/index.ts: -------------------------------------------------------------------------------- 1 | export * from './MemoryLocker.js' 2 | -------------------------------------------------------------------------------- /packages/server/src/test/BaseHandler.test.ts: -------------------------------------------------------------------------------- 1 | import {strict as assert} from 'node:assert' 2 | 3 | import {BaseHandler} from '../handlers/BaseHandler.js' 4 | import {DataStore} from '@tus/utils' 5 | import {MemoryLocker} from '@tus/server' 6 | 7 | describe('BaseHandler', () => { 8 | const store = new DataStore() 9 | const handler = new BaseHandler(store, { 10 | path: '/test/output', 11 | locker: new MemoryLocker(), 12 | }) 13 | 14 | it('constructor must require a DataStore', (done) => { 15 | assert.throws(() => { 16 | // @ts-expect-error TS(2554): Expected 2 arguments, but got 0. 17 | new BaseHandler() 18 | }, Error) 19 | done() 20 | }) 21 | 22 | it('write() should end the response and set status code', (done) => { 23 | const res = handler.write(200, {}) 24 | assert.equal(res.status, 200) 25 | done() 26 | }) 27 | 28 | it('write() should set headers', (done) => { 29 | const header = 'Access-Control-Allow-Methods' 30 | const headers = {[header]: 'GET, OPTIONS'} 31 | const res = handler.write(200, headers) 32 | assert.equal(res.headers.get(header), headers[header]) 33 | done() 34 | }) 35 | 36 | it('write() should write the body', async () => { 37 | const body = 'Hello tus!' 38 | const res = handler.write(200, {}, body) 39 | assert.equal(await res.text(), body) 40 | }) 41 | 42 | it('should get ID correctly from nested URL', () => { 43 | const req = new Request('https://example.com/some/path/yeah/1234') 44 | const id = handler.getFileIdFromRequest(req) 45 | assert.equal(id, '1234') 46 | }) 47 | 48 | it('should handle URL-encoded ID', () => { 49 | const req = new Request('https://example.com/some/path/yeah/1234%205%23') 50 | const id = handler.getFileIdFromRequest(req) 51 | assert.equal(id, '1234 5#') 52 | }) 53 | 54 | it('should allow to to generate a url with a custom function', () => { 55 | const handler = new BaseHandler(store, { 56 | path: '/path', 57 | locker: new MemoryLocker(), 58 | generateUrl: (_, info) => { 59 | const {proto, host, path, id} = info 60 | return `${proto}://${host}${path}/${id}?customParam=1` 61 | }, 62 | }) 63 | 64 | const req = new Request('http://example.com/upload/123', { 65 | headers: { 66 | host: 'example.com', 67 | }, 68 | }) 69 | const id = '123' 70 | const url = handler.generateUrl(req, id) 71 | assert.equal(url, 'http://example.com/path/123?customParam=1') 72 | }) 73 | 74 | it('should allow extracting the request id with a custom function', () => { 75 | const handler = new BaseHandler(store, { 76 | path: '/path', 77 | locker: new MemoryLocker(), 78 | getFileIdFromRequest: (req: Request) => { 79 | return `${new URL(req.url).pathname.split('/').pop()}-custom` 80 | }, 81 | }) 82 | 83 | const req = new Request('http://example.com/upload/1234') 84 | const url = handler.getFileIdFromRequest(req) 85 | assert.equal(url, '1234-custom') 86 | }) 87 | }) 88 | -------------------------------------------------------------------------------- /packages/server/src/test/DataStore.test.ts: -------------------------------------------------------------------------------- 1 | import 'should' 2 | import {strict as assert} from 'node:assert' 3 | 4 | import {DataStore} from '@tus/utils' 5 | 6 | describe('DataStore', () => { 7 | const datastore = new DataStore() 8 | 9 | it('should provide extensions', (done) => { 10 | datastore.should.have.property('extensions') 11 | assert.equal(Array.isArray(datastore.extensions), true) 12 | assert.equal(datastore.extensions.length, 0) 13 | datastore.extensions = ['creation', 'expiration'] 14 | assert.deepStrictEqual(datastore.extensions, ['creation', 'expiration']) 15 | done() 16 | }) 17 | 18 | it('should check for an extension', (done) => { 19 | datastore.extensions = ['creation', 'expiration'] 20 | assert.equal(datastore.hasExtension('creation'), true) 21 | assert.equal(datastore.hasExtension('expiration'), true) 22 | assert.equal(datastore.hasExtension('concatentation'), false) 23 | assert.equal(datastore.hasExtension('CREATION'), false) // Test case sensitivity 24 | done() 25 | }) 26 | 27 | it('must have a create method', (done) => { 28 | datastore.should.have.property('create') 29 | datastore.create.should.be.type('function') 30 | done() 31 | }) 32 | 33 | it('must have a remove method', (done) => { 34 | datastore.should.have.property('remove') 35 | done() 36 | }) 37 | 38 | it('must have a write method', (done) => { 39 | datastore.should.have.property('write') 40 | datastore.write.should.be.type('function') 41 | done() 42 | }) 43 | 44 | it('must have a getUpload method', (done) => { 45 | datastore.should.have.property('getUpload') 46 | datastore.getUpload.should.be.type('function') 47 | done() 48 | }) 49 | }) 50 | -------------------------------------------------------------------------------- /packages/server/src/test/DeleteHandler.test.ts: -------------------------------------------------------------------------------- 1 | import 'should' 2 | 3 | import {strict as assert} from 'node:assert' 4 | 5 | import sinon from 'sinon' 6 | 7 | import {ERRORS, EVENTS, DataStore, type CancellationContext} from '@tus/utils' 8 | import {DeleteHandler} from '../handlers/DeleteHandler.js' 9 | import {MemoryLocker} from '@tus/server' 10 | 11 | describe('DeleteHandler', () => { 12 | const path = '/test/output' 13 | const fake_store = sinon.createStubInstance(DataStore) 14 | let handler: InstanceType 15 | let req: Request 16 | let context: CancellationContext 17 | 18 | beforeEach(() => { 19 | fake_store.remove.resetHistory() 20 | handler = new DeleteHandler(fake_store, { 21 | relativeLocation: true, 22 | path, 23 | locker: new MemoryLocker(), 24 | }) 25 | req = new Request(`http://example.com/${path}/1234`, {method: 'DELETE'}) 26 | const abortController = new AbortController() 27 | context = { 28 | signal: abortController.signal, 29 | cancel: () => abortController.abort(), 30 | abort: () => abortController.abort(), 31 | } 32 | }) 33 | 34 | it('should 404 if no file id match', () => { 35 | fake_store.remove.rejects(ERRORS.FILE_NOT_FOUND) 36 | return assert.rejects(() => handler.send(req, context), {status_code: 404}) 37 | }) 38 | 39 | it('should 404 if no file ID', async () => { 40 | sinon.stub(handler, 'getFileIdFromRequest').returns(undefined) 41 | await assert.rejects(() => handler.send(req, context), {status_code: 404}) 42 | assert.equal(fake_store.remove.callCount, 0) 43 | }) 44 | 45 | it('must acknowledge successful DELETE requests with the 204', async () => { 46 | fake_store.remove.resolves() 47 | const res = await handler.send(req, context) 48 | assert.equal(res.status, 204) 49 | }) 50 | 51 | it(`must fire the ${EVENTS.POST_TERMINATE} event`, (done) => { 52 | fake_store.remove.resolves() 53 | handler.on(EVENTS.POST_TERMINATE, (request, _, id) => { 54 | assert.deepStrictEqual(req, request) 55 | assert.equal(id, '1234') 56 | done() 57 | }) 58 | handler.send(req, context) 59 | }) 60 | 61 | it('must not allow terminating an upload if already completed', async () => { 62 | const handler = new DeleteHandler(fake_store, { 63 | relativeLocation: true, 64 | disableTerminationForFinishedUploads: true, 65 | path, 66 | locker: new MemoryLocker(), 67 | }) 68 | 69 | fake_store.getUpload.resolves({ 70 | id: 'abc', 71 | metadata: undefined, 72 | get sizeIsDeferred(): boolean { 73 | return false 74 | }, 75 | creation_date: undefined, 76 | offset: 1000, 77 | size: 1000, 78 | storage: {type: 'test', path: `${path}/abc`}, 79 | }) 80 | await assert.rejects(() => handler.send(req, context), {status_code: 400}) 81 | }) 82 | }) 83 | -------------------------------------------------------------------------------- /packages/server/src/test/HeadHandler.test.ts: -------------------------------------------------------------------------------- 1 | import {strict as assert} from 'node:assert' 2 | 3 | import sinon from 'sinon' 4 | 5 | import {ERRORS, DataStore, Upload, type CancellationContext} from '@tus/utils' 6 | import {HeadHandler} from '../handlers/HeadHandler.js' 7 | import {MemoryLocker} from '@tus/server' 8 | 9 | describe('HeadHandler', () => { 10 | const path = '/test/output' 11 | const url = `https://example.com${path}` 12 | const fake_store = sinon.createStubInstance(DataStore) 13 | const handler = new HeadHandler(fake_store, { 14 | relativeLocation: true, 15 | path, 16 | locker: new MemoryLocker(), 17 | }) 18 | let req: Request 19 | let context: CancellationContext 20 | 21 | beforeEach(() => { 22 | req = new Request(`${url}/1234`, { 23 | method: 'HEAD', 24 | }) 25 | const abortController = new AbortController() 26 | context = { 27 | cancel: () => abortController.abort(), 28 | abort: () => abortController.abort(), 29 | signal: abortController.signal, 30 | } 31 | }) 32 | 33 | it('should 404 if no file id match', () => { 34 | fake_store.getUpload.rejects(ERRORS.FILE_NOT_FOUND) 35 | return assert.rejects(() => handler.send(req, context), {status_code: 404}) 36 | }) 37 | 38 | it('should 404 if no file ID', () => { 39 | req = new Request(`${url}/`, { 40 | method: 'HEAD', 41 | }) 42 | return assert.rejects(() => handler.send(req, context), {status_code: 404}) 43 | }) 44 | 45 | it('should resolve with the offset and cache-control', async () => { 46 | fake_store.getUpload.resolves(new Upload({id: '1234', offset: 0})) 47 | const res = await handler.send(req, context) 48 | assert.equal(res.headers.get('Upload-Offset'), '0') 49 | assert.equal(res.headers.get('Cache-Control'), 'no-store') 50 | assert.equal(res.status, 200) 51 | }) 52 | 53 | it('should resolve with upload-length', async () => { 54 | const file = new Upload({ 55 | id: '1234', 56 | offset: 0, 57 | size: 512, 58 | }) 59 | fake_store.getUpload.resolves(file) 60 | const res = await handler.send(req, context) 61 | assert.equal(res.headers.get('Upload-Length'), '512') 62 | assert.equal(res.headers.has('Upload-Defer-Length'), false) 63 | }) 64 | 65 | it('should resolve with upload-defer-length', async () => { 66 | const file = new Upload({ 67 | id: '1234', 68 | offset: 0, 69 | }) 70 | fake_store.getUpload.resolves(file) 71 | const res = await handler.send(req, context) 72 | assert.equal(res.headers.get('Upload-Defer-Length'), '1') 73 | assert.equal(res.headers.has('Upload-Length'), false) 74 | }) 75 | 76 | it('should resolve with metadata', async () => { 77 | const file = new Upload({ 78 | id: '1234', 79 | offset: 0, 80 | metadata: {is_confidential: null, foo: 'bar'}, 81 | }) 82 | fake_store.getUpload.resolves(file) 83 | const res = await handler.send(req, context) 84 | assert.equal(res.headers.get('Upload-Metadata'), 'is_confidential,foo YmFy') 85 | }) 86 | 87 | it('should resolve without metadata', async () => { 88 | const file = new Upload({ 89 | id: '1234', 90 | offset: 0, 91 | }) 92 | fake_store.getUpload.resolves(file) 93 | const res = await handler.send(req, context) 94 | assert.equal(res.headers.has('Upload-Metadata'), false) 95 | }) 96 | }) 97 | -------------------------------------------------------------------------------- /packages/server/src/test/HeaderValidator.test.ts: -------------------------------------------------------------------------------- 1 | import {strict as assert} from 'node:assert' 2 | 3 | import {validateHeader} from '../validators/HeaderValidator.js' 4 | import {TUS_RESUMABLE} from '@tus/utils' 5 | 6 | describe('HeaderValidator', () => { 7 | describe('upload-offset', () => { 8 | it('should validate a number', (done) => { 9 | const value = '1234' 10 | assert.equal(validateHeader('upload-offset', value), true) 11 | done() 12 | }) 13 | 14 | it('should invalidate a negative number', (done) => { 15 | const value = '-4' 16 | assert.equal(validateHeader('upload-offset', value), false) 17 | done() 18 | }) 19 | 20 | it('should invalidate a non number', (done) => { 21 | assert.equal(validateHeader('upload-length', 'hello'), false) 22 | assert.equal(validateHeader('upload-length', '0100'), false) 23 | assert.equal(validateHeader('upload-length', '0asd100'), false) 24 | assert.equal(validateHeader('upload-length', '1asd100'), false) 25 | done() 26 | }) 27 | }) 28 | 29 | describe('upload-length', () => { 30 | it('should validate a number', (done) => { 31 | const value = '1234' 32 | assert.equal(validateHeader('upload-length', value), true) 33 | done() 34 | }) 35 | 36 | it('should invalidate a number < 0', (done) => { 37 | assert.equal(validateHeader('upload-length', '-1'), false) 38 | done() 39 | }) 40 | 41 | it('should invalidate a non number', (done) => { 42 | assert.equal(validateHeader('upload-length', 'hello'), false) 43 | assert.equal(validateHeader('upload-length', '0100'), false) 44 | assert.equal(validateHeader('upload-length', '0asd100'), false) 45 | assert.equal(validateHeader('upload-length', '1asd100'), false) 46 | assert.equal(validateHeader('upload-length', '1.3'), false) 47 | assert.equal(validateHeader('upload-length', '-0'), false) 48 | assert.equal(validateHeader('upload-length', '+0'), false) 49 | assert.equal(validateHeader('upload-length', 'NaN'), false) 50 | assert.equal(validateHeader('upload-length', '+Infinity'), false) 51 | done() 52 | }) 53 | }) 54 | 55 | describe('upload-defer-length', () => { 56 | it('should validate 1', (done) => { 57 | const value = '1' 58 | assert.equal(validateHeader('upload-defer-length', value), true) 59 | done() 60 | }) 61 | 62 | it('should invalidate a number !== 1', (done) => { 63 | assert.equal(validateHeader('upload-defer-length', '0'), false) 64 | assert.equal(validateHeader('upload-defer-length', '1234'), false) 65 | assert.equal(validateHeader('upload-defer-length', '-1'), false) 66 | assert.equal(validateHeader('upload-defer-length', '+1'), false) 67 | assert.equal(validateHeader('upload-defer-length', ' 1 '), false) // test leading and trailing whitespaces 68 | done() 69 | }) 70 | 71 | it('should invalidate a non number', (done) => { 72 | const value = 'hello' 73 | assert.equal(validateHeader('upload-defer-length', value), false) 74 | done() 75 | }) 76 | }) 77 | 78 | describe('upload-metadata', () => { 79 | it('should validate a comma separated list', (done) => { 80 | const value = 81 | 'file/name dGVzdC5tcDQ=,size OTYwMjQ0,type! dmlkZW8vbXA0,video,withWhitespace' 82 | assert.equal(validateHeader('upload-metadata', value), true) 83 | done() 84 | }) 85 | 86 | it('should validate keys without a value', (done) => { 87 | assert.equal(validateHeader('upload-metadata', 'is_confidential'), true) 88 | done() 89 | }) 90 | 91 | it('should fail on non comma separated list', (done) => { 92 | assert.equal(validateHeader('upload-metadata', 'too-many spaces'), false) 93 | assert.equal(validateHeader('upload-metadata', ''), false) 94 | assert.equal(validateHeader('upload-metadata', '\t\n'), false) 95 | done() 96 | }) 97 | }) 98 | 99 | describe('upload-concat', () => { 100 | it('should validate partial and final', (done) => { 101 | assert.equal(validateHeader('upload-concat', 'partial'), true) 102 | assert.equal(validateHeader('upload-concat', 'final;/files/a /files/b'), true) 103 | done() 104 | }) 105 | 106 | it('should invalidate everything else', (done) => { 107 | assert.equal(validateHeader('upload-concat', ''), false) 108 | assert.equal(validateHeader('upload-concat', 'PARTIAL'), false) 109 | assert.equal(validateHeader('upload-concat', 'invalid-value'), false) 110 | done() 111 | }) 112 | }) 113 | 114 | describe('x-requested-with', () => { 115 | it('always validate ', (done) => { 116 | assert.equal(validateHeader('x-requested-with'), true) 117 | done() 118 | }) 119 | }) 120 | 121 | describe('tus-version', () => { 122 | it('should validate tus version', (done) => { 123 | assert.equal(validateHeader('tus-version', TUS_RESUMABLE), true) 124 | done() 125 | }) 126 | 127 | it('should invalidate tus version', (done) => { 128 | assert.equal(validateHeader('tus-version', '0.0.0'), false) 129 | assert.equal(validateHeader('tus-version', '0.1.0'), false) 130 | done() 131 | }) 132 | }) 133 | 134 | describe('tus-resumable', () => { 135 | it('should validate tus version', (done) => { 136 | assert.equal(validateHeader('tus-resumable', TUS_RESUMABLE), true) 137 | done() 138 | }) 139 | 140 | it('should invalidate tus version', (done) => { 141 | assert.equal(validateHeader('tus-resumable', '0.0.0'), false) 142 | assert.equal(validateHeader('tus-resumable', '0.1.0'), false) 143 | done() 144 | }) 145 | }) 146 | 147 | describe('tus-extension', () => { 148 | it('always validate ', (done) => { 149 | assert.equal(validateHeader('tus-extension'), true) 150 | done() 151 | }) 152 | }) 153 | 154 | describe('tus-max-size', () => { 155 | it('always validate ', (done) => { 156 | assert.equal(validateHeader('tus-max-size'), true) 157 | done() 158 | }) 159 | }) 160 | 161 | describe('content-type', () => { 162 | it('should validate octet-stream', (done) => { 163 | assert.equal( 164 | validateHeader('content-type', 'application/offset+octet-stream'), 165 | true 166 | ) 167 | done() 168 | }) 169 | 170 | it('should invalidate everything except octet-stream', (done) => { 171 | assert.equal(validateHeader('content-type', 'video/mp4'), false) 172 | assert.equal(validateHeader('content-type', 'application/json'), false) 173 | done() 174 | }) 175 | }) 176 | }) 177 | -------------------------------------------------------------------------------- /packages/server/src/test/Locker.test.ts: -------------------------------------------------------------------------------- 1 | import assert from 'node:assert' 2 | import sinon from 'sinon' 3 | import {ERRORS, MemoryLocker} from '@tus/server' 4 | 5 | describe('MemoryLocker', () => { 6 | it('will acquire a lock by notifying another to release it', async () => { 7 | const locker = new MemoryLocker() 8 | const lockId = 'upload-id-1' 9 | const abortController = new AbortController() 10 | 11 | const cancel = sinon.spy() 12 | const cancel2 = sinon.spy() 13 | 14 | const lock1 = locker.newLock(lockId) 15 | const lock2 = locker.newLock(lockId) 16 | 17 | await lock1.lock(abortController.signal, async () => { 18 | await lock1.unlock() 19 | cancel() 20 | }) 21 | 22 | await lock2.lock(abortController.signal, async () => { 23 | cancel2() 24 | }) 25 | 26 | await lock2.unlock() 27 | 28 | assert(cancel.callCount === 1, `calls count dont match ${cancel.callCount} !== 1`) 29 | assert(cancel2.callCount === 0, `calls count dont match ${cancel.callCount} !== 1`) 30 | }) 31 | 32 | it('will return a lock timeout error', async () => { 33 | const locker = new MemoryLocker({ 34 | acquireLockTimeout: 500, 35 | }) 36 | const abortController = new AbortController() 37 | 38 | const lockId = 'upload-id-1' 39 | const lock = locker.newLock(lockId) 40 | 41 | const cancel = sinon.spy() 42 | 43 | await lock.lock(abortController.signal, async () => { 44 | cancel() 45 | // We note that the function has been called, but do not 46 | // release the lock 47 | }) 48 | 49 | try { 50 | await lock.lock(abortController.signal, async () => { 51 | throw new Error('panic should not be called') 52 | }) 53 | } catch (e) { 54 | assert(!(e instanceof Error), `error returned is not correct ${e.message}`) 55 | assert('body' in e, 'body is not present in the error') 56 | assert(e.body === ERRORS.ERR_LOCK_TIMEOUT.body) 57 | } 58 | }) 59 | 60 | it('request lock and unlock', async () => { 61 | const locker = new MemoryLocker() 62 | const lockId = 'upload-id-1' 63 | const abortController = new AbortController() 64 | 65 | const lock = locker.newLock(lockId) 66 | const lock2 = locker.newLock(lockId) 67 | 68 | const cancel = sinon.spy() 69 | await lock.lock(abortController.signal, () => { 70 | cancel() 71 | setTimeout(async () => { 72 | await lock.unlock() 73 | }, 50) 74 | }) 75 | 76 | await lock2.lock(abortController.signal, () => { 77 | throw new Error('should not be called') 78 | }) 79 | 80 | await lock2.unlock() 81 | 82 | assert( 83 | cancel.callCount > 0, 84 | `request released called more times than expected - ${cancel.callCount}` 85 | ) 86 | }) 87 | 88 | it('will stop trying to acquire the lock if the abort signal is aborted', async () => { 89 | const locker = new MemoryLocker() 90 | const lockId = 'upload-id-1' 91 | const abortController = new AbortController() 92 | 93 | const cancel = sinon.spy() 94 | const cancel2 = sinon.spy() 95 | 96 | const lock1 = locker.newLock(lockId) 97 | const lock2 = locker.newLock(lockId) 98 | 99 | await lock1.lock(abortController.signal, async () => { 100 | // do not unlock when requested 101 | cancel() 102 | }) 103 | 104 | // Abort signal is aborted after lock2 tries to acquire the lock 105 | setTimeout(() => { 106 | abortController.abort() 107 | }, 100) 108 | 109 | try { 110 | await lock2.lock(abortController.signal, async () => { 111 | cancel2() 112 | }) 113 | assert(false, 'lock2 should not have been acquired') 114 | } catch (e) { 115 | assert(e === ERRORS.ERR_LOCK_TIMEOUT, `error returned is not correct ${e}`) 116 | } 117 | 118 | assert(cancel.callCount > 1, `calls count dont match ${cancel.callCount} !== 1`) 119 | assert(cancel2.callCount === 0, `calls count dont match ${cancel.callCount} !== 1`) 120 | }) 121 | }) 122 | -------------------------------------------------------------------------------- /packages/server/src/test/OptionsHandler.test.ts: -------------------------------------------------------------------------------- 1 | import 'should' 2 | 3 | import {strict as assert} from 'node:assert' 4 | 5 | import {OptionsHandler} from '../handlers/OptionsHandler.js' 6 | import { 7 | DataStore, 8 | ALLOWED_METHODS, 9 | ALLOWED_HEADERS, 10 | MAX_AGE, 11 | type CancellationContext, 12 | } from '@tus/utils' 13 | import {MemoryLocker, type ServerOptions} from '@tus/server' 14 | 15 | describe('OptionsHandler', () => { 16 | const options: ServerOptions = { 17 | path: '/test/output', 18 | locker: new MemoryLocker(), 19 | maxSize: 1024, 20 | } 21 | const store = new DataStore() 22 | const handler = new OptionsHandler(store, options) 23 | 24 | let context: CancellationContext 25 | let req: Request 26 | 27 | beforeEach(() => { 28 | const abortController = new AbortController() 29 | context = { 30 | cancel: () => abortController.abort(), 31 | abort: () => abortController.abort(), 32 | signal: abortController.signal, 33 | } 34 | req = new Request(`https://example.com${options.path}/1234`, {method: 'OPTIONS'}) 35 | }) 36 | 37 | it('send() should set headers and 204', async () => { 38 | const headers = { 39 | 'Access-Control-Allow-Methods': ALLOWED_METHODS, 40 | 'Access-Control-Allow-Headers': ALLOWED_HEADERS, 41 | 'Access-Control-Max-Age': MAX_AGE.toString(), 42 | 'Tus-Version': '1.0.0', 43 | 'Tus-Max-Size': '1024', 44 | } 45 | const res = await handler.send(req, context) 46 | for (const header in headers) { 47 | assert.equal( 48 | res.headers.get(header), 49 | headers[header as keyof typeof headers], 50 | `${header} not equal` 51 | ) 52 | } 53 | 54 | assert.equal(res.status, 204) 55 | }) 56 | 57 | it('send() should set extensions header if they exist', async () => { 58 | const headers = {'Tus-Extension': 'creation,expiration'} 59 | store.extensions = ['creation', 'expiration'] 60 | const handler = new OptionsHandler(store, options) 61 | const res = await handler.send(req, context) 62 | // eslint-disable-next-line guard-for-in 63 | for (const header in headers) { 64 | assert.equal(res.headers.get(header), headers[header as keyof typeof headers]) 65 | } 66 | }) 67 | }) 68 | -------------------------------------------------------------------------------- /packages/server/src/types.ts: -------------------------------------------------------------------------------- 1 | import type {ServerRequest as Request} from 'srvx/types' 2 | import type {Locker, Upload} from '@tus/utils' 3 | 4 | /** 5 | * Represents the configuration options for a server. 6 | */ 7 | export type ServerOptions = { 8 | /** 9 | * The route to accept requests. 10 | */ 11 | path: string 12 | 13 | /** 14 | * Max file size allowed when uploading 15 | */ 16 | maxSize?: number | ((req: Request, uploadId: string | null) => Promise | number) 17 | 18 | /** 19 | * Return a relative URL as the `Location` header. 20 | */ 21 | relativeLocation?: boolean 22 | 23 | /** 24 | * Allow `Forwarded`, `X-Forwarded-Proto`, and `X-Forwarded-Host` headers 25 | * to override the `Location` header returned by the server. 26 | */ 27 | respectForwardedHeaders?: boolean 28 | 29 | /** 30 | * Additional headers sent in `Access-Control-Allow-Headers`. 31 | */ 32 | allowedHeaders?: string[] 33 | 34 | /** 35 | * Additional headers sent in `Access-Control-Expose-Headers`. 36 | */ 37 | exposedHeaders?: string[] 38 | 39 | /** 40 | * Set `Access-Control-Allow-Credentials` to true or false (the default) 41 | */ 42 | allowedCredentials?: boolean 43 | 44 | /** 45 | * Add trusted origins to `Access-Control-Allow-Origin`. 46 | */ 47 | allowedOrigins?: string[] 48 | 49 | /** 50 | * Interval in milliseconds for sending progress of an upload over `EVENTS.POST_RECEIVE` 51 | */ 52 | postReceiveInterval?: number 53 | 54 | /** 55 | * Control how the upload URL is generated. 56 | * @param req - The incoming HTTP request. 57 | * @param options - Options for generating the URL. 58 | */ 59 | generateUrl?: ( 60 | req: Request, 61 | options: {proto: string; host: string; path: string; id: string} 62 | ) => string 63 | 64 | /** 65 | * Control how the Upload-ID is extracted from the request. 66 | * @param req - The incoming HTTP request. 67 | */ 68 | getFileIdFromRequest?: (req: Request, lastPath?: string) => string | undefined 69 | 70 | /** 71 | * Control how you want to name files. 72 | * It is important to make these unique to prevent data loss. 73 | * Only use it if you really need to. 74 | * Default uses `crypto.randomBytes(16).toString('hex')`. 75 | * @param req - The incoming HTTP request. 76 | */ 77 | namingFunction?: ( 78 | req: Request, 79 | metadata?: Record 80 | ) => string | Promise 81 | 82 | /** 83 | * The Lock interface defines methods for implementing a locking mechanism. 84 | * It is primarily used to ensure exclusive access to resources, such as uploads and their metadata. 85 | */ 86 | locker: Locker | Promise | ((req: Request) => Locker | Promise) 87 | 88 | /** 89 | * This timeout controls how long the server will wait a cancelled lock to do its cleanup. 90 | */ 91 | lockDrainTimeout?: number 92 | 93 | /** 94 | * Disallow termination for finished uploads. 95 | */ 96 | disableTerminationForFinishedUploads?: boolean 97 | 98 | /** 99 | * `onUploadCreate` will be invoked before a new upload is created. 100 | * If the function returns the (modified) response, the upload will be created. 101 | * If an error is thrown, the HTTP request will be aborted, and the provided `body` and `status_code` 102 | * (or their fallbacks) will be sent to the client. This can be used to implement validation of upload 103 | * metadata or add headers. 104 | * @param req - The incoming HTTP request. 105 | * @param upload - The Upload object. 106 | */ 107 | onUploadCreate?: ( 108 | req: Request, 109 | upload: Upload 110 | ) => Promise<{metadata?: Upload['metadata']}> 111 | 112 | /** 113 | * `onUploadFinish` will be invoked after an upload is completed but before a response is returned to the client. 114 | * You can optionally return `status_code`, `headers` and `body` to modify the response. 115 | * Note that the tus specification does not allow sending response body nor status code other than 204, but most clients support it. 116 | * If an error is thrown, the HTTP request will be aborted, and the provided `body` and `status_code` 117 | * (or their fallbacks) will be sent to the client. This can be used to implement post-processing validation. 118 | * @param req - The incoming HTTP request. 119 | * @param res - The HTTP response. 120 | * @param upload - The Upload object. 121 | */ 122 | onUploadFinish?: ( 123 | req: Request, 124 | upload: Upload 125 | ) => Promise<{ 126 | status_code?: number 127 | headers?: Record 128 | body?: string 129 | }> 130 | 131 | /** 132 | * `onIncomingRequest` will be invoked when an incoming request is received. 133 | * @param req - The incoming HTTP request. 134 | * @param res - The HTTP response. 135 | * @param uploadId - The ID of the upload. 136 | */ 137 | onIncomingRequest?: (req: Request, uploadId: string) => Promise 138 | 139 | /** 140 | * `onResponseError` will be invoked when an error response is about to be sent by the server. 141 | * Use this function to map custom errors to tus errors or for custom observability. 142 | * @param req - The incoming HTTP request. 143 | * @param res - The HTTP response. 144 | * @param err - The error object or response. 145 | */ 146 | onResponseError?: ( 147 | req: Request, 148 | err: Error | {status_code: number; body: string} 149 | ) => 150 | | Promise<{status_code: number; body: string} | undefined> 151 | | {status_code: number; body: string} 152 | | undefined 153 | } 154 | 155 | export type RouteHandler = (req: Request) => Response | Promise 156 | 157 | export type WithOptional = Omit & {[P in K]+?: T[P]} 158 | 159 | export type WithRequired = T & {[P in K]-?: T[P]} 160 | -------------------------------------------------------------------------------- /packages/server/src/validators/HeaderValidator.ts: -------------------------------------------------------------------------------- 1 | import {TUS_VERSION, TUS_RESUMABLE, Metadata} from '@tus/utils' 2 | 3 | type validator = (value?: string) => boolean 4 | 5 | export const validators = new Map([ 6 | [ 7 | // The Upload-Offset request and response header indicates a byte offset within a resource. 8 | // The value MUST be a non-negative integer. 9 | 'upload-offset', 10 | (value) => { 11 | const n = Number(value) 12 | return Number.isInteger(n) && String(n) === value && n >= 0 13 | }, 14 | ], 15 | [ 16 | // The Upload-Length request and response header indicates the size of the entire upload in bytes. 17 | // The value MUST be a non-negative integer. 18 | 'upload-length', 19 | (value) => { 20 | const n = Number(value) 21 | return Number.isInteger(n) && String(n) === value && n >= 0 22 | }, 23 | ], 24 | [ 25 | // The Upload-Defer-Length request and response header indicates that the size of the upload 26 | // is not known currently and will be transferred later. 27 | // Its value MUST be 1. If the length of an upload is not deferred, this header MUST be omitted. 28 | 'upload-defer-length', 29 | (value) => value === '1', 30 | ], 31 | [ 32 | 'upload-metadata', 33 | // The Upload-Metadata request and response header MUST consist of one 34 | // or more comma-separated key-value pairs. The key and value MUST be 35 | // separated by a space. The key MUST NOT contain spaces and commas and 36 | // MUST NOT be empty. The key SHOULD be ASCII encoded and the value MUST 37 | // be Base64 encoded. All keys MUST be unique. 38 | (value) => { 39 | try { 40 | Metadata.parse(value) 41 | return true 42 | } catch { 43 | return false 44 | } 45 | }, 46 | ], 47 | [ 48 | 'x-forwarded-proto', 49 | (value) => { 50 | if (value === 'http' || value === 'https') { 51 | return true 52 | } 53 | return false 54 | }, 55 | ], 56 | [ 57 | // The Tus-Version response header MUST be a comma-separated list of protocol versions supported by the Server. 58 | // The list MUST be sorted by Server's preference where the first one is the most preferred one. 59 | 'tus-version', 60 | (value) => { 61 | // @ts-expect-error we can compare a literal 62 | return TUS_VERSION.includes(value) 63 | }, 64 | ], 65 | [ 66 | // The Tus-Resumable header MUST be included in every request and response except for OPTIONS requests. 67 | // The value MUST be the version of the protocol used by the Client or the Server. 68 | // If the version specified by the Client is not supported by the Server, 69 | // it MUST respond with the 412 Precondition Failed status and MUST include the Tus-Version header into the response. 70 | // In addition, the Server MUST NOT process the request. 71 | 'tus-resumable', 72 | (value) => value === TUS_RESUMABLE, 73 | ], 74 | ['content-type', (value) => value === 'application/offset+octet-stream'], 75 | [ 76 | // The Upload-Concat request and response header MUST be set in both partial and final upload creation requests. 77 | // It indicates whether the upload is either a partial or final upload. 78 | // If the upload is a partial one, the header value MUST be partial. 79 | // In the case of a final upload, its value MUST be final followed by a semicolon and a space-separated list 80 | // of partial upload URLs that will be concatenated. 81 | // The partial uploads URLs MAY be absolute or relative and MUST NOT contain spaces as defined in RFC 3986. 82 | 'upload-concat', 83 | (value) => { 84 | if (!value) return false 85 | const valid_partial = value === 'partial' 86 | const valid_final = value.startsWith('final;') 87 | return valid_partial || valid_final 88 | }, 89 | ], 90 | ]) 91 | 92 | export function validateHeader(name: string, value?: string | null): boolean { 93 | const lowercaseName = name.toLowerCase() 94 | if (!validators.has(lowercaseName)) { 95 | return true 96 | } 97 | // @ts-expect-error if already guards 98 | return validators.get(lowercaseName)(value) 99 | } 100 | -------------------------------------------------------------------------------- /packages/server/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig.json", 3 | "extends": "../../tsconfig.base.json", 4 | "references": [ 5 | { "path": "../utils/tsconfig.json" }, 6 | { "path": "../file-store/tsconfig.json" } 7 | ], 8 | "include": ["src"], 9 | "compilerOptions": { 10 | "rootDir": "src", 11 | "outDir": "dist", 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /packages/utils/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # @tus/utils 2 | 3 | ## 0.6.0 4 | 5 | ### Minor Changes 6 | 7 | - 0f063d9: Change required Node.js version from 16 to 20.19.0 8 | - f190875: - `POST_RECEIVE_V2` has been renamed to `POST_RECEIVE`. The deprecated version of `POST_RECEIVE` has been removed. 9 | - 7a5a60d: Make this package ESM-only instead of CommonJS. Since Node.js >= 20.19.0 you can `require(esm)` so you can consume this package even if you don't ESM yourself yet. 10 | 11 | ## 0.5.1 12 | 13 | ### Patch Changes 14 | 15 | - 42c6267: Consistent cancellation across streams and locks, fixing lock on file never being unlocked when the request ends prematurely. 16 | 17 | ## 0.5.0 18 | 19 | ### Minor Changes 20 | 21 | - 8f19a53: Add IoRedisKvStore & use redis.scan instead of discouraged redis.keys 22 | 23 | ## 0.4.0 24 | 25 | ### Minor Changes 26 | 27 | - de28c6e: Publish source maps and declaration maps 28 | 29 | ## 0.3.0 30 | 31 | ### Minor Changes 32 | 33 | - 117e1b2: Add basic storage information to the Upload model. You can now access 34 | `upload.storage` which has `type` (`file`, `s3`, `gcs`), `path`, and when applicable 35 | `bucket`. 36 | 37 | ## 0.2.0 38 | 39 | ### Minor Changes 40 | 41 | - 60698da: Introduce POST_RECEIVE_V2 event, which correctly fires during the stream write 42 | rather than after it is finished 43 | 44 | ## 0.1.0 45 | 46 | ### Minor Changes 47 | 48 | - a896d25: Introduce @tus/utils for code sharing between packages 49 | -------------------------------------------------------------------------------- /packages/utils/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/package.json", 3 | "name": "@tus/utils", 4 | "version": "0.6.0", 5 | "description": "Internal utils for tus Node.js server and stores", 6 | "main": "./dist/index.js", 7 | "exports": "./dist/index.js", 8 | "type": "module", 9 | "homepage": "https://github.com/tus/tus-node-server#readme", 10 | "bugs": "https://github.com/tus/tus-node-server/issues", 11 | "repository": "tus/tus-node-server", 12 | "license": "MIT", 13 | "files": [ 14 | "dist", 15 | "src", 16 | "!test*" 17 | ], 18 | "scripts": { 19 | "build": "tsc --build", 20 | "pretest": "tsc --build", 21 | "test": "mocha './dist/test/*.js' --exit" 22 | }, 23 | "devDependencies": { 24 | "@types/debug": "^4.1.12", 25 | "@types/mocha": "^10.0.6", 26 | "@types/node": "^22.13.7", 27 | "ioredis": "^5.4.1", 28 | "mocha": "^11.0.1", 29 | "should": "^13.2.3" 30 | }, 31 | "engines": { 32 | "node": ">=20.19.0" 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /packages/utils/src/constants.ts: -------------------------------------------------------------------------------- 1 | export const REQUEST_METHODS = ['POST', 'HEAD', 'PATCH', 'OPTIONS', 'DELETE'] as const 2 | 3 | export const HEADERS = [ 4 | 'Authorization', 5 | 'Content-Type', 6 | 'Location', 7 | 'Tus-Extension', 8 | 'Tus-Max-Size', 9 | 'Tus-Resumable', 10 | 'Tus-Version', 11 | 'Upload-Concat', 12 | 'Upload-Defer-Length', 13 | 'Upload-Length', 14 | 'Upload-Metadata', 15 | 'Upload-Offset', 16 | 'X-HTTP-Method-Override', 17 | 'X-Requested-With', 18 | 'X-Forwarded-Host', 19 | 'X-Forwarded-Proto', 20 | 'Forwarded', 21 | ] as const 22 | 23 | export const HEADERS_LOWERCASE = HEADERS.map((header) => { 24 | return header.toLowerCase() 25 | }) as Array> 26 | 27 | export const ALLOWED_HEADERS = HEADERS.join(', ') 28 | export const ALLOWED_METHODS = REQUEST_METHODS.join(', ') 29 | export const EXPOSED_HEADERS = HEADERS.join(', ') 30 | 31 | export const ERRORS = { 32 | MISSING_OFFSET: { 33 | status_code: 403, 34 | body: 'Upload-Offset header required\n', 35 | }, 36 | ABORTED: { 37 | status_code: 400, 38 | body: 'Request aborted due to lock acquired', 39 | }, 40 | INVALID_TERMINATION: { 41 | status_code: 400, 42 | body: 'Cannot terminate an already completed upload', 43 | }, 44 | ERR_LOCK_TIMEOUT: { 45 | status_code: 500, 46 | body: 'failed to acquire lock before timeout', 47 | }, 48 | INVALID_CONTENT_TYPE: { 49 | status_code: 403, 50 | body: 'Content-Type header required\n', 51 | }, 52 | FILE_NOT_FOUND: { 53 | status_code: 404, 54 | body: 'The file for this url was not found\n', 55 | }, 56 | INVALID_OFFSET: { 57 | status_code: 409, 58 | body: 'Upload-Offset conflict\n', 59 | }, 60 | FILE_NO_LONGER_EXISTS: { 61 | status_code: 410, 62 | body: 'The file for this url no longer exists\n', 63 | }, 64 | ERR_SIZE_EXCEEDED: { 65 | status_code: 413, 66 | body: "upload's size exceeded\n", 67 | }, 68 | ERR_MAX_SIZE_EXCEEDED: { 69 | status_code: 413, 70 | body: 'Maximum size exceeded\n', 71 | }, 72 | INVALID_LENGTH: { 73 | status_code: 400, 74 | body: 'Upload-Length or Upload-Defer-Length header required\n', 75 | }, 76 | INVALID_METADATA: { 77 | status_code: 400, 78 | body: 'Upload-Metadata is invalid. It MUST consist of one or more comma-separated key-value pairs. The key and value MUST be separated by a space. The key MUST NOT contain spaces and commas and MUST NOT be empty. The key SHOULD be ASCII encoded and the value MUST be Base64 encoded. All keys MUST be unique', 79 | }, 80 | UNKNOWN_ERROR: { 81 | status_code: 500, 82 | body: 'Something went wrong with that request\n', 83 | }, 84 | FILE_WRITE_ERROR: { 85 | status_code: 500, 86 | body: 'Something went wrong receiving the file\n', 87 | }, 88 | UNSUPPORTED_CONCATENATION_EXTENSION: { 89 | status_code: 501, 90 | body: 'Concatenation extension is not (yet) supported. Disable parallel uploads in the tus client.\n', 91 | }, 92 | UNSUPPORTED_CREATION_DEFER_LENGTH_EXTENSION: { 93 | status_code: 501, 94 | body: 'creation-defer-length extension is not (yet) supported.\n', 95 | }, 96 | UNSUPPORTED_EXPIRATION_EXTENSION: { 97 | status_code: 501, 98 | body: 'expiration extension is not (yet) supported.\n', 99 | }, 100 | } as const 101 | 102 | export const POST_CREATE = 'POST_CREATE' as const 103 | export const POST_RECEIVE = 'POST_RECEIVE' as const 104 | export const POST_FINISH = 'POST_FINISH' as const 105 | export const POST_TERMINATE = 'POST_TERMINATE' as const 106 | export const EVENTS = { 107 | POST_CREATE, 108 | POST_RECEIVE, 109 | POST_FINISH, 110 | POST_TERMINATE, 111 | } as const 112 | 113 | export const MAX_AGE = 86_400 as const 114 | export const TUS_RESUMABLE = '1.0.0' as const 115 | export const TUS_VERSION = ['1.0.0'] as const 116 | -------------------------------------------------------------------------------- /packages/utils/src/index.ts: -------------------------------------------------------------------------------- 1 | export * from './models/index.js' 2 | export * from './constants.js' 3 | export * from './kvstores/index.js' 4 | -------------------------------------------------------------------------------- /packages/utils/src/kvstores/FileKvStore.ts: -------------------------------------------------------------------------------- 1 | import fs from 'node:fs/promises' 2 | import path from 'node:path' 3 | 4 | import type {KvStore} from './Types.js' 5 | import type {Upload} from '../models/index.js' 6 | 7 | /** 8 | * FileConfigstore writes the `Upload` JSON metadata to disk next the uploaded file itself. 9 | * It uses a queue which only processes one operation at a time to prevent unsafe concurrent access. 10 | */ 11 | export class FileKvStore implements KvStore { 12 | directory: string 13 | 14 | constructor(path: string) { 15 | this.directory = path 16 | } 17 | 18 | async get(key: string): Promise { 19 | try { 20 | const buffer = await fs.readFile(this.resolve(key), 'utf8') 21 | return JSON.parse(buffer as string) 22 | } catch { 23 | return undefined 24 | } 25 | } 26 | 27 | async set(key: string, value: T): Promise { 28 | await fs.writeFile(this.resolve(key), JSON.stringify(value)) 29 | } 30 | 31 | async delete(key: string): Promise { 32 | await fs.rm(this.resolve(key)) 33 | } 34 | 35 | async list(): Promise> { 36 | const files = await fs.readdir(this.directory) 37 | const sorted = files.sort((a, b) => a.localeCompare(b)) 38 | const name = (file: string) => path.basename(file, '.json') 39 | // To only return tus file IDs we check if the file has a corresponding JSON info file 40 | return sorted.filter( 41 | (file, idx) => idx < sorted.length - 1 && name(file) === name(sorted[idx + 1]) 42 | ) 43 | } 44 | 45 | private resolve(key: string): string { 46 | return path.resolve(this.directory, `${key}.json`) 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /packages/utils/src/kvstores/IoRedisKvStore.ts: -------------------------------------------------------------------------------- 1 | import type {Redis as IoRedis} from 'ioredis' 2 | import type {KvStore} from './Types.js' 3 | import type {Upload} from '../models/index.js' 4 | 5 | export class IoRedisKvStore implements KvStore { 6 | constructor( 7 | private redis: IoRedis, 8 | private prefix = '' 9 | ) { 10 | this.redis = redis 11 | this.prefix = prefix 12 | } 13 | 14 | private prefixed(key: string): string { 15 | return `${this.prefix}${key}` 16 | } 17 | 18 | async get(key: string): Promise { 19 | return this.deserializeValue(await this.redis.get(this.prefixed(key))) 20 | } 21 | 22 | async set(key: string, value: T): Promise { 23 | await this.redis.set(this.prefixed(key), this.serializeValue(value)) 24 | } 25 | 26 | async delete(key: string): Promise { 27 | await this.redis.del(this.prefixed(key)) 28 | } 29 | 30 | async list(): Promise> { 31 | const keys = new Set() 32 | let cursor = '0' 33 | do { 34 | const [next, batch] = await this.redis.scan( 35 | cursor, 36 | 'MATCH', 37 | this.prefixed('*'), 38 | 'COUNT', 39 | '20' 40 | ) 41 | cursor = next 42 | for (const key of batch) keys.add(key) 43 | } while (cursor !== '0') 44 | return Array.from(keys) 45 | } 46 | 47 | private serializeValue(value: T): string { 48 | return JSON.stringify(value) 49 | } 50 | 51 | private deserializeValue(buffer: string | null): T | undefined { 52 | return buffer ? JSON.parse(buffer) : undefined 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /packages/utils/src/kvstores/MemoryKvStore.ts: -------------------------------------------------------------------------------- 1 | import type {Upload} from '../models/index.js' 2 | import type {KvStore} from './Types.js' 3 | 4 | /** 5 | * Memory based configstore. 6 | * Used mostly for unit tests. 7 | */ 8 | export class MemoryKvStore implements KvStore { 9 | data: Map = new Map() 10 | 11 | async get(key: string): Promise { 12 | return this.data.get(key) 13 | } 14 | 15 | async set(key: string, value: T): Promise { 16 | this.data.set(key, value) 17 | } 18 | 19 | async delete(key: string): Promise { 20 | this.data.delete(key) 21 | } 22 | 23 | async list(): Promise> { 24 | return [...this.data.keys()] 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /packages/utils/src/kvstores/RedisKvStore.ts: -------------------------------------------------------------------------------- 1 | import type {RedisClientType} from '@redis/client' 2 | import type {KvStore} from './Types.js' 3 | import type {Upload} from '../models/index.js' 4 | 5 | /** 6 | * Redis based configstore. 7 | * 8 | * @author Mitja Puzigaća 9 | */ 10 | export class RedisKvStore implements KvStore { 11 | constructor( 12 | private redis: RedisClientType, 13 | private prefix = '' 14 | ) { 15 | this.redis = redis 16 | this.prefix = prefix 17 | } 18 | 19 | async get(key: string): Promise { 20 | return this.deserializeValue(await this.redis.get(this.prefix + key)) 21 | } 22 | 23 | async set(key: string, value: T): Promise { 24 | await this.redis.set(this.prefix + key, this.serializeValue(value)) 25 | } 26 | 27 | async delete(key: string): Promise { 28 | await this.redis.del(this.prefix + key) 29 | } 30 | 31 | async list(): Promise> { 32 | const keys = new Set() 33 | let cursor = 0 34 | do { 35 | const result = await this.redis.scan(cursor, {MATCH: `${this.prefix}*`, COUNT: 20}) 36 | cursor = result.cursor 37 | for (const key of result.keys) keys.add(key) 38 | } while (cursor !== 0) 39 | return Array.from(keys) 40 | } 41 | 42 | private serializeValue(value: T): string { 43 | return JSON.stringify(value) 44 | } 45 | 46 | private deserializeValue(buffer: string | null): T | undefined { 47 | return buffer ? JSON.parse(buffer) : undefined 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /packages/utils/src/kvstores/Types.ts: -------------------------------------------------------------------------------- 1 | import type {Upload} from '../models/index.js' 2 | 3 | export interface KvStore { 4 | get(key: string): Promise 5 | set(key: string, value: T): Promise 6 | delete(key: string): Promise 7 | 8 | list?(): Promise> 9 | } 10 | -------------------------------------------------------------------------------- /packages/utils/src/kvstores/index.ts: -------------------------------------------------------------------------------- 1 | export {FileKvStore} from './FileKvStore.js' 2 | export {MemoryKvStore} from './MemoryKvStore.js' 3 | export {RedisKvStore} from './RedisKvStore.js' 4 | export {IoRedisKvStore} from './IoRedisKvStore.js' 5 | export {KvStore} from './Types.js' 6 | -------------------------------------------------------------------------------- /packages/utils/src/models/Context.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * The CancellationContext interface provides mechanisms to manage the termination of a request. 3 | * It is designed to handle two types of request terminations: immediate abortion and graceful cancellation. 4 | * 5 | * Properties: 6 | * - signal: An instance of AbortSignal. It allows external entities to listen for cancellation requests, 7 | * making it possible to react accordingly. 8 | * 9 | * Methods: 10 | * - abort(): This function should be called to immediately terminate the request. It is intended for scenarios 11 | * where the request cannot continue and needs to be stopped as soon as possible, such as due to upload errors 12 | * or invalid conditions. Implementers should ensure that invoking this method leads to the swift cessation of all 13 | * request-related operations to save resources. 14 | * 15 | * - cancel(): This function is used for more controlled termination of the request. It signals that the request should 16 | * be concluded, but allows for a short period of time to finalize operations gracefully. This could involve 17 | * completing current transactions or cleaning up resources. The exact behavior and the time allowed for cancellation 18 | * completion are determined by the implementation, but the goal is to try to end the request without abrupt interruption, 19 | * ensuring orderly shutdown of ongoing processes. 20 | */ 21 | export interface CancellationContext { 22 | signal: AbortSignal 23 | abort: () => void 24 | cancel: () => void 25 | } 26 | -------------------------------------------------------------------------------- /packages/utils/src/models/DataStore.ts: -------------------------------------------------------------------------------- 1 | import EventEmitter from 'node:events' 2 | import stream from 'node:stream' 3 | 4 | import {Upload} from './Upload.js' 5 | 6 | export class DataStore extends EventEmitter { 7 | extensions: string[] = [] 8 | 9 | hasExtension(extension: string) { 10 | return this.extensions?.includes(extension) 11 | } 12 | 13 | /** 14 | * Called in POST requests. This method just creates a 15 | * file, implementing the creation extension. 16 | * 17 | * http://tus.io/protocols/resumable-upload.html#creation 18 | */ 19 | async create(file: Upload) { 20 | return file 21 | } 22 | 23 | /** 24 | * Called in DELETE requests. This method just deletes the file from the store. 25 | * http://tus.io/protocols/resumable-upload.html#termination 26 | */ 27 | async remove(id: string) {} 28 | 29 | /** 30 | * Called in PATCH requests. This method should write data 31 | * to the DataStore file, and possibly implement the 32 | * concatenation extension. 33 | * 34 | * http://tus.io/protocols/resumable-upload.html#concatenation 35 | */ 36 | async write(stream: stream.Readable, id: string, offset: number) { 37 | return 0 38 | } 39 | 40 | /** 41 | * Called in HEAD requests. This method should return the bytes 42 | * writen to the DataStore, for the client to know where to resume 43 | * the upload. 44 | */ 45 | async getUpload(id: string): Promise { 46 | return new Upload({ 47 | id, 48 | size: 0, 49 | offset: 0, 50 | storage: {type: 'datastore', path: ''}, 51 | }) 52 | } 53 | 54 | /** 55 | * Called in PATCH requests when upload length is known after being defered. 56 | */ 57 | async declareUploadLength(id: string, upload_length: number) {} 58 | 59 | /** 60 | * Returns number of expired uploads that were deleted. 61 | */ 62 | async deleteExpired(): Promise { 63 | return 0 64 | } 65 | 66 | getExpiration(): number { 67 | return 0 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /packages/utils/src/models/Locker.ts: -------------------------------------------------------------------------------- 1 | export type RequestRelease = () => Promise | void 2 | 3 | /** 4 | * The Locker interface creates a Lock instance for a given resource identifier. 5 | */ 6 | export interface Locker { 7 | newLock(id: string): Lock 8 | } 9 | 10 | /** 11 | * The Lock interface defines methods for implementing a locking mechanism. 12 | * It is primarily used to ensure exclusive access to resources, such as uploads and their metadata. 13 | * 14 | * The interface adheres to TUS protocol recommendations, emphasizing the need to prevent prolonged lock retention. 15 | * This approach helps manage resources efficiently and avoids issues with half-open TCP connections. 16 | * 17 | * Methods: 18 | * - lock(id, cancelReq): Acquires a lock on a resource identified by 'id'. If the lock is already held by another request, 19 | * the 'cancelReq' callback is provided to signal the current lock holder to release the lock. 20 | * The 'cancelReq' callback should be invoked when there's an attempt by another request to acquire a previously locked resource. 21 | * This mechanism ensures that locks are held only as long as necessary and are promptly released for other requests. 22 | * 23 | * - unlock(id): Releases the lock held on the resource identified by 'id'. This should be called by the lock holder 24 | * after completing their operation or upon receiving a signal through the 'cancelReq' callback from a subsequent request 25 | * attempting to acquire the lock. 26 | * 27 | */ 28 | export interface Lock { 29 | lock(signal: AbortSignal, cancelReq: RequestRelease): Promise 30 | unlock(): Promise 31 | } 32 | -------------------------------------------------------------------------------- /packages/utils/src/models/Metadata.ts: -------------------------------------------------------------------------------- 1 | import type {Upload} from './Upload.js' 2 | 3 | const ASCII_SPACE = ' '.codePointAt(0) 4 | const ASCII_COMMA = ','.codePointAt(0) 5 | const BASE64_REGEX = /^[\d+/A-Za-z]*={0,2}$/ 6 | 7 | export function validateKey(key: string) { 8 | if (key.length === 0) { 9 | return false 10 | } 11 | 12 | for (let i = 0; i < key.length; ++i) { 13 | const charCodePoint = key.codePointAt(i) as number 14 | if ( 15 | charCodePoint > 127 || 16 | charCodePoint === ASCII_SPACE || 17 | charCodePoint === ASCII_COMMA 18 | ) { 19 | return false 20 | } 21 | } 22 | 23 | return true 24 | } 25 | 26 | export function validateValue(value: string) { 27 | if (value.length % 4 !== 0) { 28 | return false 29 | } 30 | 31 | return BASE64_REGEX.test(value) 32 | } 33 | 34 | export function parse(str?: string) { 35 | const meta: Record = {} 36 | 37 | if (!str || str.trim().length === 0) { 38 | throw new Error('Metadata string is not valid') 39 | } 40 | 41 | for (const pair of str.split(',')) { 42 | const tokens = pair.split(' ') 43 | const [key, value] = tokens 44 | if ( 45 | ((tokens.length === 1 && validateKey(key)) || 46 | (tokens.length === 2 && validateKey(key) && validateValue(value))) && 47 | !(key in meta) 48 | ) { 49 | const decodedValue = value ? Buffer.from(value, 'base64').toString('utf8') : null 50 | meta[key] = decodedValue 51 | } else { 52 | throw new Error('Metadata string is not valid') 53 | } 54 | } 55 | 56 | return meta 57 | } 58 | 59 | export function stringify(metadata: NonNullable): string { 60 | return Object.entries(metadata) 61 | .map(([key, value]) => { 62 | if (value === null) { 63 | return key 64 | } 65 | 66 | const encodedValue = Buffer.from(value, 'utf8').toString('base64') 67 | return `${key} ${encodedValue}` 68 | }) 69 | .join(',') 70 | } 71 | -------------------------------------------------------------------------------- /packages/utils/src/models/StreamLimiter.ts: -------------------------------------------------------------------------------- 1 | import {Transform, type TransformCallback} from 'node:stream' 2 | import {ERRORS} from '../constants.js' 3 | 4 | // TODO: create HttpError and use it everywhere instead of throwing objects 5 | export class MaxFileExceededError extends Error { 6 | status_code: number 7 | body: string 8 | 9 | constructor() { 10 | super(ERRORS.ERR_MAX_SIZE_EXCEEDED.body) 11 | this.status_code = ERRORS.ERR_MAX_SIZE_EXCEEDED.status_code 12 | this.body = ERRORS.ERR_MAX_SIZE_EXCEEDED.body 13 | Object.setPrototypeOf(this, MaxFileExceededError.prototype) 14 | } 15 | } 16 | 17 | export class StreamLimiter extends Transform { 18 | private maxSize: number 19 | private currentSize = 0 20 | 21 | constructor(maxSize: number) { 22 | super() 23 | this.maxSize = maxSize 24 | } 25 | 26 | _transform(chunk: Buffer, encoding: BufferEncoding, callback: TransformCallback): void { 27 | this.currentSize += chunk.length 28 | if (this.currentSize > this.maxSize) { 29 | callback(new MaxFileExceededError()) 30 | } else { 31 | callback(null, chunk) 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /packages/utils/src/models/StreamSplitter.ts: -------------------------------------------------------------------------------- 1 | /* global BufferEncoding */ 2 | import crypto from 'node:crypto' 3 | import fs from 'node:fs/promises' 4 | import path from 'node:path' 5 | import stream from 'node:stream' 6 | 7 | function randomString(size: number) { 8 | return crypto.randomBytes(size).toString('base64url').slice(0, size) 9 | } 10 | 11 | type Options = { 12 | chunkSize: number 13 | directory: string 14 | } 15 | 16 | type Callback = (error: Error | null) => void 17 | 18 | export class StreamSplitter extends stream.Writable { 19 | directory: Options['directory'] 20 | currentChunkPath: string | null 21 | currentChunkSize: number 22 | fileHandle: fs.FileHandle | null 23 | filenameTemplate: string 24 | chunkSize: Options['chunkSize'] 25 | part: number 26 | 27 | constructor({chunkSize, directory}: Options, options?: stream.WritableOptions) { 28 | super(options) 29 | this.chunkSize = chunkSize 30 | this.currentChunkPath = null 31 | this.currentChunkSize = 0 32 | this.fileHandle = null 33 | this.directory = directory 34 | this.filenameTemplate = randomString(10) 35 | this.part = 0 36 | 37 | this.on('error', this._handleError.bind(this)) 38 | } 39 | 40 | async _write(chunk: Buffer, _: BufferEncoding, callback: Callback) { 41 | try { 42 | // In order to start writing a chunk, we must first create 43 | // a file system reference for it 44 | if (this.fileHandle === null) { 45 | await this._newChunk() 46 | } 47 | 48 | let overflow = this.currentChunkSize + chunk.length - this.chunkSize 49 | 50 | // The current chunk will be more than our defined part size if we would 51 | // write all of it to disk. 52 | while (overflow > 0) { 53 | // Only write to disk the up to our defined part size. 54 | await this._writeChunk(chunk.subarray(0, chunk.length - overflow)) 55 | await this._finishChunk() 56 | 57 | // We still have some overflow left, so we write it to a new chunk. 58 | await this._newChunk() 59 | chunk = chunk.subarray(chunk.length - overflow, chunk.length) 60 | overflow = this.currentChunkSize + chunk.length - this.chunkSize 61 | } 62 | 63 | // The chunk is smaller than our defined part size so we can just write it to disk. 64 | await this._writeChunk(chunk) 65 | callback(null) 66 | } catch (error) { 67 | callback(error) 68 | } 69 | } 70 | 71 | async _final(callback: Callback) { 72 | if (this.fileHandle === null) { 73 | callback(null) 74 | return 75 | } 76 | 77 | try { 78 | await this._finishChunk() 79 | callback(null) 80 | } catch (error) { 81 | callback(error) 82 | } 83 | } 84 | 85 | async _writeChunk(chunk: Buffer): Promise { 86 | await fs.appendFile(this.fileHandle as fs.FileHandle, chunk) 87 | this.currentChunkSize += chunk.length 88 | } 89 | 90 | async _handleError() { 91 | await this.emitEvent('chunkError', this.currentChunkPath) 92 | // If there was an error, we want to stop allowing to write on disk as we cannot advance further. 93 | // At this point the chunk might be incomplete advancing further might cause data loss. 94 | // some scenarios where this might happen is if the disk is full or if we abort the stream midway. 95 | if (this.fileHandle === null) { 96 | return 97 | } 98 | 99 | await this.fileHandle.close() 100 | this.currentChunkPath = null 101 | this.fileHandle = null 102 | } 103 | 104 | async _finishChunk(): Promise { 105 | if (this.fileHandle === null) { 106 | return 107 | } 108 | 109 | await this.fileHandle.close() 110 | 111 | await this.emitEvent('chunkFinished', { 112 | path: this.currentChunkPath, 113 | size: this.currentChunkSize, 114 | }) 115 | 116 | this.currentChunkPath = null 117 | this.fileHandle = null 118 | this.currentChunkSize = 0 119 | this.part += 1 120 | } 121 | 122 | async emitEvent(name: string, payload: T) { 123 | const listeners = this.listeners(name) 124 | for (const listener of listeners) { 125 | await listener(payload) 126 | } 127 | } 128 | 129 | async _newChunk(): Promise { 130 | const currentChunkPath = path.join( 131 | this.directory, 132 | `${this.filenameTemplate}-${this.part}` 133 | ) 134 | await this.emitEvent('beforeChunkStarted', currentChunkPath) 135 | this.currentChunkPath = currentChunkPath 136 | 137 | const fileHandle = await fs.open(this.currentChunkPath, 'w') 138 | await this.emitEvent('chunkStarted', this.currentChunkPath) 139 | this.currentChunkSize = 0 140 | this.fileHandle = fileHandle 141 | } 142 | } 143 | -------------------------------------------------------------------------------- /packages/utils/src/models/Uid.ts: -------------------------------------------------------------------------------- 1 | import crypto from 'node:crypto' 2 | 3 | export const Uid = { 4 | rand() { 5 | return crypto.randomBytes(16).toString('hex') 6 | }, 7 | } 8 | -------------------------------------------------------------------------------- /packages/utils/src/models/Upload.ts: -------------------------------------------------------------------------------- 1 | type TUpload = { 2 | id: string 3 | size?: number 4 | offset: number 5 | metadata?: Record 6 | storage?: { 7 | type: string 8 | path: string 9 | bucket?: string 10 | } 11 | creation_date?: string 12 | } 13 | 14 | export class Upload { 15 | id: TUpload['id'] 16 | metadata: TUpload['metadata'] 17 | size: TUpload['size'] 18 | offset: TUpload['offset'] 19 | creation_date: TUpload['creation_date'] 20 | storage: TUpload['storage'] 21 | 22 | constructor(upload: TUpload) { 23 | if (!upload.id) { 24 | throw new Error('[File] constructor must be given an ID') 25 | } 26 | 27 | this.id = upload.id 28 | this.size = upload.size 29 | this.offset = upload.offset 30 | this.metadata = upload.metadata 31 | this.storage = upload.storage 32 | 33 | this.creation_date = upload.creation_date ?? new Date().toISOString() 34 | } 35 | 36 | get sizeIsDeferred(): boolean { 37 | return this.size === undefined 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /packages/utils/src/models/index.ts: -------------------------------------------------------------------------------- 1 | export {DataStore} from './DataStore.js' 2 | export * as Metadata from './Metadata.js' 3 | export {StreamSplitter} from './StreamSplitter.js' 4 | export {StreamLimiter} from './StreamLimiter.js' 5 | export {Uid} from './Uid.js' 6 | export {Upload} from './Upload.js' 7 | export {Locker, Lock, RequestRelease} from './Locker.js' 8 | export {CancellationContext} from './Context.js' 9 | -------------------------------------------------------------------------------- /packages/utils/src/test/Metadata.test.ts: -------------------------------------------------------------------------------- 1 | import {strict as assert} from 'node:assert' 2 | import {parse, stringify} from '../models/Metadata.js' 3 | 4 | describe('Metadata', () => { 5 | it('parse valid metadata string', () => { 6 | const str = 7 | 'file/name dGVzdC5tcDQ=,size OTYwMjQ0,type! dmlkZW8vbXA0,video,withWhitespace ' 8 | const obj = { 9 | 'file/name': 'test.mp4', 10 | size: '960244', 11 | 'type!': 'video/mp4', 12 | video: null, 13 | withWhitespace: null, 14 | } 15 | const decoded = parse(str) 16 | assert.deepStrictEqual(decoded, obj) 17 | }) 18 | 19 | it('check length of metadata string', () => { 20 | const obj = { 21 | filename: 'test.mp4', 22 | size: '960244', 23 | type: 'video/mp4', 24 | video: null, 25 | withWhitespace: null, 26 | } 27 | const encoded = stringify(obj) 28 | 29 | assert.strictEqual(encoded.split(',').length, Object.entries(obj).length) 30 | }) 31 | 32 | it('verify metadata stringification', () => { 33 | assert.strictEqual(stringify({filename: 'test.mp4'}), 'filename dGVzdC5tcDQ=') 34 | assert.strictEqual(stringify({size: '960244'}), 'size OTYwMjQ0') 35 | assert.strictEqual(stringify({type: 'video/mp4'}), 'type dmlkZW8vbXA0') 36 | // Multiple valid options 37 | assert.notStrictEqual(['video', 'video '].indexOf(stringify({video: null})), -1) 38 | assert.notStrictEqual( 39 | ['withWhitespace', 'withWhitespace '].indexOf(stringify({withWhitespace: null})), 40 | -1 41 | ) 42 | }) 43 | 44 | it('verify metadata parsing', () => { 45 | assert.deepStrictEqual(parse('filename dGVzdC5tcDQ='), { 46 | filename: 'test.mp4', 47 | }) 48 | assert.deepStrictEqual(parse('size OTYwMjQ0'), {size: '960244'}) 49 | assert.deepStrictEqual(parse('type dmlkZW8vbXA0'), { 50 | type: 'video/mp4', 51 | }) 52 | assert.deepStrictEqual(parse('video'), {video: null}) 53 | assert.deepStrictEqual(parse('video '), {video: null}) 54 | assert.deepStrictEqual(parse('withWhitespace'), { 55 | withWhitespace: null, 56 | }) 57 | assert.deepStrictEqual(parse('withWhitespace '), { 58 | withWhitespace: null, 59 | }) 60 | }) 61 | 62 | it('cyclic test', () => { 63 | const obj = { 64 | filename: 'world_domination_plan.pdf', 65 | is_confidential: null, 66 | } 67 | // Object -> string -> object 68 | assert.deepStrictEqual(parse(stringify(obj)), obj) 69 | }) 70 | 71 | describe('verify invalid metadata string', () => { 72 | it('duplicate keys', () => { 73 | assert.throws(() => { 74 | parse('filename dGVzdC5tcDQ=, filename cGFja2FnZS5qc29u') 75 | }) 76 | assert.throws(() => { 77 | parse('video ,video dHJ1ZQ==') 78 | }) 79 | assert.throws(() => { 80 | parse('size,size ') 81 | }) 82 | assert.throws(() => { 83 | parse('') 84 | }) 85 | assert.throws(() => { 86 | parse('\t\n') 87 | }) 88 | }) 89 | 90 | it('invalid key', () => { 91 | assert.throws(() => { 92 | parse('🦁 ZW1vamk=') 93 | }) 94 | assert.throws(() => { 95 | parse('€¢ß') 96 | }) 97 | assert.throws(() => { 98 | parse('test, te st ') 99 | }) 100 | assert.throws(() => { 101 | parse('test,,test') 102 | }) 103 | }) 104 | 105 | it('invalid base64 value', () => { 106 | assert.throws(() => { 107 | parse('key ZW1vamk') 108 | }) // Value is not a multiple of 4 characters 109 | assert.throws(() => { 110 | parse('key invalid-base64==') 111 | }) 112 | assert.throws(() => { 113 | parse('key =ZW1vamk') 114 | }) // Padding can not be at the beginning 115 | assert.throws(() => { 116 | parse('key ') 117 | }) // Only single whitespace is allowed 118 | }) 119 | }) 120 | }) 121 | -------------------------------------------------------------------------------- /packages/utils/src/test/StreamSplitter.test.ts: -------------------------------------------------------------------------------- 1 | import os from 'node:os' 2 | import fs from 'node:fs' 3 | import stream from 'node:stream/promises' 4 | import {strict as assert} from 'node:assert' 5 | 6 | import {StreamSplitter} from '../models/index.js' 7 | import {Readable} from 'node:stream' 8 | 9 | const fileSize = 20_971_520 10 | 11 | describe('StreamSplitter', () => { 12 | it('should buffer chunks until optimal part size', async () => { 13 | const readStream = fs.createReadStream('../../test/fixtures/test.pdf') 14 | const optimalChunkSize = 8 * 1024 * 1024 15 | const parts = [optimalChunkSize, optimalChunkSize, fileSize - optimalChunkSize * 2] 16 | let offset = 0 17 | let index = 0 18 | const splitterStream = new StreamSplitter({ 19 | chunkSize: optimalChunkSize, 20 | directory: os.tmpdir(), 21 | }).on('chunkFinished', ({size}) => { 22 | offset += size 23 | assert.equal(parts[index], size) 24 | index++ 25 | }) 26 | await stream.pipeline(readStream, splitterStream) 27 | assert.equal(offset, fileSize) 28 | }) 29 | 30 | it('should split to multiple chunks when single buffer exceeds chunk size', async () => { 31 | const optimalChunkSize = 1024 32 | const expectedChunks = 7 33 | 34 | const readStream = Readable.from([Buffer.alloc(expectedChunks * optimalChunkSize)]) 35 | 36 | let chunksStarted = 0 37 | let chunksFinished = 0 38 | const splitterStream = new StreamSplitter({ 39 | chunkSize: optimalChunkSize, 40 | directory: os.tmpdir(), 41 | }) 42 | .on('chunkStarted', () => { 43 | chunksStarted++ 44 | }) 45 | .on('chunkFinished', () => { 46 | chunksFinished++ 47 | }) 48 | 49 | await stream.pipeline(readStream, splitterStream) 50 | 51 | assert.equal(chunksStarted, expectedChunks) 52 | assert.equal(chunksFinished, expectedChunks) 53 | }) 54 | }) 55 | -------------------------------------------------------------------------------- /packages/utils/src/test/Uid.test.ts: -------------------------------------------------------------------------------- 1 | import {strict as assert} from 'node:assert' 2 | 3 | import {Uid} from '../models/index.js' 4 | 5 | describe('Uid', () => { 6 | it('returns a 32 char string', (done) => { 7 | const id = Uid.rand() 8 | assert.equal(typeof id, 'string') 9 | assert.equal(id.length, 32) 10 | done() 11 | }) 12 | 13 | it('returns a different string every time', (done) => { 14 | const ids: Record = {} 15 | for (let i = 0; i < 16; i++) { 16 | const id = Uid.rand() 17 | assert(!ids[id], 'id was encountered multiple times') 18 | ids[id] = true 19 | } 20 | 21 | done() 22 | }) 23 | }) 24 | -------------------------------------------------------------------------------- /packages/utils/src/test/Upload.test.ts: -------------------------------------------------------------------------------- 1 | import 'should' 2 | import {strict as assert} from 'node:assert' 3 | 4 | import {Upload} from '../models/Upload.js' 5 | import {Uid} from '../models/Uid.js' 6 | 7 | describe('Upload', () => { 8 | describe('constructor', () => { 9 | it('must require a file_name', () => { 10 | assert.throws(() => { 11 | // @ts-expect-error TS(2554): Expected 4 arguments, but got 0. 12 | new Upload() 13 | }, Error) 14 | }) 15 | 16 | it('should set properties given', () => { 17 | const id = Uid.rand() 18 | const size = 1234 19 | const offset = 0 20 | const metadata = {foo: 'bar'} 21 | const upload = new Upload({id, size, offset, metadata}) 22 | assert.equal(upload.id, id) 23 | assert.equal(upload.size, size) 24 | assert.equal(upload.offset, offset) 25 | assert.equal(upload.sizeIsDeferred, false) 26 | assert.equal(upload.metadata, metadata) 27 | }) 28 | }) 29 | }) 30 | -------------------------------------------------------------------------------- /packages/utils/src/test/stores.ts: -------------------------------------------------------------------------------- 1 | import 'should' 2 | import {strict as assert} from 'node:assert' 3 | import fs from 'node:fs' 4 | import stream from 'node:stream' 5 | import {setTimeout as promSetTimeout} from 'node:timers/promises' 6 | 7 | import {Upload, Uid} from '@tus/utils' 8 | 9 | export function testId(id: string) { 10 | return `${id}-${Uid.rand()}` 11 | } 12 | 13 | export const shouldHaveStoreMethods = () => { 14 | describe('the class', () => { 15 | it('must have a write method', function (done) { 16 | this.datastore.should.have.property('write') 17 | done() 18 | }) 19 | 20 | it('must have a getUpload method', function (done) { 21 | this.datastore.should.have.property('getUpload') 22 | done() 23 | }) 24 | }) 25 | } 26 | 27 | export const shouldCreateUploads = () => { 28 | describe('create', () => { 29 | const file = new Upload({ 30 | id: testId('create-test'), 31 | size: 1000, 32 | offset: 0, 33 | metadata: {filename: 'world_domination_plan.pdf', is_confidential: null}, 34 | }) 35 | const file_defered = new Upload({ 36 | id: testId('create-test-deferred'), 37 | offset: 0, 38 | }) 39 | 40 | it('should resolve to file', async function () { 41 | const newFile = await this.datastore.create(file) 42 | assert.ok(newFile.storage.path) 43 | assert.ok(newFile.storage.type) 44 | assert.equal(newFile instanceof Upload, true) 45 | }) 46 | 47 | it("should report 'creation' extension", function () { 48 | assert.equal(this.datastore.hasExtension('creation'), true) 49 | }) 50 | 51 | it('should create new upload resource', async function () { 52 | await this.datastore.create(file) 53 | const upload = await this.datastore.getUpload(file.id) 54 | assert.equal(upload.offset, 0) 55 | }) 56 | 57 | it('should store `upload_length` when creating new resource', async function () { 58 | await this.datastore.create(file) 59 | const upload = await this.datastore.getUpload(file.id) 60 | assert.strictEqual(upload.size, file.size) 61 | }) 62 | 63 | it('should store `upload_defer_length` when creating new resource', async function () { 64 | await this.datastore.create(file_defered) 65 | const upload = await this.datastore.getUpload(file_defered.id) 66 | assert.strictEqual(upload.sizeIsDeferred, file_defered.sizeIsDeferred) 67 | }) 68 | 69 | it('should store `upload_metadata` when creating new resource', async function () { 70 | await this.datastore.create(file) 71 | const upload = await this.datastore.getUpload(file.id) 72 | assert.deepStrictEqual(upload.metadata, file.metadata) 73 | }) 74 | 75 | it('should store `upload_metadata` with non-ASCII characters', async function () { 76 | const file = new Upload({ 77 | id: testId('create-test-non-ascii'), 78 | size: 1000, 79 | offset: 0, 80 | metadata: {filename: '世界_domination_plan.pdf', is_confidential: null}, 81 | }) 82 | await this.datastore.create(file) 83 | const upload = await this.datastore.getUpload(file.id) 84 | assert.deepStrictEqual(upload.metadata, file.metadata) 85 | }) 86 | }) 87 | } 88 | 89 | export const shouldExpireUploads = () => { 90 | describe('expiration extension', () => { 91 | it("should report 'expiration' extension", function () { 92 | assert.equal(this.datastore.hasExtension('expiration'), true) 93 | }) 94 | 95 | it('should expire upload', async function () { 96 | const file = new Upload({ 97 | id: testId('expiration-test'), 98 | size: this.testFileSize, 99 | offset: 0, 100 | metadata: {filename: 'world_domination_plan.pdf', is_confidential: null}, 101 | }) 102 | this.datastore.expirationPeriodInMilliseconds = 100 103 | await this.datastore.create(file) 104 | const readable = fs.createReadStream(this.testFilePath) 105 | const offset = await this.datastore.write(readable, file.id, 0) 106 | await promSetTimeout(100) 107 | const n = await this.datastore.deleteExpired() 108 | assert.equal(offset, this.testFileSize) 109 | assert.equal(n, 1) 110 | }) 111 | }) 112 | } 113 | 114 | export const shouldRemoveUploads = () => { 115 | const file = new Upload({id: testId('remove-test'), size: 1000, offset: 0}) 116 | 117 | describe('remove (termination extension)', () => { 118 | it("should report 'termination' extension", function () { 119 | assert.equal(this.datastore.hasExtension('termination'), true) 120 | }) 121 | 122 | it('should reject when the file does not exist', function () { 123 | return this.datastore.remove('doesnt_exist').should.be.rejected() 124 | }) 125 | 126 | it('should delete the file when it does exist', async function () { 127 | await this.datastore.create(file) 128 | return this.datastore.remove(file.id) 129 | }) 130 | 131 | it('should delete the file during upload', async function () { 132 | const file = new Upload({ 133 | id: testId('termination-test'), 134 | size: this.testFileSize, 135 | offset: 0, 136 | metadata: {filename: 'terminate_during_upload.pdf', is_confidential: null}, 137 | }) 138 | await this.datastore.create(file) 139 | 140 | const readable = fs.createReadStream(this.testFilePath, { 141 | highWaterMark: 100 * 1024, 142 | }) 143 | // Pause between chunks read to make sure that file is still uploading when terminate function is invoked 144 | readable.on('data', () => { 145 | readable.pause() 146 | setTimeout(() => readable.resume(), 1000) 147 | }) 148 | 149 | await Promise.allSettled([ 150 | this.datastore.write(readable, file.id, 0), 151 | this.datastore.remove(file.id), 152 | ]) 153 | 154 | try { 155 | await this.datastore.getUpload(file.id) 156 | assert.fail('getUpload should have thrown an error') 157 | } catch (error) { 158 | assert.equal([404, 410].includes(error?.status_code), true) 159 | } 160 | 161 | readable.destroy() 162 | }) 163 | }) 164 | } 165 | 166 | export const shouldWriteUploads = () => { 167 | describe('write', () => { 168 | it('should reject write streams that can not be open', async function () { 169 | const stream = fs.createReadStream(this.testFilePath) 170 | return this.datastore.write(stream, 'doesnt_exist', 0).should.be.rejected() 171 | }) 172 | 173 | it('should reject when readable stream has an error', async function () { 174 | const stream = fs.createReadStream(this.testFilePath) 175 | return this.datastore.write(stream, 'doesnt_exist', 0).should.be.rejected() 176 | }) 177 | 178 | it('should write a stream and resolve the new offset', async function () { 179 | const file = new Upload({ 180 | id: testId('write-test'), 181 | size: this.testFileSize, 182 | offset: 0, 183 | metadata: {filename: 'world_domination_plan.pdf', is_confidential: null}, 184 | }) 185 | await this.datastore.create(file) 186 | const readable = fs.createReadStream(this.testFilePath) 187 | const offset = await this.datastore.write(readable, file.id, 0) 188 | assert.equal(offset, this.testFileSize) 189 | }) 190 | 191 | it('should reject when stream is destroyed', async function () { 192 | const file = new Upload({ 193 | id: testId('write-test-reject'), 194 | size: this.testFileSize, 195 | offset: 0, 196 | metadata: {filename: 'world_domination_plan.pdf', is_confidential: null}, 197 | }) 198 | await this.datastore.create(file) 199 | const readable = new stream.Readable({ 200 | read() { 201 | this.push('some data') 202 | this.destroy() 203 | }, 204 | }) 205 | const offset = this.datastore.write(readable, file.id, 0) 206 | return offset.should.be.rejected() 207 | }) 208 | }) 209 | } 210 | 211 | export const shouldHandleOffset = () => { 212 | describe('getUpload', () => { 213 | it('should reject non-existant files', function () { 214 | return this.datastore.getUpload('doesnt_exist').should.be.rejected() 215 | }) 216 | 217 | it('should resolve the stats for existing files', async function () { 218 | const file = new Upload({ 219 | id: testId('offset-test'), 220 | size: this.testFileSize, 221 | offset: 0, 222 | metadata: {filename: 'world_domination_plan.pdf', is_confidential: null}, 223 | }) 224 | 225 | await this.datastore.create(file) 226 | const offset = await this.datastore.write( 227 | fs.createReadStream(this.testFilePath), 228 | file.id, 229 | file.offset 230 | ) 231 | const upload = await this.datastore.getUpload(file.id) 232 | assert.equal(upload.offset, offset) 233 | }) 234 | }) 235 | } 236 | 237 | export const shouldDeclareUploadLength = () => { 238 | describe('declareUploadLength', () => { 239 | it('should reject non-existant files', function () { 240 | return this.datastore.declareUploadLength('doesnt_exist', '10').should.be.rejected() 241 | }) 242 | 243 | it('should update upload_length after declaring upload length', async function () { 244 | const file = new Upload({ 245 | id: testId('declare-length-test'), 246 | offset: 0, 247 | metadata: {filename: 'world_domination_plan.pdf', is_confidential: null}, 248 | }) 249 | 250 | await this.datastore.create(file) 251 | let upload = await this.datastore.getUpload(file.id) 252 | assert.equal(upload.size, undefined) 253 | assert.equal(upload.sizeIsDeferred, true) 254 | await this.datastore.declareUploadLength(file.id, 10) 255 | upload = await this.datastore.getUpload(file.id) 256 | assert.equal(upload.size, 10) 257 | assert.equal(upload.sizeIsDeferred, false) 258 | }) 259 | }) 260 | } 261 | -------------------------------------------------------------------------------- /packages/utils/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig.json", 3 | "extends": "../../tsconfig.base.json", 4 | "include": ["src"], 5 | "compilerOptions": { 6 | "rootDir": "src", 7 | "outDir": "dist", 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /test/fixtures/test.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tus/tus-node-server/b4029ab913cd3ff53665ae6aa38199553f610f81/test/fixtures/test.mp4 -------------------------------------------------------------------------------- /test/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/package.json", 3 | "name": "test", 4 | "private": true, 5 | "type": "module", 6 | "scripts": { 7 | "build": "tsc --build", 8 | "test": "mocha dist/e2e.test.js dist/s3.e2e.js --timeout 40000 --exit" 9 | }, 10 | "exports": { 11 | "./stores.test": "./dist/stores.test.js" 12 | }, 13 | "dependencies": { 14 | "@tus/file-store": "^2.0.0", 15 | "@tus/gcs-store": "^2.0.0", 16 | "@tus/s3-store": "^2.0.0", 17 | "@tus/server": "^2.2.0" 18 | }, 19 | "devDependencies": { 20 | "@types/mocha": "^10.0.6", 21 | "@types/node": "^22.13.7", 22 | "@types/rimraf": "^3.0.2", 23 | "@types/sinon": "^17.0.3", 24 | "@types/supertest": "^2.0.16", 25 | "@types/throttle": "^1.0.4", 26 | "mocha": "^11.0.1", 27 | "rimraf": "^3.0.2", 28 | "should": "^13.2.3", 29 | "sinon": "^20.0.0", 30 | "supertest": "^6.3.4", 31 | "throttle": "^1.0.3" 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /test/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig.json", 3 | "references": [ 4 | { "path": "../packages/azure-store/tsconfig.json" }, 5 | { "path": "../packages/file-store/tsconfig.json" }, 6 | { "path": "../packages/gcs-store/tsconfig.json" }, 7 | { "path": "../packages/s3-store/tsconfig.json" }, 8 | { "path": "../packages/server/tsconfig.json" } 9 | ], 10 | "extends": "../tsconfig.base.json", 11 | "include": ["src"], 12 | "compilerOptions": { 13 | "rootDir": "src", 14 | "outDir": "dist" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /tsconfig.base.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig.json", 3 | "compilerOptions": { 4 | "composite": true, 5 | "lib": ["es2022"], 6 | "module": "NodeNext", 7 | "target": "es2022", 8 | "strict": true, 9 | "declaration": true, 10 | "declarationMap": true, 11 | "sourceMap": true, 12 | "useUnknownInCatchVariables": false, 13 | "skipLibCheck": true 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig.json", 3 | "files": [], 4 | "references": [ 5 | { "path": "packages/azure-store/tsconfig.json" }, 6 | { "path": "packages/file-store/tsconfig.json" }, 7 | { "path": "packages/gcs-store/tsconfig.json" }, 8 | { "path": "packages/s3-store/tsconfig.json" }, 9 | { "path": "packages/server/tsconfig.json" }, 10 | { "path": "packages/utils/tsconfig.json" }, 11 | { "path": "test/tsconfig.json" } 12 | ] 13 | } 14 | --------------------------------------------------------------------------------