├── .editorconfig ├── .github ├── CONTRIBUTING.md └── workflows │ ├── checks.yml │ ├── labels.yml │ ├── release.yml │ └── stale.yml ├── .gitignore ├── .npmrc ├── .prettierignore ├── LICENSE.md ├── README.md ├── bin └── test.ts ├── drivers ├── fs │ ├── debug.ts │ ├── driver.ts │ ├── main.ts │ └── types.ts ├── gcs │ ├── debug.ts │ ├── driver.ts │ ├── main.ts │ └── types.ts └── s3 │ ├── debug.ts │ ├── driver.ts │ ├── main.ts │ └── types.ts ├── eslint.config.js ├── index.ts ├── package.json ├── src ├── debug.ts ├── disk.ts ├── drive_directory.ts ├── drive_manager.ts ├── driver_file.ts ├── errors.ts ├── fake_disk.ts ├── key_normalizer.ts └── types.ts ├── tests ├── core │ ├── disk.spec.ts │ ├── drive_manager.spec.ts │ └── key_normalizer.spec.ts ├── drivers │ ├── fs │ │ ├── copy.spec.ts │ │ ├── delete.spec.ts │ │ ├── exists.spec.ts │ │ ├── get.spec.ts │ │ ├── get_metadata.spec.ts │ │ ├── list_all.spec.ts │ │ ├── move.spec.ts │ │ ├── put.spec.ts │ │ ├── url_generation.spec.ts │ │ └── visibility.spec.ts │ ├── gcs │ │ ├── copy.spec.ts │ │ ├── delete.spec.ts │ │ ├── disk.spec.ts │ │ ├── env.ts │ │ ├── exists.spec.ts │ │ ├── get.spec.ts │ │ ├── get_metadata.spec.ts │ │ ├── list_all.spec.ts │ │ ├── move.spec.ts │ │ ├── put.spec.ts │ │ ├── url_generation.spec.ts │ │ └── visibility.spec.ts │ └── s3 │ │ ├── copy.spec.ts │ │ ├── delete.spec.ts │ │ ├── env.ts │ │ ├── exists.spec.ts │ │ ├── get.spec.ts │ │ ├── get_metadata.spec.ts │ │ ├── list_all.spec.ts │ │ ├── move.spec.ts │ │ ├── put.spec.ts │ │ ├── url_generation.spec.ts │ │ └── visibility.spec.ts └── helpers.ts ├── tsconfig.json └── tsnode.esm.js /.editorconfig: -------------------------------------------------------------------------------- 1 | # http://editorconfig.org 2 | 3 | [*] 4 | indent_style = space 5 | indent_size = 2 6 | end_of_line = lf 7 | charset = utf-8 8 | trim_trailing_whitespace = true 9 | insert_final_newline = true 10 | 11 | [*.json] 12 | insert_final_newline = ignore 13 | 14 | [**.min.js] 15 | indent_style = ignore 16 | insert_final_newline = ignore 17 | 18 | [MakeFile] 19 | indent_style = space 20 | 21 | [*.md] 22 | trim_trailing_whitespace = false 23 | -------------------------------------------------------------------------------- /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | This is a general contribution guide for all of the [FlyDrive](https://github.com/flydrive-js) repos. Please read this guide thoroughly before contributing to any of the repos 🙏 4 | 5 | Code is not the only way to contribute. Following are also some ways to contribute and become part of the community. 6 | 7 | - Fixing typos in the documentation 8 | - Improving existing docs 9 | - Writing cookbooks or blog posts to educate others in the community 10 | - Triaging issues 11 | - Sharing your opinion on existing issues 12 | - Help the community in [discord](https://discord.gg/vDcEjq6) by answering their questions. 13 | 14 | ## Reporting bugs 15 | 16 | Many issues reported on open source projects are usually questions or misconfiguration at the reporter's end. Therefore, we highly recommend you properly troubleshoot your issues before reporting them. 17 | 18 | If you're reporting a bug, include as much information as possible with the code samples you have written. The scale of good to bad issues looks as follows. 19 | 20 | - **PERFECT ISSUE**: You isolate the underlying bug. Create a failing test in the repo and open a Github issue around it. 21 | - **GOOD ISSUE**: You isolate the underlying bug and provide a minimal reproduction of it as a Github repo. Antfu has written a great article on [Why Reproductions are Required](https://antfu.me/posts/why-reproductions-are-required). 22 | - **OKAYISH ISSUE**: You correctly state your issue. Share the code that produces the issue in the first place. Also, include the related configuration files and the package version you use. 23 | 24 | Last but not least is to format every code block properly by following the [Github markdown syntax guide](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax). 25 | 26 | - **POOR ISSUE**: You dump the question you have with the hope that the other person will ask the relevant questions and help you. These kinds of issues are closed automatically without any explanation. 27 | 28 | ## Having a discussion 29 | 30 | You often want to discuss a topic or maybe share some ideas. In that case, create an issue and prefix it with the **Idea** keyword. For example: `Idea - Should we add support for X`. 31 | 32 | ## Creating pull requests 33 | 34 | It is never a good experience to have your pull request declined after investing a lot of time and effort in writing the code. Therefore, we highly recommend you to [kick off a discussion](https://github.com/flydrive-js/core/issues/new?title=Discussion%20for%20a%20new%20feature%20-%20%3CYOUR%20FEATURE%20NAME%3E) before starting any new work on your side. 35 | 36 | Just start a discussion and explain what are you planning to contribute? 37 | 38 | - **Are you trying to create a PR to fix a bug**: PRs for bugs are mostly accepted once the bug has been confirmed. 39 | - **Are you planning to add a new feature**: Please thoroughly explain why this feature is required and share links to the learning material we can read to educate ourselves. 40 | 41 | > Note: You should also be available to open additional PRs for documenting the contributed feature or improvement. 42 | 43 | ## Repository setup 44 | 45 | 1. Start by cloning the repo on your local machine. 46 | 47 | ```sh 48 | git clone 49 | ``` 50 | 51 | 2. Install dependencies on your local. Please do not update any dependencies along with a feature request. If you find stale dependencies, create a separate PR to update them. 52 | 53 | We use `npm` for managing dependencies, therefore do not use `yarn` or any other tool. 54 | 55 | ```sh 56 | npm install 57 | ``` 58 | 59 | 3. Run tests by executing the following command. 60 | 61 | ```sh 62 | npm test 63 | ``` 64 | 65 | ## Tools in use 66 | 67 | Following is the list of tools in use. 68 | 69 | | Tool | Usage | 70 | | ---------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | 71 | | TypeScript | All of the repos are authored in TypeScript. The compiled JavaScript and Type-definitions are published on npm. | 72 | | TS Node | We use [ts-node](https://typestrong.org/ts-node/) to run tests or scripts without compiling TypeScript. The main goal of ts-node is to have a faster feedback loop during development | 73 | | SWC | [SWC](https://swc.rs/) is a Rust based TypeScript compiler. TS Node ships with first-class support for using SWC over the TypeScript official compiler. The main reason for using SWC is the speed gain. | 74 | | NP | We use [np](https://github.com/sindresorhus/np) to publish our packages on npm. Np does all the heavy lifting of creating a release and publishes it on npm and Github. The np config is defined within the `package.json` file. | 75 | | ESLint | ESLint helps us enforce a consistent coding style across all the repos with multiple contributors. All our ESLint rules are published under the [eslint-plugin-adonis](https://github.com/adonisjs-community/eslint-plugin-adonis) package. | 76 | | Prettier | We use prettier to format the codebase for consistent visual output. If you are confused about why we are using ESLint and Prettier both, then please read [Prettier vs. Linters](https://prettier.io/docs/en/comparison.html) doc on the Prettier website. | 77 | | EditorConfig | The `.editorconfig` file in the root of every project configures your Code editor to use a set of rules for indentation and whitespace management. Again, Prettier is used for post formatting your code, and Editorconfig is used to configure the editor in advance. | 78 | | Conventional Changelog | All of the commits across all the repos uses [commitlint](https://github.com/conventional-changelog/commitlint/#what-is-commitlint) to enforce consistent commit messages. | 79 | | Husky | We use [husky](https://typicode.github.io/husky/#/) to enforce commit conventions when committing the code. Husky is a git hooks system written in Node | 80 | 81 | ## Commands 82 | 83 | | Command | Description | 84 | | --------------------- | ---------------------------------------------------------------------------------------------------------------------- | 85 | | `npm run test` | Run project tests using `ts-node` | 86 | | `npm run compile` | Compile the TypeScript project to JavaScript. The compiled output is written inside the `build` directory | 87 | | `npm run release` | Start the release process using `np` | 88 | | `npm run lint` | Lint the codebase using ESlint | 89 | | `npm run format` | Format the codebase using Prettier | 90 | | `npm run sync-labels` | Sync the labels defined inside the `.github/labels.json` file with Github. This command is for the project admin only. | 91 | 92 | ## Coding style 93 | 94 | All of my (Harminder Virk) projects are written in TypeScript. Also, slowly, I am also moving everything to pure ESM. 95 | 96 | - You can learn more about [my coding style here](https://github.com/thetutlage/meta/discussions/3) 97 | - Check out the setup I follow for [ESM and TypeScript here](https://github.com/thetutlage/meta/discussions/2) 98 | 99 | Also, make sure to run the following commands before pushing the code. 100 | 101 | ```sh 102 | # Formats using prettier 103 | npm run format 104 | 105 | # Lints using Eslint 106 | npm run lint 107 | ``` 108 | 109 | ## Getting recognized as a contributor 110 | 111 | We rely on Github to list all the repo contributors in the right-side panel of the repo. Following is an example of the same. 112 | 113 | Also, we use the [auto generate release notes](https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes#about-automatically-generated-release-notes) feature of Github, which adds a reference to the contributor profile within the release notes. 114 | -------------------------------------------------------------------------------- /.github/workflows/checks.yml: -------------------------------------------------------------------------------- 1 | name: test 2 | 3 | on: 4 | - push 5 | - pull_request 6 | - workflow_call 7 | 8 | jobs: 9 | lint: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v4 13 | - name: Install 14 | run: npm install 15 | - name: Run lint 16 | run: npm run lint 17 | 18 | typecheck: 19 | runs-on: ubuntu-latest 20 | steps: 21 | - uses: actions/checkout@v4 22 | - name: Install 23 | run: npm install 24 | - name: Run typecheck 25 | run: npm run typecheck 26 | 27 | tests-main: 28 | runs-on: ${{ matrix.os }} 29 | strategy: 30 | matrix: 31 | os: [ubuntu-latest, windows-latest] 32 | node-version: 33 | - 20.10.0 34 | - 21.x 35 | steps: 36 | - uses: actions/checkout@v4 37 | - name: Use Node.js ${{ matrix.node-version }} 38 | uses: actions/setup-node@v4 39 | with: 40 | node-version: ${{ matrix.node-version }} 41 | - name: Install 42 | run: npm install 43 | - name: Run tests 44 | run: npm test main 45 | env: 46 | GCS_KEY: ${{ secrets.GCS_KEY }} 47 | GCS_BUCKET: drive-gcs 48 | GCS_FINE_GRAINED_ACL_BUCKET: drive-gcs-no-uniform-acl 49 | tests-gcs: 50 | runs-on: ${{ matrix.os }} 51 | concurrency: 52 | group: gcs 53 | strategy: 54 | matrix: 55 | os: [ubuntu-latest] 56 | node-version: 57 | - 21.x 58 | steps: 59 | - uses: actions/checkout@v4 60 | - name: Use Node.js ${{ matrix.node-version }} 61 | uses: actions/setup-node@v4 62 | with: 63 | node-version: ${{ matrix.node-version }} 64 | - name: Install 65 | run: npm install 66 | - name: Run tests 67 | run: npm test gcs 68 | env: 69 | GCS_KEY: ${{ secrets.GCS_KEY }} 70 | GCS_BUCKET: drive-gcs 71 | GCS_FINE_GRAINED_ACL_BUCKET: drive-gcs-no-uniform-acl 72 | tests-s3: 73 | runs-on: ${{ matrix.os }} 74 | concurrency: 75 | group: s3 76 | strategy: 77 | matrix: 78 | os: [ubuntu-latest] 79 | node-version: 80 | - 21.x 81 | steps: 82 | - uses: actions/checkout@v4 83 | - name: Use Node.js ${{ matrix.node-version }} 84 | uses: actions/setup-node@v4 85 | with: 86 | node-version: ${{ matrix.node-version }} 87 | - name: Install 88 | run: npm install 89 | - name: Run tests 90 | run: npm test s3 91 | env: 92 | S3_SERVICE: do 93 | S3_BUCKET: testing-flydrive 94 | S3_ACCESS_KEY: ${{ secrets.DO_ACCESS_KEY }} 95 | S3_ACCESS_SECRET: ${{ secrets.DO_ACCESS_SECRET }} 96 | S3_ENDPOINT: https://sgp1.digitaloceanspaces.com 97 | S3_REGION: sgp1 98 | S3_CDN_URL: https://testing-flydrive.sgp1.cdn.digitaloceanspaces.com 99 | tests-r2: 100 | runs-on: ${{ matrix.os }} 101 | concurrency: 102 | group: r2 103 | strategy: 104 | matrix: 105 | os: [ubuntu-latest] 106 | node-version: 107 | - 21.x 108 | steps: 109 | - uses: actions/checkout@v4 110 | - name: Use Node.js ${{ matrix.node-version }} 111 | uses: actions/setup-node@v4 112 | with: 113 | node-version: ${{ matrix.node-version }} 114 | - name: Install 115 | run: npm install 116 | - name: Run tests 117 | run: npm test s3 118 | env: 119 | S3_SERVICE: r2 120 | S3_BUCKET: testing-flydrive 121 | S3_ACCESS_KEY: ${{ secrets.R2_ACCESS_KEY }} 122 | S3_ACCESS_SECRET: ${{ secrets.R2_ACCESS_SECRET }} 123 | S3_ENDPOINT: https://b7d56a259a224b185a70dd6e6f77d9c3.r2.cloudflarestorage.com 124 | S3_CDN_URL: https://pub-7bacaefbafa643faa5799c5bf17a5b3d.r2.dev 125 | S3_REGION: auto 126 | -------------------------------------------------------------------------------- /.github/workflows/labels.yml: -------------------------------------------------------------------------------- 1 | name: Sync labels 2 | on: 3 | workflow_dispatch: 4 | permissions: 5 | issues: write 6 | jobs: 7 | labels: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v4 11 | - uses: EndBug/label-sync@v2 12 | with: 13 | config-file: 'https://raw.githubusercontent.com/thetutlage/static/main/labels.yml' 14 | delete-other-labels: true 15 | token: ${{ secrets.GITHUB_TOKEN }} 16 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: release 2 | on: workflow_dispatch 3 | permissions: 4 | contents: write 5 | id-token: write 6 | jobs: 7 | checks: 8 | uses: ./.github/workflows/checks.yml 9 | secrets: inherit 10 | release: 11 | needs: checks 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v4 15 | with: 16 | fetch-depth: 0 17 | - uses: actions/setup-node@v4 18 | with: 19 | node-version: 20 20 | - name: git config 21 | run: | 22 | git config user.name "${GITHUB_ACTOR}" 23 | git config user.email "${GITHUB_ACTOR}@users.noreply.github.com" 24 | - name: Init npm config 25 | run: npm config set //registry.npmjs.org/:_authToken $NPM_TOKEN 26 | env: 27 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }} 28 | - run: npm install 29 | - run: npm run release -- --ci 30 | env: 31 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }} 32 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 33 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} 34 | -------------------------------------------------------------------------------- /.github/workflows/stale.yml: -------------------------------------------------------------------------------- 1 | name: 'Close stale issues and PRs' 2 | on: 3 | schedule: 4 | - cron: '30 0 * * *' 5 | 6 | jobs: 7 | stale: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/stale@v9 11 | with: 12 | stale-issue-message: 'This issue has been marked as stale because it has been inactive for more than 21 days. Please reopen if you still need help on this issue' 13 | stale-pr-message: 'This pull request has been marked as stale because it has been inactive for more than 21 days. Please reopen if you still intend to submit this pull request' 14 | close-issue-message: 'This issue has been automatically closed because it has been inactive for more than 4 weeks. Please reopen if you still need help on this issue' 15 | close-pr-message: 'This pull request has been automatically closed because it has been inactive for more than 4 weeks. Please reopen if you still intend to submit this pull request' 16 | days-before-stale: 21 17 | days-before-close: 5 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | coverage 3 | .DS_STORE 4 | .nyc_output 5 | .idea 6 | .vscode/ 7 | *.sublime-project 8 | *.sublime-workspace 9 | *.log 10 | build 11 | dist 12 | yarn.lock 13 | shrinkwrap.yaml 14 | package-lock.json 15 | gkey.json 16 | examples.ts 17 | .env 18 | -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | package-lock=false 2 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | build 2 | docs 3 | coverage 4 | *.html 5 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | # The MIT License 2 | 3 | Copyright 2022 FlyDrive, contributors 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 6 | 7 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 8 | 9 | THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 10 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # flydrive 2 | 3 |
4 |
5 | 6 |
7 |

Multi-driver storage library for Node.js

8 |

9 | 10 | FlyDrive is a file storage library for Node.js. It provides a unified API to interact with the local file system and cloud storage solutions like **S3**, **R2**, and **GCS**. 11 | 12 |

13 |
14 | 15 |
16 | 17 |
18 | 19 | [![gh-workflow-image]][gh-workflow-url] [![npm-image]][npm-url] ![][typescript-image] [![license-image]][license-url] 20 | 21 |
22 | 23 |
24 |

25 | 26 | Documentation 27 | 28 | | 29 | 30 | Contributing 31 | 32 |

33 |
34 | 35 |
36 | Built with ❤︎ by Harminder Virk 37 |
38 | 39 |
40 |
41 |
42 | 43 | ![](https://github.com/thetutlage/static/blob/main/sponsorkit/sponsors.png?raw=true) 44 | 45 | [gh-workflow-image]: https://img.shields.io/github/actions/workflow/status/flydrive-js/core/checks.yml?style=for-the-badge 46 | [gh-workflow-url]: https://github.com/flydrive-js/core/actions/workflows/checks.yml 'Github action' 47 | [npm-image]: https://img.shields.io/npm/v/flydrive/latest.svg?style=for-the-badge&logo=npm 48 | [npm-url]: https://www.npmjs.com/package/flydrive/v/latest 'npm' 49 | [typescript-image]: https://img.shields.io/badge/Typescript-294E80.svg?style=for-the-badge&logo=typescript 50 | [license-url]: LICENSE.md 51 | [license-image]: https://img.shields.io/github/license/flydrive-js/core?style=for-the-badge 52 | -------------------------------------------------------------------------------- /bin/test.ts: -------------------------------------------------------------------------------- 1 | import { assert } from '@japa/assert' 2 | import { fileSystem } from '@japa/file-system' 3 | import { configure, processCLIArgs, run } from '@japa/runner' 4 | 5 | processCLIArgs(process.argv.splice(2)) 6 | 7 | configure({ 8 | suites: [ 9 | { 10 | name: 'main', 11 | files: ['tests/core/*.spec.ts', 'tests/drivers/fs/*.spec.ts'], 12 | }, 13 | { 14 | name: 'gcs', 15 | files: ['tests/drivers/gcs/*.spec.ts'], 16 | }, 17 | { 18 | name: 's3', 19 | files: ['tests/drivers/s3/*.spec.ts'], 20 | }, 21 | ], 22 | plugins: [assert(), fileSystem()], 23 | }) 24 | 25 | run() 26 | -------------------------------------------------------------------------------- /drivers/fs/debug.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { debuglog } from 'node:util' 11 | 12 | export default debuglog('flydrive:fs') 13 | -------------------------------------------------------------------------------- /drivers/fs/driver.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import etag from 'etag' 11 | import mimeTypes from 'mime-types' 12 | import { Readable } from 'node:stream' 13 | import { slash } from '@poppinss/utils' 14 | import * as fsp from 'node:fs/promises' 15 | import { fileURLToPath } from 'node:url' 16 | import { Retrier } from '@humanwhocodes/retry' 17 | import { RuntimeException } from '@poppinss/utils' 18 | import { dirname, join, relative } from 'node:path' 19 | import { existsSync, rmSync, createReadStream, Dirent } from 'node:fs' 20 | 21 | import debug from './debug.js' 22 | import type { FSDriverOptions } from './types.js' 23 | import { DriveFile } from '../../src/driver_file.js' 24 | import { DriveDirectory } from '../../src/drive_directory.js' 25 | import type { 26 | WriteOptions, 27 | ObjectMetaData, 28 | DriverContract, 29 | ObjectVisibility, 30 | SignedURLOptions, 31 | } from '../../src/types.js' 32 | 33 | /** 34 | * The error codes on which we want to retry fs 35 | * operations 36 | */ 37 | const RETRY_ERROR_CODES = new Set(['ENFILE', 'EMFILE']) 38 | 39 | /** 40 | * Implementation of FlyDrive driver that uses the local filesystem 41 | * to persist and read files. 42 | */ 43 | export class FSDriver implements DriverContract { 44 | /** 45 | * The root directory for the driver 46 | */ 47 | #rootUrl: string 48 | 49 | /** 50 | * Retrier is used to retry file system operations 51 | * when certain errors are raised. 52 | */ 53 | #retrier = new Retrier( 54 | (error: NodeJS.ErrnoException) => error.code && RETRY_ERROR_CODES.has(error.code) 55 | ) 56 | 57 | constructor(public options: FSDriverOptions) { 58 | this.#rootUrl = 59 | typeof options.location === 'string' ? options.location : fileURLToPath(options.location) 60 | 61 | debug('driver config %O', options) 62 | } 63 | 64 | /** 65 | * Reads the file for the provided path 66 | */ 67 | #read(key: string): Promise { 68 | const location = join(this.#rootUrl, key) 69 | return this.#retrier.retry(() => fsp.readFile(location)) 70 | } 71 | 72 | /** 73 | * Reads dir and ignores non-existing errors 74 | */ 75 | async #readDir(location: string, recursive: boolean): Promise { 76 | try { 77 | return await fsp.readdir(location, { 78 | recursive, 79 | withFileTypes: true, 80 | }) 81 | } catch (error) { 82 | if (error.code !== 'ENOENT') { 83 | throw error 84 | } 85 | return [] 86 | } 87 | } 88 | 89 | /** 90 | * Generic implementation to write a file 91 | */ 92 | #write( 93 | key: string, 94 | contents: string | Readable | Uint8Array, 95 | options?: { signal?: AbortSignal } 96 | ) { 97 | const location = join(this.#rootUrl, key) 98 | return this.#retrier.retry(async () => { 99 | await fsp.mkdir(dirname(location), { recursive: true }) 100 | await fsp.writeFile(location, contents, options) 101 | }) 102 | } 103 | 104 | /** 105 | * Synchronously check if a file exists 106 | */ 107 | existsSync(key: string): boolean { 108 | debug('checking if file exists %s:%s', this.#rootUrl, key) 109 | const location = join(this.#rootUrl, key) 110 | return existsSync(location) 111 | } 112 | 113 | /** 114 | * Returns a boolean indicating if the file exists or not. 115 | */ 116 | async exists(key: string): Promise { 117 | debug('checking if file exists %s:%s', this.#rootUrl, key) 118 | const location = join(this.#rootUrl, key) 119 | try { 120 | const object = await fsp.stat(location) 121 | return object.isFile() 122 | } catch (error) { 123 | if (error.code === 'ENOENT') { 124 | return false 125 | } 126 | throw error 127 | } 128 | } 129 | 130 | /** 131 | * Returns the contents of the file as a UTF-8 string. An 132 | * exception is thrown when the file is missing. 133 | */ 134 | async get(key: string): Promise { 135 | debug('reading file contents %s:%s', this.#rootUrl, key) 136 | return this.#read(key).then((value) => value.toString('utf-8')) 137 | } 138 | 139 | /** 140 | * Returns the contents of the file as a stream. An 141 | * exception is thrown when the file is missing. 142 | */ 143 | async getStream(key: string): Promise { 144 | debug('reading file contents as a stream %s:%s', this.#rootUrl, key) 145 | const location = join(this.#rootUrl, key) 146 | return createReadStream(location) 147 | } 148 | 149 | /** 150 | * Returns the contents of the file as an Uint8Array. An 151 | * exception is thrown when the file is missing. 152 | */ 153 | async getBytes(key: string): Promise { 154 | debug('reading file contents as array buffer %s:%s', this.#rootUrl, key) 155 | return this.#read(key).then((value) => new Uint8Array(value.buffer)) 156 | } 157 | 158 | /** 159 | * Returns the metadata of a file. 160 | */ 161 | async getMetaData(key: string): Promise { 162 | debug('fetching file metadata %s:%s', this.#rootUrl, key) 163 | const location = join(this.#rootUrl, key) 164 | const stats = await fsp.stat(location) 165 | 166 | if (stats.isDirectory()) { 167 | throw new RuntimeException(`Cannot get metadata of a directory "${key}"`) 168 | } 169 | 170 | return { 171 | contentLength: stats.size, 172 | contentType: mimeTypes.lookup(key) || undefined, 173 | etag: etag(stats), 174 | lastModified: stats.mtime, 175 | } 176 | } 177 | 178 | /** 179 | * Returns the file visibility from the pre-defined config 180 | * value 181 | */ 182 | async getVisibility(_: string): Promise { 183 | return this.options.visibility 184 | } 185 | 186 | /** 187 | * Returns the public URL of the file. This method does not check 188 | * if the file exists or not. 189 | */ 190 | async getUrl(key: string): Promise { 191 | const location = join(this.#rootUrl, key) 192 | const generateURL = this.options.urlBuilder?.generateURL 193 | if (generateURL) { 194 | debug('generating public URL %s:%s', this.#rootUrl, key) 195 | return generateURL(key, location) 196 | } 197 | 198 | throw new RuntimeException('Cannot generate URL. The "fs" driver does not support it') 199 | } 200 | 201 | /** 202 | * Returns the signed/temporary URL of the file. By default, the signed URLs 203 | * expire in 30mins, but a custom expiry can be defined using 204 | * "options.expiresIn" property. 205 | */ 206 | async getSignedUrl(key: string, options?: SignedURLOptions): Promise { 207 | const location = join(this.#rootUrl, key) 208 | const normalizedOptions = Object.assign( 209 | { 210 | expiresIn: '30 mins', 211 | }, 212 | options 213 | ) 214 | 215 | /** 216 | * Use custom implementation when exists. 217 | */ 218 | const generateSignedURL = this.options.urlBuilder?.generateSignedURL 219 | if (generateSignedURL) { 220 | debug('generating signed URL %s:%s', this.#rootUrl, key) 221 | return generateSignedURL(key, location, normalizedOptions) 222 | } 223 | 224 | throw new RuntimeException('Cannot generate signed URL. The "fs" driver does not support it') 225 | } 226 | 227 | /** 228 | * Results in noop, since the local filesystem cannot have per 229 | * object visibility. 230 | */ 231 | async setVisibility(_: string, __: ObjectVisibility): Promise {} 232 | 233 | /** 234 | * Writes a file to the destination with the provided contents. 235 | * 236 | * - Missing directories will be created recursively. 237 | * - Existing file will be overwritten. 238 | */ 239 | put(key: string, contents: string | Uint8Array, options?: WriteOptions): Promise { 240 | debug('creating/updating file %s:%s', this.#rootUrl, key) 241 | return this.#write(key, contents, { signal: options?.signal }) 242 | } 243 | 244 | /** 245 | * Writes a file to the destination with the provided contents 246 | * as a readable stream. 247 | * 248 | * - Missing directories will be created recursively. 249 | * - Existing file will be overwritten. 250 | */ 251 | putStream(key: string, contents: Readable, options?: WriteOptions): Promise { 252 | debug('creating/updating file using readable stream %s:%s', this.#rootUrl, key) 253 | return new Promise((resolve, reject) => { 254 | contents.once('error', (error) => reject(error)) 255 | return this.#write(key, contents, { signal: options?.signal }).then(resolve).catch(reject) 256 | }) 257 | } 258 | 259 | /** 260 | * Copies the source file to the destination. Both paths must 261 | * be within the root location. 262 | */ 263 | copy(source: string, destination: string): Promise { 264 | debug('copying file from %s to %s', source, destination) 265 | const sourceLocation = join(this.#rootUrl, source) 266 | const destinationLocation = join(this.#rootUrl, destination) 267 | 268 | return this.#retrier.retry(async () => { 269 | await fsp.mkdir(dirname(destinationLocation), { recursive: true }) 270 | await fsp.copyFile(sourceLocation, destinationLocation) 271 | }) 272 | } 273 | 274 | /** 275 | * Moves the source file to the destination. Both paths must 276 | * be within the root location. 277 | */ 278 | move(source: string, destination: string): Promise { 279 | debug('moving file from %s to %s', source, destination) 280 | const sourceLocation = join(this.#rootUrl, source) 281 | const destinationLocation = join(this.#rootUrl, destination) 282 | 283 | return this.#retrier.retry(async () => { 284 | await fsp.mkdir(dirname(destinationLocation), { recursive: true }) 285 | await fsp.copyFile(sourceLocation, destinationLocation) 286 | await fsp.unlink(sourceLocation) 287 | }) 288 | } 289 | 290 | /** 291 | * Deletes a file within the root location of the filesystem. 292 | * Attempting to delete a non-existing file will result in 293 | * a noop. 294 | */ 295 | delete(key: string): Promise { 296 | debug('deleting file %s:%s', this.#rootUrl, key) 297 | const location = join(this.#rootUrl, key) 298 | 299 | return this.#retrier.retry(async () => { 300 | try { 301 | await fsp.unlink(location) 302 | } catch (error) { 303 | if (error.code !== 'ENOENT') { 304 | throw error 305 | } 306 | } 307 | }) 308 | } 309 | 310 | /** 311 | * Deletes the files and directories matching the provided 312 | * prefix. The method is same as running "rm -rf" unix 313 | * command 314 | */ 315 | deleteAll(prefix: string): Promise { 316 | debug('deleting all files in folder %s:%s', this.#rootUrl, prefix) 317 | const location = join(this.#rootUrl, prefix) 318 | 319 | return this.#retrier.retry(async () => { 320 | return fsp.rm(location, { recursive: true, force: true }) 321 | }) 322 | } 323 | 324 | /** 325 | * Synchronously delete all files from the root location 326 | */ 327 | clearSync() { 328 | rmSync(this.#rootUrl, { recursive: true, force: true }) 329 | } 330 | 331 | /** 332 | * Returns a list of files. The pagination properties are ignored 333 | * by the fs driver, since it does not support pagination. 334 | */ 335 | async listAll( 336 | prefix: string, 337 | options?: { 338 | recursive?: boolean 339 | paginationToken?: string 340 | } 341 | ): Promise<{ 342 | paginationToken?: string 343 | objects: Iterable 344 | }> { 345 | const self = this 346 | const location = join(this.#rootUrl, prefix) 347 | const { recursive } = Object.assign({ recursive: false }, options) 348 | debug('listing files from folder %s:%s %O', this.#rootUrl, prefix, options) 349 | 350 | /** 351 | * Reading files with their types. 352 | */ 353 | const files = await this.#readDir(location, recursive) 354 | 355 | /** 356 | * The generator is used to lazily iterate over files and 357 | * convert them into DriveFile or DriveDirectory instances 358 | */ 359 | function* filesGenerator(): Iterator< 360 | DriveFile | { isFile: false; isDirectory: true; prefix: string; name: string } 361 | > { 362 | for (const file of files) { 363 | const relativeName = slash( 364 | relative(self.#rootUrl, join(file.parentPath || file.path, file.name)) 365 | ) 366 | if (file.isFile()) { 367 | yield new DriveFile(relativeName, self) 368 | } else if (!recursive) { 369 | yield new DriveDirectory(relativeName) 370 | } 371 | } 372 | } 373 | 374 | return { 375 | paginationToken: undefined, 376 | objects: { 377 | [Symbol.iterator]: filesGenerator, 378 | }, 379 | } 380 | } 381 | } 382 | -------------------------------------------------------------------------------- /drivers/fs/main.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | export { FSDriver } from './driver.js' 11 | -------------------------------------------------------------------------------- /drivers/fs/types.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { ObjectVisibility, SignedURLOptions } from '../../src/types.js' 11 | 12 | /** 13 | * The options accepted by the FSDriver 14 | */ 15 | export type FSDriverOptions = { 16 | /** 17 | * Root location of the filesystem. The files will be 18 | * read and persisted to this location 19 | */ 20 | location: URL | string 21 | 22 | /** 23 | * The default visibility of all the files. The FSDriver 24 | * does not use visbility to implement any logic, instead 25 | * it returns the value as it is via the "getMetaData" 26 | * method 27 | */ 28 | visibility: ObjectVisibility 29 | 30 | /** 31 | * Configure a custom URL builder for creating public and 32 | * temporary URLs 33 | */ 34 | urlBuilder?: { 35 | /** 36 | * Custom implementation for creating public URLs 37 | */ 38 | generateURL?(key: string, filePath: string): Promise 39 | 40 | /** 41 | * Custom implementation for creating signed/temporary URLs 42 | */ 43 | generateSignedURL?(key: string, filePath: string, options: SignedURLOptions): Promise 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /drivers/gcs/debug.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { debuglog } from 'node:util' 11 | 12 | export default debuglog('flydrive:gcs') 13 | -------------------------------------------------------------------------------- /drivers/gcs/main.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | export { GCSDriver } from './driver.js' 11 | -------------------------------------------------------------------------------- /drivers/gcs/types.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import type { GetSignedUrlConfig, Storage, StorageOptions } from '@google-cloud/storage' 11 | import type { ObjectVisibility } from '../../src/types.js' 12 | 13 | /** 14 | * The base set of options that are always needed to 15 | * create GCS driver instance 16 | */ 17 | type GCSDriverBaseOptions = { 18 | /** 19 | * The bucket from which to read and write files 20 | */ 21 | bucket: string 22 | 23 | /** 24 | * The default visibility of all the objects within the 25 | * bucket. The property is only considered when the 26 | * bucket is not using uniformACL. 27 | */ 28 | visibility: ObjectVisibility 29 | 30 | /** 31 | * Is bucket using uniform ACL? Defaults to "true". 32 | * 33 | * When set to "true", the visibility setting of FlyDrive 34 | * will have no impact. 35 | */ 36 | usingUniformAcl?: boolean 37 | 38 | /** 39 | * Configure a custom URL builder for creating public and 40 | * temporary URLs 41 | */ 42 | urlBuilder?: { 43 | /** 44 | * Custom implementation for creating public URLs 45 | */ 46 | generateURL?(key: string, bucket: string, storage: Storage): Promise 47 | 48 | /** 49 | * Custom implementation for creating signed/temporary URLs 50 | */ 51 | generateSignedURL?( 52 | key: string, 53 | bucket: string, 54 | config: GetSignedUrlConfig, 55 | storage: Storage 56 | ): Promise 57 | } 58 | } 59 | 60 | /** 61 | * Configuration options accepted by the GCS driver 62 | */ 63 | export type GCSDriverOptions = 64 | | ({ 65 | /** 66 | * An instance of the GCS storage class. If not provided, 67 | * one must provide other options to establish a connection 68 | */ 69 | storage: Storage 70 | } & GCSDriverBaseOptions) 71 | | (StorageOptions & GCSDriverBaseOptions) 72 | -------------------------------------------------------------------------------- /drivers/s3/debug.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { debuglog } from 'node:util' 11 | 12 | export default debuglog('flydrive:s3') 13 | -------------------------------------------------------------------------------- /drivers/s3/main.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | export { S3Driver } from './driver.js' 11 | -------------------------------------------------------------------------------- /drivers/s3/types.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import type { 11 | GetObjectAclCommandInput, 12 | S3Client, 13 | S3ClientConfig, 14 | ServerSideEncryption, 15 | } from '@aws-sdk/client-s3' 16 | import type { ObjectVisibility } from '../../src/types.js' 17 | 18 | /** 19 | * The base set of options accepted by the S3 driver 20 | */ 21 | type S3DriverBaseOptions = { 22 | /** 23 | * The bucket from which to read and write files 24 | */ 25 | bucket: string 26 | 27 | /** 28 | * The default visibility of all the objects within the 29 | * bucket. 30 | */ 31 | visibility: ObjectVisibility 32 | 33 | /** 34 | * Does service supports ACL? 35 | * 36 | * When set to "false", the ACL related commands uses visibility 37 | * defined within the config without any API call. 38 | * 39 | * Defaults to "true". However, when you are using Cloudflare R2, you 40 | * must set it to "false". 41 | */ 42 | supportsACL?: boolean 43 | 44 | /** 45 | * An optional CDN URL to use for public URLs. Otherwise the endpoint 46 | * will be used 47 | */ 48 | cdnUrl?: string 49 | 50 | /** 51 | * Configure a custom URL builder for creating public and 52 | * temporary URLs 53 | */ 54 | urlBuilder?: { 55 | /** 56 | * Custom implementation for creating public URLs 57 | */ 58 | generateURL?(key: string, bucket: string, client: S3Client): Promise 59 | 60 | /** 61 | * Custom implementation for creating signed/temporary URLs 62 | */ 63 | generateSignedURL?( 64 | key: string, 65 | options: GetObjectAclCommandInput, 66 | client: S3Client 67 | ): Promise 68 | } 69 | 70 | /** 71 | * Encryption to use when uploading files to S3. 72 | */ 73 | encryption?: ServerSideEncryption 74 | } 75 | 76 | /** 77 | * The configuration options accepted by the S3 driver 78 | */ 79 | export type S3DriverOptions = 80 | | (S3ClientConfig & S3DriverBaseOptions) 81 | | ({ 82 | client: S3Client 83 | } & S3DriverBaseOptions) 84 | -------------------------------------------------------------------------------- /eslint.config.js: -------------------------------------------------------------------------------- 1 | import { configPkg } from '@adonisjs/eslint-config' 2 | export default configPkg({ 3 | ignores: ['coverage'], 4 | }) 5 | -------------------------------------------------------------------------------- /index.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | export { Disk } from './src/disk.js' 11 | export * as errors from './src/errors.js' 12 | export { DriveFile } from './src/driver_file.js' 13 | export { DriveManager } from './src/drive_manager.js' 14 | export { KeyNormalizer } from './src/key_normalizer.js' 15 | export { DriveDirectory } from './src/drive_directory.js' 16 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "flydrive", 3 | "description": "File storage library with unified API to manage files across multiple cloud storage providers like S3, GCS, R2 and so on", 4 | "version": "1.2.0", 5 | "engines": { 6 | "node": ">=20.6.0" 7 | }, 8 | "type": "module", 9 | "files": [ 10 | "build", 11 | "!build/bin", 12 | "!build/tests" 13 | ], 14 | "main": "build/index.js", 15 | "exports": { 16 | ".": "./build/index.js", 17 | "./types": "./build/src/types.js", 18 | "./drivers/fs": "./build/drivers/fs/main.js", 19 | "./drivers/fs/types": "./build/drivers/fs/types.js", 20 | "./drivers/gcs": "./build/drivers/gcs/main.js", 21 | "./drivers/gcs/types": "./build/drivers/gcs/types.js", 22 | "./drivers/s3": "./build/drivers/s3/main.js", 23 | "./drivers/s3/types": "./build/drivers/s3/types.js" 24 | }, 25 | "scripts": { 26 | "pretest": "npm run lint", 27 | "test": "c8 npm run quick:test", 28 | "lint": "eslint .", 29 | "format": "prettier --write .", 30 | "clean": "del-cli build", 31 | "typecheck": "tsc --noEmit", 32 | "precompile": "npm run lint && npm run clean", 33 | "compile": "tsup-node && tsc --emitDeclarationOnly --declaration", 34 | "build": "npm run compile", 35 | "prepublishOnly": "npm run build", 36 | "release": "release-it", 37 | "quick:test": "node --import=ts-node-maintained/register/esm --enable-source-maps bin/test.ts" 38 | }, 39 | "devDependencies": { 40 | "@adonisjs/env": "^6.2.0", 41 | "@adonisjs/eslint-config": "^2.0.0", 42 | "@adonisjs/prettier-config": "^1.4.4", 43 | "@adonisjs/tsconfig": "^1.4.0", 44 | "@aws-sdk/client-s3": "^3.758.0", 45 | "@aws-sdk/s3-request-presigner": "^3.758.0", 46 | "@google-cloud/storage": "^7.15.2", 47 | "@japa/assert": "^4.0.1", 48 | "@japa/file-system": "^2.3.2", 49 | "@japa/runner": "^4.2.0", 50 | "@release-it/conventional-changelog": "^10.0.0", 51 | "@swc/core": "1.10.7", 52 | "@types/etag": "^1.8.3", 53 | "@types/mime-types": "^2.1.4", 54 | "@types/node": "^22.13.10", 55 | "@types/sinon": "^17.0.4", 56 | "c8": "^10.1.3", 57 | "copyfiles": "^2.4.1", 58 | "del-cli": "^6.0.0", 59 | "eslint": "^9.22.0", 60 | "get-stream": "^9.0.1", 61 | "got": "^14.4.6", 62 | "prettier": "^3.5.3", 63 | "release-it": "^18.1.2", 64 | "sinon": "^19.0.4", 65 | "ts-node-maintained": "^10.9.5", 66 | "tsup": "^8.4.0", 67 | "typescript": "^5.8.2" 68 | }, 69 | "dependencies": { 70 | "@humanwhocodes/retry": "^0.4.2", 71 | "@poppinss/utils": "^6.9.2", 72 | "etag": "^1.8.1", 73 | "mime-types": "^2.1.35" 74 | }, 75 | "peerDependencies": { 76 | "@aws-sdk/client-s3": "^3.577.0", 77 | "@aws-sdk/s3-request-presigner": "^3.577.0", 78 | "@google-cloud/storage": "^7.10.2" 79 | }, 80 | "peerDependenciesMeta": { 81 | "@aws-sdk/client-s3": { 82 | "optional": true 83 | }, 84 | "@aws-sdk/s3-request-presigner": { 85 | "optional": true 86 | }, 87 | "@google-cloud/storage": { 88 | "optional": true 89 | } 90 | }, 91 | "homepage": "https://github.com/flydrive-js/core#readme", 92 | "repository": { 93 | "type": "git", 94 | "url": "git+https://github.com/flydrive-js/core.git" 95 | }, 96 | "bugs": { 97 | "url": "https://github.com/flydrive-js/core/issues" 98 | }, 99 | "keywords": [ 100 | "filesystem", 101 | "flydrive", 102 | "s3", 103 | "gcs", 104 | "r2" 105 | ], 106 | "author": "virk,flydrive", 107 | "license": "MIT", 108 | "publishConfig": { 109 | "access": "public", 110 | "provenance": true 111 | }, 112 | "tsup": { 113 | "entry": [ 114 | "./index.ts", 115 | "./src/types.ts", 116 | "./drivers/fs/main.ts", 117 | "./drivers/fs/types.ts", 118 | "./drivers/gcs/main.ts", 119 | "./drivers/gcs/types.ts", 120 | "./drivers/s3/main.ts", 121 | "./drivers/s3/types.ts" 122 | ], 123 | "outDir": "./build", 124 | "clean": true, 125 | "format": "esm", 126 | "dts": false, 127 | "sourcemap": false, 128 | "target": "esnext" 129 | }, 130 | "release-it": { 131 | "git": { 132 | "requireCleanWorkingDir": true, 133 | "requireUpstream": true, 134 | "commitMessage": "chore(release): ${version}", 135 | "tagAnnotation": "v${version}", 136 | "push": true, 137 | "tagName": "v${version}" 138 | }, 139 | "github": { 140 | "release": true 141 | }, 142 | "npm": { 143 | "publish": true, 144 | "skipChecks": true 145 | }, 146 | "plugins": { 147 | "@release-it/conventional-changelog": { 148 | "preset": { 149 | "name": "angular" 150 | } 151 | } 152 | } 153 | }, 154 | "c8": { 155 | "reporter": [ 156 | "text", 157 | "html" 158 | ], 159 | "exclude": [ 160 | "tests/**" 161 | ] 162 | }, 163 | "prettier": "@adonisjs/prettier-config" 164 | } 165 | -------------------------------------------------------------------------------- /src/debug.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { debuglog } from 'node:util' 11 | 12 | export default debuglog('flydrive:core') 13 | -------------------------------------------------------------------------------- /src/disk.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { unlink } from 'node:fs/promises' 11 | import { createReadStream } from 'node:fs' 12 | import type { Readable } from 'node:stream' 13 | 14 | import * as errors from './errors.js' 15 | import { DriveFile } from './driver_file.js' 16 | import { KeyNormalizer } from './key_normalizer.js' 17 | import { DriveDirectory } from './drive_directory.js' 18 | import type { 19 | WriteOptions, 20 | FileSnapshot, 21 | ObjectMetaData, 22 | DriverContract, 23 | ObjectVisibility, 24 | SignedURLOptions, 25 | } from './types.js' 26 | 27 | /** 28 | * Disk offers a unified API for working with different drivers 29 | */ 30 | export class Disk { 31 | /** 32 | * The normalizer is used to normalize and validate keys 33 | */ 34 | #normalizer = new KeyNormalizer() 35 | 36 | constructor(public driver: DriverContract) {} 37 | 38 | /** 39 | * Creates a new instance of the DriveFile. It can be used 40 | * to lazily fetch file contents or convert it into a 41 | * snapshot for persistence 42 | */ 43 | file(key: string): DriveFile { 44 | return new DriveFile(key, this.driver) 45 | } 46 | 47 | /** 48 | * Creates a new instance of the DriveFile from the snapshot. 49 | */ 50 | fromSnapshot(snapshot: FileSnapshot): DriveFile { 51 | return new DriveFile(snapshot.key, this.driver, { 52 | contentLength: snapshot.contentLength, 53 | etag: snapshot.etag, 54 | lastModified: new Date(snapshot.lastModified), 55 | contentType: snapshot.contentType, 56 | }) 57 | } 58 | 59 | /** 60 | * Check if the file exists. This method cannot check existence 61 | * of directories. 62 | */ 63 | exists(key: string): Promise { 64 | return this.file(key).exists() 65 | } 66 | 67 | /** 68 | * Returns file contents as a UTF-8 string. Use "getArrayBuffer" method 69 | * if you need more control over the file contents decoding. 70 | */ 71 | get(key: string): Promise { 72 | return this.file(key).get() 73 | } 74 | 75 | /** 76 | * Returns file contents as a Readable stream. 77 | */ 78 | getStream(key: string): Promise { 79 | return this.file(key).getStream() 80 | } 81 | 82 | /** 83 | * Returns file contents as a Uint8Array. 84 | */ 85 | getBytes(key: string): Promise { 86 | return this.file(key).getBytes() 87 | } 88 | 89 | /** 90 | * @deprecated 91 | * @see {@link Disk.getBytes} 92 | */ 93 | getArrayBuffer(key: string): Promise { 94 | return this.file(key).getArrayBuffer() 95 | } 96 | 97 | /** 98 | * Returns metadata of the given file. 99 | */ 100 | getMetaData(key: string): Promise { 101 | return this.file(key).getMetaData() 102 | } 103 | 104 | /** 105 | * Returns the visibility of the file 106 | */ 107 | getVisibility(key: string): Promise { 108 | return this.file(key).getVisibility() 109 | } 110 | 111 | /** 112 | * Returns the public URL of the file 113 | */ 114 | getUrl(key: string): Promise { 115 | return this.file(key).getUrl() 116 | } 117 | 118 | /** 119 | * Returns a signed/temporary URL of the file 120 | */ 121 | getSignedUrl(key: string, options?: SignedURLOptions): Promise { 122 | return this.file(key).getSignedUrl(options) 123 | } 124 | 125 | /** 126 | * Update the visibility of the file 127 | */ 128 | async setVisibility(key: string, visibility: ObjectVisibility): Promise { 129 | key = this.#normalizer.normalize(key) 130 | try { 131 | return await this.driver.setVisibility(key, visibility) 132 | } catch (error) { 133 | throw new errors.E_CANNOT_SET_VISIBILITY([key], { cause: error }) 134 | } 135 | } 136 | 137 | /** 138 | * Create new file or update an existing file. In case of an error, 139 | * the "E_CANNOT_WRITE_FILE" exception is thrown 140 | */ 141 | async put(key: string, contents: string | Uint8Array, options?: WriteOptions): Promise { 142 | key = this.#normalizer.normalize(key) 143 | try { 144 | return await this.driver.put(key, contents, options) 145 | } catch (error) { 146 | throw new errors.E_CANNOT_WRITE_FILE([key], { cause: error }) 147 | } 148 | } 149 | 150 | /** 151 | * Create new file or update an existing file using a Readable Stream 152 | * In case of an error, the "E_CANNOT_WRITE_FILE" exception is thrown 153 | */ 154 | async putStream(key: string, contents: Readable, options?: WriteOptions) { 155 | key = this.#normalizer.normalize(key) 156 | try { 157 | return await this.driver.putStream(key, contents, options) 158 | } catch (error) { 159 | throw new errors.E_CANNOT_WRITE_FILE([key], { cause: error }) 160 | } 161 | } 162 | 163 | /** 164 | * Copies file from the "source" to the "destination" within the 165 | * same bucket or the root location of local filesystem. 166 | * 167 | * Use "copyFromFs" method to copy files from local filesystem to 168 | * a cloud provider 169 | */ 170 | async copy(source: string, destination: string, options?: WriteOptions): Promise { 171 | source = this.#normalizer.normalize(source) 172 | destination = this.#normalizer.normalize(destination) 173 | try { 174 | return await this.driver.copy(source, destination, options) 175 | } catch (error) { 176 | throw new errors.E_CANNOT_COPY_FILE([source, destination], { cause: error }) 177 | } 178 | } 179 | 180 | /** 181 | * Copies file from the local filesystem to the cloud provider. 182 | */ 183 | copyFromFs(source: string | URL, destination: string, options?: WriteOptions) { 184 | return this.putStream(destination, createReadStream(source), options) 185 | } 186 | 187 | /** 188 | * Moves file from the "source" to the "destination" within the 189 | * same bucket or the root location of local filesystem. 190 | * 191 | * Use "moveFromFs" method to move files from local filesystem to 192 | * a cloud provider 193 | */ 194 | async move(source: string, destination: string, options?: WriteOptions): Promise { 195 | source = this.#normalizer.normalize(source) 196 | destination = this.#normalizer.normalize(destination) 197 | try { 198 | return await this.driver.move(source, destination, options) 199 | } catch (error) { 200 | throw new errors.E_CANNOT_MOVE_FILE([source, destination], { cause: error }) 201 | } 202 | } 203 | 204 | /** 205 | * Moves file from the local filesystem to the cloud provider. 206 | */ 207 | async moveFromFs(source: string | URL, destination: string, options?: WriteOptions) { 208 | await this.putStream(destination, createReadStream(source), options) 209 | await unlink(source) 210 | } 211 | 212 | /** 213 | * Deletes a file for the given key. Use "deleteAll" method to delete 214 | * files for a matching folder prefix. 215 | */ 216 | async delete(key: string): Promise { 217 | key = this.#normalizer.normalize(key) 218 | try { 219 | return await this.driver.delete(key) 220 | } catch (error) { 221 | throw new errors.E_CANNOT_DELETE_FILE([key], { cause: error }) 222 | } 223 | } 224 | 225 | /** 226 | * Delete all files matching the given prefix. In case of "fs" driver, 227 | * the mentioned folder will be deleted. 228 | */ 229 | async deleteAll(prefix?: string): Promise { 230 | prefix = prefix && prefix !== '/' ? this.#normalizer.normalize(prefix) : '/' 231 | try { 232 | return await this.driver.deleteAll(prefix) 233 | } catch (error) { 234 | throw new errors.E_CANNOT_DELETE_DIRECTORY([prefix], { cause: error }) 235 | } 236 | } 237 | 238 | /** 239 | * Returns a list of objects which includes and files and directories. 240 | * In case of "recursive" listing, no directories are returned. 241 | */ 242 | listAll( 243 | prefix?: string, 244 | options?: { 245 | recursive?: boolean 246 | paginationToken?: string 247 | } 248 | ): Promise<{ 249 | paginationToken?: string 250 | objects: Iterable 251 | }> { 252 | prefix = prefix && prefix !== '/' ? this.#normalizer.normalize(prefix) : '/' 253 | return this.driver.listAll(prefix, options) 254 | } 255 | } 256 | -------------------------------------------------------------------------------- /src/drive_directory.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { basename } from 'node:path' 11 | 12 | /** 13 | * Representation of a directory in the listing 14 | * of objects. 15 | */ 16 | export class DriveDirectory { 17 | isFile: false = false 18 | isDirectory: true = true 19 | name: string 20 | constructor(public prefix: string) { 21 | this.name = basename(this.prefix) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/drive_manager.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { RuntimeException } from '@poppinss/utils' 11 | 12 | import debug from './debug.js' 13 | import { Disk } from './disk.js' 14 | import { FakeDisk } from './fake_disk.js' 15 | import { DriveManagerOptions, DriverContract } from './types.js' 16 | 17 | /** 18 | * Drive manager exposes the API to manage Disk instances for multiple 19 | * services. Also, it offers a fakes API for testing. 20 | */ 21 | export class DriveManager DriverContract>> { 22 | /** 23 | * Registered config 24 | */ 25 | #config: DriveManagerOptions 26 | 27 | /** 28 | * A collection of cached service. We re-use disk instances for a 29 | * service, since there isn't any need to reconstruct them 30 | * everytime. 31 | */ 32 | #cachedServices: Map = new Map() 33 | 34 | /** 35 | * A collection of fakes created for the services. 36 | */ 37 | #fakes: Map = new Map() 38 | 39 | constructor(config: DriveManagerOptions) { 40 | this.#config = config 41 | debug('driver manager config %O', config) 42 | } 43 | 44 | /** 45 | * Returns an instance of a Disk for the given service. By default 46 | * use the "default" service from config 47 | */ 48 | use(service?: K): Disk { 49 | const serviceToUse = service || this.#config.default 50 | 51 | /** 52 | * Return fake when exists 53 | */ 54 | const fake = this.#fakes.get(serviceToUse) 55 | if (fake) { 56 | debug('returning fake for service %s', serviceToUse) 57 | return fake 58 | } 59 | 60 | /** 61 | * Return from cache 62 | */ 63 | const cachedDisk = this.#cachedServices.get(serviceToUse) 64 | if (cachedDisk) { 65 | debug('use cached disk instance for service %s', serviceToUse) 66 | return cachedDisk 67 | } 68 | 69 | /** 70 | * Create disk and cache it 71 | */ 72 | const disk = new Disk(this.#config.services[serviceToUse]()) 73 | debug('creating disk instance for service %s', serviceToUse) 74 | this.#cachedServices.set(serviceToUse, disk) 75 | return disk 76 | } 77 | 78 | /** 79 | * Deploy fake for a given service. The "use" method for the same service 80 | * will now return an instance of the "FakeDisk" class and not the 81 | * real implementation. 82 | */ 83 | fake(service?: K): FakeDisk { 84 | const serviceToUse = service || this.#config.default 85 | 86 | /** 87 | * Ensure fakes config has been defined 88 | */ 89 | if (!this.#config.fakes) { 90 | throw new RuntimeException( 91 | 'Cannot use "drive.fake". Make sure to define fakes configuration when creating DriveManager instance' 92 | ) 93 | } 94 | 95 | /** 96 | * Remove existing fake 97 | */ 98 | this.restore(serviceToUse) 99 | debug('creating fake for service %s', serviceToUse) 100 | 101 | /** 102 | * Create new fake 103 | */ 104 | const fake = new FakeDisk(serviceToUse as string, this.#config.fakes) 105 | this.#fakes.set(serviceToUse, fake) 106 | return fake 107 | } 108 | 109 | /** 110 | * Restore fake for a given service 111 | */ 112 | restore(service?: K): void { 113 | const serviceToUse = service || this.#config.default 114 | const fake = this.#fakes.get(serviceToUse) 115 | 116 | if (fake) { 117 | debug('restoring fake for service %s', serviceToUse) 118 | fake.clear() 119 | this.#fakes.delete(serviceToUse) 120 | } 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /src/driver_file.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { basename } from 'node:path' 11 | import { Readable } from 'node:stream' 12 | 13 | import * as errors from './errors.js' 14 | import { KeyNormalizer } from './key_normalizer.js' 15 | import type { 16 | DriverContract, 17 | FileSnapshot, 18 | ObjectMetaData, 19 | ObjectVisibility, 20 | SignedURLOptions, 21 | } from './types.js' 22 | 23 | /** 24 | * DriveFile is a pointer to a given object. It can be used to lazily 25 | * read the file contents and metadata and also you may convert it 26 | * to a snapshot and persist it inside the database. 27 | */ 28 | export class DriveFile { 29 | /** 30 | * The driver to use for performing read-only operations 31 | */ 32 | #driver: DriverContract 33 | 34 | /** 35 | * Known metadata from the snapshot or from the files listing 36 | * API 37 | */ 38 | #metaData?: ObjectMetaData 39 | 40 | /** 41 | * The normalizer is used to normalize and validate keys 42 | */ 43 | #normalizer = new KeyNormalizer() 44 | 45 | /** 46 | * Reference to the normalized file key 47 | */ 48 | key: string 49 | 50 | /** 51 | * The basename of the file. Extracted from the key 52 | */ 53 | name: string 54 | 55 | /** 56 | * Flags to know if the object is a file or a directory 57 | */ 58 | isFile: true = true 59 | isDirectory: false = false 60 | 61 | constructor(key: string, driver: DriverContract, metaData?: ObjectMetaData) { 62 | this.#driver = driver 63 | this.#metaData = metaData 64 | this.key = this.#normalizer.normalize(key) 65 | this.name = basename(this.key) 66 | } 67 | 68 | /** 69 | * Check if the file exists. This method cannot check existence 70 | * of directories. 71 | */ 72 | async exists() { 73 | try { 74 | return await this.#driver.exists(this.key) 75 | } catch (error) { 76 | throw new errors.E_CANNOT_CHECK_FILE_EXISTENCE([this.key], { cause: error }) 77 | } 78 | } 79 | 80 | /** 81 | * Returns file contents as a UTF-8 string. Use "getArrayBuffer" method 82 | * if you need more control over the file contents decoding. 83 | */ 84 | async get(): Promise { 85 | try { 86 | return await this.#driver.get(this.key) 87 | } catch (error) { 88 | throw new errors.E_CANNOT_READ_FILE([this.key], { cause: error }) 89 | } 90 | } 91 | 92 | /** 93 | * Returns file contents as a Readable stream. 94 | */ 95 | async getStream(): Promise { 96 | try { 97 | return await this.#driver.getStream(this.key) 98 | } catch (error) { 99 | throw new errors.E_CANNOT_READ_FILE([this.key], { cause: error }) 100 | } 101 | } 102 | 103 | /** 104 | * Returns file contents as a Uint8Array. 105 | */ 106 | async getBytes(): Promise { 107 | try { 108 | return await this.#driver.getBytes(this.key) 109 | } catch (error) { 110 | throw new errors.E_CANNOT_READ_FILE([this.key], { cause: error }) 111 | } 112 | } 113 | 114 | /** 115 | * @deprecated 116 | * @see {@link DriveFile.getBytes} 117 | */ 118 | async getArrayBuffer(): Promise { 119 | process.emitWarning( 120 | 'getArrayBuffer() method has been deprecated. Instead use "getBytes"', 121 | 'DeprecationWarning' 122 | ) 123 | return this.getBytes() 124 | } 125 | 126 | /** 127 | * Returns metadata of the given file. 128 | */ 129 | async getMetaData(): Promise { 130 | if (this.#metaData) { 131 | return this.#metaData 132 | } 133 | 134 | try { 135 | return await this.#driver.getMetaData(this.key) 136 | } catch (error) { 137 | throw new errors.E_CANNOT_GET_METADATA([this.key], { cause: error }) 138 | } 139 | } 140 | 141 | /** 142 | * Returns the visibility of the file 143 | */ 144 | async getVisibility(): Promise { 145 | try { 146 | return await this.#driver.getVisibility(this.key) 147 | } catch (error) { 148 | throw new errors.E_CANNOT_GET_METADATA([this.key], { cause: error }) 149 | } 150 | } 151 | 152 | /** 153 | * Returns the public URL of the file 154 | */ 155 | async getUrl() { 156 | try { 157 | return await this.#driver.getUrl(this.key) 158 | } catch (error) { 159 | throw new errors.E_CANNOT_GENERATE_URL([this.key], { cause: error }) 160 | } 161 | } 162 | 163 | /** 164 | * Returns a signed/temporary URL of the file 165 | */ 166 | async getSignedUrl(options?: SignedURLOptions) { 167 | try { 168 | return await this.#driver.getSignedUrl(this.key, options) 169 | } catch (error) { 170 | throw new errors.E_CANNOT_GENERATE_URL([this.key], { cause: error }) 171 | } 172 | } 173 | 174 | /** 175 | * Returns a snapshot of the file. The snapshot could be persisted 176 | * within any database storage and later you can create a file 177 | * instance from it using the "disk.fromSnapshot" method. 178 | */ 179 | async toSnapshot(): Promise { 180 | const metaData = await this.getMetaData() 181 | 182 | return { 183 | key: this.key, 184 | name: this.name, 185 | contentLength: metaData.contentLength, 186 | lastModified: metaData.lastModified.toString(), 187 | etag: metaData.etag, 188 | contentType: metaData.contentType, 189 | } 190 | } 191 | } 192 | -------------------------------------------------------------------------------- /src/errors.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { createError } from '@poppinss/utils' 11 | 12 | /** 13 | * Unable to write file to the destination 14 | */ 15 | export const E_CANNOT_WRITE_FILE = createError<[key: string]>( 16 | 'Cannot write file at location "%s"', 17 | 'E_CANNOT_WRITE_FILE' 18 | ) 19 | 20 | /** 21 | * Unable to read file 22 | */ 23 | export const E_CANNOT_READ_FILE = createError<[key: string]>( 24 | 'Cannot read file from location "%s"', 25 | 'E_CANNOT_READ_FILE' 26 | ) 27 | 28 | /** 29 | * Unable to delete file 30 | */ 31 | export const E_CANNOT_DELETE_FILE = createError<[key: string]>( 32 | 'Cannot delete file at location "%s"', 33 | 'E_CANNOT_DELETE_FILE' 34 | ) 35 | 36 | /** 37 | * Unable to delete directory 38 | */ 39 | export const E_CANNOT_DELETE_DIRECTORY = createError<[key: string]>( 40 | 'Cannot delete directory at location "%s"', 41 | 'E_CANNOT_DELETE_DIRECTORY' 42 | ) 43 | 44 | /** 45 | * Unable to copy file 46 | */ 47 | export const E_CANNOT_COPY_FILE = createError<[source: string, destination: string]>( 48 | 'Cannot copy file from "%s" to "%s"', 49 | 'E_CANNOT_COPY_FILE' 50 | ) 51 | 52 | /** 53 | * Unable to move file 54 | */ 55 | export const E_CANNOT_MOVE_FILE = createError<[source: string, destination: string]>( 56 | 'Cannot move file from "%s" to "%s"', 57 | 'E_CANNOT_MOVE_FILE' 58 | ) 59 | 60 | /** 61 | * Unable to check the location of the file 62 | */ 63 | export const E_CANNOT_CHECK_FILE_EXISTENCE = createError<[key: string]>( 64 | 'Unable to check existence for file at location "%s"', 65 | 'E_CANNOT_CHECK_FILE_EXISTENCE' 66 | ) 67 | 68 | /** 69 | * Unable to get file metadata 70 | */ 71 | export const E_CANNOT_GET_METADATA = createError<[key: string]>( 72 | 'Unable to retrieve metadata of file at location "%s"', 73 | 'E_CANNOT_GET_METADATA' 74 | ) 75 | 76 | /** 77 | * Unable to set file visibility 78 | */ 79 | export const E_CANNOT_SET_VISIBILITY = createError<[key: string]>( 80 | 'Unable to set visibility for file at location "%s"', 81 | 'E_CANNOT_SET_VISIBILITY' 82 | ) 83 | 84 | /** 85 | * Unable to generate URL for a file 86 | */ 87 | export const E_CANNOT_GENERATE_URL = createError<[key: string]>( 88 | 'Cannot generate URL for file at location "%s"', 89 | 'E_CANNOT_GENERATE_URL' 90 | ) 91 | 92 | /** 93 | * The file key has unallowed set of characters 94 | */ 95 | export const E_UNALLOWED_CHARACTERS = createError<[key: string]>( 96 | 'The key "%s" has unallowed characters', 97 | 'E_UNALLOWED_CHARACTERS' 98 | ) 99 | 100 | /** 101 | * Key post normalization leads to an empty string 102 | */ 103 | export const E_INVALID_KEY = createError<[key: string]>( 104 | 'Invalid key "%s". After normalization results in an empty string', 105 | 'E_INVALID_KEY' 106 | ) 107 | 108 | /** 109 | * The file key has unallowed set of characters 110 | */ 111 | export const E_PATH_TRAVERSAL_DETECTED = createError<[key: string]>( 112 | 'Path traversal segment detected in key "%s"', 113 | 'E_PATH_TRAVERSAL_DETECTED' 114 | ) 115 | -------------------------------------------------------------------------------- /src/fake_disk.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { join } from 'node:path' 11 | import { AssertionError } from 'node:assert' 12 | 13 | import { Disk } from './disk.js' 14 | import { FSDriver } from '../drivers/fs/driver.js' 15 | import type { DriveManagerOptions } from './types.js' 16 | 17 | /** 18 | * FakeDisk extends the Disk class with additional capabilities to 19 | * write assertions. 20 | */ 21 | export class FakeDisk extends Disk { 22 | declare driver: FSDriver 23 | 24 | constructor( 25 | public disk: string, 26 | fakesConfig: Exclude['fakes'], undefined> 27 | ) { 28 | super( 29 | new FSDriver({ 30 | location: 31 | typeof fakesConfig.location === 'string' 32 | ? join(fakesConfig.location, disk) 33 | : new URL(disk, fakesConfig.location), 34 | visibility: 'public', 35 | urlBuilder: fakesConfig.urlBuilder, 36 | }) 37 | ) 38 | } 39 | 40 | /** 41 | * Assert the expected file(s) exists. Otherwise an assertion 42 | * error is thrown 43 | */ 44 | assertExists(paths: string | string[]) { 45 | const pathsToVerify = Array.isArray(paths) ? paths : [paths] 46 | for (let filePath of pathsToVerify) { 47 | if (!this.driver.existsSync(filePath)) { 48 | throw new AssertionError({ 49 | message: `Expected "${filePath}" to exist, but file not found.`, 50 | }) 51 | } 52 | } 53 | } 54 | 55 | /** 56 | * Assert the expected file(s) to not exist. Otherwise an assertion 57 | * error is thrown 58 | */ 59 | assertMissing(paths: string | string[]) { 60 | const pathsToVerify = Array.isArray(paths) ? paths : [paths] 61 | for (let filePath of pathsToVerify) { 62 | if (this.driver.existsSync(filePath)) { 63 | throw new AssertionError({ 64 | message: `Expected "${filePath}" to be missing, but file exists`, 65 | }) 66 | } 67 | } 68 | } 69 | 70 | /** 71 | * Clear storage 72 | */ 73 | clear() { 74 | this.driver.clearSync() 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /src/key_normalizer.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { slash } from '@poppinss/utils' 11 | import { normalize } from 'node:path/posix' 12 | import string from '@poppinss/utils/string' 13 | 14 | import * as errors from './errors.js' 15 | 16 | /** 17 | * Key normalizer normalizes the key for writing and reading files. It 18 | * removes unsafe characters from a string that are either not allowed 19 | * by cloud providers, or can conflict with a URL. 20 | * 21 | * The keys are also scanned and protected from path traversal. 22 | */ 23 | export class KeyNormalizer { 24 | /** 25 | * The set of allowed characters. Key free to re-assign a new 26 | * value 27 | */ 28 | static allowedCharacterSet = /^[A-Za-z0-9-_!\/\.\s]*$/ 29 | 30 | /** 31 | * Normalizes the key by condensing whitespaces, using unix 32 | * slashes, and replacing consecutive slashes with one 33 | * slash ("/"). 34 | */ 35 | #preNormalize(key: string): string { 36 | /** 37 | * Condense whitespaces into one 38 | */ 39 | let normalizedKey = string.condenseWhitespace(key) 40 | 41 | /** 42 | * - Normalize slashes to unix style 43 | * - Remove consecutive '/' 44 | * - Remove more than two dots + slash "..../" to "../" 45 | */ 46 | return slash(normalizedKey) 47 | .replace(/\/{2,}/g, '/') 48 | .replace(/\.{3,}\//g, '../') 49 | } 50 | 51 | /** 52 | * Validates the key to check for unallowed characters 53 | */ 54 | #validateCharacterSet(key: string, originalKey: string) { 55 | if (!KeyNormalizer.allowedCharacterSet.test(key)) { 56 | throw new errors.E_UNALLOWED_CHARACTERS([originalKey]) 57 | } 58 | } 59 | 60 | /** 61 | * Checks for path traversel in key 62 | */ 63 | #checkForPathTraversal(key: string, originalKey: string) { 64 | const tokens = key.split('/') 65 | for (let token of tokens) { 66 | if (token === '..') { 67 | throw new errors.E_PATH_TRAVERSAL_DETECTED([originalKey]) 68 | } 69 | } 70 | } 71 | 72 | /** 73 | * Further normalizing the key after validating it. Here we remove 74 | * starting and ending path expressions like "." and "/" from 75 | * the key. 76 | */ 77 | #postNormalize(key: string) { 78 | /** 79 | * Normalize key by removing consecutive path expressions. For example 80 | * 81 | * - "dir/." will convert to "dir" 82 | * - "dir/./" will convert to "dir/" 83 | * 84 | * Note 85 | * Do not call this method before validating for path traversal 86 | */ 87 | let normalizedKey = normalize(key) 88 | 89 | /** 90 | * Remove leading and ending '/' 91 | * Remove leading and ending "." 92 | */ 93 | return normalizedKey.replace(/^\/|\/$/g, '').replace(/^\.|\.$/g, '') 94 | } 95 | 96 | /** 97 | * Normalize the key 98 | */ 99 | normalize(key: string) { 100 | let normalizedKey = this.#preNormalize(key) 101 | 102 | /** 103 | * Validating the key after pre-processing it with 104 | * some rules 105 | */ 106 | this.#validateCharacterSet(normalizedKey, key) 107 | this.#checkForPathTraversal(normalizedKey, key) 108 | 109 | /** 110 | * Performing post normalization after the key passes 111 | * the validations 112 | */ 113 | normalizedKey = this.#postNormalize(normalizedKey) 114 | 115 | /** 116 | * Post normalization sometimes can lead to an empty string 117 | */ 118 | if (normalizedKey.trim() === '') { 119 | throw new errors.E_INVALID_KEY([key]) 120 | } 121 | 122 | return normalizedKey 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { Readable } from 'node:stream' 11 | import { DriveFile } from './driver_file.js' 12 | import { DriveDirectory } from './drive_directory.js' 13 | 14 | /** 15 | * The visibility of the object. 16 | */ 17 | export type ObjectVisibility = 'public' | 'private' 18 | 19 | /** 20 | * The metadata of an object that can be fetched 21 | * using the "getMetaData" method. 22 | */ 23 | export type ObjectMetaData = { 24 | contentType?: string 25 | contentLength: number 26 | etag: string 27 | lastModified: Date 28 | } 29 | 30 | /** 31 | * Options accepted by the write operations. 32 | */ 33 | export type WriteOptions = { 34 | visibility?: ObjectVisibility 35 | contentType?: string 36 | contentLanguage?: string 37 | contentEncoding?: string 38 | contentDisposition?: string 39 | cacheControl?: string 40 | contentLength?: number 41 | } & { 42 | [key: string]: any 43 | } 44 | 45 | /** 46 | * Options accepted during the creation of a signed URL. 47 | */ 48 | export type SignedURLOptions = { 49 | expiresIn?: string | number 50 | contentType?: string 51 | contentDisposition?: string 52 | } & { 53 | [key: string]: any 54 | } 55 | 56 | /** 57 | * Representation of file snapshot. It can be persisted 58 | * inside any database storage. 59 | */ 60 | export type FileSnapshot = { 61 | key: string 62 | name: string 63 | contentLength: number 64 | lastModified: string 65 | etag: string 66 | contentType?: string 67 | } 68 | 69 | /** 70 | * The interface every driver must implement. 71 | */ 72 | export interface DriverContract { 73 | /** 74 | * Return a boolean indicating if the file exists 75 | */ 76 | exists(key: string): Promise 77 | 78 | /** 79 | * Return contents of a object for the given key as a UTF-8 string. 80 | * Should throw "E_CANNOT_READ_FILE" error when the file 81 | * does not exists. 82 | */ 83 | get(key: string): Promise 84 | 85 | /** 86 | * Return contents of a object for the given key as a Readable stream. 87 | * Should throw "E_CANNOT_READ_FILE" error when the file 88 | * does not exists. 89 | */ 90 | getStream(key: string): Promise 91 | 92 | /** 93 | * Return contents of an object for the given key as an Uint8Array. 94 | * Should throw "E_CANNOT_READ_FILE" error when the file 95 | * does not exists. 96 | */ 97 | getBytes(key: string): Promise 98 | 99 | /** 100 | * Return metadata of an object for the given key. 101 | */ 102 | getMetaData(key: string): Promise 103 | 104 | /** 105 | * Return the visibility of the file 106 | */ 107 | getVisibility(key: string): Promise 108 | 109 | /** 110 | * Return the public URL to access the file 111 | */ 112 | getUrl(key: string): Promise 113 | 114 | /** 115 | * Return the signed/temporary URL to access the file 116 | */ 117 | getSignedUrl(key: string, options?: SignedURLOptions): Promise 118 | 119 | /** 120 | * Update the visibility of the file 121 | */ 122 | setVisibility(key: string, visibility: ObjectVisibility): Promise 123 | 124 | /** 125 | * Write object to the destination with the provided 126 | * contents. 127 | */ 128 | put(key: string, contents: string | Uint8Array, options?: WriteOptions): Promise 129 | 130 | /** 131 | * Write object to the destination with the provided 132 | * contents as a readable stream 133 | */ 134 | putStream(key: string, contents: Readable, options?: WriteOptions): Promise 135 | 136 | /** 137 | * Copy the file from within the disk root location. Both 138 | * the "source" and "destination" will be the key names 139 | * and not absolute paths. 140 | */ 141 | copy(source: string, destination: string, options?: WriteOptions): Promise 142 | 143 | /** 144 | * Move the file from within the disk root location. Both 145 | * the "source" and "destination" will be the key names 146 | * and not absolute paths. 147 | */ 148 | move(source: string, destination: string, options?: WriteOptions): Promise 149 | 150 | /** 151 | * Delete the file for the given key. Should not throw 152 | * error when file does not exist in first place 153 | */ 154 | delete(key: string): Promise 155 | 156 | /** 157 | * Delete the files and directories matching the provided prefix. 158 | */ 159 | deleteAll(prefix: string): Promise 160 | 161 | /** 162 | * The list all method must return an array of objects with 163 | * the ability to paginate results (if supported). 164 | */ 165 | listAll( 166 | prefix: string, 167 | options?: { 168 | recursive?: boolean 169 | paginationToken?: string 170 | } 171 | ): Promise<{ 172 | paginationToken?: string 173 | objects: Iterable 174 | }> 175 | } 176 | 177 | /** 178 | * Configuration accepted by DriveManager 179 | */ 180 | export interface DriveManagerOptions DriverContract>> { 181 | /** 182 | * The default service to use for file system operations 183 | */ 184 | default: keyof Services 185 | 186 | /** 187 | * Configured services 188 | */ 189 | services: Services 190 | 191 | /** 192 | * Fakes configuration. Only needed when using fakes from the 193 | * DriveManager 194 | */ 195 | fakes?: { 196 | /** 197 | * The location for persisting files during fake mode 198 | */ 199 | location: URL | string 200 | 201 | /** 202 | * Configure a custom URL builder for creating public and 203 | * temporary URLs in fake mode 204 | */ 205 | urlBuilder?: { 206 | /** 207 | * Custom implementation for creating public URLs 208 | */ 209 | generateURL?(key: string, filePath: string): Promise 210 | 211 | /** 212 | * Custom implementation for creating signed/temporary URLs 213 | */ 214 | generateSignedURL?(key: string, filePath: string, options: SignedURLOptions): Promise 215 | } 216 | } 217 | } 218 | -------------------------------------------------------------------------------- /tests/core/drive_manager.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | 12 | import { Disk } from '../../src/disk.js' 13 | import { FakeDisk } from '../../src/fake_disk.js' 14 | import { FSDriver } from '../../drivers/fs/driver.js' 15 | import { GCSDriver } from '../../drivers/gcs/driver.js' 16 | import { DriveManager } from '../../src/drive_manager.js' 17 | import { GCS_BUCKET, GCS_KEY } from '../drivers/gcs/env.js' 18 | 19 | test.group('Drive Manager', () => { 20 | test('create disk instances for configured services', ({ fs, assert }) => { 21 | const drive = new DriveManager({ 22 | default: 'fs', 23 | services: { 24 | fs: () => new FSDriver({ location: fs.baseUrl, visibility: 'public' }), 25 | gcs: () => 26 | new GCSDriver({ 27 | visibility: 'public', 28 | bucket: GCS_BUCKET, 29 | credentials: GCS_KEY, 30 | usingUniformAcl: true, 31 | }), 32 | }, 33 | }) 34 | 35 | assert.instanceOf(drive.use(), Disk) 36 | assert.instanceOf(drive.use('gcs'), Disk) 37 | assert.instanceOf(drive.use('gcs').driver, GCSDriver) 38 | 39 | assert.instanceOf(drive.use('fs'), Disk) 40 | assert.instanceOf(drive.use('fs').driver, FSDriver) 41 | }) 42 | 43 | test('cache disk instances', ({ fs, assert }) => { 44 | const drive = new DriveManager({ 45 | default: 'fs', 46 | services: { 47 | fs: () => new FSDriver({ location: fs.baseUrl, visibility: 'public' }), 48 | gcs: () => 49 | new GCSDriver({ 50 | visibility: 'public', 51 | bucket: GCS_BUCKET, 52 | credentials: GCS_KEY, 53 | usingUniformAcl: true, 54 | }), 55 | }, 56 | }) 57 | 58 | assert.instanceOf(drive.use(), Disk) 59 | assert.strictEqual(drive.use('gcs'), drive.use('gcs')) 60 | }) 61 | 62 | test('throw error when trying to create a fake without fakes config', ({ fs, assert }) => { 63 | const drive = new DriveManager({ 64 | default: 'fs', 65 | services: { 66 | fs: () => new FSDriver({ location: fs.baseUrl, visibility: 'public' }), 67 | gcs: () => 68 | new GCSDriver({ 69 | visibility: 'public', 70 | bucket: GCS_BUCKET, 71 | credentials: GCS_KEY, 72 | usingUniformAcl: true, 73 | }), 74 | }, 75 | }) 76 | 77 | assert.throws( 78 | () => drive.fake('gcs'), 79 | 'Cannot use "drive.fake". Make sure to define fakes configuration when creating DriveManager instance' 80 | ) 81 | }) 82 | 83 | test('create fake for a service', ({ fs, assert }) => { 84 | const drive = new DriveManager({ 85 | default: 'fs', 86 | services: { 87 | fs: () => new FSDriver({ location: fs.baseUrl, visibility: 'public' }), 88 | gcs: () => 89 | new GCSDriver({ 90 | visibility: 'public', 91 | bucket: GCS_BUCKET, 92 | credentials: GCS_KEY, 93 | usingUniformAcl: true, 94 | }), 95 | }, 96 | fakes: { 97 | location: fs.baseUrl, 98 | }, 99 | }) 100 | 101 | const fake = drive.fake('gcs') 102 | assert.instanceOf(fake, FakeDisk) 103 | assert.strictEqual(drive.use('gcs'), fake) 104 | assert.notStrictEqual(drive.use('fs'), fake) 105 | }) 106 | 107 | test('write files to disk when using fakes', async ({ fs, assert }) => { 108 | const drive = new DriveManager({ 109 | default: 'fs', 110 | services: { 111 | fs: () => new FSDriver({ location: fs.baseUrl, visibility: 'public' }), 112 | gcs: () => 113 | new GCSDriver({ 114 | visibility: 'public', 115 | bucket: GCS_BUCKET, 116 | credentials: GCS_KEY, 117 | usingUniformAcl: true, 118 | }), 119 | }, 120 | fakes: { 121 | location: fs.baseUrl, 122 | }, 123 | }) 124 | 125 | const fake = drive.fake('gcs') 126 | await drive.use('gcs').put('hello.txt', 'Hello world') 127 | 128 | fake.assertExists('hello.txt') 129 | await assert.fileExists('gcs/hello.txt') 130 | }) 131 | 132 | test('clear files on restore', async ({ fs, assert }) => { 133 | const drive = new DriveManager({ 134 | default: 'fs', 135 | services: { 136 | fs: () => new FSDriver({ location: fs.baseUrl, visibility: 'public' }), 137 | gcs: () => 138 | new GCSDriver({ 139 | visibility: 'public', 140 | bucket: GCS_BUCKET, 141 | credentials: GCS_KEY, 142 | usingUniformAcl: true, 143 | }), 144 | }, 145 | fakes: { 146 | location: fs.baseUrl, 147 | }, 148 | }) 149 | 150 | const fake = drive.fake('gcs') 151 | await drive.use('gcs').put('hello.txt', 'Hello world') 152 | drive.restore('gcs') 153 | 154 | fake.assertMissing('hello.txt') 155 | await assert.fileNotExists('gcs/hello.txt') 156 | }) 157 | 158 | test('create and restore fakes of the default service', async ({ fs, assert }) => { 159 | const drive = new DriveManager({ 160 | default: 'fs', 161 | services: { 162 | fs: () => new FSDriver({ location: fs.baseUrl, visibility: 'public' }), 163 | gcs: () => 164 | new GCSDriver({ 165 | visibility: 'public', 166 | bucket: GCS_BUCKET, 167 | credentials: GCS_KEY, 168 | usingUniformAcl: true, 169 | }), 170 | }, 171 | fakes: { 172 | location: fs.baseUrl, 173 | }, 174 | }) 175 | 176 | const fake = drive.fake() 177 | assert.strictEqual(fake, drive.use()) 178 | 179 | drive.restore() 180 | assert.notStrictEqual(fake, drive.use()) 181 | }) 182 | 183 | test('use fakes assertions', async ({ fs, assert }) => { 184 | const drive = new DriveManager({ 185 | default: 'fs', 186 | services: { 187 | fs: () => new FSDriver({ location: fs.baseUrl, visibility: 'public' }), 188 | gcs: () => 189 | new GCSDriver({ 190 | visibility: 'public', 191 | bucket: GCS_BUCKET, 192 | credentials: GCS_KEY, 193 | usingUniformAcl: true, 194 | }), 195 | }, 196 | fakes: { 197 | location: fs.baseUrl, 198 | }, 199 | }) 200 | 201 | const fake = drive.fake('gcs') 202 | await drive.use('gcs').put('hello.txt', 'Hello world') 203 | 204 | fake.assertExists('hello.txt') 205 | fake.assertMissing('foo.txt') 206 | 207 | assert.throws( 208 | () => fake.assertExists('foo.txt'), 209 | 'Expected "foo.txt" to exist, but file not found' 210 | ) 211 | 212 | assert.throws( 213 | () => fake.assertMissing('hello.txt'), 214 | 'Expected "hello.txt" to be missing, but file exists' 215 | ) 216 | }) 217 | }) 218 | -------------------------------------------------------------------------------- /tests/core/key_normalizer.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import { KeyNormalizer } from '../../src/key_normalizer.js' 12 | 13 | test.group('Key normalizer | Pre normalization', () => { 14 | test('perform pre-normalization on key "{key}"') 15 | .with([ 16 | { 17 | key: 'hello world', 18 | output: 'hello world', 19 | }, 20 | { 21 | key: 'foo\\bar', 22 | output: 'foo/bar', 23 | }, 24 | { 25 | key: 'foo//bar//baz', 26 | output: 'foo/bar/baz', 27 | }, 28 | ]) 29 | .run(({ assert }, { key, output }) => { 30 | assert.equal(new KeyNormalizer().normalize(key), output) 31 | }) 32 | }) 33 | 34 | test.group('Key normalizer | Unallowed characters', () => { 35 | test('throw error when key has unallowed characters "{key}"') 36 | .with([ 37 | { 38 | key: 'foo$', 39 | }, 40 | { 41 | key: '^foo', 42 | }, 43 | { 44 | key: '>foo', 45 | }, 46 | { 47 | key: 'bar<', 48 | }, 49 | { 50 | key: '{bar}', 51 | }, 52 | { 53 | key: '#bar', 54 | }, 55 | { 56 | key: 'bar%baz', 57 | }, 58 | { 59 | key: '~virk', 60 | }, 61 | { 62 | key: 'foo|bar', 63 | }, 64 | { 65 | key: 'foo"bar\'', 66 | }, 67 | { 68 | key: 'foo@bar', 69 | }, 70 | { 71 | key: 'foo&bar', 72 | }, 73 | { 74 | key: 'foo+bar', 75 | }, 76 | { 77 | key: 'helloworld;', 78 | }, 79 | { 80 | key: 'some\0/path.txt', 81 | }, 82 | { 83 | key: 's\x09i.js"', 84 | }, 85 | ]) 86 | .run(({ assert }, { key }) => { 87 | assert.throws( 88 | () => new KeyNormalizer().normalize(key), 89 | `The key "${key}" has unallowed characters` 90 | ) 91 | }) 92 | }) 93 | 94 | test.group('Key normalizer | Path traversal', () => { 95 | test('throw error when key leads to path traversal "{key}"') 96 | .with([ 97 | { 98 | key: 'something/../../../hehe', 99 | }, 100 | { 101 | key: '/something/../../..', 102 | }, 103 | { 104 | key: '..', 105 | }, 106 | { 107 | key: 'something\\..\\..', 108 | }, 109 | { 110 | key: '\\something\\..\\..\\dirname', 111 | }, 112 | { 113 | key: '../foo', 114 | }, 115 | { 116 | key: 'foo/../back', 117 | }, 118 | { 119 | key: 'beyond/root/../.././..', 120 | }, 121 | { 122 | key: '/beyond/../..', 123 | }, 124 | { 125 | key: '/./../some/dir', 126 | }, 127 | { 128 | key: '.../foo/bar', 129 | }, 130 | { 131 | key: '\\something\\...\\...\\dirname', 132 | }, 133 | { 134 | key: 'beyond/root/.../', 135 | }, 136 | ]) 137 | .run(({ assert }, { key }) => { 138 | assert.throws( 139 | () => new KeyNormalizer().normalize(key), 140 | `Path traversal segment detected in key "${key}"` 141 | ) 142 | }) 143 | }) 144 | 145 | test.group('Key normalizer | Post normalization', () => { 146 | test('perform post-normalization on key "{key}"') 147 | .with([ 148 | { 149 | key: '/path/to/dir/.', 150 | output: 'path/to/dir', 151 | }, 152 | { 153 | key: '/dirname/', 154 | output: 'dirname', 155 | }, 156 | { 157 | key: 'dirname./', 158 | output: 'dirname', 159 | }, 160 | { 161 | key: 'dirname/./', 162 | output: 'dirname', 163 | }, 164 | { 165 | key: 'dirname/..txt', 166 | output: 'dirname/..txt', 167 | }, 168 | { 169 | key: 'dirname/.', 170 | output: 'dirname', 171 | }, 172 | { 173 | key: 'dirname!./', 174 | output: 'dirname!', 175 | }, 176 | { 177 | key: '00004869/files/other/10-75..stl', 178 | output: '00004869/files/other/10-75..stl', 179 | }, 180 | { 181 | key: '/dirname//subdir///subsubdir', 182 | output: 'dirname/subdir/subsubdir', 183 | }, 184 | { 185 | key: '\\\\someshared\\\\drive', 186 | output: 'someshared/drive', 187 | }, 188 | { 189 | key: 'C\\dirname\\\\subdir\\\\\\subsubdir', 190 | output: 'C/dirname/subdir/subsubdir', 191 | }, 192 | { 193 | key: '...hello-world', 194 | output: '..hello-world', 195 | }, 196 | ]) 197 | .run(({ assert }, { key, output }) => { 198 | assert.equal(new KeyNormalizer().normalize(key), output) 199 | }) 200 | }) 201 | 202 | test.group('Key normalizer | Empty strings', () => { 203 | test('throw error when key leads to an empty string "{key}"') 204 | .with([ 205 | { 206 | key: '.', 207 | }, 208 | { 209 | key: './', 210 | }, 211 | { 212 | key: '. .', 213 | }, 214 | { 215 | key: ' ', 216 | }, 217 | { 218 | key: '. /.', 219 | }, 220 | { 221 | key: '. ./', 222 | }, 223 | { 224 | key: '. /./', 225 | }, 226 | { 227 | key: '. ././', 228 | }, 229 | ]) 230 | .run(({ assert }, { key }) => { 231 | assert.throws( 232 | () => new KeyNormalizer().normalize(key), 233 | `Invalid key "${key}". After normalization results in an empty string` 234 | ) 235 | }) 236 | }) 237 | -------------------------------------------------------------------------------- /tests/drivers/fs/copy.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import { FSDriver } from '../../../drivers/fs/driver.js' 12 | 13 | test.group('FS Driver | copy', () => { 14 | test('copy file from source to the destination', async ({ fs, assert }) => { 15 | const source = 'hello.txt' 16 | const destination = 'hi.txt' 17 | const contents = 'Hello world' 18 | 19 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 20 | await fdfs.put(source, contents) 21 | await fdfs.copy(source, destination) 22 | 23 | assert.equal(await fdfs.get(destination), contents) 24 | }) 25 | 26 | test('copy file from source to a nested directory', async ({ fs, assert }) => { 27 | const source = 'hello.txt' 28 | const destination = 'foo/bar/baz/hi.txt' 29 | const contents = 'Hello world' 30 | 31 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 32 | await fdfs.put(source, contents) 33 | await fdfs.copy(source, destination) 34 | 35 | assert.equal(await fdfs.get(destination), contents) 36 | }) 37 | 38 | test('return error when source file does not exist', async ({ fs, assert }) => { 39 | const source = 'hello.txt' 40 | const destination = 'hi.txt' 41 | 42 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 43 | await assert.rejects(async () => { 44 | await fdfs.copy(source, destination) 45 | }, /ENOENT: no such file or directory/) 46 | }) 47 | 48 | test('return error when source is a directory', async ({ fs, assert }) => { 49 | const source = 'foo/hello.txt' 50 | const destination = 'bar' 51 | 52 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 53 | await fdfs.put(source, 'hello world') 54 | 55 | await assert.rejects(async () => { 56 | await fdfs.copy('foo', destination) 57 | }, /ENOTSUP: operation not supported|EISDIR: illegal operation on a|EPERM: operation not permitted/) 58 | }) 59 | }) 60 | -------------------------------------------------------------------------------- /tests/drivers/fs/delete.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | 12 | import { FSDriver } from '../../../drivers/fs/driver.js' 13 | 14 | test.group('FS Driver | delete', () => { 15 | test('delete file', async ({ fs, assert }) => { 16 | const key = 'hello.txt' 17 | const contents = 'Hello world' 18 | 19 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 20 | await fdfs.put(key, contents) 21 | await fdfs.delete(key) 22 | 23 | await assert.fileNotExists(key) 24 | }) 25 | 26 | test('delete file at nested path', async ({ fs, assert }) => { 27 | const key = 'foo/bar/hello.txt' 28 | const contents = 'Hello world' 29 | 30 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 31 | await fdfs.put(key, contents) 32 | await fdfs.delete(key) 33 | 34 | await assert.fileNotExists(key) 35 | }) 36 | 37 | test('noop when trying to delete a non-existing file', async ({ fs, assert }) => { 38 | const key = 'foo/bar/hello.txt' 39 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 40 | await fdfs.delete(key) 41 | 42 | await assert.fileNotExists(key) 43 | }) 44 | 45 | test('throw error when trying to delete a directory', async ({ fs, assert }) => { 46 | const key = 'foo/bar/hello.txt' 47 | const contents = 'Hello world' 48 | 49 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 50 | await fdfs.put(key, contents) 51 | await assert.rejects(async () => { 52 | await fdfs.delete('foo') 53 | }, /EPERM: operation not permitted|EISDIR: illegal operation on a direct/) 54 | 55 | await assert.fileExists(key) 56 | }) 57 | }) 58 | 59 | test.group('FS Driver | deleteAll', () => { 60 | test('delete all files matching the prefix', async ({ fs, assert }) => { 61 | const key = 'foo/hello.txt' 62 | const contents = 'Hello world' 63 | 64 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 65 | await fdfs.put(key, contents) 66 | await fdfs.deleteAll('foo') 67 | 68 | await assert.fileNotExists(key) 69 | await assert.dirIsEmpty() 70 | }) 71 | 72 | test('delete empty folders', async ({ fs, assert }) => { 73 | const key = 'foo/hello.txt' 74 | const anotherKey = 'foo/bar/hello.txt' 75 | const contents = 'Hello world' 76 | 77 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 78 | await fdfs.put(key, contents) 79 | await fdfs.put(anotherKey, contents) 80 | 81 | await fdfs.delete(anotherKey) 82 | await assert.dirIsEmpty('foo/bar') 83 | 84 | await fdfs.deleteAll('foo') 85 | 86 | await assert.fileNotExists(key) 87 | await assert.dirIsEmpty() 88 | }) 89 | 90 | test('noop when trying to delete empty directories', async ({ fs, assert }) => { 91 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 92 | await fdfs.deleteAll('foo') 93 | await assert.dirIsEmpty() 94 | }) 95 | }) 96 | -------------------------------------------------------------------------------- /tests/drivers/fs/exists.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import { FSDriver } from '../../../drivers/fs/driver.js' 12 | 13 | test.group('FS Driver | exists', () => { 14 | test('return true when file exists', async ({ fs, assert }) => { 15 | const key = 'hello.txt' 16 | const contents = 'Hello world' 17 | 18 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 19 | await fdfs.put(key, contents) 20 | 21 | assert.isTrue(await fdfs.exists(key)) 22 | }) 23 | 24 | test('return false when file does not exist', async ({ fs, assert }) => { 25 | const key = 'hello.txt' 26 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 27 | 28 | assert.isFalse(await fdfs.exists(key)) 29 | }) 30 | 31 | test('return false when object is a folder', async ({ fs, assert }) => { 32 | const key = 'foo/hello.txt' 33 | const contents = 'Hello world' 34 | 35 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 36 | await fdfs.put(key, contents) 37 | 38 | assert.isFalse(await fdfs.exists('foo')) 39 | }) 40 | }) 41 | -------------------------------------------------------------------------------- /tests/drivers/fs/get.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import getStream from 'get-stream' 11 | import { test } from '@japa/runner' 12 | 13 | import { FSDriver } from '../../../drivers/fs/driver.js' 14 | 15 | test.group('FS Driver | get', () => { 16 | test('get file contents from the destination', async ({ fs, assert }) => { 17 | const key = 'hello.txt' 18 | const contents = 'Hello world' 19 | 20 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 21 | await fdfs.put(key, contents) 22 | 23 | assert.equal(await fdfs.get(key), contents) 24 | }) 25 | 26 | test('return error when file does not exist', async ({ fs, assert }) => { 27 | const key = 'hello.txt' 28 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 29 | await assert.rejects(async () => { 30 | await fdfs.get(key) 31 | }, /ENOENT: no such file or directory/) 32 | }) 33 | 34 | test('return error when trying to read contents of a folder', async ({ fs, assert }) => { 35 | const key = 'foo/hello.txt' 36 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 37 | await fdfs.put(key, 'hello world') 38 | await assert.rejects(async () => { 39 | await fdfs.get('foo') 40 | }, /EPERM: operation not permitted|EISDIR: illegal operation on a direct/) 41 | }) 42 | }) 43 | 44 | test.group('FS Driver | getStream', () => { 45 | test('get file contents as a stream', async ({ fs, assert }) => { 46 | const key = 'hello.txt' 47 | const contents = 'Hello world' 48 | 49 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 50 | await fdfs.put(key, contents) 51 | 52 | assert.equal(await getStream(await fdfs.getStream(key)), contents) 53 | }) 54 | 55 | test('return error when file does not exist', async ({ fs, assert }) => { 56 | const key = 'hello.txt' 57 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 58 | 59 | await assert.rejects(async () => { 60 | await getStream(await fdfs.getStream(key)) 61 | }, /ENOENT: no such file or directory/) 62 | }) 63 | }) 64 | 65 | test.group('FS Driver | getBytes', () => { 66 | test('get file contents as array buffer', async ({ fs, assert }) => { 67 | const key = 'hello.txt' 68 | const contents = 'Hello world' 69 | 70 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 71 | await fdfs.put(key, contents) 72 | 73 | assert.equal(new TextDecoder().decode(await fdfs.getBytes(key)), contents) 74 | }) 75 | 76 | test('return error when file does not exist', async ({ fs, assert }) => { 77 | const key = 'hello.txt' 78 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 79 | 80 | await assert.rejects(async () => { 81 | await fdfs.getBytes(key) 82 | }, /ENOENT: no such file or directory/) 83 | }) 84 | }) 85 | -------------------------------------------------------------------------------- /tests/drivers/fs/get_metadata.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import { FSDriver } from '../../../drivers/fs/driver.js' 12 | 13 | test.group('FS Driver | getMetaData', () => { 14 | test('get metaData of a file', async ({ fs, assert }) => { 15 | const key = 'hello.txt' 16 | const contents = 'Hello world' 17 | 18 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 19 | await fdfs.put(key, contents) 20 | const metaData = await fdfs.getMetaData(key) 21 | 22 | assert.match(metaData.etag, /W/) 23 | assert.isTrue(metaData.lastModified instanceof Date) 24 | assert.containsSubset(metaData, { 25 | contentLength: 11, 26 | contentType: 'text/plain', 27 | }) 28 | }) 29 | 30 | test('return error when file does not exists', async ({ fs, assert }) => { 31 | const key = 'hello.txt' 32 | 33 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 34 | await assert.rejects(async () => { 35 | await fdfs.getMetaData(key) 36 | }, /ENOENT: no such file or directory/) 37 | }) 38 | 39 | test('return error when trying to get metadata of a directory', async ({ fs, assert }) => { 40 | const key = 'foo/hello.txt' 41 | 42 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 43 | await fdfs.put(key, 'hello world') 44 | 45 | await assert.rejects(async () => { 46 | await fdfs.getMetaData('foo') 47 | }, /Cannot get metadata of a directory/) 48 | }) 49 | }) 50 | -------------------------------------------------------------------------------- /tests/drivers/fs/list_all.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import { FSDriver } from '../../../drivers/fs/driver.js' 12 | 13 | test.group('FS Driver | listAll | root dir', () => { 14 | test('list all files and top-level directories of the matching prefix', async ({ 15 | fs, 16 | assert, 17 | }) => { 18 | const keys = ['hello.txt', 'foo/bar/hello.txt', 'baz/hello.txt'] 19 | const contents = 'Hello world' 20 | 21 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 22 | for (const key of keys) { 23 | await fdfs.put(key, contents) 24 | } 25 | 26 | const { objects } = await fdfs.listAll('/') 27 | assert.deepEqual(Array.from(objects), [ 28 | { 29 | isDirectory: true, 30 | isFile: false, 31 | name: 'baz', 32 | prefix: 'baz', 33 | }, 34 | { 35 | isDirectory: true, 36 | isFile: false, 37 | name: 'foo', 38 | prefix: 'foo', 39 | }, 40 | { 41 | isDirectory: false, 42 | isFile: true, 43 | name: 'hello.txt', 44 | key: 'hello.txt', 45 | }, 46 | ]) 47 | }) 48 | 49 | test('list all files recursively', async ({ fs, assert }) => { 50 | const keys = ['hello.txt', 'foo/bar/hello.txt', 'baz/hello.txt'] 51 | const contents = 'Hello world' 52 | 53 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 54 | for (const key of keys) { 55 | await fdfs.put(key, contents) 56 | } 57 | 58 | const { objects } = await fdfs.listAll('/', { recursive: true }) 59 | assert.deepEqual(Array.from(objects), [ 60 | { 61 | isDirectory: false, 62 | isFile: true, 63 | name: 'hello.txt', 64 | key: 'hello.txt', 65 | }, 66 | { 67 | isDirectory: false, 68 | isFile: true, 69 | name: 'hello.txt', 70 | key: 'foo/bar/hello.txt', 71 | }, 72 | { 73 | isDirectory: false, 74 | isFile: true, 75 | name: 'hello.txt', 76 | key: 'baz/hello.txt', 77 | }, 78 | ]) 79 | }) 80 | }) 81 | 82 | test.group('FS Driver | listAll | nested dir', () => { 83 | test('list all files and top-level directories of the matching prefix', async ({ 84 | fs, 85 | assert, 86 | }) => { 87 | const keys = ['hello.txt', 'foo/bar/hello.txt', 'baz/hello.txt'] 88 | const contents = 'Hello world' 89 | 90 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 91 | for (const key of keys) { 92 | await fdfs.put(key, contents) 93 | } 94 | 95 | const { objects } = await fdfs.listAll('foo') 96 | assert.deepEqual(Array.from(objects), [ 97 | { 98 | isDirectory: true, 99 | isFile: false, 100 | name: 'bar', 101 | prefix: 'foo/bar', 102 | }, 103 | ]) 104 | }) 105 | 106 | test('list all files recursively', async ({ fs, assert }) => { 107 | const keys = ['hello.txt', 'foo/bar/hello.txt', 'baz/hello.txt'] 108 | const contents = 'Hello world' 109 | 110 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 111 | for (const key of keys) { 112 | await fdfs.put(key, contents) 113 | } 114 | 115 | const { objects } = await fdfs.listAll('foo', { recursive: true }) 116 | assert.deepEqual(Array.from(objects), [ 117 | { 118 | isDirectory: false, 119 | isFile: true, 120 | name: 'hello.txt', 121 | key: 'foo/bar/hello.txt', 122 | }, 123 | ]) 124 | }) 125 | 126 | test('do not throw error when listing files of a non-existing directory', async ({ 127 | fs, 128 | assert, 129 | }) => { 130 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 131 | 132 | const { objects } = await fdfs.listAll('foo', { recursive: true }) 133 | assert.deepEqual(Array.from(objects), []) 134 | }) 135 | }) 136 | -------------------------------------------------------------------------------- /tests/drivers/fs/move.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import { FSDriver } from '../../../drivers/fs/driver.js' 12 | 13 | test.group('FS Driver | move', () => { 14 | test('move file from source to the destination', async ({ fs, assert }) => { 15 | const source = 'hello.txt' 16 | const destination = 'hi.txt' 17 | const contents = 'Hello world' 18 | 19 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 20 | await fdfs.put(source, contents) 21 | await fdfs.move(source, destination) 22 | 23 | assert.equal(await fdfs.get(destination), contents) 24 | await assert.fileNotExists(source) 25 | }) 26 | 27 | test('move file from source to a nested directory', async ({ fs, assert }) => { 28 | const source = 'hello.txt' 29 | const destination = 'foo/bar/baz/hi.txt' 30 | const contents = 'Hello world' 31 | 32 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 33 | await fdfs.put(source, contents) 34 | await fdfs.move(source, destination) 35 | 36 | assert.equal(await fdfs.get(destination), contents) 37 | await assert.fileNotExists(source) 38 | }) 39 | 40 | test('overwrite destination when one already exists', async ({ fs, assert }) => { 41 | const source = 'hello.txt' 42 | const destination = 'foo/bar/baz/hi.txt' 43 | const contents = 'Hello world' 44 | 45 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 46 | await fdfs.put(source, contents) 47 | await fdfs.put(destination, 'Hi world') 48 | assert.equal(await fdfs.get(destination), 'Hi world') 49 | 50 | await fdfs.move(source, destination) 51 | 52 | assert.equal(await fdfs.get(destination), contents) 53 | await assert.fileNotExists(source) 54 | }) 55 | 56 | test('return error when source file does not exist', async ({ fs, assert }) => { 57 | const source = 'hello.txt' 58 | const destination = 'hi.txt' 59 | 60 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 61 | await assert.rejects(async () => { 62 | await fdfs.move(source, destination) 63 | }, /ENOENT: no such file or directory/) 64 | }) 65 | 66 | test('return error when source is a directory', async ({ fs, assert }) => { 67 | const source = 'foo/hello.txt' 68 | const destination = 'bar' 69 | 70 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 71 | await fdfs.put(source, 'hello world') 72 | 73 | await assert.rejects(async () => { 74 | await fdfs.move('foo', destination) 75 | }, /ENOTSUP: operation not supported|EISDIR: illegal operation on a|EPERM: operation not permitted/) 76 | }) 77 | }) 78 | -------------------------------------------------------------------------------- /tests/drivers/fs/put.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { join } from 'node:path' 11 | import { test } from '@japa/runner' 12 | import { Readable } from 'node:stream' 13 | import { createReadStream } from 'node:fs' 14 | 15 | import { FSDriver } from '../../../drivers/fs/driver.js' 16 | 17 | test.group('FS Driver | put', () => { 18 | test('create file at the destination', async ({ fs, assert }) => { 19 | const key = 'hello.txt' 20 | const contents = 'Hello world' 21 | 22 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 23 | await fdfs.put(key, contents) 24 | 25 | await assert.fileExists(key) 26 | await assert.fileEquals(key, contents) 27 | }) 28 | 29 | test('overwrite contents of existing file', async ({ fs, assert }) => { 30 | const key = 'hello.txt' 31 | const contents = 'Hello world' 32 | const newContents = 'Hi world' 33 | 34 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 35 | await fdfs.put(key, contents) 36 | await fdfs.put(key, newContents) 37 | 38 | await assert.fileExists(key) 39 | await assert.fileEquals(key, newContents) 40 | }) 41 | 42 | test('create files at a nested destination', async ({ fs, assert }) => { 43 | const key = 'users/1/hello.txt' 44 | const contents = 'Hello world' 45 | 46 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 47 | await fdfs.put(key, contents) 48 | 49 | await assert.fileExists(key) 50 | await assert.fileEquals(key, contents) 51 | }) 52 | }) 53 | 54 | test.group('FS Driver | putStream', () => { 55 | test('create file from readable stream', async ({ fs, assert }) => { 56 | const key = 'hello.txt' 57 | const contents = 'Hello world' 58 | 59 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 60 | await fdfs.putStream(key, Readable.from([contents])) 61 | 62 | await assert.fileExists(key) 63 | await assert.fileEquals(key, contents) 64 | }) 65 | 66 | test('create file from fs stream', async ({ fs, assert }) => { 67 | const key = 'hello.txt' 68 | const contents = JSON.stringify({ greeting: 'hello world' }) 69 | await fs.create('foo.json', contents) 70 | 71 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 72 | await fdfs.putStream(key, createReadStream(join(fs.basePath, 'foo.json'))) 73 | 74 | await assert.fileExists(key) 75 | await assert.fileEquals(key, contents) 76 | }) 77 | 78 | test('throw error when readable stream returns error', async ({ fs, assert }) => { 79 | const key = 'hello.txt' 80 | 81 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 82 | await assert.rejects(async () => { 83 | await fdfs.putStream(key, createReadStream(join(fs.basePath, 'foo.json'))) 84 | }, /ENOENT: no such file or directory/) 85 | }) 86 | }) 87 | -------------------------------------------------------------------------------- /tests/drivers/fs/url_generation.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import { FSDriver } from '../../../drivers/fs/driver.js' 12 | 13 | test.group('FS Driver | getUrl', () => { 14 | test('throw error when trying to generate a URL', async ({ fs, assert }) => { 15 | const key = 'hello.txt' 16 | 17 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 18 | await assert.rejects( 19 | () => fdfs.getUrl(key), 20 | 'Cannot generate URL. The "fs" driver does not support it' 21 | ) 22 | }) 23 | 24 | test('use custom implementation to generate a URL', async ({ fs, assert }) => { 25 | const key = 'hello.txt' 26 | 27 | const fdfs = new FSDriver({ 28 | location: fs.baseUrl, 29 | visibility: 'public', 30 | urlBuilder: { 31 | async generateURL(fileKey) { 32 | return `/assets/${fileKey}` 33 | }, 34 | }, 35 | }) 36 | 37 | assert.equal(await fdfs.getUrl(key), '/assets/hello.txt') 38 | }) 39 | 40 | test('throw error when trying to generate a signed URL', async ({ fs, assert }) => { 41 | const key = 'hello.txt' 42 | 43 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 44 | await assert.rejects( 45 | () => fdfs.getSignedUrl(key), 46 | 'Cannot generate signed URL. The "fs" driver does not support it' 47 | ) 48 | }) 49 | 50 | test('use custom implementation to generate a signed URL', async ({ fs, assert }) => { 51 | const key = 'hello.txt' 52 | 53 | const fdfs = new FSDriver({ 54 | location: fs.baseUrl, 55 | visibility: 'public', 56 | urlBuilder: { 57 | async generateSignedURL(fileKey) { 58 | return `/assets/${fileKey}?signature=foo` 59 | }, 60 | }, 61 | }) 62 | 63 | assert.equal(await fdfs.getSignedUrl(key), '/assets/hello.txt?signature=foo') 64 | }) 65 | }) 66 | -------------------------------------------------------------------------------- /tests/drivers/fs/visibility.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import { FSDriver } from '../../../drivers/fs/driver.js' 12 | 13 | test.group('FS Driver | visibility', () => { 14 | test('get visibility of a file', async ({ fs, assert }) => { 15 | const key = 'hello.txt' 16 | 17 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 18 | const visibility = await fdfs.getVisibility(key) 19 | assert.equal(visibility, 'public') 20 | }) 21 | 22 | test('noop when trying to set visibility of a file', async ({ fs, assert }) => { 23 | const key = 'hello.txt' 24 | 25 | const fdfs = new FSDriver({ location: fs.baseUrl, visibility: 'public' }) 26 | await fdfs.setVisibility(key, 'private') 27 | 28 | const visibility = await fdfs.getVisibility(key) 29 | assert.equal(visibility, 'public') 30 | }) 31 | }) 32 | -------------------------------------------------------------------------------- /tests/drivers/gcs/copy.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import string from '@poppinss/utils/string' 12 | import { Storage } from '@google-cloud/storage' 13 | import { GCSDriver } from '../../../drivers/gcs/driver.js' 14 | import { GCS_BUCKET, GCS_FINE_GRAINED_ACL_BUCKET, GCS_KEY } from './env.js' 15 | 16 | /** 17 | * Direct access to Google cloud storage bucket 18 | * via their SDK 19 | */ 20 | const bucket = new Storage({ 21 | credentials: GCS_KEY, 22 | }).bucket(GCS_BUCKET) 23 | const noUniformedAclBucket = new Storage({ 24 | credentials: GCS_KEY, 25 | }).bucket(GCS_FINE_GRAINED_ACL_BUCKET) 26 | 27 | test.group('GCS Driver | copy', (group) => { 28 | group.each.setup(() => { 29 | return async () => { 30 | await bucket.deleteFiles() 31 | await noUniformedAclBucket.deleteFiles() 32 | } 33 | }) 34 | group.each.timeout(10_000) 35 | 36 | test('copy file from source to the destination', async ({ assert }) => { 37 | const source = `${string.random(6)}.txt` 38 | const destination = `${string.random(6)}.txt` 39 | const contents = 'Hello world' 40 | 41 | const fdgcs = new GCSDriver({ 42 | visibility: 'public', 43 | bucket: GCS_BUCKET, 44 | credentials: GCS_KEY, 45 | usingUniformAcl: true, 46 | }) 47 | await fdgcs.put(source, contents) 48 | await fdgcs.copy(source, destination) 49 | 50 | assert.equal(await fdgcs.get(destination), contents) 51 | }) 52 | 53 | test('copy file from source to a nested directory', async ({ assert }) => { 54 | const source = `${string.random(6)}.txt` 55 | const destination = `foo/bar/baz/${string.random(6)}.txt` 56 | const contents = 'Hello world' 57 | 58 | const fdgcs = new GCSDriver({ 59 | visibility: 'public', 60 | bucket: GCS_BUCKET, 61 | credentials: GCS_KEY, 62 | usingUniformAcl: true, 63 | }) 64 | await fdgcs.put(source, contents) 65 | await fdgcs.copy(source, destination) 66 | 67 | assert.equal(await fdgcs.get(destination), contents) 68 | }) 69 | 70 | test('return error when source file does not exist', async ({ assert }) => { 71 | const source = `${string.random(6)}.txt` 72 | const destination = `${string.random(6)}.txt` 73 | 74 | const fdgcs = new GCSDriver({ 75 | visibility: 'public', 76 | bucket: GCS_BUCKET, 77 | credentials: GCS_KEY, 78 | usingUniformAcl: true, 79 | }) 80 | await assert.rejects(async () => { 81 | await fdgcs.copy(source, destination) 82 | }, /No such object:/) 83 | }) 84 | 85 | test('retain source file visibility and metadata during copy', async ({ assert }) => { 86 | const source = `${string.random(10)}.txt` 87 | const destination = `${string.random(10)}.txt` 88 | const contents = 'Hello world' 89 | 90 | const fdgcs = new GCSDriver({ 91 | visibility: 'public', 92 | bucket: GCS_FINE_GRAINED_ACL_BUCKET, 93 | credentials: GCS_KEY, 94 | usingUniformAcl: false, 95 | }) 96 | 97 | await fdgcs.put(source, contents, { 98 | contentType: 'image/png', 99 | visibility: 'private', 100 | }) 101 | 102 | await fdgcs.copy(source, destination) 103 | const metaData = await fdgcs.getMetaData(destination) 104 | const visibility = await fdgcs.getVisibility(destination) 105 | 106 | assert.equal(visibility, 'private') 107 | assert.equal(metaData.contentType, 'image/png') 108 | 109 | const existsResponse = await noUniformedAclBucket.file(source).exists() 110 | assert.isTrue(existsResponse[0]) 111 | }) 112 | }) 113 | -------------------------------------------------------------------------------- /tests/drivers/gcs/delete.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import string from '@poppinss/utils/string' 12 | import { Storage } from '@google-cloud/storage' 13 | import { GCS_BUCKET, GCS_KEY } from './env.js' 14 | import { GCSDriver } from '../../../drivers/gcs/driver.js' 15 | 16 | /** 17 | * Direct access to Google cloud storage bucket 18 | * via their SDK 19 | */ 20 | const bucket = new Storage({ 21 | credentials: GCS_KEY, 22 | }).bucket(GCS_BUCKET) 23 | 24 | test.group('GCS Driver | delete', (group) => { 25 | group.each.setup(() => { 26 | return async () => { 27 | await bucket.deleteFiles() 28 | } 29 | }) 30 | group.each.timeout(10_000) 31 | 32 | test('delete file', async ({ assert }) => { 33 | const key = `${string.random(6)}.txt` 34 | const contents = 'Hello world' 35 | 36 | const fdgcs = new GCSDriver({ 37 | visibility: 'public', 38 | bucket: GCS_BUCKET, 39 | credentials: GCS_KEY, 40 | }) 41 | await fdgcs.put(key, contents) 42 | await fdgcs.delete(key) 43 | 44 | const existsResponse = await bucket.file(key).exists() 45 | assert.isFalse(existsResponse[0]) 46 | }) 47 | 48 | test('delete file at nested path', async ({ assert }) => { 49 | const key = `foo/bar/${string.random(6)}.txt` 50 | const contents = 'Hello world' 51 | 52 | const fdgcs = new GCSDriver({ 53 | visibility: 'public', 54 | bucket: GCS_BUCKET, 55 | credentials: GCS_KEY, 56 | }) 57 | await fdgcs.put(key, contents) 58 | await fdgcs.delete(key) 59 | 60 | const existsResponse = await bucket.file(key).exists() 61 | assert.isFalse(existsResponse[0]) 62 | }) 63 | 64 | test('noop when trying to delete a non-existing file', async ({ assert }) => { 65 | const key = `foo/bar/${string.random(6)}.txt` 66 | const fdgcs = new GCSDriver({ 67 | visibility: 'public', 68 | bucket: GCS_BUCKET, 69 | credentials: GCS_KEY, 70 | }) 71 | await fdgcs.delete(key) 72 | 73 | const existsResponse = await bucket.file(key).exists() 74 | assert.isFalse(existsResponse[0]) 75 | }) 76 | 77 | test('noop when trying to delete a directory', async ({ assert }) => { 78 | const key = `foo/bar/${string.random(6)}.txt` 79 | const contents = 'Hello world' 80 | 81 | const fdgcs = new GCSDriver({ 82 | visibility: 'public', 83 | bucket: GCS_BUCKET, 84 | credentials: GCS_KEY, 85 | }) 86 | 87 | await fdgcs.put(key, contents) 88 | 89 | /** 90 | * GCS consider it as a 404 call and hence no error is raised 91 | */ 92 | await fdgcs.delete('foo/') 93 | 94 | const existsResponse = await bucket.file(key).exists() 95 | assert.isTrue(existsResponse[0]) 96 | }) 97 | }) 98 | 99 | test.group('GCS Driver | deleteAll', (group) => { 100 | group.each.setup(() => { 101 | return async () => { 102 | await bucket.deleteFiles() 103 | } 104 | }) 105 | group.each.timeout(10_000) 106 | 107 | test('delete all files matching the prefix', async ({ assert }) => { 108 | const key = `foo/${string.random(6)}.txt` 109 | const anotherKey = `${string.random(6)}.txt` 110 | const contents = 'Hello world' 111 | 112 | const fdgcs = new GCSDriver({ 113 | visibility: 'public', 114 | bucket: GCS_BUCKET, 115 | credentials: GCS_KEY, 116 | }) 117 | 118 | await fdgcs.put(key, contents) 119 | await fdgcs.put(anotherKey, contents) 120 | 121 | await fdgcs.deleteAll('foo') 122 | assert.deepEqual(await bucket.file(key).exists(), [false]) 123 | assert.deepEqual(await bucket.file(anotherKey).exists(), [true]) 124 | }) 125 | 126 | test('delete empty folders', async ({ assert }) => { 127 | const key = `foo/${string.random(6)}.txt` 128 | const anotherKey = `foo/bar/${string.random(6)}.txt}` 129 | const contents = 'Hello world' 130 | 131 | const fdgcs = new GCSDriver({ 132 | visibility: 'public', 133 | bucket: GCS_BUCKET, 134 | credentials: GCS_KEY, 135 | }) 136 | await fdgcs.put(key, contents) 137 | await fdgcs.put(anotherKey, contents) 138 | await fdgcs.delete(anotherKey) 139 | 140 | /** 141 | * Since we have deleted the "foo/bar/hello.txt" file. The 142 | * "bar" directory will return an empty array of files 143 | */ 144 | const files = await bucket.getFiles({ prefix: 'foo/bar/' }) 145 | assert.lengthOf(files[0], 0) 146 | 147 | /** 148 | * Now we have deletes all the files within the bucket. 149 | */ 150 | await fdgcs.deleteAll('foo') 151 | const allFiles = await bucket.getFiles() 152 | assert.lengthOf(allFiles[0], 0) 153 | }) 154 | 155 | test('noop when trying to delete non-existing prefixes', async () => { 156 | const fdgcs = new GCSDriver({ 157 | visibility: 'public', 158 | bucket: GCS_BUCKET, 159 | credentials: GCS_KEY, 160 | }) 161 | await fdgcs.deleteAll('foo') 162 | }) 163 | }) 164 | -------------------------------------------------------------------------------- /tests/drivers/gcs/disk.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { join } from 'node:path' 11 | import { test } from '@japa/runner' 12 | import string from '@poppinss/utils/string' 13 | import { Storage } from '@google-cloud/storage' 14 | 15 | import { Disk } from '../../../src/disk.js' 16 | import * as errors from '../../../src/errors.js' 17 | import { GCSDriver } from '../../../drivers/gcs/driver.js' 18 | import { GCS_BUCKET, GCS_FINE_GRAINED_ACL_BUCKET, GCS_KEY } from './env.js' 19 | 20 | /** 21 | * Direct access to Google cloud storage bucket 22 | * via their SDK 23 | */ 24 | const bucket = new Storage({ 25 | credentials: GCS_KEY, 26 | }).bucket(GCS_BUCKET) 27 | const noUniformedAclBucket = new Storage({ 28 | credentials: GCS_KEY, 29 | }).bucket(GCS_FINE_GRAINED_ACL_BUCKET) 30 | 31 | test.group('Disk | GCS | copyFromFs', (group) => { 32 | group.each.setup(() => { 33 | return async () => { 34 | await bucket.deleteFiles() 35 | await noUniformedAclBucket.deleteFiles() 36 | } 37 | }) 38 | group.each.timeout(10_000) 39 | 40 | test('copy file from the local filesystem to GCS', async ({ fs, assert }) => { 41 | const source = `${string.random(6)}.txt` 42 | const destination = `${string.random(6)}.txt` 43 | const contents = 'Hello world' 44 | 45 | await fs.create(source, contents) 46 | 47 | const fdgcs = new GCSDriver({ 48 | visibility: 'public', 49 | bucket: GCS_BUCKET, 50 | credentials: GCS_KEY, 51 | usingUniformAcl: true, 52 | }) 53 | 54 | const disk = new Disk(fdgcs) 55 | await disk.copyFromFs(join(fs.basePath, source), destination) 56 | 57 | assert.equal(await disk.get(destination), contents) 58 | await assert.fileExists(source) 59 | }) 60 | 61 | test('throw error when source file does not exists', async ({ fs, assert }) => { 62 | const source = `${string.random(6)}.txt` 63 | const destination = `${string.random(6)}.txt` 64 | 65 | const fdgcs = new GCSDriver({ 66 | visibility: 'public', 67 | bucket: GCS_BUCKET, 68 | credentials: GCS_KEY, 69 | usingUniformAcl: true, 70 | }) 71 | 72 | const disk = new Disk(fdgcs) 73 | try { 74 | await disk.copyFromFs(join(fs.basePath, source), destination) 75 | } catch (error) { 76 | assert.instanceOf(error, errors.E_CANNOT_WRITE_FILE) 77 | assert.equal(error.message, `Cannot write file at location "${destination}"`) 78 | assert.match(error.cause.message, /ENOENT: no such file or directory/) 79 | } 80 | }) 81 | }) 82 | 83 | test.group('Disk | GCS | moveFromFs', (group) => { 84 | group.each.setup(() => { 85 | return async () => { 86 | await bucket.deleteFiles() 87 | await noUniformedAclBucket.deleteFiles() 88 | } 89 | }) 90 | group.each.timeout(10_000) 91 | 92 | test('move file from the local filesystem to GCS', async ({ fs, assert }) => { 93 | const source = `${string.random(6)}.txt` 94 | const destination = `${string.random(6)}.txt` 95 | const contents = 'Hello world' 96 | 97 | await fs.create(source, contents) 98 | 99 | const fdgcs = new GCSDriver({ 100 | visibility: 'public', 101 | bucket: GCS_BUCKET, 102 | credentials: GCS_KEY, 103 | usingUniformAcl: true, 104 | }) 105 | 106 | const disk = new Disk(fdgcs) 107 | await disk.moveFromFs(join(fs.basePath, source), destination) 108 | 109 | assert.equal(await disk.get(destination), contents) 110 | await assert.fileNotExists(source) 111 | }) 112 | 113 | test('move error when source file does not exists', async ({ fs, assert }) => { 114 | const source = `${string.random(6)}.txt` 115 | const destination = `${string.random(6)}.txt` 116 | 117 | const fdgcs = new GCSDriver({ 118 | visibility: 'public', 119 | bucket: GCS_BUCKET, 120 | credentials: GCS_KEY, 121 | usingUniformAcl: true, 122 | }) 123 | 124 | const disk = new Disk(fdgcs) 125 | try { 126 | await disk.moveFromFs(join(fs.basePath, source), destination) 127 | } catch (error) { 128 | assert.instanceOf(error, errors.E_CANNOT_WRITE_FILE) 129 | assert.equal(error.message, `Cannot write file at location "${destination}"`) 130 | assert.match(error.cause.message, /ENOENT: no such file or directory/) 131 | } 132 | }) 133 | }) 134 | 135 | test.group('Disk | setVisibility', (group) => { 136 | group.each.setup(() => { 137 | return async () => { 138 | await bucket.deleteFiles() 139 | await noUniformedAclBucket.deleteFiles() 140 | } 141 | }) 142 | group.each.timeout(10_000) 143 | 144 | test('set file visibility', async ({ assert }) => { 145 | const key = `${string.random(6)}.txt` 146 | const contents = 'Hello world' 147 | 148 | const fdgcs = new GCSDriver({ 149 | visibility: 'public', 150 | bucket: GCS_FINE_GRAINED_ACL_BUCKET, 151 | credentials: GCS_KEY, 152 | usingUniformAcl: true, 153 | }) 154 | await fdgcs.put(key, contents) 155 | 156 | const disk = new Disk(fdgcs) 157 | await disk.setVisibility(key, 'private') 158 | const visibility = await disk.getVisibility(key) 159 | assert.equal(visibility, 'private') 160 | }) 161 | 162 | test('wrap driver errors to a generic error', async ({ assert }) => { 163 | const key = `${string.random(6)}.txt` 164 | const contents = 'Hello world' 165 | 166 | const fdgcs = new GCSDriver({ 167 | visibility: 'public', 168 | bucket: GCS_BUCKET, 169 | credentials: GCS_KEY, 170 | usingUniformAcl: true, 171 | }) 172 | await fdgcs.put(key, contents) 173 | 174 | const disk = new Disk(fdgcs) 175 | try { 176 | await disk.setVisibility(key, 'private') 177 | } catch (error) { 178 | assert.instanceOf(error, errors.E_CANNOT_SET_VISIBILITY) 179 | assert.equal(error.message, `Unable to set visibility for file at location "${key}"`) 180 | assert.match(error.cause.message, /Cannot update access control for an object when uniform/) 181 | } 182 | }) 183 | }) 184 | -------------------------------------------------------------------------------- /tests/drivers/gcs/env.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { Env } from '@adonisjs/env' 11 | import { readFile } from 'node:fs/promises' 12 | 13 | Env.defineIdentifier('file', (value) => { 14 | return readFile(new URL(`../../../${value}`, import.meta.url), 'utf-8') 15 | }) 16 | 17 | const env = await Env.create(new URL('../../../', import.meta.url), { 18 | GCS_KEY: Env.schema.string(), 19 | GCS_BUCKET: Env.schema.string(), 20 | GCS_FINE_GRAINED_ACL_BUCKET: Env.schema.string(), 21 | }) 22 | 23 | export const GCS_BUCKET = env.get('GCS_BUCKET') 24 | export const GCS_KEY = JSON.parse(env.get('GCS_KEY')) 25 | export const GCS_FINE_GRAINED_ACL_BUCKET = env.get('GCS_FINE_GRAINED_ACL_BUCKET') 26 | -------------------------------------------------------------------------------- /tests/drivers/gcs/exists.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import string from '@poppinss/utils/string' 12 | import { Storage } from '@google-cloud/storage' 13 | import { GCS_BUCKET, GCS_KEY } from './env.js' 14 | import { GCSDriver } from '../../../drivers/gcs/driver.js' 15 | 16 | /** 17 | * Direct access to Google cloud storage bucket 18 | * via their SDK 19 | */ 20 | const bucket = new Storage({ 21 | credentials: GCS_KEY, 22 | }).bucket(GCS_BUCKET) 23 | 24 | test.group('GCS Driver | exists', (group) => { 25 | group.each.setup(() => { 26 | return async () => { 27 | await bucket.deleteFiles() 28 | } 29 | }) 30 | group.each.timeout(10_000) 31 | 32 | test('return true when file exists', async ({ assert }) => { 33 | const key = `${string.random(6)}.txt` 34 | const contents = 'Hello world' 35 | 36 | const fdgcs = new GCSDriver({ 37 | visibility: 'public', 38 | bucket: GCS_BUCKET, 39 | credentials: GCS_KEY, 40 | usingUniformAcl: true, 41 | }) 42 | await fdgcs.put(key, contents) 43 | 44 | assert.isTrue(await fdgcs.exists(key)) 45 | }) 46 | 47 | test('return false when file does not exist', async ({ assert }) => { 48 | const key = `${string.random(6)}.txt` 49 | 50 | const fdgcs = new GCSDriver({ 51 | visibility: 'public', 52 | bucket: GCS_BUCKET, 53 | credentials: GCS_KEY, 54 | usingUniformAcl: true, 55 | }) 56 | 57 | assert.isFalse(await fdgcs.exists(key)) 58 | }) 59 | 60 | test('return false when object is a folder', async ({ assert }) => { 61 | const key = `foo/${string.random(6)}.txt` 62 | const contents = 'Hello world' 63 | 64 | const fdgcs = new GCSDriver({ 65 | visibility: 'public', 66 | bucket: GCS_BUCKET, 67 | credentials: GCS_KEY, 68 | usingUniformAcl: true, 69 | }) 70 | 71 | await fdgcs.put(key, contents) 72 | 73 | assert.isFalse(await fdgcs.exists('foo')) 74 | }) 75 | }) 76 | -------------------------------------------------------------------------------- /tests/drivers/gcs/get.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import getStream from 'get-stream' 11 | import { test } from '@japa/runner' 12 | import string from '@poppinss/utils/string' 13 | import { Storage } from '@google-cloud/storage' 14 | import { GCS_BUCKET, GCS_KEY } from './env.js' 15 | import { GCSDriver } from '../../../drivers/gcs/driver.js' 16 | 17 | /** 18 | * Direct access to Google cloud storage bucket 19 | * via their SDK 20 | */ 21 | const bucket = new Storage({ 22 | credentials: GCS_KEY, 23 | }).bucket(GCS_BUCKET) 24 | 25 | test.group('GCS Driver | get', (group) => { 26 | group.each.setup(() => { 27 | return async () => { 28 | await bucket.deleteFiles() 29 | } 30 | }) 31 | group.each.timeout(10_000) 32 | 33 | test('get file contents as a string', async ({ assert }) => { 34 | const key = `${string.random(6)}.txt` 35 | const contents = 'Hello world' 36 | 37 | const fdgcs = new GCSDriver({ 38 | visibility: 'public', 39 | bucket: GCS_BUCKET, 40 | credentials: GCS_KEY, 41 | usingUniformAcl: true, 42 | }) 43 | 44 | await fdgcs.put(key, contents) 45 | assert.equal(await fdgcs.get(key), contents) 46 | }) 47 | 48 | test('return error when file does not exist', async ({ assert }) => { 49 | const key = `${string.random(6)}.txt` 50 | const fdgcs = new GCSDriver({ 51 | visibility: 'public', 52 | bucket: GCS_BUCKET, 53 | credentials: GCS_KEY, 54 | usingUniformAcl: true, 55 | }) 56 | 57 | await assert.rejects(async () => { 58 | await fdgcs.get(key) 59 | }, /No such object:/) 60 | }) 61 | }) 62 | 63 | test.group('GCS Driver | getBytes', (group) => { 64 | group.each.setup(() => { 65 | return async () => { 66 | await bucket.deleteFiles() 67 | } 68 | }) 69 | group.each.timeout(10_000) 70 | 71 | test('get file contents as an arrayBuffer', async ({ assert }) => { 72 | const key = `${string.random(6)}.txt` 73 | const contents = 'Hello world' 74 | 75 | const fdgcs = new GCSDriver({ 76 | visibility: 'public', 77 | bucket: GCS_BUCKET, 78 | credentials: GCS_KEY, 79 | usingUniformAcl: true, 80 | }) 81 | 82 | await fdgcs.put(key, contents) 83 | assert.equal(new TextDecoder().decode(await fdgcs.getBytes(key)), contents) 84 | }) 85 | 86 | test('return error when file does not exist', async ({ assert }) => { 87 | const key = `${string.random(6)}.txt` 88 | const fdgcs = new GCSDriver({ 89 | visibility: 'public', 90 | bucket: GCS_BUCKET, 91 | credentials: GCS_KEY, 92 | usingUniformAcl: true, 93 | }) 94 | 95 | await assert.rejects(async () => { 96 | await fdgcs.getBytes(key) 97 | }, /No such object:/) 98 | }) 99 | }) 100 | 101 | test.group('GCS Driver | getStream', (group) => { 102 | group.each.setup(() => { 103 | return async () => { 104 | await bucket.deleteFiles() 105 | } 106 | }) 107 | group.each.timeout(10_000) 108 | 109 | test('get file contents as a stream', async ({ assert }) => { 110 | const key = `${string.random(6)}.txt` 111 | const contents = 'Hello world' 112 | 113 | const fdgcs = new GCSDriver({ 114 | visibility: 'public', 115 | bucket: GCS_BUCKET, 116 | credentials: GCS_KEY, 117 | usingUniformAcl: true, 118 | }) 119 | 120 | await fdgcs.put(key, contents) 121 | assert.equal(await getStream(await fdgcs.getStream(key)), contents) 122 | }) 123 | 124 | test('return error when file does not exist', async ({ assert }) => { 125 | const key = `${string.random(6)}.txt` 126 | const fdgcs = new GCSDriver({ 127 | visibility: 'public', 128 | bucket: GCS_BUCKET, 129 | credentials: GCS_KEY, 130 | usingUniformAcl: true, 131 | }) 132 | 133 | await assert.rejects(async () => { 134 | await getStream(await fdgcs.getStream(key)) 135 | }, /No such object:/) 136 | }) 137 | }) 138 | -------------------------------------------------------------------------------- /tests/drivers/gcs/get_metadata.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import string from '@poppinss/utils/string' 12 | import { Storage } from '@google-cloud/storage' 13 | import { GCS_BUCKET, GCS_KEY } from './env.js' 14 | import { GCSDriver } from '../../../drivers/gcs/driver.js' 15 | 16 | /** 17 | * Direct access to Google cloud storage bucket 18 | * via their SDK 19 | */ 20 | const bucket = new Storage({ 21 | credentials: GCS_KEY, 22 | }).bucket(GCS_BUCKET) 23 | 24 | test.group('GCS Driver | getMetaData', (group) => { 25 | group.each.setup(() => { 26 | return async () => { 27 | await bucket.deleteFiles() 28 | } 29 | }) 30 | group.each.timeout(10_000) 31 | 32 | test('get metaData of a file', async ({ assert }) => { 33 | const key = `${string.random(6)}.txt` 34 | const contents = 'Hello world' 35 | 36 | const fdgcs = new GCSDriver({ 37 | visibility: 'public', 38 | bucket: GCS_BUCKET, 39 | credentials: GCS_KEY, 40 | usingUniformAcl: true, 41 | }) 42 | 43 | await fdgcs.put(key, contents) 44 | const metaData = await fdgcs.getMetaData(key) 45 | 46 | assert.exists(metaData.etag) 47 | assert.isTrue(metaData.lastModified instanceof Date) 48 | assert.containsSubset(metaData, { 49 | contentLength: 11, 50 | contentType: 'text/plain', 51 | }) 52 | }) 53 | 54 | test('return error when file does not exists', async ({ assert }) => { 55 | const key = `${string.random(6)}.txt` 56 | 57 | const fdgcs = new GCSDriver({ 58 | visibility: 'public', 59 | bucket: GCS_BUCKET, 60 | credentials: GCS_KEY, 61 | usingUniformAcl: true, 62 | }) 63 | 64 | await assert.rejects(async () => { 65 | await fdgcs.getMetaData(key) 66 | }, /No such object:/) 67 | }) 68 | }) 69 | -------------------------------------------------------------------------------- /tests/drivers/gcs/list_all.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import string from '@poppinss/utils/string' 12 | import { Storage } from '@google-cloud/storage' 13 | import { GCS_BUCKET, GCS_KEY } from './env.js' 14 | import { GCSDriver } from '../../../drivers/gcs/driver.js' 15 | 16 | /** 17 | * Direct access to Google cloud storage bucket 18 | * via their SDK 19 | */ 20 | const bucket = new Storage({ 21 | credentials: GCS_KEY, 22 | }).bucket(GCS_BUCKET) 23 | 24 | test.group('GCS Driver | listAll | root dir', (group) => { 25 | group.each.setup(() => { 26 | return async () => { 27 | await bucket.deleteFiles() 28 | } 29 | }) 30 | group.each.timeout(10_000) 31 | 32 | test('list all files and top-level directories of the matching prefix', async ({ assert }) => { 33 | const fileName = `${string.random(10)}.txt` 34 | const keys = [fileName, `foo/bar/${fileName}`, `baz/${fileName}`] 35 | const contents = 'Hello world' 36 | 37 | const fdgcs = new GCSDriver({ 38 | visibility: 'public', 39 | bucket: GCS_BUCKET, 40 | credentials: GCS_KEY, 41 | }) 42 | 43 | for (const key of keys) { 44 | await fdgcs.put(key, contents) 45 | } 46 | 47 | const { objects } = await fdgcs.listAll('/') 48 | 49 | assert.includeDeepMembers(Array.from(objects), [ 50 | { 51 | isDirectory: true, 52 | isFile: false, 53 | name: 'baz', 54 | prefix: 'baz', 55 | }, 56 | { 57 | isDirectory: true, 58 | isFile: false, 59 | name: 'foo', 60 | prefix: 'foo', 61 | }, 62 | { 63 | isDirectory: false, 64 | isFile: true, 65 | name: fileName, 66 | key: fileName, 67 | }, 68 | ]) 69 | }) 70 | 71 | test('list all files recursively', async ({ assert }) => { 72 | const fileName = `${string.random(10)}.txt` 73 | const keys = [fileName, `foo/bar/${fileName}`, `baz/${fileName}`] 74 | const contents = 'Hello world' 75 | 76 | const fdgcs = new GCSDriver({ 77 | visibility: 'public', 78 | bucket: GCS_BUCKET, 79 | credentials: GCS_KEY, 80 | }) 81 | 82 | for (const key of keys) { 83 | await fdgcs.put(key, contents) 84 | } 85 | 86 | const { objects } = await fdgcs.listAll('/', { recursive: true }) 87 | assert.includeDeepMembers(Array.from(objects), [ 88 | { 89 | isDirectory: false, 90 | isFile: true, 91 | name: fileName, 92 | key: `baz/${fileName}`, 93 | }, 94 | { 95 | isDirectory: false, 96 | isFile: true, 97 | name: fileName, 98 | key: `foo/bar/${fileName}`, 99 | }, 100 | { 101 | isDirectory: false, 102 | isFile: true, 103 | name: fileName, 104 | key: fileName, 105 | }, 106 | ]) 107 | }) 108 | 109 | test('paginate recursive results', async ({ assert }) => { 110 | const fileName = `${string.random(10)}.txt` 111 | const keys = [fileName, `foo/bar/${fileName}`, `baz/${fileName}`] 112 | const contents = 'Hello world' 113 | 114 | const fdgcs = new GCSDriver({ 115 | visibility: 'public', 116 | bucket: GCS_BUCKET, 117 | credentials: GCS_KEY, 118 | }) 119 | 120 | for (const key of keys) { 121 | await fdgcs.put(key, contents) 122 | } 123 | 124 | /** 125 | * The expected result set. We compare the response to be a subset 126 | * of the expected result. 127 | * 128 | * We use this approach over "deepEqual" because the order of objects 129 | * is not guaranteed by GCS 130 | */ 131 | const expectedResultSet = [ 132 | { 133 | isDirectory: false, 134 | isFile: true, 135 | name: fileName, 136 | key: `foo/bar/${fileName}`, 137 | }, 138 | { 139 | isDirectory: false, 140 | isFile: true, 141 | name: fileName, 142 | key: `baz/${fileName}`, 143 | }, 144 | { 145 | isDirectory: false, 146 | isFile: true, 147 | name: fileName, 148 | key: fileName, 149 | }, 150 | ] 151 | 152 | /** 153 | * Page 1 154 | */ 155 | const { objects, paginationToken } = await fdgcs.listAll('/', { 156 | recursive: true, 157 | maxResults: 1, 158 | }) 159 | assert.containsSubset(expectedResultSet, Array.from(objects)) 160 | 161 | /** 162 | * Page 2 163 | */ 164 | const { objects: page2Objects, paginationToken: page2PaginationToken } = await fdgcs.listAll( 165 | '/', 166 | { 167 | recursive: true, 168 | maxResults: 1, 169 | paginationToken, 170 | } 171 | ) 172 | assert.containsSubset(expectedResultSet, Array.from(page2Objects)) 173 | assert.notDeepEqual(Array.from(page2Objects), Array.from(objects)) 174 | 175 | /** 176 | * Page 3 177 | */ 178 | const { objects: page3Objects } = await fdgcs.listAll('/', { 179 | recursive: true, 180 | maxResults: 1, 181 | paginationToken: page2PaginationToken, 182 | }) 183 | assert.containsSubset(expectedResultSet, Array.from(page3Objects)) 184 | assert.notDeepEqual(Array.from(page3Objects), Array.from(page2Objects)) 185 | }) 186 | }) 187 | 188 | test.group('GCS Driver | listAll | nested dir', (group) => { 189 | group.each.setup(() => { 190 | return async () => { 191 | await bucket.deleteFiles() 192 | } 193 | }) 194 | group.each.timeout(10_000) 195 | 196 | test('list all files and top-level directories of the matching prefix', async ({ assert }) => { 197 | const fileName = `${string.random(10)}.txt` 198 | const keys = [fileName, `foo/bar/${fileName}`, `baz/${fileName}`] 199 | const contents = 'Hello world' 200 | 201 | const fdgcs = new GCSDriver({ 202 | visibility: 'public', 203 | bucket: GCS_BUCKET, 204 | credentials: GCS_KEY, 205 | }) 206 | 207 | for (const key of keys) { 208 | await fdgcs.put(key, contents) 209 | } 210 | 211 | const { objects } = await fdgcs.listAll('foo') 212 | assert.includeDeepMembers(Array.from(objects), [ 213 | { 214 | isDirectory: true, 215 | isFile: false, 216 | name: 'bar', 217 | prefix: 'foo/bar', 218 | }, 219 | ]) 220 | }) 221 | 222 | test('list all files recursively', async ({ assert }) => { 223 | const fileName = `${string.random(10)}.txt` 224 | const keys = [fileName, `foo/bar/${fileName}`, `baz/${fileName}`] 225 | const contents = 'Hello world' 226 | 227 | const fdgcs = new GCSDriver({ 228 | visibility: 'public', 229 | bucket: GCS_BUCKET, 230 | credentials: GCS_KEY, 231 | }) 232 | 233 | for (const key of keys) { 234 | await fdgcs.put(key, contents) 235 | } 236 | 237 | const { objects } = await fdgcs.listAll('foo', { recursive: true }) 238 | assert.includeDeepMembers(Array.from(objects), [ 239 | { 240 | isDirectory: false, 241 | isFile: true, 242 | name: fileName, 243 | key: `foo/bar/${fileName}`, 244 | }, 245 | ]) 246 | }) 247 | 248 | test('do not throw error when listing files of a non-existing directory', async ({ assert }) => { 249 | const fdgcs = new GCSDriver({ 250 | visibility: 'public', 251 | bucket: GCS_BUCKET, 252 | credentials: GCS_KEY, 253 | }) 254 | 255 | const { objects } = await fdgcs.listAll('foo', { recursive: true }) 256 | assert.deepEqual(Array.from(objects), []) 257 | }) 258 | }) 259 | -------------------------------------------------------------------------------- /tests/drivers/gcs/move.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import string from '@poppinss/utils/string' 12 | import { Storage } from '@google-cloud/storage' 13 | import { GCS_BUCKET, GCS_FINE_GRAINED_ACL_BUCKET, GCS_KEY } from './env.js' 14 | import { GCSDriver } from '../../../drivers/gcs/driver.js' 15 | 16 | /** 17 | * Direct access to Google cloud storage bucket 18 | * via their SDK 19 | */ 20 | const bucket = new Storage({ 21 | credentials: GCS_KEY, 22 | }).bucket(GCS_BUCKET) 23 | const noUniformedAclBucket = new Storage({ 24 | credentials: GCS_KEY, 25 | }).bucket(GCS_FINE_GRAINED_ACL_BUCKET) 26 | 27 | test.group('GCS Driver | move', (group) => { 28 | group.each.setup(() => { 29 | return async () => { 30 | await bucket.deleteFiles() 31 | await noUniformedAclBucket.deleteFiles() 32 | } 33 | }) 34 | group.each.timeout(10_000) 35 | 36 | test('move file from source to the destination', async ({ assert }) => { 37 | const source = `${string.random(10)}.txt` 38 | const destination = `${string.random(10)}.txt` 39 | const contents = 'Hello world' 40 | 41 | const fdgcs = new GCSDriver({ 42 | visibility: 'public', 43 | bucket: GCS_BUCKET, 44 | credentials: GCS_KEY, 45 | usingUniformAcl: true, 46 | }) 47 | await fdgcs.put(source, contents) 48 | await fdgcs.move(source, destination) 49 | 50 | assert.equal(await fdgcs.get(destination), contents) 51 | 52 | const existsResponse = await bucket.file(source).exists() 53 | assert.isFalse(existsResponse[0]) 54 | }) 55 | 56 | test('move file from source to a nested directory', async ({ assert }) => { 57 | const source = `${string.random(10)}.txt` 58 | const destination = `foo/bar/baz/${string.random(10)}.txt` 59 | const contents = 'Hello world' 60 | 61 | const fdgcs = new GCSDriver({ 62 | visibility: 'public', 63 | bucket: GCS_BUCKET, 64 | credentials: GCS_KEY, 65 | usingUniformAcl: true, 66 | }) 67 | await fdgcs.put(source, contents) 68 | await fdgcs.move(source, destination) 69 | 70 | assert.equal(await fdgcs.get(destination), contents) 71 | 72 | const existsResponse = await bucket.file(source).exists() 73 | assert.isFalse(existsResponse[0]) 74 | }) 75 | 76 | test('return error when source file does not exist', async ({ assert }) => { 77 | const source = `${string.random(10)}.txt` 78 | const destination = `${string.random(10)}.txt` 79 | 80 | const fdgcs = new GCSDriver({ 81 | visibility: 'public', 82 | bucket: GCS_BUCKET, 83 | credentials: GCS_KEY, 84 | usingUniformAcl: true, 85 | }) 86 | await assert.rejects(async () => { 87 | await fdgcs.move(source, destination) 88 | }, /No such object:/) 89 | }) 90 | 91 | test('retain source file visibility and metadata during move', async ({ assert }) => { 92 | const source = `${string.random(10)}.txt` 93 | const destination = `${string.random(10)}.txt` 94 | const contents = 'Hello world' 95 | 96 | const fdgcs = new GCSDriver({ 97 | visibility: 'public', 98 | bucket: GCS_FINE_GRAINED_ACL_BUCKET, 99 | credentials: GCS_KEY, 100 | usingUniformAcl: false, 101 | }) 102 | await fdgcs.put(source, contents, { 103 | contentType: 'image/png', 104 | visibility: 'private', 105 | }) 106 | await fdgcs.move(source, destination) 107 | 108 | const metaData = await fdgcs.getMetaData(destination) 109 | const visibility = await fdgcs.getVisibility(destination) 110 | 111 | assert.equal(visibility, 'private') 112 | assert.equal(metaData.contentType, 'image/png') 113 | 114 | const existsResponse = await noUniformedAclBucket.file(source).exists() 115 | assert.isFalse(existsResponse[0]) 116 | }) 117 | }) 118 | -------------------------------------------------------------------------------- /tests/drivers/gcs/put.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { join } from 'node:path' 11 | import { test } from '@japa/runner' 12 | import { createReadStream } from 'node:fs' 13 | import string from '@poppinss/utils/string' 14 | import { Storage } from '@google-cloud/storage' 15 | import { GCSDriver } from '../../../drivers/gcs/driver.js' 16 | import { GCS_BUCKET, GCS_FINE_GRAINED_ACL_BUCKET, GCS_KEY } from './env.js' 17 | 18 | /** 19 | * Direct access to Google cloud storage bucket 20 | * via their SDK 21 | */ 22 | const bucket = new Storage({ 23 | credentials: GCS_KEY, 24 | }).bucket(GCS_BUCKET) 25 | const noUniformedAclBucket = new Storage({ 26 | credentials: GCS_KEY, 27 | }).bucket(GCS_FINE_GRAINED_ACL_BUCKET) 28 | 29 | test.group('GCS Driver | put', (group) => { 30 | group.each.setup(() => { 31 | return async () => { 32 | await bucket.deleteFiles() 33 | await noUniformedAclBucket.deleteFiles() 34 | } 35 | }) 36 | group.each.timeout(10_000) 37 | 38 | test('create file at the destination', async ({ assert }) => { 39 | const key = `${string.random(10)}.txt` 40 | const contents = 'Hello world' 41 | 42 | const fdfs = new GCSDriver({ 43 | visibility: 'public', 44 | bucket: GCS_BUCKET, 45 | credentials: GCS_KEY, 46 | usingUniformAcl: true, 47 | }) 48 | 49 | await fdfs.put(key, contents) 50 | 51 | /** 52 | * Verify put operation 53 | */ 54 | const response = await bucket.file(key).download() 55 | assert.equal(response[0].toString(), contents) 56 | }) 57 | 58 | test('create file from Uint8Array', async ({ assert }) => { 59 | const key = `${string.random(10)}.txt` 60 | const contents = 'Hello world' 61 | 62 | const fdfs = new GCSDriver({ 63 | visibility: 'public', 64 | bucket: GCS_BUCKET, 65 | credentials: GCS_KEY, 66 | usingUniformAcl: true, 67 | }) 68 | 69 | await fdfs.put(key, new TextEncoder().encode(contents)) 70 | 71 | /** 72 | * Verify put operation 73 | */ 74 | const response = await bucket.file(key).download() 75 | assert.equal(response[0].toString(), contents) 76 | }) 77 | 78 | test('overwrite contents of existing file', async ({ assert }) => { 79 | const key = `${string.random(10)}.txt` 80 | const contents = 'Hello world' 81 | const newContents = 'Hi world' 82 | 83 | const fdfs = new GCSDriver({ 84 | visibility: 'public', 85 | bucket: GCS_BUCKET, 86 | credentials: GCS_KEY, 87 | usingUniformAcl: true, 88 | }) 89 | 90 | await fdfs.put(key, contents) 91 | await fdfs.put(key, newContents) 92 | 93 | /** 94 | * Verify put operation 95 | */ 96 | const response = await bucket.file(key).download() 97 | assert.equal(response[0].toString(), newContents) 98 | }) 99 | 100 | test('create files at a nested destination', async ({ assert }) => { 101 | const key = `users/1/${string.random(10)}.txt` 102 | const contents = 'Hello world' 103 | 104 | const fdfs = new GCSDriver({ 105 | visibility: 'public', 106 | bucket: GCS_BUCKET, 107 | credentials: GCS_KEY, 108 | usingUniformAcl: true, 109 | }) 110 | 111 | await fdfs.put(key, contents) 112 | 113 | /** 114 | * Verify put operation 115 | */ 116 | const response = await bucket.file(key).download() 117 | assert.equal(response[0].toString(), contents) 118 | }) 119 | 120 | test('create file with custom metadata', async ({ assert }) => { 121 | const key = `${string.random(10)}.txt` 122 | const contents = 'Hello world' 123 | 124 | const fdfs = new GCSDriver({ 125 | visibility: 'public', 126 | bucket: GCS_BUCKET, 127 | credentials: GCS_KEY, 128 | usingUniformAcl: true, 129 | }) 130 | 131 | await fdfs.put(key, contents, { 132 | contentType: 'image/png', 133 | cacheControl: 'no-cache', 134 | contentEncoding: 'binary', 135 | }) 136 | 137 | /** 138 | * Verify put operation 139 | */ 140 | const response = await bucket.file(key).getMetadata() 141 | assert.equal(response[0].contentType, 'image/png') 142 | assert.equal(response[0].contentEncoding, 'binary') 143 | assert.equal(response[0].cacheControl, 'no-cache') 144 | }) 145 | 146 | test('create file with local visibility', async ({ assert }) => { 147 | const key = `${string.random(10)}.txt` 148 | const contents = 'Hello world' 149 | 150 | const fdfs = new GCSDriver({ 151 | visibility: 'public', 152 | bucket: GCS_FINE_GRAINED_ACL_BUCKET, 153 | credentials: GCS_KEY, 154 | usingUniformAcl: false, 155 | }) 156 | 157 | await fdfs.put(key, contents, { 158 | contentType: 'image/png', 159 | cacheControl: 'no-cache', 160 | contentEncoding: 'binary', 161 | }) 162 | 163 | /** 164 | * Verify put operation 165 | */ 166 | const response = await noUniformedAclBucket.file(key).isPublic() 167 | assert.isTrue(response[0]) 168 | }) 169 | 170 | test('create file with inline local visibility', async ({ assert }) => { 171 | const key = `${string.random(10)}.txt` 172 | const contents = 'Hello world' 173 | 174 | const fdfs = new GCSDriver({ 175 | visibility: 'public', 176 | bucket: GCS_FINE_GRAINED_ACL_BUCKET, 177 | credentials: GCS_KEY, 178 | usingUniformAcl: false, 179 | }) 180 | 181 | await fdfs.put(key, contents, { 182 | contentType: 'image/png', 183 | cacheControl: 'no-cache', 184 | contentEncoding: 'binary', 185 | visibility: 'private', 186 | }) 187 | 188 | /** 189 | * Verify put operation 190 | */ 191 | const response = await noUniformedAclBucket.file(key).isPublic() 192 | assert.isFalse(response[0]) 193 | }) 194 | }) 195 | 196 | test.group('GCS Driver | putStream', (group) => { 197 | group.each.setup(() => { 198 | return async () => { 199 | await bucket.deleteFiles() 200 | await noUniformedAclBucket.deleteFiles() 201 | } 202 | }) 203 | group.each.timeout(10_000) 204 | 205 | test('create file from readable stream', async ({ fs, assert }) => { 206 | const key = `${string.random(10)}.txt` 207 | const contents = 'Hello world' 208 | 209 | const fdfs = new GCSDriver({ 210 | visibility: 'public', 211 | bucket: GCS_BUCKET, 212 | credentials: GCS_KEY, 213 | usingUniformAcl: true, 214 | }) 215 | 216 | await fs.create(key, contents) 217 | await fdfs.putStream(key, createReadStream(join(fs.basePath, key))) 218 | 219 | /** 220 | * Verify put operation 221 | */ 222 | const response = await bucket.file(key).download() 223 | assert.equal(response[0].toString(), contents) 224 | }) 225 | 226 | test('create files at a nested destination', async ({ fs, assert }) => { 227 | const key = `users/1/${string.random(10)}.txt` 228 | const contents = 'Hello world' 229 | 230 | const fdfs = new GCSDriver({ 231 | visibility: 'public', 232 | bucket: GCS_BUCKET, 233 | credentials: GCS_KEY, 234 | usingUniformAcl: true, 235 | }) 236 | 237 | await fs.create(key, contents) 238 | await fdfs.putStream(key, createReadStream(join(fs.basePath, key))) 239 | 240 | /** 241 | * Verify put operation 242 | */ 243 | const response = await bucket.file(key).download() 244 | assert.equal(response[0].toString(), contents) 245 | }) 246 | 247 | test('throw error when source stream returns an error', async ({ fs, assert }) => { 248 | const key = `users/1/${string.random(10)}.txt` 249 | 250 | const fdfs = new GCSDriver({ 251 | visibility: 'public', 252 | bucket: GCS_BUCKET, 253 | credentials: GCS_KEY, 254 | usingUniformAcl: true, 255 | }) 256 | 257 | await assert.rejects(async () => { 258 | await fdfs.putStream(key, createReadStream(join(fs.basePath, key))) 259 | }, /ENOENT: no such file or directory/) 260 | }) 261 | }) 262 | -------------------------------------------------------------------------------- /tests/drivers/gcs/url_generation.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import got from 'got' 11 | import { test } from '@japa/runner' 12 | import string from '@poppinss/utils/string' 13 | import { Storage } from '@google-cloud/storage' 14 | import { GCSDriver } from '../../../drivers/gcs/driver.js' 15 | import { GCS_BUCKET, GCS_FINE_GRAINED_ACL_BUCKET, GCS_KEY } from './env.js' 16 | 17 | /** 18 | * Direct access to Google cloud storage bucket 19 | * via their SDK 20 | */ 21 | const bucket = new Storage({ 22 | credentials: GCS_KEY, 23 | }).bucket(GCS_BUCKET) 24 | const noUniformedAclBucket = new Storage({ 25 | credentials: GCS_KEY, 26 | }).bucket(GCS_FINE_GRAINED_ACL_BUCKET) 27 | 28 | test.group('GCS Driver | getUrl', (group) => { 29 | group.each.setup(() => { 30 | return async () => { 31 | await bucket.deleteFiles() 32 | await noUniformedAclBucket.deleteFiles() 33 | } 34 | }) 35 | group.each.timeout(10_000) 36 | 37 | test('get public URL of a file', async ({ assert }) => { 38 | const key = `${string.random(6)}.txt` 39 | 40 | const fdgcs = new GCSDriver({ 41 | visibility: 'public', 42 | bucket: GCS_FINE_GRAINED_ACL_BUCKET, 43 | credentials: GCS_KEY, 44 | usingUniformAcl: false, 45 | }) 46 | 47 | await fdgcs.put(key, 'hello world') 48 | 49 | const fileURL = await fdgcs.getUrl(key) 50 | assert.equal(fileURL, `https://storage.googleapis.com/${GCS_FINE_GRAINED_ACL_BUCKET}/${key}`) 51 | 52 | const fileContents = await got.get(fileURL) 53 | assert.equal(fileContents.body, 'hello world') 54 | }) 55 | 56 | test('use custom implementation for generating public URL', async ({ assert }) => { 57 | const key = `${string.random(6)}.txt` 58 | 59 | const fdgcs = new GCSDriver({ 60 | visibility: 'public', 61 | bucket: GCS_BUCKET, 62 | credentials: GCS_KEY, 63 | usingUniformAcl: true, 64 | urlBuilder: { 65 | async generateURL(fileKey, fileBucket) { 66 | return `https://cdn.example.com/${fileBucket}/${fileKey}` 67 | }, 68 | }, 69 | }) 70 | 71 | const fileURL = await fdgcs.getUrl(key) 72 | assert.equal(fileURL, `https://cdn.example.com/${GCS_BUCKET}/${key}`) 73 | }) 74 | }) 75 | 76 | test.group('GCS Driver | getSignedUrl', (group) => { 77 | group.each.setup(() => { 78 | return async () => { 79 | await bucket.deleteFiles() 80 | await noUniformedAclBucket.deleteFiles() 81 | } 82 | }) 83 | group.each.timeout(10_000) 84 | 85 | test('get signed URL of a file', async ({ assert }) => { 86 | const key = `${string.random(6)}.txt` 87 | 88 | const fdgcs = new GCSDriver({ 89 | visibility: 'public', 90 | bucket: GCS_BUCKET, 91 | credentials: GCS_KEY, 92 | usingUniformAcl: true, 93 | }) 94 | 95 | await fdgcs.put(key, 'hello world') 96 | 97 | const fileURL = new URL(await fdgcs.getSignedUrl(key)) 98 | assert.equal(fileURL.pathname, `/${GCS_BUCKET}/${key}`) 99 | assert.isTrue(fileURL.searchParams.has('Signature')) 100 | assert.isTrue(fileURL.searchParams.has('Expires')) 101 | 102 | const fileContents = await got.get(fileURL) 103 | assert.equal(fileContents.body, 'hello world') 104 | }) 105 | 106 | test('define content type for the file', async ({ assert }) => { 107 | const key = `${string.random(6)}.txt` 108 | 109 | const fdgcs = new GCSDriver({ 110 | visibility: 'public', 111 | bucket: GCS_BUCKET, 112 | credentials: GCS_KEY, 113 | usingUniformAcl: true, 114 | }) 115 | 116 | const fileURL = new URL( 117 | await fdgcs.getSignedUrl(key, { 118 | contentType: 'image/png', 119 | }) 120 | ) 121 | 122 | assert.equal(fileURL.searchParams.get('response-content-type'), 'image/png') 123 | }) 124 | 125 | test('define content disposition for the file', async ({ assert }) => { 126 | const key = `${string.random(6)}.txt` 127 | 128 | const fdgcs = new GCSDriver({ 129 | visibility: 'public', 130 | bucket: GCS_BUCKET, 131 | credentials: GCS_KEY, 132 | usingUniformAcl: true, 133 | }) 134 | 135 | const fileURL = new URL( 136 | await fdgcs.getSignedUrl(key, { 137 | contentDisposition: 'attachment', 138 | }) 139 | ) 140 | 141 | assert.equal(fileURL.searchParams.get('response-content-disposition'), 'attachment') 142 | }) 143 | 144 | test('use custom implementation for generating signed URL', async ({ assert }) => { 145 | const key = `${string.random(6)}.txt` 146 | 147 | const fdgcs = new GCSDriver({ 148 | visibility: 'public', 149 | bucket: GCS_BUCKET, 150 | credentials: GCS_KEY, 151 | usingUniformAcl: true, 152 | urlBuilder: { 153 | async generateSignedURL(fileKey, fileBucket, options, storage) { 154 | const response = await storage 155 | .bucket(fileBucket) 156 | .file(fileKey) 157 | .getSignedUrl({ 158 | ...options, 159 | cname: 'https://cdn.example.com', 160 | }) 161 | return response[0] 162 | }, 163 | }, 164 | }) 165 | 166 | const fileURL = new URL(await fdgcs.getSignedUrl(key)) 167 | assert.equal(fileURL.host, 'cdn.example.com') 168 | }) 169 | }) 170 | -------------------------------------------------------------------------------- /tests/drivers/gcs/visibility.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import string from '@poppinss/utils/string' 12 | import { Storage } from '@google-cloud/storage' 13 | import { GCSDriver } from '../../../drivers/gcs/driver.js' 14 | import { GCS_BUCKET, GCS_FINE_GRAINED_ACL_BUCKET, GCS_KEY } from './env.js' 15 | 16 | /** 17 | * Direct access to Google cloud storage bucket 18 | * via their SDK 19 | */ 20 | const bucket = new Storage({ 21 | credentials: GCS_KEY, 22 | }).bucket(GCS_BUCKET) 23 | const noUniformedAclBucket = new Storage({ 24 | credentials: GCS_KEY, 25 | }).bucket(GCS_FINE_GRAINED_ACL_BUCKET) 26 | 27 | test.group('GCS Driver | visibility | uniform ACL', (group) => { 28 | group.each.setup(() => { 29 | return async () => { 30 | await bucket.deleteFiles() 31 | await noUniformedAclBucket.deleteFiles() 32 | } 33 | }) 34 | group.each.timeout(10_000) 35 | 36 | test('get visibility of a file', async ({ assert }) => { 37 | const key = `${string.random(6)}.txt` 38 | 39 | const fdgcs = new GCSDriver({ 40 | visibility: 'public', 41 | bucket: GCS_BUCKET, 42 | credentials: GCS_KEY, 43 | usingUniformAcl: true, 44 | }) 45 | 46 | const visibility = await fdgcs.getVisibility(key) 47 | assert.equal(visibility, 'private') 48 | }) 49 | 50 | test('throw error when trying to update visibility of a file', async ({ assert }) => { 51 | const key = `${string.random(6)}.txt` 52 | 53 | const fdgcs = new GCSDriver({ 54 | visibility: 'public', 55 | bucket: GCS_BUCKET, 56 | credentials: GCS_KEY, 57 | usingUniformAcl: true, 58 | }) 59 | 60 | await fdgcs.put(key, 'hello world') 61 | 62 | await assert.rejects(async () => { 63 | await fdgcs.setVisibility(key, 'public') 64 | }, /Cannot update access control for an object when uniform bucket-level access is enabled/) 65 | }) 66 | }) 67 | 68 | test.group('GCS Driver | visibility', (group) => { 69 | group.each.setup(() => { 70 | return async () => { 71 | await bucket.deleteFiles() 72 | await noUniformedAclBucket.deleteFiles() 73 | } 74 | }) 75 | group.each.timeout(10_000) 76 | 77 | test('get visibility of a file', async ({ assert }) => { 78 | const key = `${string.random(6)}.txt` 79 | 80 | const fdgcs = new GCSDriver({ 81 | visibility: 'public', 82 | bucket: GCS_FINE_GRAINED_ACL_BUCKET, 83 | credentials: GCS_KEY, 84 | usingUniformAcl: false, 85 | }) 86 | 87 | await fdgcs.put(key, 'hello world') 88 | 89 | const visibility = await fdgcs.getVisibility(key) 90 | assert.equal(visibility, 'public') 91 | }) 92 | 93 | test('make file private', async ({ assert }) => { 94 | const key = `${string.random(6)}.txt` 95 | 96 | const fdgcs = new GCSDriver({ 97 | visibility: 'public', 98 | bucket: GCS_FINE_GRAINED_ACL_BUCKET, 99 | credentials: GCS_KEY, 100 | usingUniformAcl: false, 101 | }) 102 | 103 | await fdgcs.put(key, 'hello world', { 104 | cacheControl: 'no-cache', 105 | }) 106 | assert.equal(await fdgcs.getVisibility(key), 'public') 107 | 108 | await fdgcs.setVisibility(key, 'private') 109 | assert.equal(await fdgcs.getVisibility(key), 'private') 110 | }) 111 | 112 | test('make file public', async ({ assert }) => { 113 | const key = `${string.random(6)}.txt` 114 | 115 | const fdgcs = new GCSDriver({ 116 | visibility: 'private', 117 | bucket: GCS_FINE_GRAINED_ACL_BUCKET, 118 | credentials: GCS_KEY, 119 | usingUniformAcl: false, 120 | }) 121 | 122 | await fdgcs.put(key, 'hello world', { 123 | cacheControl: 'no-cache', 124 | }) 125 | assert.equal(await fdgcs.getVisibility(key), 'private') 126 | 127 | await fdgcs.setVisibility(key, 'public') 128 | assert.equal(await fdgcs.getVisibility(key), 'public') 129 | }) 130 | 131 | test('throw error when trying to update visibility of a non-existing file', async ({ 132 | assert, 133 | }) => { 134 | const key = `${string.random(6)}.txt` 135 | 136 | const fdgcs = new GCSDriver({ 137 | visibility: 'public', 138 | bucket: GCS_FINE_GRAINED_ACL_BUCKET, 139 | credentials: GCS_KEY, 140 | usingUniformAcl: false, 141 | }) 142 | 143 | await assert.rejects(async () => { 144 | await fdgcs.setVisibility(key, 'public') 145 | }, /No such object/) 146 | }) 147 | }) 148 | -------------------------------------------------------------------------------- /tests/drivers/s3/copy.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import string from '@poppinss/utils/string' 12 | import { S3Client } from '@aws-sdk/client-s3' 13 | 14 | import { S3Driver } from '../../../drivers/s3/driver.js' 15 | import { 16 | S3_REGION, 17 | S3_BUCKET, 18 | S3_ENDPOINT, 19 | AWS_ACCESS_KEY, 20 | AWS_ACCESS_SECRET, 21 | SUPPORTS_ACL, 22 | } from './env.js' 23 | import { deleteS3Objects } from '../../helpers.js' 24 | 25 | /** 26 | * Direct access to S3 client via their SDK 27 | */ 28 | const client = new S3Client({ 29 | credentials: { 30 | accessKeyId: AWS_ACCESS_KEY, 31 | secretAccessKey: AWS_ACCESS_SECRET, 32 | }, 33 | endpoint: S3_ENDPOINT, 34 | region: S3_REGION, 35 | }) 36 | 37 | test.group('S3 Driver | copy', (group) => { 38 | group.each.setup(() => { 39 | return async () => { 40 | await deleteS3Objects(client, S3_BUCKET, '/') 41 | } 42 | }) 43 | group.each.timeout(10_000) 44 | 45 | test('copy file from source to the destination', async ({ assert }) => { 46 | const source = `${string.random(6)}.txt` 47 | const destination = `${string.random(6)}.txt` 48 | const contents = 'Hello world' 49 | 50 | const s3fs = new S3Driver({ 51 | visibility: 'public', 52 | client: client, 53 | bucket: S3_BUCKET, 54 | supportsACL: SUPPORTS_ACL, 55 | }) 56 | await s3fs.put(source, contents) 57 | await s3fs.copy(source, destination) 58 | 59 | assert.equal(await s3fs.get(destination), contents) 60 | }) 61 | 62 | test('copy file from source to a nested directory', async ({ assert }) => { 63 | const source = `${string.random(6)}.txt` 64 | const destination = `foo/bar/baz/${string.random(6)}.txt` 65 | const contents = 'Hello world' 66 | 67 | const s3fs = new S3Driver({ 68 | visibility: 'public', 69 | client: client, 70 | bucket: S3_BUCKET, 71 | supportsACL: SUPPORTS_ACL, 72 | }) 73 | await s3fs.put(source, contents) 74 | await s3fs.copy(source, destination) 75 | 76 | assert.equal(await s3fs.get(destination), contents) 77 | }) 78 | 79 | test('return error when source file does not exist', async ({ assert }) => { 80 | const source = `${string.random(6)}.txt` 81 | const destination = `${string.random(6)}.txt` 82 | 83 | const s3fs = new S3Driver({ 84 | visibility: 'public', 85 | client: client, 86 | bucket: S3_BUCKET, 87 | supportsACL: SUPPORTS_ACL, 88 | }) 89 | await assert.rejects(async () => { 90 | await s3fs.copy(source, destination) 91 | }, /UnknownError|The specified key does not exist/) 92 | }) 93 | 94 | test('retain source file metadata during copy', async ({ assert }) => { 95 | const source = `${string.random(10)}.txt` 96 | const destination = `${string.random(10)}.txt` 97 | const contents = 'Hello world' 98 | 99 | const s3fs = new S3Driver({ 100 | visibility: 'public', 101 | client: client, 102 | bucket: S3_BUCKET, 103 | supportsACL: SUPPORTS_ACL, 104 | }) 105 | 106 | await s3fs.put(source, contents, { 107 | contentType: 'image/png', 108 | }) 109 | 110 | await s3fs.copy(source, destination) 111 | const metaData = await s3fs.getMetaData(destination) 112 | assert.equal(metaData.contentType, 'image/png') 113 | 114 | assert.isTrue(await s3fs.exists(source)) 115 | }) 116 | 117 | test('retain source file visibility during copy', async ({ assert }) => { 118 | const source = `${string.random(10)}.txt` 119 | const destination = `${string.random(10)}.txt` 120 | const contents = 'Hello world' 121 | 122 | const s3fs = new S3Driver({ 123 | visibility: 'public', 124 | client: client, 125 | bucket: S3_BUCKET, 126 | supportsACL: SUPPORTS_ACL, 127 | }) 128 | 129 | await s3fs.put(source, contents, { 130 | contentType: 'image/png', 131 | visibility: 'private', 132 | }) 133 | 134 | await s3fs.copy(source, destination) 135 | assert.equal(await s3fs.getVisibility(destination), 'private') 136 | 137 | assert.isTrue(await s3fs.exists(source)) 138 | }).skip(!SUPPORTS_ACL, 'Service does not support ACL. Hence, we cannot control file visibility') 139 | }) 140 | -------------------------------------------------------------------------------- /tests/drivers/s3/delete.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import string from '@poppinss/utils/string' 12 | import { S3Client } from '@aws-sdk/client-s3' 13 | 14 | import { S3Driver } from '../../../drivers/s3/driver.js' 15 | import { 16 | S3_REGION, 17 | S3_BUCKET, 18 | S3_ENDPOINT, 19 | AWS_ACCESS_KEY, 20 | AWS_ACCESS_SECRET, 21 | SUPPORTS_ACL, 22 | } from './env.js' 23 | import { deleteS3Objects } from '../../helpers.js' 24 | 25 | /** 26 | * Direct access to S3 client via their SDK 27 | */ 28 | const client = new S3Client({ 29 | credentials: { 30 | accessKeyId: AWS_ACCESS_KEY, 31 | secretAccessKey: AWS_ACCESS_SECRET, 32 | }, 33 | endpoint: S3_ENDPOINT, 34 | region: S3_REGION, 35 | }) 36 | 37 | test.group('S3 Driver | delete', (group) => { 38 | group.each.setup(() => { 39 | return async () => { 40 | await deleteS3Objects(client, S3_BUCKET, '/') 41 | } 42 | }) 43 | group.each.timeout(10_000) 44 | 45 | test('delete file', async ({ assert }) => { 46 | const key = `${string.random(6)}.txt` 47 | const contents = 'Hello world' 48 | 49 | const s3fs = new S3Driver({ 50 | visibility: 'public', 51 | client: client, 52 | bucket: S3_BUCKET, 53 | supportsACL: SUPPORTS_ACL, 54 | }) 55 | await s3fs.put(key, contents) 56 | await s3fs.delete(key) 57 | 58 | assert.isFalse(await s3fs.exists(key)) 59 | }) 60 | 61 | test('delete file at nested path', async ({ assert }) => { 62 | const key = `foo/bar/${string.random(6)}.txt` 63 | const contents = 'Hello world' 64 | 65 | const s3fs = new S3Driver({ 66 | visibility: 'public', 67 | client: client, 68 | bucket: S3_BUCKET, 69 | supportsACL: SUPPORTS_ACL, 70 | }) 71 | await s3fs.put(key, contents) 72 | await s3fs.delete(key) 73 | 74 | assert.isFalse(await s3fs.exists(key)) 75 | }) 76 | 77 | test('noop when trying to delete a non-existing file', async ({ assert }) => { 78 | const key = `foo/bar/${string.random(6)}.txt` 79 | const s3fs = new S3Driver({ 80 | visibility: 'public', 81 | client: client, 82 | bucket: S3_BUCKET, 83 | supportsACL: SUPPORTS_ACL, 84 | }) 85 | await s3fs.delete(key) 86 | 87 | assert.isFalse(await s3fs.exists(key)) 88 | }) 89 | 90 | test('noop when trying to delete a directory', async ({ assert }) => { 91 | const key = `foo/bar/${string.random(6)}.txt` 92 | const contents = 'Hello world' 93 | 94 | const s3fs = new S3Driver({ 95 | visibility: 'public', 96 | client: client, 97 | bucket: S3_BUCKET, 98 | supportsACL: SUPPORTS_ACL, 99 | }) 100 | 101 | await s3fs.put(key, contents) 102 | 103 | /** 104 | * S3 consider it as a 404 call and hence no error is raised 105 | */ 106 | await s3fs.delete('foo/') 107 | assert.isTrue(await s3fs.exists(key)) 108 | }) 109 | }) 110 | 111 | test.group('S3 Driver | deleteAll', (group) => { 112 | group.each.setup(() => { 113 | return async () => { 114 | await deleteS3Objects(client, S3_BUCKET, '/') 115 | } 116 | }) 117 | group.each.timeout(10_000) 118 | 119 | test('delete all files matching the prefix', async ({ assert }) => { 120 | const key = `foo/${string.random(6)}.txt` 121 | const anotherKey = `${string.random(6)}.txt` 122 | const contents = 'Hello world' 123 | 124 | const s3fs = new S3Driver({ 125 | visibility: 'public', 126 | client: client, 127 | bucket: S3_BUCKET, 128 | supportsACL: SUPPORTS_ACL, 129 | }) 130 | 131 | await s3fs.put(key, contents) 132 | await s3fs.put(anotherKey, contents) 133 | 134 | await s3fs.deleteAll('foo') 135 | assert.equal(await s3fs.exists(key), false) 136 | assert.equal(await s3fs.exists(anotherKey), true) 137 | }) 138 | 139 | test('delete empty folders', async ({ assert }) => { 140 | const key = `foo/${string.random(6)}.txt` 141 | const anotherKey = `foo/bar/${string.random(6)}.txt}` 142 | const contents = 'Hello world' 143 | 144 | const s3fs = new S3Driver({ 145 | visibility: 'public', 146 | client: client, 147 | bucket: S3_BUCKET, 148 | supportsACL: SUPPORTS_ACL, 149 | }) 150 | await s3fs.put(key, contents) 151 | await s3fs.put(anotherKey, contents) 152 | await s3fs.delete(anotherKey) 153 | 154 | /** 155 | * Since we have deleted the "foo/bar/hello.txt" file. The 156 | * "bar" directory will return an empty array of files 157 | */ 158 | const files = await s3fs.listAll('foo/bar/') 159 | assert.lengthOf(Array.from(files.objects), 0) 160 | 161 | /** 162 | * Now we have deletes all the files within the bucket. 163 | */ 164 | await s3fs.deleteAll('foo') 165 | const allFiles = await s3fs.listAll('/') 166 | assert.lengthOf(Array.from(allFiles.objects), 0) 167 | }) 168 | 169 | test('noop when trying to delete non-existing prefixes', async () => { 170 | const s3fs = new S3Driver({ 171 | visibility: 'public', 172 | client: client, 173 | bucket: S3_BUCKET, 174 | supportsACL: SUPPORTS_ACL, 175 | }) 176 | await s3fs.deleteAll('foo') 177 | }) 178 | }) 179 | -------------------------------------------------------------------------------- /tests/drivers/s3/env.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { Env } from '@adonisjs/env' 11 | 12 | const env = await Env.create(new URL('../../../', import.meta.url), { 13 | S3_SERVICE: Env.schema.enum(['r2', 'do'] as const), 14 | S3_BUCKET: Env.schema.string(), 15 | S3_ACCESS_KEY: Env.schema.string(), 16 | S3_ACCESS_SECRET: Env.schema.string(), 17 | S3_ENDPOINT: Env.schema.string(), 18 | S3_REGION: Env.schema.string(), 19 | S3_CDN_URL: Env.schema.string(), 20 | }) 21 | 22 | export const S3_SERVICE = env.get('S3_SERVICE') 23 | export const SUPPORTS_ACL = S3_SERVICE !== 'r2' 24 | export const S3_BUCKET = env.get('S3_BUCKET') 25 | export const S3_CDN_URL = env.get('S3_CDN_URL') 26 | export const S3_REGION = env.get('S3_REGION') 27 | export const S3_ENDPOINT = env.get('S3_ENDPOINT') 28 | export const AWS_ACCESS_KEY = env.get('S3_ACCESS_KEY') 29 | export const AWS_ACCESS_SECRET = env.get('S3_ACCESS_SECRET') 30 | -------------------------------------------------------------------------------- /tests/drivers/s3/exists.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import string from '@poppinss/utils/string' 12 | import { S3Client } from '@aws-sdk/client-s3' 13 | 14 | import { S3Driver } from '../../../drivers/s3/driver.js' 15 | import { 16 | S3_REGION, 17 | S3_BUCKET, 18 | S3_ENDPOINT, 19 | AWS_ACCESS_KEY, 20 | AWS_ACCESS_SECRET, 21 | SUPPORTS_ACL, 22 | } from './env.js' 23 | import { deleteS3Objects } from '../../helpers.js' 24 | 25 | /** 26 | * Direct access to S3 client via their SDK 27 | */ 28 | const client = new S3Client({ 29 | credentials: { 30 | accessKeyId: AWS_ACCESS_KEY, 31 | secretAccessKey: AWS_ACCESS_SECRET, 32 | }, 33 | endpoint: S3_ENDPOINT, 34 | region: S3_REGION, 35 | }) 36 | 37 | test.group('S3 Driver | exists', (group) => { 38 | group.each.setup(() => { 39 | return async () => { 40 | await deleteS3Objects(client, S3_BUCKET, '/') 41 | } 42 | }) 43 | group.each.timeout(10_000) 44 | 45 | test('return true when file exists', async ({ assert }) => { 46 | const key = `${string.random(6)}.txt` 47 | const contents = 'Hello world' 48 | 49 | const s3fs = new S3Driver({ 50 | visibility: 'public', 51 | client: client, 52 | bucket: S3_BUCKET, 53 | supportsACL: SUPPORTS_ACL, 54 | }) 55 | await s3fs.put(key, contents) 56 | 57 | assert.isTrue(await s3fs.exists(key)) 58 | }) 59 | 60 | test('return false when file does not exist', async ({ assert }) => { 61 | const key = `${string.random(6)}.txt` 62 | 63 | const s3fs = new S3Driver({ 64 | visibility: 'public', 65 | client: client, 66 | bucket: S3_BUCKET, 67 | supportsACL: SUPPORTS_ACL, 68 | }) 69 | 70 | assert.isFalse(await s3fs.exists(key)) 71 | }) 72 | 73 | test('return false when object is a folder', async ({ assert }) => { 74 | const key = `foo/${string.random(6)}.txt` 75 | const contents = 'Hello world' 76 | 77 | const s3fs = new S3Driver({ 78 | visibility: 'public', 79 | client: client, 80 | bucket: S3_BUCKET, 81 | supportsACL: SUPPORTS_ACL, 82 | }) 83 | 84 | await s3fs.put(key, contents) 85 | 86 | assert.isFalse(await s3fs.exists('foo')) 87 | }) 88 | }) 89 | -------------------------------------------------------------------------------- /tests/drivers/s3/get.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import getStream from 'get-stream' 11 | import { test } from '@japa/runner' 12 | import string from '@poppinss/utils/string' 13 | import { S3Client } from '@aws-sdk/client-s3' 14 | 15 | import { S3Driver } from '../../../drivers/s3/driver.js' 16 | import { 17 | S3_REGION, 18 | S3_BUCKET, 19 | S3_ENDPOINT, 20 | AWS_ACCESS_KEY, 21 | AWS_ACCESS_SECRET, 22 | SUPPORTS_ACL, 23 | } from './env.js' 24 | import { deleteS3Objects } from '../../helpers.js' 25 | 26 | /** 27 | * Direct access to S3 client via their SDK 28 | */ 29 | const client = new S3Client({ 30 | credentials: { 31 | accessKeyId: AWS_ACCESS_KEY, 32 | secretAccessKey: AWS_ACCESS_SECRET, 33 | }, 34 | endpoint: S3_ENDPOINT, 35 | region: S3_REGION, 36 | }) 37 | 38 | test.group('S3 Driver | get', (group) => { 39 | group.each.setup(() => { 40 | return async () => { 41 | await deleteS3Objects(client, S3_BUCKET, '/') 42 | } 43 | }) 44 | group.each.timeout(10_000) 45 | 46 | test('get file contents as a string', async ({ assert }) => { 47 | const key = `${string.random(6)}.txt` 48 | const contents = 'Hello world' 49 | 50 | const s3fs = new S3Driver({ 51 | visibility: 'public', 52 | client: client, 53 | bucket: S3_BUCKET, 54 | supportsACL: SUPPORTS_ACL, 55 | }) 56 | 57 | await s3fs.put(key, contents) 58 | assert.equal(await s3fs.get(key), contents) 59 | }) 60 | 61 | test('return error when file does not exist', async ({ assert }) => { 62 | const key = `${string.random(6)}.txt` 63 | const s3fs = new S3Driver({ 64 | visibility: 'public', 65 | client: client, 66 | bucket: S3_BUCKET, 67 | supportsACL: SUPPORTS_ACL, 68 | }) 69 | 70 | await assert.rejects(async () => { 71 | await s3fs.get(key) 72 | }, /UnknownError|The specified key does not exist/) 73 | }) 74 | }) 75 | 76 | test.group('S3 Driver | getBytes', (group) => { 77 | group.each.setup(() => { 78 | return async () => { 79 | await deleteS3Objects(client, S3_BUCKET, '/') 80 | } 81 | }) 82 | group.each.timeout(10_000) 83 | 84 | test('get file contents as an arrayBuffer', async ({ assert }) => { 85 | const key = `${string.random(6)}.txt` 86 | const contents = 'Hello world' 87 | 88 | const s3fs = new S3Driver({ 89 | visibility: 'public', 90 | client: client, 91 | bucket: S3_BUCKET, 92 | supportsACL: SUPPORTS_ACL, 93 | }) 94 | 95 | await s3fs.put(key, contents) 96 | assert.equal(new TextDecoder().decode(await s3fs.getBytes(key)), contents) 97 | }) 98 | 99 | test('return error when file does not exist', async ({ assert }) => { 100 | const key = `${string.random(6)}.txt` 101 | const s3fs = new S3Driver({ 102 | visibility: 'public', 103 | client: client, 104 | bucket: S3_BUCKET, 105 | supportsACL: SUPPORTS_ACL, 106 | }) 107 | 108 | await assert.rejects(async () => { 109 | await s3fs.getBytes(key) 110 | }, /UnknownError|The specified key does not exist/) 111 | }) 112 | }) 113 | 114 | test.group('S3 Driver | getStream', (group) => { 115 | group.each.setup(() => { 116 | return async () => { 117 | await deleteS3Objects(client, S3_BUCKET, '/') 118 | } 119 | }) 120 | group.each.timeout(10_000) 121 | 122 | test('get file contents as a stream', async ({ assert }) => { 123 | const key = `${string.random(6)}.txt` 124 | const contents = 'Hello world' 125 | 126 | const s3fs = new S3Driver({ 127 | visibility: 'public', 128 | client: client, 129 | bucket: S3_BUCKET, 130 | supportsACL: SUPPORTS_ACL, 131 | }) 132 | 133 | await s3fs.put(key, contents) 134 | assert.equal(await getStream(await s3fs.getStream(key)), contents) 135 | }) 136 | 137 | test('return error when file does not exist', async ({ assert }) => { 138 | const key = `${string.random(6)}.txt` 139 | const s3fs = new S3Driver({ 140 | visibility: 'public', 141 | client: client, 142 | bucket: S3_BUCKET, 143 | supportsACL: SUPPORTS_ACL, 144 | }) 145 | 146 | await assert.rejects(async () => { 147 | await getStream(await s3fs.getStream(key)) 148 | }, /UnknownError|The specified key does not exist/) 149 | }) 150 | }) 151 | -------------------------------------------------------------------------------- /tests/drivers/s3/get_metadata.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import string from '@poppinss/utils/string' 12 | import { S3Client } from '@aws-sdk/client-s3' 13 | 14 | import { S3Driver } from '../../../drivers/s3/driver.js' 15 | import { 16 | S3_REGION, 17 | S3_BUCKET, 18 | S3_ENDPOINT, 19 | AWS_ACCESS_KEY, 20 | AWS_ACCESS_SECRET, 21 | SUPPORTS_ACL, 22 | } from './env.js' 23 | import { deleteS3Objects } from '../../helpers.js' 24 | 25 | /** 26 | * Direct access to S3 client via their SDK 27 | */ 28 | const client = new S3Client({ 29 | credentials: { 30 | accessKeyId: AWS_ACCESS_KEY, 31 | secretAccessKey: AWS_ACCESS_SECRET, 32 | }, 33 | endpoint: S3_ENDPOINT, 34 | region: S3_REGION, 35 | }) 36 | 37 | test.group('S3 Driver | getMetaData', (group) => { 38 | group.each.setup(() => { 39 | return async () => { 40 | await deleteS3Objects(client, S3_BUCKET, '/') 41 | } 42 | }) 43 | group.each.timeout(10_000) 44 | 45 | test('get metaData of a file', async ({ assert }) => { 46 | const key = `${string.random(6)}.txt` 47 | const contents = 'Hello world' 48 | 49 | const s3fs = new S3Driver({ 50 | visibility: 'public', 51 | client: client, 52 | bucket: S3_BUCKET, 53 | supportsACL: SUPPORTS_ACL, 54 | }) 55 | 56 | await s3fs.put(key, contents) 57 | const metaData = await s3fs.getMetaData(key) 58 | 59 | assert.exists(metaData.etag) 60 | assert.isTrue(metaData.lastModified instanceof Date) 61 | assert.containsSubset(metaData, { 62 | contentLength: 11, 63 | contentType: 'text/plain', 64 | }) 65 | }) 66 | 67 | test('return error when file does not exists', async ({ assert }) => { 68 | const key = `${string.random(6)}.txt` 69 | 70 | const s3fs = new S3Driver({ 71 | visibility: 'public', 72 | client: client, 73 | bucket: S3_BUCKET, 74 | supportsACL: SUPPORTS_ACL, 75 | }) 76 | 77 | await assert.rejects(async () => { 78 | await s3fs.getMetaData(key) 79 | }, /UnknownError/) 80 | }) 81 | }) 82 | -------------------------------------------------------------------------------- /tests/drivers/s3/list_all.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import string from '@poppinss/utils/string' 12 | import { S3Client } from '@aws-sdk/client-s3' 13 | 14 | import { S3Driver } from '../../../drivers/s3/driver.js' 15 | import { 16 | S3_REGION, 17 | S3_BUCKET, 18 | S3_ENDPOINT, 19 | AWS_ACCESS_KEY, 20 | AWS_ACCESS_SECRET, 21 | SUPPORTS_ACL, 22 | } from './env.js' 23 | import { deleteS3Objects } from '../../helpers.js' 24 | 25 | /** 26 | * Direct access to S3 client via their SDK 27 | */ 28 | const client = new S3Client({ 29 | credentials: { 30 | accessKeyId: AWS_ACCESS_KEY, 31 | secretAccessKey: AWS_ACCESS_SECRET, 32 | }, 33 | endpoint: S3_ENDPOINT, 34 | region: S3_REGION, 35 | }) 36 | 37 | test.group('S3 Driver | listAll | root dir', (group) => { 38 | group.each.setup(() => { 39 | return async () => { 40 | await deleteS3Objects(client, S3_BUCKET, '/') 41 | } 42 | }) 43 | group.each.timeout(10_000) 44 | 45 | test('list all files and top-level directories of the matching prefix', async ({ assert }) => { 46 | const fileName = `${string.random(10)}.txt` 47 | const keys = [fileName, `foo/bar/${fileName}`, `baz/${fileName}`] 48 | const contents = 'Hello world' 49 | 50 | const s3fs = new S3Driver({ 51 | visibility: 'public', 52 | client: client, 53 | bucket: S3_BUCKET, 54 | supportsACL: SUPPORTS_ACL, 55 | }) 56 | 57 | for (const key of keys) { 58 | await s3fs.put(key, contents) 59 | } 60 | 61 | const { objects } = await s3fs.listAll('/') 62 | 63 | assert.includeDeepMembers(Array.from(objects), [ 64 | { 65 | isDirectory: true, 66 | isFile: false, 67 | name: 'baz', 68 | prefix: 'baz', 69 | }, 70 | { 71 | isDirectory: true, 72 | isFile: false, 73 | name: 'foo', 74 | prefix: 'foo', 75 | }, 76 | { 77 | isDirectory: false, 78 | isFile: true, 79 | name: fileName, 80 | key: fileName, 81 | }, 82 | ]) 83 | }) 84 | 85 | test('list all files recursively', async ({ assert }) => { 86 | const fileName = `${string.random(10)}.txt` 87 | const keys = [fileName, `foo/bar/${fileName}`, `baz/${fileName}`] 88 | const contents = 'Hello world' 89 | 90 | const s3fs = new S3Driver({ 91 | visibility: 'public', 92 | client: client, 93 | bucket: S3_BUCKET, 94 | supportsACL: SUPPORTS_ACL, 95 | }) 96 | 97 | for (const key of keys) { 98 | await s3fs.put(key, contents) 99 | } 100 | 101 | const { objects } = await s3fs.listAll('/', { recursive: true }) 102 | assert.includeDeepMembers(Array.from(objects), [ 103 | { 104 | isDirectory: false, 105 | isFile: true, 106 | name: fileName, 107 | key: `baz/${fileName}`, 108 | }, 109 | { 110 | isDirectory: false, 111 | isFile: true, 112 | name: fileName, 113 | key: `foo/bar/${fileName}`, 114 | }, 115 | { 116 | isDirectory: false, 117 | isFile: true, 118 | name: fileName, 119 | key: fileName, 120 | }, 121 | ]) 122 | }) 123 | 124 | test('paginate recursive results', async ({ assert }) => { 125 | const fileName = `${string.random(10)}.txt` 126 | const keys = [fileName, `foo/bar/${fileName}`, `baz/${fileName}`] 127 | const contents = 'Hello world' 128 | 129 | const s3fs = new S3Driver({ 130 | visibility: 'public', 131 | client: client, 132 | bucket: S3_BUCKET, 133 | supportsACL: SUPPORTS_ACL, 134 | }) 135 | 136 | for (const key of keys) { 137 | await s3fs.put(key, contents) 138 | } 139 | 140 | /** 141 | * The expected result set. We compare the response to be a subset 142 | * of the expected result. 143 | * 144 | * We use this approach over "deepEqual" because the order of objects 145 | * is not guaranteed by GCS 146 | */ 147 | const expectedResultSet = [ 148 | { 149 | isDirectory: false, 150 | isFile: true, 151 | name: fileName, 152 | key: `foo/bar/${fileName}`, 153 | }, 154 | { 155 | isDirectory: false, 156 | isFile: true, 157 | name: fileName, 158 | key: `baz/${fileName}`, 159 | }, 160 | { 161 | isDirectory: false, 162 | isFile: true, 163 | name: fileName, 164 | key: fileName, 165 | }, 166 | ] 167 | 168 | /** 169 | * Page 1 170 | */ 171 | const { objects, paginationToken } = await s3fs.listAll('/', { 172 | recursive: true, 173 | maxResults: 1, 174 | }) 175 | assert.containsSubset(expectedResultSet, Array.from(objects)) 176 | 177 | /** 178 | * Page 2 179 | */ 180 | const { objects: page2Objects, paginationToken: page2PaginationToken } = await s3fs.listAll( 181 | '/', 182 | { 183 | recursive: true, 184 | maxResults: 1, 185 | paginationToken, 186 | } 187 | ) 188 | assert.containsSubset(expectedResultSet, Array.from(page2Objects)) 189 | assert.notDeepEqual(Array.from(page2Objects), Array.from(objects)) 190 | 191 | /** 192 | * Page 3 193 | */ 194 | const { objects: page3Objects } = await s3fs.listAll('/', { 195 | recursive: true, 196 | maxResults: 1, 197 | paginationToken: page2PaginationToken, 198 | }) 199 | assert.containsSubset(expectedResultSet, Array.from(page3Objects)) 200 | assert.notDeepEqual(Array.from(page3Objects), Array.from(page2Objects)) 201 | }) 202 | }) 203 | 204 | test.group('S3 Driver | listAll | nested dir', (group) => { 205 | group.each.setup(() => { 206 | return async () => { 207 | await deleteS3Objects(client, S3_BUCKET, '/') 208 | } 209 | }) 210 | group.each.timeout(10_000) 211 | 212 | test('list all files and top-level directories of the matching prefix', async ({ assert }) => { 213 | const fileName = `${string.random(10)}.txt` 214 | const keys = [fileName, `foo/bar/${fileName}`, `baz/${fileName}`] 215 | const contents = 'Hello world' 216 | 217 | const s3fs = new S3Driver({ 218 | visibility: 'public', 219 | client: client, 220 | bucket: S3_BUCKET, 221 | supportsACL: SUPPORTS_ACL, 222 | }) 223 | 224 | for (const key of keys) { 225 | await s3fs.put(key, contents) 226 | } 227 | 228 | const { objects } = await s3fs.listAll('foo') 229 | assert.includeDeepMembers(Array.from(objects), [ 230 | { 231 | isDirectory: true, 232 | isFile: false, 233 | name: 'bar', 234 | prefix: 'foo/bar', 235 | }, 236 | ]) 237 | }) 238 | 239 | test('list all files recursively', async ({ assert }) => { 240 | const fileName = `${string.random(10)}.txt` 241 | const keys = [fileName, `foo/bar/${fileName}`, `baz/${fileName}`] 242 | const contents = 'Hello world' 243 | 244 | const s3fs = new S3Driver({ 245 | visibility: 'public', 246 | client: client, 247 | bucket: S3_BUCKET, 248 | supportsACL: SUPPORTS_ACL, 249 | }) 250 | 251 | for (const key of keys) { 252 | await s3fs.put(key, contents) 253 | } 254 | 255 | const { objects } = await s3fs.listAll('foo', { recursive: true }) 256 | assert.includeDeepMembers(Array.from(objects), [ 257 | { 258 | isDirectory: false, 259 | isFile: true, 260 | name: fileName, 261 | key: `foo/bar/${fileName}`, 262 | }, 263 | ]) 264 | }) 265 | 266 | test('do not throw error when listing files of a non-existing directory', async ({ assert }) => { 267 | const s3fs = new S3Driver({ 268 | visibility: 'public', 269 | client: client, 270 | bucket: S3_BUCKET, 271 | supportsACL: SUPPORTS_ACL, 272 | }) 273 | 274 | const { objects } = await s3fs.listAll('foo', { recursive: true }) 275 | assert.deepEqual(Array.from(objects), []) 276 | }) 277 | }) 278 | -------------------------------------------------------------------------------- /tests/drivers/s3/move.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import string from '@poppinss/utils/string' 12 | import { S3Client } from '@aws-sdk/client-s3' 13 | 14 | import { S3Driver } from '../../../drivers/s3/driver.js' 15 | import { 16 | S3_REGION, 17 | S3_BUCKET, 18 | S3_ENDPOINT, 19 | AWS_ACCESS_KEY, 20 | AWS_ACCESS_SECRET, 21 | SUPPORTS_ACL, 22 | } from './env.js' 23 | import { deleteS3Objects } from '../../helpers.js' 24 | 25 | /** 26 | * Direct access to S3 client via their SDK 27 | */ 28 | const client = new S3Client({ 29 | credentials: { 30 | accessKeyId: AWS_ACCESS_KEY, 31 | secretAccessKey: AWS_ACCESS_SECRET, 32 | }, 33 | endpoint: S3_ENDPOINT, 34 | region: S3_REGION, 35 | }) 36 | 37 | test.group('S3 Driver | move', (group) => { 38 | group.each.setup(() => { 39 | return async () => { 40 | await deleteS3Objects(client, S3_BUCKET, '/') 41 | } 42 | }) 43 | group.each.timeout(10_000) 44 | 45 | test('move file from source to the destination', async ({ assert }) => { 46 | const source = `${string.random(10)}.txt` 47 | const destination = `${string.random(10)}.txt` 48 | const contents = 'Hello world' 49 | 50 | const s3fs = new S3Driver({ 51 | visibility: 'public', 52 | client: client, 53 | bucket: S3_BUCKET, 54 | supportsACL: SUPPORTS_ACL, 55 | }) 56 | await s3fs.put(source, contents) 57 | await s3fs.move(source, destination) 58 | 59 | assert.equal(await s3fs.get(destination), contents) 60 | assert.isFalse(await s3fs.exists(source)) 61 | }) 62 | 63 | test('move file from source to a nested directory', async ({ assert }) => { 64 | const source = `${string.random(10)}.txt` 65 | const destination = `foo/bar/baz/${string.random(10)}.txt` 66 | const contents = 'Hello world' 67 | 68 | const s3fs = new S3Driver({ 69 | visibility: 'public', 70 | client: client, 71 | bucket: S3_BUCKET, 72 | supportsACL: SUPPORTS_ACL, 73 | }) 74 | await s3fs.put(source, contents) 75 | await s3fs.move(source, destination) 76 | 77 | assert.equal(await s3fs.get(destination), contents) 78 | assert.isFalse(await s3fs.exists(source)) 79 | }) 80 | 81 | test('return error when source file does not exist', async ({ assert }) => { 82 | const source = `${string.random(10)}.txt` 83 | const destination = `${string.random(10)}.txt` 84 | 85 | const s3fs = new S3Driver({ 86 | visibility: 'public', 87 | client: client, 88 | bucket: S3_BUCKET, 89 | supportsACL: SUPPORTS_ACL, 90 | }) 91 | await assert.rejects(async () => { 92 | await s3fs.move(source, destination) 93 | }, /UnknownError|The specified key does not exist/) 94 | }) 95 | 96 | test('retain source file metadata during move', async ({ assert }) => { 97 | const source = `${string.random(10)}.txt` 98 | const destination = `${string.random(10)}.txt` 99 | const contents = 'Hello world' 100 | 101 | const s3fs = new S3Driver({ 102 | visibility: 'public', 103 | client: client, 104 | bucket: S3_BUCKET, 105 | supportsACL: SUPPORTS_ACL, 106 | }) 107 | 108 | await s3fs.put(source, contents, { 109 | contentType: 'image/png', 110 | }) 111 | await s3fs.move(source, destination) 112 | 113 | const metaData = await s3fs.getMetaData(destination) 114 | assert.equal(metaData.contentType, 'image/png') 115 | assert.isFalse(await s3fs.exists(source)) 116 | }) 117 | 118 | test('retain source file visibility during move', async ({ assert }) => { 119 | const source = `${string.random(10)}.txt` 120 | const destination = `${string.random(10)}.txt` 121 | const contents = 'Hello world' 122 | 123 | const s3fs = new S3Driver({ 124 | visibility: 'public', 125 | client: client, 126 | bucket: S3_BUCKET, 127 | supportsACL: SUPPORTS_ACL, 128 | }) 129 | 130 | await s3fs.put(source, contents, { 131 | contentType: 'image/png', 132 | visibility: 'private', 133 | }) 134 | await s3fs.move(source, destination) 135 | 136 | const visibility = await s3fs.getVisibility(destination) 137 | 138 | assert.equal(visibility, 'private') 139 | assert.isFalse(await s3fs.exists(source)) 140 | }).skip(!SUPPORTS_ACL, 'Service does not support ACL. Hence, we cannot control file visibility') 141 | }) 142 | -------------------------------------------------------------------------------- /tests/drivers/s3/put.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { join } from 'node:path' 11 | import { test } from '@japa/runner' 12 | import { createReadStream } from 'node:fs' 13 | import string from '@poppinss/utils/string' 14 | import { GetObjectAclCommand, GetObjectCommand, S3Client } from '@aws-sdk/client-s3' 15 | 16 | import { S3Driver } from '../../../drivers/s3/driver.js' 17 | import { 18 | S3_REGION, 19 | S3_BUCKET, 20 | S3_ENDPOINT, 21 | AWS_ACCESS_KEY, 22 | AWS_ACCESS_SECRET, 23 | SUPPORTS_ACL, 24 | } from './env.js' 25 | import { deleteS3Objects } from '../../helpers.js' 26 | 27 | /** 28 | * Direct access to S3 client via their SDK 29 | */ 30 | const client = new S3Client({ 31 | credentials: { 32 | accessKeyId: AWS_ACCESS_KEY, 33 | secretAccessKey: AWS_ACCESS_SECRET, 34 | }, 35 | endpoint: S3_ENDPOINT, 36 | region: S3_REGION, 37 | }) 38 | 39 | test.group('S3 Driver | put', (group) => { 40 | group.each.setup(() => { 41 | return async () => { 42 | await deleteS3Objects(client, S3_BUCKET, '/') 43 | } 44 | }) 45 | group.each.timeout(10_000) 46 | 47 | test('create file at the destination', async ({ assert }) => { 48 | const key = `${string.random(10)}.txt` 49 | const contents = 'Hello world' 50 | 51 | const s3fs = new S3Driver({ 52 | visibility: 'public', 53 | client: client, 54 | bucket: S3_BUCKET, 55 | supportsACL: SUPPORTS_ACL, 56 | }) 57 | 58 | await s3fs.put(key, contents) 59 | 60 | /** 61 | * Verify put operation 62 | */ 63 | const response = await client.send(new GetObjectCommand({ Key: key, Bucket: S3_BUCKET })) 64 | assert.equal(await response.Body!.transformToString(), contents) 65 | }) 66 | 67 | test('create file from Uint8Array', async ({ assert }) => { 68 | const key = `${string.random(10)}.txt` 69 | const contents = 'Hello world' 70 | 71 | const s3fs = new S3Driver({ 72 | visibility: 'public', 73 | client: client, 74 | bucket: S3_BUCKET, 75 | supportsACL: SUPPORTS_ACL, 76 | }) 77 | 78 | await s3fs.put(key, new TextEncoder().encode(contents)) 79 | 80 | /** 81 | * Verify put operation 82 | */ 83 | const response = await client.send(new GetObjectCommand({ Key: key, Bucket: S3_BUCKET })) 84 | assert.equal(await response.Body!.transformToString(), contents) 85 | }) 86 | 87 | test('overwrite contents of existing file', async ({ assert }) => { 88 | const key = `${string.random(10)}.txt` 89 | const contents = 'Hello world' 90 | const newContents = 'Hi world' 91 | 92 | const s3fs = new S3Driver({ 93 | visibility: 'public', 94 | client: client, 95 | bucket: S3_BUCKET, 96 | supportsACL: SUPPORTS_ACL, 97 | }) 98 | 99 | await s3fs.put(key, contents) 100 | await s3fs.put(key, newContents) 101 | 102 | /** 103 | * Verify put operation 104 | */ 105 | const response = await client.send(new GetObjectCommand({ Key: key, Bucket: S3_BUCKET })) 106 | assert.equal(await response.Body!.transformToString(), newContents) 107 | }) 108 | 109 | test('create files at a nested destination', async ({ assert }) => { 110 | const key = `users/1/${string.random(10)}.txt` 111 | const contents = 'Hello world' 112 | 113 | const s3fs = new S3Driver({ 114 | visibility: 'public', 115 | client: client, 116 | bucket: S3_BUCKET, 117 | supportsACL: SUPPORTS_ACL, 118 | }) 119 | 120 | await s3fs.put(key, contents) 121 | 122 | /** 123 | * Verify put operation 124 | */ 125 | const response = await client.send(new GetObjectCommand({ Key: key, Bucket: S3_BUCKET })) 126 | assert.equal(await response.Body!.transformToString(), contents) 127 | }) 128 | 129 | test('create file with custom metadata', async ({ assert }) => { 130 | const key = `${string.random(10)}.txt` 131 | const contents = 'Hello world' 132 | 133 | const s3fs = new S3Driver({ 134 | visibility: 'public', 135 | client: client, 136 | bucket: S3_BUCKET, 137 | supportsACL: SUPPORTS_ACL, 138 | }) 139 | 140 | await s3fs.put(key, contents, { 141 | contentType: 'image/png', 142 | cacheControl: 'no-cache', 143 | contentEncoding: 'binary', 144 | contentLanguage: 'en-IN', 145 | contentDisposition: 'attachment', 146 | }) 147 | 148 | /** 149 | * Verify put operation 150 | */ 151 | const response = await client.send(new GetObjectCommand({ Key: key, Bucket: S3_BUCKET })) 152 | assert.equal(response.ContentType, 'image/png') 153 | assert.equal(response.ContentEncoding, 'binary') 154 | assert.equal(response.CacheControl, 'no-cache') 155 | assert.equal(response.ContentDisposition, 'attachment') 156 | assert.equal(response.ContentLanguage, 'en-IN') 157 | }) 158 | 159 | test('create file with local visibility', async ({ assert }) => { 160 | const key = `${string.random(10)}.txt` 161 | const contents = 'Hello world' 162 | 163 | const s3fs = new S3Driver({ 164 | visibility: 'public', 165 | client: client, 166 | bucket: S3_BUCKET, 167 | supportsACL: SUPPORTS_ACL, 168 | }) 169 | 170 | await s3fs.put(key, contents, { 171 | contentType: 'image/png', 172 | cacheControl: 'no-cache', 173 | contentEncoding: 'binary', 174 | }) 175 | 176 | /** 177 | * Verify put operation 178 | */ 179 | const response = await client.send(new GetObjectAclCommand({ Key: key, Bucket: S3_BUCKET })) 180 | assert.equal( 181 | response.Grants?.find( 182 | (grant) => grant.Grantee?.URI === 'http://acs.amazonaws.com/groups/global/AllUsers' 183 | )?.Permission, 184 | 'READ' 185 | ) 186 | }).skip(!SUPPORTS_ACL, 'Service does not support ACL. Hence, we cannot control file visibility') 187 | 188 | test('create file with inline local visibility', async ({ assert }) => { 189 | const key = `${string.random(10)}.txt` 190 | const contents = 'Hello world' 191 | 192 | const s3fs = new S3Driver({ 193 | visibility: 'public', 194 | client: client, 195 | bucket: S3_BUCKET, 196 | supportsACL: SUPPORTS_ACL, 197 | }) 198 | 199 | await s3fs.put(key, contents, { 200 | contentType: 'image/png', 201 | cacheControl: 'no-cache', 202 | contentEncoding: 'binary', 203 | visibility: 'private', 204 | }) 205 | 206 | /** 207 | * Verify put operation 208 | */ 209 | const response = await client.send(new GetObjectAclCommand({ Key: key, Bucket: S3_BUCKET })) 210 | assert.equal( 211 | response.Grants?.find( 212 | (grant) => grant.Grantee?.URI === 'http://acs.amazonaws.com/groups/global/AllUsers' 213 | )?.Permission, 214 | undefined 215 | ) 216 | }).skip(!SUPPORTS_ACL, 'Service does not support ACL. Hence, we cannot control file visibility') 217 | }) 218 | 219 | test.group('S3 Driver | putStream', (group) => { 220 | group.each.setup(() => { 221 | return async () => { 222 | await deleteS3Objects(client, S3_BUCKET, '/') 223 | } 224 | }) 225 | group.each.timeout(10_000) 226 | 227 | test('create file from readable stream', async ({ fs, assert }) => { 228 | const key = `${string.random(10)}.txt` 229 | const contents = 'Hello world' 230 | 231 | const s3fs = new S3Driver({ 232 | visibility: 'public', 233 | client: client, 234 | bucket: S3_BUCKET, 235 | supportsACL: SUPPORTS_ACL, 236 | }) 237 | 238 | await fs.create(key, contents) 239 | await s3fs.putStream(key, createReadStream(join(fs.basePath, key))) 240 | 241 | /** 242 | * Verify put operation 243 | */ 244 | const response = await client.send(new GetObjectCommand({ Key: key, Bucket: S3_BUCKET })) 245 | assert.equal(await response.Body!.transformToString(), contents) 246 | }) 247 | 248 | test('create files at a nested destination', async ({ fs, assert }) => { 249 | const key = `users/1/${string.random(10)}.txt` 250 | const contents = 'Hello world' 251 | 252 | const s3fs = new S3Driver({ 253 | visibility: 'public', 254 | client: client, 255 | bucket: S3_BUCKET, 256 | supportsACL: SUPPORTS_ACL, 257 | }) 258 | 259 | await fs.create(key, contents) 260 | await s3fs.putStream(key, createReadStream(join(fs.basePath, key))) 261 | 262 | /** 263 | * Verify put operation 264 | */ 265 | const response = await client.send(new GetObjectCommand({ Key: key, Bucket: S3_BUCKET })) 266 | assert.equal(await response.Body!.transformToString(), contents) 267 | }) 268 | }) 269 | -------------------------------------------------------------------------------- /tests/drivers/s3/url_generation.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import got from 'got' 11 | import { test } from '@japa/runner' 12 | import string from '@poppinss/utils/string' 13 | import { GetObjectCommand, S3Client } from '@aws-sdk/client-s3' 14 | 15 | import { S3Driver } from '../../../drivers/s3/driver.js' 16 | import { 17 | S3_REGION, 18 | S3_BUCKET, 19 | S3_CDN_URL, 20 | S3_SERVICE, 21 | S3_ENDPOINT, 22 | SUPPORTS_ACL, 23 | AWS_ACCESS_KEY, 24 | AWS_ACCESS_SECRET, 25 | } from './env.js' 26 | import { deleteS3Objects } from '../../helpers.js' 27 | import { getSignedUrl } from '@aws-sdk/s3-request-presigner' 28 | 29 | /** 30 | * Direct access to S3 client via their SDK 31 | */ 32 | const client = new S3Client({ 33 | credentials: { 34 | accessKeyId: AWS_ACCESS_KEY, 35 | secretAccessKey: AWS_ACCESS_SECRET, 36 | }, 37 | endpoint: S3_ENDPOINT, 38 | region: S3_REGION, 39 | }) 40 | 41 | test.group('S3 Driver | getUrl', (group) => { 42 | group.each.setup(() => { 43 | return async () => { 44 | await deleteS3Objects(client, S3_BUCKET, '/') 45 | } 46 | }) 47 | group.each.timeout(10_000) 48 | 49 | test('get public URL of a file', async ({ assert }) => { 50 | const key = `${string.random(6)}.txt` 51 | 52 | const s3fs = new S3Driver({ 53 | visibility: 'public', 54 | client: client, 55 | bucket: S3_BUCKET, 56 | supportsACL: SUPPORTS_ACL, 57 | }) 58 | 59 | await s3fs.put(key, 'hello world') 60 | const fileURL = await s3fs.getUrl(key) 61 | 62 | assert.equal(fileURL, `${S3_ENDPOINT}/${S3_BUCKET}/${key}`) 63 | 64 | /** 65 | * R2 files are private unless a cdnURL is assigned to them 66 | */ 67 | if (S3_SERVICE !== 'r2') { 68 | const fileContents = await got.get(fileURL) 69 | assert.equal(fileContents.body, 'hello world') 70 | } 71 | }) 72 | 73 | test('use custom implementation for generating public URL', async ({ assert }) => { 74 | const key = `${string.random(6)}.txt` 75 | 76 | const s3fs = new S3Driver({ 77 | visibility: 'public', 78 | client: client, 79 | bucket: S3_BUCKET, 80 | supportsACL: SUPPORTS_ACL, 81 | urlBuilder: { 82 | async generateURL(fileKey, fileBucket) { 83 | return new URL(fileKey, `https://cdn.example.com/${fileBucket}/`).toString() 84 | }, 85 | }, 86 | }) 87 | 88 | const fileURL = await s3fs.getUrl(key) 89 | assert.equal(fileURL, `https://cdn.example.com/${S3_BUCKET}/${key}`) 90 | }) 91 | 92 | test('use CDN url for creating public URL', async ({ assert }) => { 93 | const key = `${string.random(6)}.txt` 94 | 95 | const s3fs = new S3Driver({ 96 | visibility: 'public', 97 | client: client, 98 | bucket: S3_BUCKET, 99 | supportsACL: SUPPORTS_ACL, 100 | cdnUrl: S3_CDN_URL, 101 | }) 102 | 103 | await s3fs.put(key, 'hello world') 104 | const fileURL = await s3fs.getUrl(key) 105 | 106 | const fileContents = await got.get(fileURL) 107 | 108 | assert.equal(fileURL, new URL(key, S3_CDN_URL).toString()) 109 | assert.equal(fileContents.body, 'hello world') 110 | }) 111 | }) 112 | 113 | test.group('S3 Driver | getSignedUrl', (group) => { 114 | group.each.setup(() => { 115 | return async () => { 116 | await deleteS3Objects(client, S3_BUCKET, '/') 117 | } 118 | }) 119 | group.each.timeout(10_000) 120 | 121 | test('get signed URL of a file', async ({ assert }) => { 122 | const key = `${string.random(6)}.txt` 123 | 124 | const s3fs = new S3Driver({ 125 | visibility: 'private', 126 | client: client, 127 | bucket: S3_BUCKET, 128 | supportsACL: SUPPORTS_ACL, 129 | }) 130 | 131 | await s3fs.put(key, 'hello world') 132 | 133 | const fileURL = new URL(await s3fs.getSignedUrl(key)) 134 | const fileContents = await got.get(fileURL) 135 | 136 | assert.include(fileURL.hostname, S3_BUCKET) 137 | assert.equal(fileURL.pathname, `/${key}`) 138 | assert.isTrue(fileURL.searchParams.has('X-Amz-Signature')) 139 | assert.isTrue(fileURL.searchParams.has('X-Amz-Expires')) 140 | 141 | assert.equal(fileContents.body, 'hello world') 142 | }) 143 | 144 | test('define content type for the file', async ({ assert }) => { 145 | const key = `${string.random(6)}.txt` 146 | 147 | const s3fs = new S3Driver({ 148 | visibility: 'public', 149 | client: client, 150 | bucket: S3_BUCKET, 151 | supportsACL: SUPPORTS_ACL, 152 | }) 153 | 154 | const fileURL = new URL( 155 | await s3fs.getSignedUrl(key, { 156 | contentType: 'image/png', 157 | }) 158 | ) 159 | 160 | assert.equal(fileURL.searchParams.get('response-content-type'), 'image/png') 161 | }) 162 | 163 | test('define content disposition for the file', async ({ assert }) => { 164 | const key = `${string.random(6)}.txt` 165 | 166 | const s3fs = new S3Driver({ 167 | visibility: 'public', 168 | client: client, 169 | bucket: S3_BUCKET, 170 | supportsACL: SUPPORTS_ACL, 171 | }) 172 | 173 | const fileURL = new URL( 174 | await s3fs.getSignedUrl(key, { 175 | contentDisposition: 'attachment', 176 | }) 177 | ) 178 | 179 | assert.equal(fileURL.searchParams.get('response-content-disposition'), 'attachment') 180 | }) 181 | 182 | test('use custom implementation for generating signed URL', async ({ assert }) => { 183 | const key = `${string.random(6)}.txt` 184 | 185 | const s3fs = new S3Driver({ 186 | visibility: 'public', 187 | client: client, 188 | bucket: S3_BUCKET, 189 | supportsACL: SUPPORTS_ACL, 190 | urlBuilder: { 191 | async generateSignedURL(_, options, s3Client) { 192 | return getSignedUrl( 193 | s3Client, 194 | new GetObjectCommand({ 195 | ...options, 196 | ResponseCacheControl: 'no-cache', 197 | }) 198 | ) 199 | }, 200 | }, 201 | }) 202 | 203 | const fileURL = new URL(await s3fs.getSignedUrl(key)) 204 | assert.equal(fileURL.searchParams.get('response-cache-control'), 'no-cache') 205 | }) 206 | }) 207 | -------------------------------------------------------------------------------- /tests/drivers/s3/visibility.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { test } from '@japa/runner' 11 | import string from '@poppinss/utils/string' 12 | import { S3Client } from '@aws-sdk/client-s3' 13 | 14 | import { S3Driver } from '../../../drivers/s3/driver.js' 15 | import { 16 | S3_REGION, 17 | S3_BUCKET, 18 | S3_ENDPOINT, 19 | AWS_ACCESS_KEY, 20 | AWS_ACCESS_SECRET, 21 | SUPPORTS_ACL, 22 | } from './env.js' 23 | import { deleteS3Objects } from '../../helpers.js' 24 | 25 | /** 26 | * Direct access to S3 client via their SDK 27 | */ 28 | const client = new S3Client({ 29 | credentials: { 30 | accessKeyId: AWS_ACCESS_KEY, 31 | secretAccessKey: AWS_ACCESS_SECRET, 32 | }, 33 | endpoint: S3_ENDPOINT, 34 | region: S3_REGION, 35 | }) 36 | 37 | test.group('S3 Driver | visibility', (group) => { 38 | group.each.setup(() => { 39 | return async () => { 40 | await deleteS3Objects(client, S3_BUCKET, '/') 41 | } 42 | }) 43 | group.each.timeout(10_000) 44 | 45 | test('get visibility of a file', async ({ assert }) => { 46 | const key = `${string.random(6)}.txt` 47 | 48 | const s3fs = new S3Driver({ 49 | visibility: 'public', 50 | client: client, 51 | bucket: S3_BUCKET, 52 | supportsACL: SUPPORTS_ACL, 53 | }) 54 | 55 | await s3fs.put(key, 'hello world') 56 | 57 | const visibility = await s3fs.getVisibility(key) 58 | assert.equal(visibility, 'public') 59 | }) 60 | 61 | test('make file private', async ({ assert }) => { 62 | const key = `${string.random(6)}.txt` 63 | 64 | const s3fs = new S3Driver({ 65 | visibility: 'public', 66 | client: client, 67 | bucket: S3_BUCKET, 68 | supportsACL: SUPPORTS_ACL, 69 | }) 70 | 71 | await s3fs.put(key, 'hello world', { 72 | cacheControl: 'no-cache', 73 | }) 74 | assert.equal(await s3fs.getVisibility(key), 'public') 75 | 76 | await s3fs.setVisibility(key, 'private') 77 | 78 | /** 79 | * The file visibility won't change when service does not 80 | * support ACL 81 | */ 82 | if (SUPPORTS_ACL) { 83 | assert.equal(await s3fs.getVisibility(key), 'private') 84 | } else { 85 | assert.equal(await s3fs.getVisibility(key), 'public') 86 | } 87 | }) 88 | 89 | test('make file public', async ({ assert }) => { 90 | const key = `${string.random(6)}.txt` 91 | 92 | const s3fs = new S3Driver({ 93 | visibility: 'private', 94 | client: client, 95 | bucket: S3_BUCKET, 96 | supportsACL: SUPPORTS_ACL, 97 | }) 98 | 99 | await s3fs.put(key, 'hello world', { 100 | cacheControl: 'no-cache', 101 | }) 102 | assert.equal(await s3fs.getVisibility(key), 'private') 103 | 104 | await s3fs.setVisibility(key, 'public') 105 | 106 | /** 107 | * The file visibility won't change when service does not 108 | * support ACL 109 | */ 110 | if (SUPPORTS_ACL) { 111 | assert.equal(await s3fs.getVisibility(key), 'public') 112 | } else { 113 | assert.equal(await s3fs.getVisibility(key), 'private') 114 | } 115 | }) 116 | 117 | test('throw error when trying to update visibility of a non-existing file', async ({ 118 | assert, 119 | }) => { 120 | const key = `${string.random(6)}.txt` 121 | 122 | const s3fs = new S3Driver({ 123 | visibility: 'public', 124 | client: client, 125 | bucket: S3_BUCKET, 126 | supportsACL: SUPPORTS_ACL, 127 | }) 128 | 129 | await assert.rejects(async () => { 130 | await s3fs.setVisibility(key, 'public') 131 | }, /UnknownError/) 132 | }).skip(!SUPPORTS_ACL, 'Service does not support ACL. Hence, we cannot control file visibility') 133 | }) 134 | -------------------------------------------------------------------------------- /tests/helpers.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * flydrive 3 | * 4 | * (c) FlyDrive 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | import { DeleteObjectsCommand, ListObjectsV2Command, S3Client } from '@aws-sdk/client-s3' 11 | 12 | /** 13 | * Delete S3 Objects 14 | */ 15 | export async function deleteS3Objects(client: S3Client, bucket: string, prefix: string) { 16 | const objects = await client.send( 17 | new ListObjectsV2Command({ 18 | Bucket: bucket, 19 | ...(prefix === '/' ? {} : { Prefix: prefix }), 20 | }) 21 | ) 22 | 23 | if (objects.Contents) { 24 | try { 25 | await client.send( 26 | new DeleteObjectsCommand({ 27 | Bucket: bucket, 28 | Delete: { 29 | Objects: objects.Contents.map((object) => { 30 | return { 31 | Key: object.Key, 32 | } 33 | }), 34 | Quiet: true, 35 | }, 36 | }) 37 | ) 38 | } catch (error) { 39 | console.log('======= BULK DELETE FAILURE START =======') 40 | console.log(objects) 41 | console.log(error.$response) 42 | console.log('======= BULK DELETE FAILURE END =======') 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@adonisjs/tsconfig/tsconfig.package.json", 3 | "compilerOptions": { 4 | "rootDir": "./", 5 | "outDir": "./build", 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /tsnode.esm.js: -------------------------------------------------------------------------------- 1 | /* 2 | |-------------------------------------------------------------------------- 3 | | TS-Node ESM hook 4 | |-------------------------------------------------------------------------- 5 | | 6 | | Importing this file before any other file will allow you to run TypeScript 7 | | code directly using TS-Node + SWC. For example 8 | | 9 | | node --import="./tsnode.esm.js" bin/test.ts 10 | | node --import="./tsnode.esm.js" index.ts 11 | | 12 | | 13 | | Why not use "--loader=ts-node/esm"? 14 | | Because, loaders have been deprecated. 15 | */ 16 | 17 | import { register } from 'node:module' 18 | register('ts-node/esm', import.meta.url) 19 | --------------------------------------------------------------------------------