├── .env.default ├── .github └── workflows │ └── ci.yaml ├── .gitignore ├── LICENSE ├── README.md ├── adapter_api.md ├── changelog.md ├── config_urls.md ├── copy.ts ├── fixes └── glob │ ├── copy.ts │ └── index.d.ts ├── flake.lock ├── flake.nix ├── migration_to_api2.0.md ├── migration_to_api2.1.md ├── package-lock.json ├── package.json ├── publish ├── AdapterAmazonS3 │ ├── README.md │ ├── changelog.md │ ├── package-lock.json │ └── package.json ├── AdapterAzureBlob │ ├── README.md │ ├── changelog.md │ ├── package-lock.json │ └── package.json ├── AdapterBackblazeB2 │ ├── README.md │ ├── changelog.md │ ├── package-lock.json │ └── package.json ├── AdapterGoogleCloud │ ├── README.md │ ├── changelog.md │ ├── package-lock.json │ └── package.json ├── AdapterLocal │ ├── README.md │ ├── changelog.md │ ├── package-lock.json │ └── package.json ├── AdapterMinio │ ├── README.md │ ├── changelog.md │ ├── package-lock.json │ └── package.json └── Storage │ ├── README.md │ ├── changelog.md │ ├── package-lock.json │ └── package.json ├── src ├── AbstractAdapter.ts ├── AdapterAmazonS3.ts ├── AdapterAzureBlob.ts ├── AdapterBackblazeB2.ts ├── AdapterBackblazeB2F.ts ├── AdapterGoogleCloud.ts ├── AdapterLocal.ts ├── AdapterMinio.ts ├── Storage.ts ├── adapters.ts ├── indexes │ ├── AdapterAmazonS3.ts │ ├── AdapterAzureBlob.ts │ ├── AdapterBackblazeB2.ts │ ├── AdapterGoogleCloud.ts │ ├── AdapterLocal.ts │ ├── AdapterMinio.ts │ └── Storage.ts ├── template_class.ts ├── template_functional.ts ├── types │ ├── adapter_amazon_s3.ts │ ├── adapter_azure_blob.ts │ ├── adapter_backblaze_b2.ts │ ├── adapter_google_cloud.ts │ ├── adapter_local.ts │ ├── adapter_minio.ts │ ├── add_file_params.ts │ ├── general.ts │ └── result.ts └── util.ts ├── tests ├── config.ts ├── data │ ├── image1.jpg │ ├── image2.jpg │ ├── input.txt │ └── with space.jpg ├── test.jasmine.ts ├── test.ts ├── test_urls.ts └── util.ts ├── todo.md └── tsconfig.json /.env.default: -------------------------------------------------------------------------------- 1 | # The following environment variables will be read automatically by 2 | # the service client libraries that are uses in the adapters 3 | 4 | # Amazon S3 5 | AWS_ACCESS_KEY_ID="XXXXXXXXXX" 6 | AWS_SECRET_ACCESS_KEY="XXXXXXXXXX" 7 | AWS_REGION="eu-west-1" 8 | 9 | # Google Cloud Storage 10 | GOOGLE_APPLICATION_CREDENTIALS="path/to/keyFile.json" 11 | 12 | # Azure 13 | AZURE_STORAGE_ACCOUNT_KEY="XXXXXXXXXX" 14 | # or: 15 | AZURE_STORAGE_SAS_TOKEN="XXXXXXXXXX" 16 | # or: 17 | AZURE_STORAGE_CONNECTION_STRING="XXXXXXXXXX" 18 | -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | name: ci 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | branches: 9 | - "*" 10 | 11 | jobs: 12 | test-local: 13 | runs-on: ubuntu-latest 14 | name: Test Node.JS ${{matrix.node-version}} 15 | strategy: 16 | matrix: 17 | node-version: ["18.x", "20.x", "21.x"] 18 | steps: 19 | - uses: actions/checkout@v4.1.1 20 | - uses: actions/setup-node@v4.0.0 21 | with: 22 | node-version: ${{matrix.node-version}} 23 | - uses: actions/cache@v3.3.2 24 | with: 25 | path: | 26 | node_modules 27 | key: node-modules-cache-${{matrix.node-version}}-${{runner.os}}-${{hashFiles('package-lock.json')}}-${{github.ref}} 28 | restore-keys: | 29 | node-modules-cache-${{matrix.node-version}}-${{runner.os}}-${{hashFiles('package-lock.json')}}- 30 | node-modules-cache-${{matrix.node-version}}-${{runner.os}}- 31 | - run: npm ci 32 | - run: npm run test-local 33 | # - run: npm run test-minio-2 34 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .env 2 | .idea 3 | .vscode 4 | gcs.json 5 | minio 6 | node_modules 7 | publish/**/src 8 | publish/**/src 9 | publish/**/src 10 | publish/**/dist/**/*.js 11 | publish/**/dist/**/*.js.map 12 | publish/**/dist/**/*.d.ts 13 | tests_local 14 | tests/test_directory 15 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Tweede golf 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /adapter_api.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tweedegolf/storage-abstraction/767900d9dbeca8a22cec9e99f47e22f84cd344c8/adapter_api.md -------------------------------------------------------------------------------- /changelog.md: -------------------------------------------------------------------------------- 1 | # 2.1.6 2 | 3 | - updated Google AzureBlob adapter to 1.0.7 4 | 5 | # 2.1.5 6 | 7 | - updated Google Cloud adapter to 1.0.8 8 | 9 | # 2.1.4 10 | 11 | - updated dependencies 12 | - remarkable dependency autolinker set to "^4.0.0" 13 | 14 | # 2.1.3 15 | 16 | - updated dependencies 17 | 18 | # 2.1.2 19 | 20 | - updated dependencies 21 | - fixed test scripts 22 | - fix for type error in @types/glob 23 | 24 | # 2.1.1 25 | 26 | - fix issue with optional `bucketName` argument 27 | 28 | # 2.1.0 29 | 30 | - reverted to v1 of the configuration URLs (with some improvements) 31 | - re-implemented storing the selected bucket in local state 32 | 33 | # 2.0.0 34 | 35 | - Complete overhaul of the API. If you are using API 1.x please read the [migration document](migration_to_api2.1.md) 36 | 37 | # 1.5.6 38 | 39 | - removed unnecessary call to getMetaData in GCS adapter 40 | - removed @ramda/zip dependency 41 | 42 | # 1.5.5 43 | 44 | - changed Backblaze B2 service client dependency to a [fork](https://www.npmjs.com/package/@nichoth/backblaze-b2) that fixes the long standing Axios security issue → all credits: [nichoth](https://github.com/nichoth) 45 | 46 | # 1.5.4 47 | 48 | - added `getFileAsURL` to Amazon S3 adapter → all credits: [DennisSnijder](https://github.com/DennisSnijder) 49 | 50 | # 1.5.3 51 | 52 | - fix for issue #48 → all credits: [Pezmc](https://github.com/Pezmc) 53 | 54 | # 1.4.7 - 1.5.2 55 | 56 | - Added support for Azure → all credits: [tesirm99](https://github.com/tesirm99) 57 | - Upgrade all packages 58 | - Fixed numerous async errors 59 | - AdapterAmazonS3: use `s3-request-presigner` to create links to objects 60 | - AdapterAmazonS3: added support for S3 compatible storages (tested with Cloudflare R2 and Backblaze S3) 61 | - AdapterLocal now treats values without prefix passed to `mode` as decimal number instead of octal numbers 62 | - AdapterLocal: if you pass the config as an object and you don't provide a value for `bucketName`, the bucketName will no longer be set to the last folder of the value you provide for `directory`. In other words: if you want to set a value for `bucketName` you have to add it specifically to the config object. 63 | 64 | # 1.4.5 65 | 66 | - Remove option 'slug' in config: this makes the user responsible for choosing a valid bucket name. 67 | - Add `options` to `createBucket`. 68 | - Add `options` to `addFileFromPath`, `addFileFromBuffer` and `addFileFromReadable`. 69 | - Add `skipCheck` to configuration object and made all keys optional 70 | - Return public url after a file has been successfully added to a bucket 71 | 72 | # 1.4.4 73 | 74 | - use '@aws-sdk/client-s3' instead of 'aws-sdk' 75 | - eslint fixes 76 | 77 | # 1.4.3 78 | 79 | - Removed `await-to-js` dependency in local storage adapter 80 | 81 | # 1.4.2 82 | 83 | - Added `ConfigAmazonS3`, `ConfigBackblazeB2`, `ConfigGoogleCloud` and `ConfigLocal` to exported types 84 | - Removed `await-to-js` dependency 85 | 86 | # 1.4.1 87 | 88 | - Added `AdapterConfig` to exported types 89 | 90 | # 1.4.0 91 | 92 | - Changed the name of the 'functional classes' from Storage to Adapter, e.g. `StorageAmazonS3` became `AdapterAmazonS3` 93 | - Replaced `introspect()` by `getConfiguration():AdapterConfig` and `getType():string` 94 | - Added adapter class for BackBlaze B2 95 | - Made configuration more generic and extensible 96 | - Removed option to create a new Storage without configuration (StorageLocal) 97 | - Added default storage options that can be overruled or extended by the config object or url 98 | - Made slugify optional and turned it off by default for StorageLocal 99 | - Added API method `fileExists():Promise` 100 | - In the configuration object or string non-existent keys or invalid values are no longer filtered out: it is the programmer's responsibility to provide valid options 101 | - If no bucket name is provided the bucket name will always be an empty string "", not `undefined` or `null`. Also when no bucket is selected `bucketName` will be "". 102 | - Adapter modules are only loaded when needed (using `require`) 103 | - Removed options from both configuration and adapters. 104 | - Formalized return values 105 | 106 | # 1.3.1 107 | 108 | - Removed sloppy code: parsing and validation of configuration is now done in one [place](https://github.com/tweedegolf/storage-abstraction/blob/master/src/util.ts). 109 | - Removed jasmine-ts dependency 110 | 111 | # 1.3.0 112 | 113 | - Removed `getFileByteRangeAsReadable` and merged the functionality in `getFileAsReadable` by adding a range parameter `{start: number, end: number}` 114 | - Removed the option to instantiate a specific storage type directly; all instantiating must be done with `new Storage(config)`. 115 | - Optimized `getFileAsReadable` for Google Cloud. 116 | - Implemented `addFileFromReadable`, fixes [issue#2](https://github.com/tweedegolf/storage-abstraction/issues/2) 117 | - Added configuration urls: all configuration options in a single string. 118 | - When creating a local storage without specifying a directory, the directory where the process runs will be used (in earlier versions the os' tmp folder was used) 119 | - When creating a local storage without specifying a bucket name, a directory named `local-bucket` will be created and used as selected bucket. 120 | - When using `new Storage()` without configuration you create a local storage instance with the default configuration (as described in the 2 bullets above). 121 | - Updated documentation. 122 | - Updated dependency version. 123 | - Added yarn.lock. 124 | - Renamed 'functional classes' to 'adapter classes' 125 | 126 | # 1.2.1 127 | 128 | (Pull request #3)[https://github.com/tweedegolf/storage-abstraction/pull/3] 129 | 130 | - Implemented `sizeOf`, `getFileByteRangeAsReadable` 131 | - Improved AWS performance 132 | 133 | # 1.1.16 134 | 135 | (Pull request #1)[https://github.com/tweedegolf/storage-abstraction/pull/1] 136 | 137 | - Expanded the S3 configuration options 138 | -------------------------------------------------------------------------------- /config_urls.md: -------------------------------------------------------------------------------- 1 | ### Local storage 2 | 3 | ```typescript 4 | const url = "local://path/to/bucket@bucket_name?mode=511&extra_option2=value2..."; 5 | 6 | const config: AdapterConfigLocal = { 7 | bucketName: "bucket_name", 8 | directory: "path/to/bucket", 9 | mode: "511", 10 | }; 11 | ``` 12 | 13 | ### Amazon S3 14 | 15 | ```typescript 16 | const url = 17 | "s3://access_key_id:secret_access_key@bucket_name?region=region&extra_option2=value2..."; 18 | 19 | const config: AdapterConfigAmazonS3 = { 20 | bucketName: "bucket_name", 21 | accessKeyId: "access_key_id", 22 | secretAccessKey: "secret_access_key", 23 | region: "region", 24 | }; 25 | 26 | // read accessKeyId and secretAccessKey from environment 27 | const url = "s3://@bucket_name?region=region&extra_option2=value2..."; 28 | 29 | const config: AdapterConfigAmazonS3 = { 30 | bucketName: "bucket_name", 31 | region: "region", 32 | }; 33 | 34 | // read accessKeyId, secretAccessKey and region from environment 35 | const url = "s3://@bucket_name?extra_option1=value1&extra_option2=value2..."; 36 | 37 | const config: AdapterConfigAmazonS3 = { 38 | bucketName: "bucket_name", 39 | }; 40 | ``` 41 | 42 | ### Google Cloud Storage 43 | 44 | ```typescript 45 | const url = "gcs://path/to/key_file.json@bucket_name?extra_option1=value1&extra_option2=value2..."; 46 | 47 | const config: AdapterConfigGoogleCloud = { 48 | bucketName: "bucket_name", 49 | keyFilename: "path/to/key_file.json", 50 | }; 51 | 52 | // read path to keyFile.json from environment 53 | const url = "gcs://@bucket_name?extra_option1=value1&extra_option2=value2..."; 54 | const config: AdapterConfigGoogleCloud = { 55 | bucketName: "bucket_name", 56 | }; 57 | ``` 58 | 59 | ### Backblaze B2 60 | 61 | ```typescript 62 | const url = 63 | "b2://application_key_id:application_key@bucket_name?extra_option1=value1&extra_option2=value2..."; 64 | 65 | const config: AdapterConfigBackblazeB2 = { 66 | bucketName: "bucket_name", 67 | applicationKeyId: "application_key_id", 68 | applicationKey: "application_key", 69 | }; 70 | ``` 71 | 72 | ### Azure Blob Storage 73 | 74 | ```typescript 75 | // both accountName and accountKey 76 | const url = 77 | "azure://account_name:account_key@container_name?extra_option1=value1&extra_option2=value2..."; 78 | 79 | const config: AdapterConfigAzureBlob = { 80 | bucketName: "container_name", 81 | accountName: "account_name", 82 | accountKey: "account_key", 83 | }; 84 | 85 | // passwordless 86 | const url = "azure://account_name@container_name?extra_option1=value1&extra_option2=value2..."; 87 | 88 | const config: AdapterConfigAzureBlob = { 89 | bucketName: "container_name", 90 | accountName: "account_name", 91 | }; 92 | 93 | // sasToken 94 | const url = "azure://account_name@container_name?sas_token=SAS_TOKEN&extra_option2=value2..."; 95 | 96 | const config: AdapterConfigAzureBlob = { 97 | bucketName: "container_name", 98 | accountName: "account_name", 99 | sasToken: "SAS_TOKEN", 100 | }; 101 | 102 | // connection string 103 | const url = 104 | "azure://account_name@container_name?connection_string=CONNECTION_STRING&extra_option2=value2..."; 105 | 106 | const config: AdapterConfigAzureBlob = { 107 | bucketName: "container_name", 108 | accountName: "account_name", 109 | connectionString: "CONNECTION_STRING", 110 | }; 111 | ``` 112 | 113 | ### MinIO 114 | 115 | ```typescript 116 | const url = 117 | "minio://access_key:secret_key@bucket_name?region=region&endPoint=END_POINT&port=PORT&useSSL=USE_SSL"; 118 | 119 | const config: AdapterConfigMinio = { 120 | bucketName: "bucket_name", 121 | accessKey: "access_key", 122 | secretKey: "secret_key", 123 | endPoint: "END_POINT", 124 | region: "region", 125 | useSSL: "USE_SSL", 126 | port: "PORT", 127 | }; 128 | 129 | // without region, defaults to "auto" 130 | const url = "minio://accessKey:secretKey@bucket_name?endPoint=END_POINT&port=PORT&useSSL=USE_SSL"; 131 | 132 | const config: AdapterConfigMinio = { 133 | bucketName: "bucket_name", 134 | accessKey: "access_key", 135 | secretKey: "secret_key", 136 | endPoint: "END_POINT", 137 | useSSL: "USE_SSL", 138 | port: "PORT", 139 | }; 140 | 141 | // without region, port and useSSL, will default to "auto", "443" and "true" 142 | const url = "minio://accessKey:secretKey@bucket_name?endPoint=END_POINT"; 143 | 144 | const config: AdapterConfigMinio = { 145 | bucketName: "bucket_name", 146 | accessKey: "access_key", 147 | secretKey: "secret_key", 148 | endPoint: "END_POINT", 149 | }; 150 | ``` 151 | -------------------------------------------------------------------------------- /copy.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs"; 2 | import path from "path"; 3 | 4 | const classes = [ 5 | "Storage", 6 | "AdapterLocal", 7 | "AdapterMinio", 8 | "AdapterAmazonS3", 9 | "AdapterGoogleCloud", 10 | "AdapterAzureBlob", 11 | "AdapterBackblazeB2", 12 | ]; 13 | 14 | const extensions = ["js", "js.map", "d.ts"]; 15 | 16 | const types = ["general", "result", "add_file_params"]; 17 | 18 | const specificTypes = { 19 | Storage: [], 20 | AdapterLocal: ["adapter_local"], 21 | AdapterMinio: ["adapter_minio"], 22 | AdapterAmazonS3: ["adapter_amazon_s3"], 23 | AdapterGoogleCloud: ["adapter_google_cloud"], 24 | AdapterAzureBlob: ["adapter_azure_blob"], 25 | AdapterBackblazeB2: ["adapter_backblaze_b2"], 26 | }; 27 | 28 | async function beforeAll(): Promise { 29 | const promises = classes.reduce((acc: Array>, val: string) => { 30 | acc.push(fs.promises.rm(path.join("publish", val, "src"), { recursive: true, force: true })); 31 | acc.push(fs.promises.rm(path.join("publish", val, "dist"), { recursive: true, force: true })); 32 | return acc; 33 | }, []); 34 | 35 | return Promise.all(promises) 36 | .then(() => { 37 | return "ok"; 38 | }) 39 | .catch((e) => { 40 | return e.message; 41 | }); 42 | } 43 | 44 | async function createDirs(): Promise { 45 | try { 46 | for (let i = 0; i < classes.length; i++) { 47 | // await fs.promises.mkdir(path.join("publish", classes[i], "src")); 48 | await fs.promises.mkdir(path.join("publish", classes[i], "dist")); 49 | await fs.promises.mkdir(path.join("publish", classes[i], "dist", "types")); 50 | await fs.promises.mkdir(path.join("publish", classes[i], "dist", "index")); 51 | } 52 | return "ok"; 53 | } catch (e) { 54 | return e.message; 55 | } 56 | } 57 | 58 | async function copy(): Promise { 59 | const promises = classes.reduce((acc: Array>, val: string) => { 60 | extensions.forEach((ext) => { 61 | acc.push( 62 | fs.promises.copyFile( 63 | path.join("publish", "dist", `${val}.${ext}`), 64 | path.join("publish", val, "dist", `${val}.${ext}`) 65 | ) 66 | ); 67 | 68 | acc.push( 69 | fs.promises.copyFile( 70 | path.join("publish", "dist", "indexes", `${val}.${ext}`), 71 | path.join("publish", val, "dist", "index", `${val}.${ext}`) 72 | ) 73 | ); 74 | 75 | acc.push( 76 | fs.promises.copyFile( 77 | path.join("publish", "dist", `AbstractAdapter.${ext}`), 78 | path.join("publish", val, "dist", `AbstractAdapter.${ext}`) 79 | ) 80 | ); 81 | acc.push( 82 | fs.promises.copyFile( 83 | path.join("publish", "dist", `util.${ext}`), 84 | path.join("publish", val, "dist", `util.${ext}`) 85 | ) 86 | ); 87 | if (val === "Storage") { 88 | acc.push( 89 | fs.promises.copyFile( 90 | path.join("publish", "dist", `adapters.${ext}`), 91 | path.join("publish", val, "dist", `adapters.${ext}`) 92 | ) 93 | ); 94 | } 95 | 96 | types.forEach((type) => { 97 | acc.push( 98 | fs.promises.copyFile( 99 | path.join("publish", "dist", "types", `${type}.${ext}`), 100 | path.join("publish", val, "dist", "types", `${type}.${ext}`) 101 | ) 102 | ); 103 | }); 104 | 105 | specificTypes[val].forEach((type: string) => { 106 | acc.push( 107 | fs.promises.copyFile( 108 | path.join("publish", "dist", "types", `${type}.${ext}`), 109 | path.join("publish", val, "dist", "types", `${type}.${ext}`) 110 | ) 111 | ); 112 | }); 113 | 114 | // copy README to Storage 115 | acc.push(fs.promises.copyFile("README.md", path.join("publish", "Storage", "README.md"))); 116 | }); 117 | 118 | // acc.push( 119 | // fs.promises.copyFile( 120 | // path.join("src", `${val}.ts`), 121 | // path.join("publish", val, "src", `${val}.ts`) 122 | // ) 123 | // ); 124 | 125 | return acc; 126 | }, []); 127 | 128 | return Promise.all(promises) 129 | .then(() => { 130 | return "ok"; 131 | }) 132 | .catch((e) => { 133 | return e.message; 134 | }); 135 | } 136 | 137 | async function run() { 138 | const s = await beforeAll(); 139 | if (s !== "ok") { 140 | console.log(`error beforeAll ${s}`); 141 | process.exit(1); 142 | } 143 | const t = await createDirs(); 144 | if (t !== "ok") { 145 | console.log(`error createDirs ${t}`); 146 | process.exit(1); 147 | } 148 | const u = await copy(); 149 | if (u !== "ok") { 150 | console.log(`error copy ${u}`); 151 | process.exit(1); 152 | } 153 | await fs.promises.rm(path.join("publish", "dist"), { recursive: true, force: true }); 154 | process.exit(0); 155 | } 156 | 157 | run(); 158 | -------------------------------------------------------------------------------- /fixes/glob/copy.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs"; 2 | import path from "path"; 3 | 4 | async function run() { 5 | try { 6 | await fs.promises.copyFile( 7 | path.join(process.cwd(), "fixes", "glob", "index.d.ts"), 8 | path.join(process.cwd(), "node_modules", "@types", "glob", "index.d.ts") 9 | ); 10 | } catch (e) { 11 | console.error(e); 12 | } 13 | } 14 | 15 | run(); 16 | -------------------------------------------------------------------------------- /fixes/glob/index.d.ts: -------------------------------------------------------------------------------- 1 | // Type definitions for glob 8.1 2 | // Project: https://github.com/isaacs/node-glob 3 | // Definitions by: vvakame 4 | // voy 5 | // Klaus Meinhardt 6 | // Piotr Błażejewicz 7 | // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped 8 | 9 | /// 10 | 11 | import events = require("events"); 12 | import minimatch = require("minimatch"); 13 | import fs = require("fs"); 14 | 15 | declare function G(pattern: string, cb: (err: Error | null, matches: string[]) => void): G.IGlob; 16 | declare function G( 17 | pattern: string, 18 | options: G.IOptions, 19 | cb: (err: Error | null, matches: string[]) => void 20 | ): G.IGlob; 21 | 22 | declare namespace G { 23 | function __promisify__(pattern: string, options?: IOptions): Promise; 24 | 25 | function sync(pattern: string, options?: IOptions): string[]; 26 | 27 | function hasMagic(pattern: string, options?: IOptions): boolean; 28 | 29 | let glob: typeof G; 30 | let Glob: IGlobStatic; 31 | let GlobSync: IGlobSyncStatic; 32 | 33 | interface IOptions extends minimatch.MinimatchOptions { 34 | cwd?: string | undefined; 35 | root?: string | undefined; 36 | dot?: boolean | undefined; 37 | nomount?: boolean | undefined; 38 | mark?: boolean | undefined; 39 | nosort?: boolean | undefined; 40 | stat?: boolean | undefined; 41 | silent?: boolean | undefined; 42 | strict?: boolean | undefined; 43 | cache?: { [path: string]: boolean | "DIR" | "FILE" | ReadonlyArray } | undefined; 44 | statCache?: { [path: string]: false | { isDirectory(): boolean } | undefined } | undefined; 45 | symlinks?: { [path: string]: boolean | undefined } | undefined; 46 | realpathCache?: { [path: string]: string } | undefined; 47 | sync?: boolean | undefined; 48 | nounique?: boolean | undefined; 49 | nonull?: boolean | undefined; 50 | debug?: boolean | undefined; 51 | nobrace?: boolean | undefined; 52 | noglobstar?: boolean | undefined; 53 | noext?: boolean | undefined; 54 | nocase?: boolean | undefined; 55 | matchBase?: any; // eslint-disable-line 56 | nodir?: boolean | undefined; 57 | ignore?: string | ReadonlyArray | undefined; 58 | follow?: boolean | undefined; 59 | realpath?: boolean | undefined; 60 | nonegate?: boolean | undefined; 61 | nocomment?: boolean | undefined; 62 | absolute?: boolean | undefined; 63 | allowWindowsEscape?: boolean | undefined; 64 | fs?: typeof fs; 65 | } 66 | 67 | interface IGlobStatic extends events.EventEmitter { 68 | new (pattern: string, cb?: (err: Error | null, matches: string[]) => void): IGlob; 69 | new ( 70 | pattern: string, 71 | options: IOptions, 72 | cb?: (err: Error | null, matches: string[]) => void 73 | ): IGlob; 74 | prototype: IGlob; 75 | } 76 | 77 | interface IGlobSyncStatic { 78 | new (pattern: string, options?: IOptions): IGlobBase; 79 | prototype: IGlobBase; 80 | } 81 | 82 | interface IGlobBase { 83 | minimatch: minimatch.Minimatch; 84 | options: IOptions; 85 | aborted: boolean; 86 | cache: { [path: string]: boolean | "DIR" | "FILE" | ReadonlyArray }; 87 | statCache: { [path: string]: false | { isDirectory(): boolean } | undefined }; 88 | symlinks: { [path: string]: boolean | undefined }; 89 | realpathCache: { [path: string]: string }; 90 | found: string[]; 91 | } 92 | 93 | interface IGlob extends IGlobBase, events.EventEmitter { 94 | pause(): void; 95 | resume(): void; 96 | abort(): void; 97 | } 98 | } 99 | 100 | export = G; 101 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "flake-utils": { 4 | "inputs": { 5 | "systems": "systems" 6 | }, 7 | "locked": { 8 | "lastModified": 1694529238, 9 | "narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=", 10 | "owner": "numtide", 11 | "repo": "flake-utils", 12 | "rev": "ff7b65b44d01cf9ba6a71320833626af21126384", 13 | "type": "github" 14 | }, 15 | "original": { 16 | "owner": "numtide", 17 | "repo": "flake-utils", 18 | "type": "github" 19 | } 20 | }, 21 | "nixpkgs": { 22 | "locked": { 23 | "lastModified": 1697915759, 24 | "narHash": "sha256-WyMj5jGcecD+KC8gEs+wFth1J1wjisZf8kVZH13f1Zo=", 25 | "owner": "nixos", 26 | "repo": "nixpkgs", 27 | "rev": "51d906d2341c9e866e48c2efcaac0f2d70bfd43e", 28 | "type": "github" 29 | }, 30 | "original": { 31 | "owner": "nixos", 32 | "ref": "nixpkgs-unstable", 33 | "repo": "nixpkgs", 34 | "type": "github" 35 | } 36 | }, 37 | "root": { 38 | "inputs": { 39 | "flake-utils": "flake-utils", 40 | "nixpkgs": "nixpkgs" 41 | } 42 | }, 43 | "systems": { 44 | "locked": { 45 | "lastModified": 1681028828, 46 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", 47 | "owner": "nix-systems", 48 | "repo": "default", 49 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", 50 | "type": "github" 51 | }, 52 | "original": { 53 | "owner": "nix-systems", 54 | "repo": "default", 55 | "type": "github" 56 | } 57 | } 58 | }, 59 | "root": "root", 60 | "version": 7 61 | } 62 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "Nix flake for storage-abstraction"; 3 | 4 | inputs = { 5 | nixpkgs.url = "github:nixos/nixpkgs/nixpkgs-unstable"; 6 | flake-utils.url = "github:numtide/flake-utils"; 7 | }; 8 | 9 | outputs = { 10 | flake-utils, 11 | nixpkgs, 12 | ... 13 | }: let 14 | systems = ["x86_64-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin"]; 15 | outputs = flake-utils.lib.eachSystem systems (system: let 16 | pkgs = import nixpkgs { 17 | inherit system; 18 | overlays = []; 19 | }; 20 | in { 21 | # packages exported by the flake 22 | packages = {}; 23 | 24 | # nix run 25 | apps = {}; 26 | 27 | # nix fmt 28 | formatter = pkgs.alejandra; 29 | 30 | # nix develop -c $SHELL 31 | devShells.default = pkgs.mkShell { 32 | name = "default dev shell"; 33 | packages = with pkgs; [ 34 | nodejs_18 35 | ]; 36 | }; 37 | }); 38 | in 39 | outputs; 40 | } 41 | -------------------------------------------------------------------------------- /migration_to_api2.0.md: -------------------------------------------------------------------------------- 1 | - Every API method that accesses the cloud storage service returns a Promise that resolves to an object: 2 | 3 | ```typescript 4 | type ResultObject = { 5 | error: string | null; 6 | value: string | number | Array<[string, number]> | Array | Readable; // depends on method 7 | }; 8 | ``` 9 | 10 | - ~~No more local state: the storage instance will no longer hold a reference to the last used or selected bucket in its local state; you will have to provide a bucket name for every bucket operation, for instance `clearBucket`, but also `removeFile`.~~ 11 | - The storage instance will also no longer hold a reference to all available buckets; a call to `listBuckets` will access the cloud storage service every time it is called; this is handy in case another process or user has created or deleted a new bucket. 12 | - `createBucket` resolves with an error if that bucket already exists 13 | - `removeFile` has an additional optional boolean argument `allVersions`; if set to true all versions of the specified file will be removed. Default: false 14 | - `addFile` is added to the API; you can use this method instead of `addFileFromPath`, `addFileFromBuffer` or `addFileFromReadable` 15 | - Extended and updated the introspect API of the adapter: 16 | - `getConfig()` and `getType()` are implemented as getter as well, resp.: `storage.config` and `storage.type` 17 | - added `configError` and `storage.configError` 18 | - added `getServiceClient` and `storage.serviceClient` 19 | - ~~Configuration urls are now completely in the form of a query string: `s3://region=us-west-1&accessKeyId=KEYID&secretAccessKey=SECRET`~~ 20 | - Supported storages: 21 | - Amazon S3 22 | - Cubbit 23 | - Cloudflare R2 24 | - Backblaze B2 S3 compatible 25 | - Backblaze B2 26 | - Google Cloud 27 | - Azure Blob 28 | - MinIO 29 | - local storage 30 | 31 | ### Old API (1.5.x) compared to new API (2.x) 32 | 33 | #### init 34 | 35 | `init(config):Promise`
36 | `N/A` 37 | 38 | #### test 39 | 40 | `test():Promise`
41 | `N/A` 42 | 43 | #### selectBucket 44 | 45 | `selectBucket(name: string | null): Promise`
46 | `N/A` → re-implemented in 2.1 47 | 48 | #### getSelectedBucket 49 | 50 | `getSelectedBucket(): string`
51 | `N/A` → re-implemented in 2.1 52 | 53 | #### validateName 54 | 55 | `validateName(name: string): string`
56 | `N/A` 57 | 58 | #### listBuckets 59 | 60 | `listBuckets(): Promise`
61 | `listBuckets(): Promise` 62 | 63 | #### createBucket 64 | 65 | `createBucket(name?: string, options?: object): Promise`
66 | `createBucket(name: string, options?: object): Promise` 67 | 68 | #### clearBucket 69 | 70 | `clearBucket(name?: string): Promise`
71 | `clearBucket(name: string): Promise` 72 | 73 | #### deleteBucket 74 | 75 | `deleteBucket(name?: string): Promise`
76 | `deleteBucket(name: string): Promise` 77 | 78 | #### removeFile 79 | 80 | `removeFile(fileName: string): Promise`
81 | `removeFile(bucketName: string, fileName: string): Promise` 82 | 83 | #### listFiles 84 | 85 | `listFiles(): Promise<[string, number][]>`
86 | `listFiles(bucketName: string): Promise` 87 | 88 | #### sizeOf 89 | 90 | `sizeOf(name: string): Promise`
91 | `sizeOf(bucketName: string, fileName: string): Promise` 92 | 93 | #### fileExists 94 | 95 | `fileExists(name: string): Promise`
96 | `fileExists(bucketName: string, fileName: string): Promise` 97 | 98 | #### getFileAsReadable 99 | 100 | ```typescript 101 | getFileAsReadable( 102 | name: string, 103 | options?: { start?: number; end?: number } 104 | ): Promise 105 | ``` 106 | 107 | ```typescript 108 | getFileAsStream( 109 | bucketName: string, 110 | fileName: string, 111 | options?: { [id: string]: any } 112 | ): Promise 113 | ``` 114 | 115 | #### addFileFromPath 116 | 117 | ```typescript 118 | addFileFromPath(origPath: string, targetPath: string, options: object = {}): Promise 119 | ``` 120 | 121 | ```typescript 122 | addFileFromPath(params: FilePathParams): Promise 123 | ``` 124 | 125 | #### addFileFromBuffer 126 | 127 | ```typescript 128 | addFileFromBuffer(buffer: Buffer, targetPath: string, options: object = {}): Promise 129 | ``` 130 | 131 | ```typescript 132 | addFileFromBuffer(params: FileBufferParams): Promise 133 | ``` 134 | 135 | #### addFileFromReadable 136 | 137 | ```typescript 138 | addFileFromReadable(stream: Readable, targetPath: string, options: object = {}): Promise 139 | ``` 140 | 141 | ```typescript 142 | addFileFromStream(FileStreamParams): Promise 143 | ``` 144 | 145 | ### The `init` function is not required anymore 146 | 147 | Only Backblaze B2 Native API storage requires initial authorization by calling the async `authorize` function. This authorization step was performed once by calling the `init` method. Although it would yield an error, it was still possible to call API methods without calling `init` prior to that. In the new version every API call checks if the initial authorization has been performed. 148 | 149 | Other storage services do not require initial authorization but their `init` method was used to select and/or create the bucket that was provided in the config. 150 | 151 | Because in the new API seeks to be more transparent, there will be no more 'magic behind the screen'. So if you want to create a bucket (provided you have the access rights to do so) you have to call `createBucket` explicitly. 152 | 153 | Also the new version tries to keep as little local state as possible so `selectBucket` and `getSelectedBucket` have been removed. 154 | 155 | Because of all aforementioned changes the `init` is no longer required! You can start calling API methods right after instantiating a storage: 156 | 157 | ```typescript 158 | const b2 = new Storage("b2://applicationKeyId=your-key-id&applicationKey=your-key"); 159 | await b2.listBuckets(); 160 | ``` 161 | 162 | ### The bucket in the config is no longer automatically selected or created 163 | 164 | However, the bucket name that you've provided with the configuration url or object is available by calling `getConfig`: 165 | 166 | ```typescript 167 | const s3 = new Storage("s3://key=key&secret=secret®ion=eu-west-2&bucketName=erwe"); 168 | await s3.listFiles(s3.config.bucketName, "your-file.jpg') 169 | ``` 170 | -------------------------------------------------------------------------------- /migration_to_api2.1.md: -------------------------------------------------------------------------------- 1 | - Every API method that accesses the cloud storage service returns a Promise that resolves to an object: 2 | 3 | ```typescript 4 | type ResultObject = { 5 | error: string | null; 6 | value: string | number | Array<[string, number]> | Array | Readable; // depends on method 7 | }; 8 | ``` 9 | 10 | - The storage instance will also no longer hold a reference to all available buckets; a call to `listBuckets` will access the cloud storage service every time it is called; this is handy in case another process or user has created or deleted a new bucket. 11 | - `createBucket` resolves with an error if that bucket already exists 12 | - `removeFile` has an additional optional boolean argument `allVersions`; if set to true all versions of the specified file will be removed. Default: false 13 | - `addFile` is added to the API; you can use this method instead of `addFileFromPath`, `addFileFromBuffer` or `addFileFromReadable` 14 | - Extended and updated the introspect API of the adapter: 15 | - `getConfig()` and `getType()` are implemented as getter as well, resp.: `storage.config` and `storage.type` 16 | - added `configError` and `storage.configError` 17 | - added `getServiceClient` and `storage.serviceClient` 18 | - added getter and setter for (de)selecting the bucket; `storage.bucket = "the-buck"` and `console.log(storage.bucket)` 19 | - Supported storages: 20 | - Amazon S3 21 | - Cubbit 22 | - Cloudflare R2 23 | - Backblaze B2 S3 compatible 24 | - Backblaze B2 25 | - Google Cloud 26 | - Azure Blob 27 | - MinIO 28 | - local storage 29 | 30 | ### Configuration URLs 31 | 32 | The format of the configuration URLs is more consistent across the storage services. The `@region` part of the config URL of the AmazonS3 adapter has been moved to the query string. 33 | 34 | ```typescript 35 | // old format: 36 | const u = "s3://accessKeyId:secretAccessKey@region/bucketName?option1=value1&option2=value2..."; 37 | 38 | // new format: 39 | const v = "s3://accessKeyId:secretAccessKey@bucketName?region=us-west-1&option2=value2..."; 40 | ``` 41 | 42 | The general format is: 43 | 44 | ```typescript 45 | const u = "protocol://part1:part2@bucketName?option1=value1&option2=value2..."; 46 | ``` 47 | 48 | Which translates for most storage services into: 49 | 50 | ```typescript 51 | const u = "storage_type://credential1:credential2@bucketName?option1=value1&option2=value2..."; 52 | ``` 53 | 54 | ### Old API (1.5.x) compared to new API (2.x) 55 | 56 | #### init 57 | 58 | `init(config):Promise`
59 | `N/A` 60 | 61 | #### test 62 | 63 | `test():Promise`
64 | `N/A` 65 | 66 | #### selectBucket 67 | 68 | `selectBucket(name: string | null): Promise`
69 | `selectBucket(name: string | null): void`
70 | 71 | #### getSelectedBucket 72 | 73 | `getSelectedBucket(): string`
74 | `getSelectedBucket(): string | null`
75 | 76 | #### validateName 77 | 78 | `validateName(name: string): string`
79 | `N/A` 80 | 81 | #### listBuckets 82 | 83 | `listBuckets(): Promise`
84 | `listBuckets(): Promise` 85 | 86 | #### createBucket 87 | 88 | `createBucket(name: string, options?: object): Promise`
89 | `createBucket(name: string, options?: object): Promise` 90 | 91 | #### clearBucket 92 | 93 | `clearBucket(name?: string): Promise`
94 | `clearBucket(name: string): Promise` 95 | 96 | #### deleteBucket 97 | 98 | `deleteBucket(name?: string): Promise`
99 | `deleteBucket(name: string): Promise` 100 | 101 | #### removeFile 102 | 103 | `removeFile(fileName: string): Promise`
104 | `removeFile(bucketName: string, fileName: string): Promise` 105 | 106 | #### listFiles 107 | 108 | `listFiles(): Promise<[string, number][]>`
109 | `listFiles(bucketName: string): Promise` 110 | 111 | #### sizeOf 112 | 113 | `sizeOf(name: string): Promise`
114 | `sizeOf(bucketName: string, fileName: string): Promise` 115 | 116 | #### fileExists 117 | 118 | `fileExists(name: string): Promise`
119 | `fileExists(bucketName: string, fileName: string): Promise` 120 | 121 | #### getFileAsReadable 122 | 123 | ```typescript 124 | getFileAsReadable( 125 | name: string, 126 | options?: { start?: number; end?: number } 127 | ): Promise 128 | ``` 129 | 130 | ```typescript 131 | getFileAsStream( 132 | bucketName: string, 133 | fileName: string, 134 | options?: { [id: string]: any } 135 | ): Promise 136 | ``` 137 | 138 | #### addFileFromPath 139 | 140 | ```typescript 141 | addFileFromPath(origPath: string, targetPath: string, options: object = {}): Promise 142 | ``` 143 | 144 | ```typescript 145 | addFileFromPath(params: FilePathParams): Promise 146 | ``` 147 | 148 | #### addFileFromBuffer 149 | 150 | ```typescript 151 | addFileFromBuffer(buffer: Buffer, targetPath: string, options: object = {}): Promise 152 | ``` 153 | 154 | ```typescript 155 | addFileFromBuffer(params: FileBufferParams): Promise 156 | ``` 157 | 158 | #### addFileFromReadable 159 | 160 | ```typescript 161 | addFileFromReadable(stream: Readable, targetPath: string, options: object = {}): Promise 162 | ``` 163 | 164 | ```typescript 165 | addFileFromStream(FileStreamParams): Promise 166 | ``` 167 | 168 | ### The `init` function is not required anymore 169 | 170 | Only Backblaze B2 Native API storage requires initial authorization by calling the async `authorize` function. This authorization step was performed once by calling the `init` method. Although it would yield an error, it was still possible to call API methods without calling `init` prior to that. In the new version every API call checks if the initial authorization has been performed. 171 | 172 | Other storage services do not require initial authorization but their `init` method was used to select and/or create the bucket that was provided in the config. 173 | 174 | Because in the new API seeks to be more transparent, there will be no more 'magic behind the screen'. So if you want to create a bucket (provided you have the access rights to do so) you have to call `createBucket` explicitly. 175 | 176 | Also the new version tries to keep as little local state as possible so `selectBucket` and `getSelectedBucket` have been removed. 177 | 178 | Because of all aforementioned changes the `init` is no longer required! You can start calling API methods right after instantiating a storage: 179 | 180 | ```typescript 181 | const b2 = new Storage("b2://applicationKeyId=your-key-id&applicationKey=your-key"); 182 | await b2.listBuckets(); 183 | ``` 184 | 185 | ### The bucket in the config is no longer automatically selected or created 186 | 187 | However, the bucket name that you've provided with the configuration url or object is available by calling `getConfig`: 188 | 189 | ```typescript 190 | const s3 = new Storage("s3://key=key&secret=secret®ion=eu-west-2&bucketName=erwe"); 191 | await s3.listFiles(s3.config.bucketName, "your-file.jpg') 192 | ``` 193 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@tweedegolf/storage-abstraction", 3 | "version": "2.1.5", 4 | "description": "Provides an abstraction layer for interacting with a storage; the storage can be local or in the cloud.", 5 | "main": "dist/index.js", 6 | "types": "dist/index.d.ts", 7 | "homepage": "https://github.com/tweedegolf/storage-abstraction/", 8 | "repository": "https://github.com/tweedegolf/storage-abstraction/", 9 | "dependencies": { 10 | "@aws-sdk/client-s3": "^3.582.0", 11 | "@aws-sdk/s3-request-presigner": "^3.582.0", 12 | "@azure/identity": "^4.0.1", 13 | "@azure/storage-blob": "^12.17.0", 14 | "@google-cloud/storage": "^7.11.1", 15 | "@nichoth/backblaze-b2": "^1.7.1", 16 | "minio": "^8.0.0", 17 | "rimraf": "^5.0.5" 18 | }, 19 | "devDependencies": { 20 | "@types/jasmine": "^5.1.2", 21 | "@types/glob": "^8.1.0", 22 | "@types/minimatch": "^5.1.2", 23 | "@types/node": "^20.11.24", 24 | "@typescript-eslint/eslint-plugin": "^7.10.0", 25 | "@typescript-eslint/parser": "^7.10.0", 26 | "dotenv": "^16.4.5", 27 | "eslint": "^8.57.0", 28 | "eslint-config-prettier": "^9.0.0", 29 | "eslint-plugin-prettier": "^5.1.3", 30 | "jasmine": "^5.1.0", 31 | "markdown-toc": "^1.2.0", 32 | "prettier": "^3.2.5", 33 | "ts-node": "^10.9.2", 34 | "tsc-watch": "^6.0.4", 35 | "typescript": "^5.0.4" 36 | }, 37 | "scripts": { 38 | "test-all": "npm run test-local && npm run test-gcs && npm run test-s3 && npm run test-b2 && npm run test-azure", 39 | "test-jasmine": "ts-node ./node_modules/.bin/jasmine --random=false --fail-fast ./tests/test.jasmine.ts", 40 | "test-local": "LOCAL_DIRECTORY='tests/test_directory' npm run test-jasmine 0", 41 | "test-s3": "npm run test-jasmine 1", 42 | "test-gcs": "npm run test-jasmine 2", 43 | "test-b2": "npm run test-jasmine 3", 44 | "test-azure": "npm run test-jasmine 4", 45 | "test-minio": "npm run test-jasmine 5", 46 | "test-minio-2": "MINIO_ENDPOINT='play.min.io' MINIO_USE_SSL='true' MINIO_PORT='9000' MINIO_REGION='us-east-1' MINIO_ACCESS_KEY='Q3AM3UQ867SPQQA43P2F' MINIO_SECRET_KEY='zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG' npm run test-jasmine 5", 47 | "test": "ts-node ./tests/test.ts", 48 | "test-urls": "ts-node ./tests/test_urls.ts", 49 | "test-mode": "ts-node ./tests_local/test-mode.ts", 50 | "testB2": "ts-node ./tests_local/testB2.ts", 51 | "testS3": "ts-node ./tests_local/testS3.ts", 52 | "testR2": "ts-node ./tests_local/testR2.ts", 53 | "testB2S3": "ts-node ./tests_local/testB2S3.ts", 54 | "testGCS": "ts-node ./tests_local/testGCS.ts", 55 | "testAzure": "ts-node ./tests_local/testAzure.ts", 56 | "testCubbit": "ts-node ./tests_local/testCubbit.ts", 57 | "testMinio": "ts-node ./tests_local/testMinio.ts", 58 | "testLocal": "ts-node ./tests_local/testLocal.ts", 59 | "testBucketName": "ts-node ./tests_local/testBucketName.ts", 60 | "ts": "ts-node", 61 | "tsc": "node_modules/.bin/tsc", 62 | "fix-glob": "ts-node ./fixes/glob/copy.ts", 63 | "copy": "npm run tsc && ts-node copy.ts", 64 | "prepublishOnly": "", 65 | "toc": "markdown-toc -i README.md", 66 | "prepare": "npm run toc && npm run fix-glob && npm run copy" 67 | }, 68 | "keywords": [ 69 | "storage", 70 | "storage abstraction", 71 | "cloud filesystem", 72 | "google cloud", 73 | "amazon s3", 74 | "backblaze b2", 75 | "cloudflare r2", 76 | "cubbit", 77 | "minio", 78 | "azure" 79 | ], 80 | "author": "daniel@tweedegolf.nl", 81 | "license": "MIT", 82 | "publishConfig": { 83 | "access": "public" 84 | }, 85 | "prettier": { 86 | "semi": true, 87 | "trailingComma": "es5", 88 | "printWidth": 100 89 | }, 90 | "eslintConfig": { 91 | "root": true, 92 | "env": { 93 | "node": true, 94 | "browser": true, 95 | "es6": true, 96 | "jasmine": true 97 | }, 98 | "globals": {}, 99 | "extends": [ 100 | "eslint:recommended", 101 | "plugin:@typescript-eslint/recommended", 102 | "prettier" 103 | ], 104 | "ignorePatterns": [ 105 | "node_modules/", 106 | "dist/" 107 | ], 108 | "rules": { 109 | "prettier/prettier": "error", 110 | "no-dupe-class-members": "off", 111 | "@typescript-eslint/no-unused-vars": "off", 112 | "@typescript-eslint/no-inferrable-types": "off", 113 | "@typescript-eslint/no-empty-function": "off", 114 | "@typescript-eslint/interface-name-prefix": "off", 115 | "@typescript-eslint/no-var-requires": "off" 116 | }, 117 | "parser": "@typescript-eslint/parser", 118 | "plugins": [ 119 | "prettier" 120 | ] 121 | } 122 | } -------------------------------------------------------------------------------- /publish/AdapterAmazonS3/README.md: -------------------------------------------------------------------------------- 1 | # Amazon S3 Storage Adapter 2 | 3 | An adapter that provides an abstraction layer over the API of the Amazon S3 cloud storage service and S3 compatible services like Cubbit, Cloudflare R2 and Backblaze B2 S3. 4 | 5 | This adapter is one of the adapters that is meant to be used as a plugin of the [Storage Abstraction package](https://www.npmjs.com/package/@tweedegolf/storage-abstraction). However it can be used standalone as well, see [below](#standalone). 6 | 7 | The [API](https://github.com/tweedegolf/storage-abstraction/tree/master?tab=readme-ov-file#adapter-api) of the adapter abstracts away the differences between the API's of cloud storage services. The API only supports the basic, most commonly used cloud service operations such as creating a bucket, storing files and so on. 8 | 9 | It is also possible to access all the specific functionality of the cloud service API through the service client of the adapter, see [here](https://github.com/tweedegolf/storage-abstraction/tree/master?tab=readme-ov-file#getserviceclient). 10 | 11 | If you are new to the Storage Abstraction library you may want to read [this](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#how-it-works) first. 12 | 13 | ```typescript 14 | import { Storage, StorageType } from "@tweedegolf/storage-abstraction"; 15 | 16 | const configuration = { 17 | type: StorageType.S3, 18 | }; 19 | 20 | const storage = new Storage(configuration); 21 | 22 | const result = await storage.listBuckets(); 23 | 24 | console.log(result); 25 | ``` 26 | 27 | The Storage class is cloud service agnostic and doesn't know anything about the adapter it uses and adapters are completely interchangeable. It only expects the adapter to have implemented all methods of the `IAdapter` interface, see the [API](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#adapter-api). 28 | 29 | When you create a Storage instance it checks the mandatory `type` key in the configuration object and then loads the appropriate adapter module automatically from your node_modules folder using `require()`. For more information please read [this](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#register-your-adapter). 30 | 31 | ## Configuration 32 | 33 | The configuration object that you pass to the Storage constructor is forwarded to the constructor of the adapter. 34 | 35 | The Storage constructor is only interested in the `type` key of the configuration object, all other keys are necessary for configuring the adapter. 36 | 37 | The Storage constructor expects the configuration to be of type `StorageAdapterConfig`. 38 | 39 | The adapter expects the configuration to be of type `AdapterConfig` or a type that extends this type. 40 | 41 | ```typescript 42 | export interface AdapterConfig { 43 | bucketName?: string; 44 | [id: string]: any; // any mandatory or optional key 45 | } 46 | 47 | export interface StorageAdapterConfig extends AdapterConfig { 48 | type: string; 49 | } 50 | ``` 51 | 52 | The type of the configuration object for this adapter: 53 | 54 | ```typescript 55 | export interface AdapterConfigS3 extends AdapterConfig { 56 | region?: string; 57 | endpoint?: string; 58 | credentials?: { 59 | accessKeyId?: string; 60 | secretAccessKey?: string; 61 | }; 62 | accessKeyId?: string; 63 | secretAccessKey?: string; 64 | } 65 | ``` 66 | 67 | ## Examples 68 | 69 | Examples with configuration object: 70 | 71 | ```typescript 72 | // Cubbit S3 compatible 73 | const s = new Storage({ 74 | type: StorageType.S3, 75 | accessKeyId: 'your-key-id' 76 | secretAccessKey: 'your-secret' 77 | endpoint: "https://s3.cubbit.eu/", 78 | region: "auto", 79 | }); 80 | 81 | const s = new Storage({ 82 | type: StorageType.S3, 83 | accessKeyId: 'your-key-id' 84 | secretAccessKey: 'your-secret' 85 | endpoint: "https://s3.cubbit.eu/", 86 | region: "auto", 87 | bucketName: "the-buck", 88 | }); 89 | ``` 90 | 91 | Same example with configuration url: 92 | 93 | ```typescript 94 | // Cubbit S3 compatible 95 | const s = new Storage( 96 | "s3://your-key-id:your-access-key?endpoint=https://s3.cubbit.eu/®ion=auto" 97 | ); 98 | 99 | const s = new Storage( 100 | "s3://your-key-id:your-access-key@the-buck?endpoint=https://s3.cubbit.eu/®ion=auto" 101 | ); 102 | ``` 103 | 104 | For more information about configuration urls please read [this](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#configuration-url). 105 | 106 | ### Amazon S3 107 | 108 | If you use this adapter to interact with the original Amazon S3 service it is possible to skip the passing in of the `accessKeyId`, `secretAccessKey` and `region`; the AWS SDK will automatically read it from a chain of providers, e.g. from environment variables or the ECS task role, so this will work: 109 | 110 | ```typescript 111 | // only for Amazon S3 112 | const s = new Storage({ type: StorageType.S3 }); 113 | // with a config url: 114 | const s = new Storage("s3://"); 115 | // and even: 116 | const s = new Storage("s3"); 117 | ``` 118 | 119 | The environment variables that you need to set for this are: 120 | 121 | ```shell 122 | AWS_ACCESS_KEY_ID="your access key" 123 | AWS_SECRET_ACCESS_KEY="your secret" 124 | AWS_REGION="eu-west-1" 125 | 126 | ``` 127 | 128 | Note that this does _not_ work for S3 compatible services because the AWS SDK doesn't read the endpoint from environment variables. 129 | 130 | Also, if you pass a value for `endpoint` in the config, for some reason AWS SDK does not read the environment variables `AWS_REGION` `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` anymore. 131 | 132 | So for S3 compatible services setting a value for `endpoint`, `accessKeyId` and `secretAccessKey` in the config is mandatory. 133 | 134 | For S3 compatible services `region` is mandatory as well but you don't have to pass this in the config because AWS SDK always reads the `AWS_REGION` environment variable if no value is provided in the config. Note that the names of the regions may differ from service to service, see below. 135 | 136 | ### S3 Compatible Storage 137 | 138 | Cloudflare R2, Backblaze B2 and Cubbit are S3 compatible services. You can use the `AdapterAmazonS3` but you have to add a value for `endpoint` in the config. 139 | 140 | #### Cloudflare R2 141 | 142 | ```typescript 143 | const s = new Storage({ 144 | type: StorageType.S3, 145 | region: 'auto' 146 | endpoint: R2_ENDPOINT, 147 | accessKeyId: R2_ACCESS_KEY, 148 | secretAccessKey: R2_SECRET_KEY, 149 | }); 150 | ``` 151 | 152 | The endpoint is `https://..r2.cloudflarestorage.com`. 153 | 154 | Jurisdiction is optional, e.g. `eu`. 155 | 156 | It is mandatory to set a value for `region`, use one of these values: 157 | 158 | - `auto` 159 | - `wnam` 160 | - `enam` 161 | - `weur` 162 | - `eeur` 163 | - `apac` 164 | 165 | You can also set the region using the `AWS_REGION` environment variable. 166 | 167 | #### Backblaze S3 168 | 169 | ```typescript 170 | const s = new Storage({ 171 | type: StorageType.S3, 172 | region: "eu-central-003", 173 | endpoint: B2_ENDPOINT, 174 | accessKeyId: B2_APPLICATION_KEY_ID, 175 | secretAccessKey: B2_APPLICATION_KEY, 176 | }); 177 | ``` 178 | 179 | The endpoint is `https://s3..backblazeb2.com`. Although the region is part of the endpoint AWS SDK still expects you to set a value for `region` in the configuration or in the `AWS_REGION` environment variable. You can simply retrieve your region from the endpoint. 180 | 181 | Backblaze also has a native API. You can use [this adapter](https://www.npmjs.com/package/@tweedegolf/sab-adapter-backblaze-b2) if you want to use the native API. 182 | 183 | ## Standalone 184 | 185 | You can also use the adapter standalone, without the need to create a Storage instance: 186 | 187 | ```typescript 188 | import { AdapterAmazonS3 } from "@tweedegolf/sab-adapter-amazon-s3"; 189 | 190 | const a = new AdapterAmazonS3(); 191 | const r = await a.listBuckets(); 192 | console.log(r); 193 | ``` 194 | 195 | ## API 196 | 197 | For a complete description of the Adapter API see [this part](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#adapter-api) documentation of the Storage Abstraction package readme. 198 | -------------------------------------------------------------------------------- /publish/AdapterAmazonS3/changelog.md: -------------------------------------------------------------------------------- 1 | # 1.0.17 2 | 3 | - add support for optional key `useSignedUrl` in options object `getFileAsUrl` 4 | 5 | # 1.0.16 6 | 7 | - updated @aws-sdk to 3.582.0 8 | 9 | # 1.0.15 10 | 11 | - updated @aws-sdk to 3.550.0 12 | 13 | # 1.0.14 14 | 15 | - fix for issue #60 16 | - updated @aws-sdk to 3.525.0 17 | 18 | # 1.0.13 19 | 20 | - improved error logging 21 | 22 | # 1.0.12 23 | 24 | - fix bug in selected bucket 25 | 26 | # 1.0.10 27 | 28 | - revert to v1 format of config URLs 29 | - re-implement storing the selected bucket in local state 30 | - `selectBucket` and `geSelectedBucket` 31 | - also implemented as getter 32 | `storage.bucketName = "the-buck";` and setter `console.log(storage.bucketName);` 33 | - update @aws-sdk/client-s3 to 3.503.1 34 | -------------------------------------------------------------------------------- /publish/AdapterAmazonS3/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@tweedegolf/sab-adapter-amazon-s3", 3 | "version": "1.0.17", 4 | "description": "Provides an abstraction layer for interacting with Amazon S3 and S3 compatible cloud services.", 5 | "main": "dist/index/AdapterAmazonS3.js", 6 | "types": "dist/index/AdapterAmazonS3.d.ts", 7 | "homepage": "https://github.com/tweedegolf/storage-abstraction/", 8 | "repository": "https://github.com/tweedegolf/storage-abstraction/", 9 | "dependencies": { 10 | "@aws-sdk/client-s3": "^3.582.0", 11 | "@aws-sdk/s3-request-presigner": "^3.582.0" 12 | }, 13 | "scripts": {}, 14 | "keywords": [ 15 | "amazon s3", 16 | "storage", 17 | "storage abstraction", 18 | "cloud filesystem", 19 | "google cloud", 20 | "backblaze b2", 21 | "cloudflare r2", 22 | "cubbit", 23 | "minio", 24 | "azure" 25 | ], 26 | "author": "daniel@tweedegolf.nl", 27 | "license": "MIT", 28 | "publishConfig": { 29 | "access": "public" 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /publish/AdapterAzureBlob/README.md: -------------------------------------------------------------------------------- 1 | # Azure Blob Storage Adapter 2 | 3 | An adapter that provides an abstraction layer over the API of the Microsoft Azure Blob cloud storage service. 4 | 5 | This adapter is one of the adapters that is meant to be used as a plugin of the [Storage Abstraction package](https://www.npmjs.com/package/@tweedegolf/storage-abstraction). However it can be used standalone as well, see [below](#standalone). 6 | 7 | The [API](https://github.com/tweedegolf/storage-abstraction/tree/master?tab=readme-ov-file#adapter-api) of the adapter abstracts away the differences between the API's of cloud storage services. The API only supports the basic, most commonly used cloud service operations such as creating a bucket, storing files and so on. 8 | 9 | It is also possible to access all the specific functionality of the cloud service API through the service client of the adapter, see [here](https://github.com/tweedegolf/storage-abstraction/tree/master?tab=readme-ov-file#getserviceclient). 10 | 11 | If you are new to the Storage Abstraction library you may want to read [this](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#how-it-works) first. 12 | 13 | ```typescript 14 | import { Storage, StorageType } from "@tweedegolf/storage-abstraction"; 15 | 16 | const configuration = { 17 | type: StorageType.AZURE, 18 | accountName: "yourAccount", 19 | accountKey: "yourKey", 20 | }; 21 | 22 | const storage = new Storage(configuration); 23 | 24 | const result = await storage.listBuckets(); 25 | 26 | console.log(result); 27 | ``` 28 | 29 | The Storage class is cloud service agnostic and doesn't know anything about the adapter it uses and adapters are completely interchangeable. It only expects the adapter to have implemented all methods of the `IAdapter` interface, see the [API](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#adapter-api). 30 | 31 | When you create a Storage instance it checks the mandatory `type` key in the configuration object and then loads the appropriate adapter module automatically from your node_modules folder using `require()`. For more information please read [this](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#register-your-adapter). 32 | 33 | ## Configuration 34 | 35 | The configuration object that you pass to the Storage constructor is forwarded to the constructor of the adapter. 36 | 37 | The Storage constructor is only interested in the `type` key of the configuration object, all other keys are necessary for configuring the adapter. 38 | 39 | The Storage constructor expects the configuration to be of type `StorageAdapterConfig`. 40 | 41 | The adapter expects the configuration to be of type `AdapterConfig` or a type that extends this type. 42 | 43 | ```typescript 44 | export interface AdapterConfig { 45 | bucketName?: string; 46 | [id: string]: any; // any mandatory or optional key 47 | } 48 | 49 | export interface StorageAdapterConfig extends AdapterConfig { 50 | type: string; 51 | } 52 | ``` 53 | 54 | The type of the configuration object for this adapter: 55 | 56 | ```typescript 57 | export interface AdapterConfigAzure extends AdapterConfig { 58 | accountName?: string; 59 | connectionString?: string; 60 | accountKey?: string; 61 | sasToken?: string; 62 | blobDomain?: string; 63 | } 64 | ``` 65 | 66 | ## Examples 67 | 68 | Examples with configuration object: 69 | 70 | ```typescript 71 | const s = new Storage({ 72 | type: StorageType.AZURE, 73 | accountName: "your-account-name", 74 | accountKey: "your-account-key", 75 | }); 76 | 77 | const s = new Storage({ 78 | type: StorageType.AZURE, 79 | accountName: "your-account-name", 80 | accountKey: "your-account-key", 81 | bucketName: "the-buck" 82 | maxTries: 3 83 | }); 84 | 85 | const s = new Storage({ 86 | type: StorageType.AZURE, 87 | accountName: "your-account-name", 88 | sasToken: "your-sas-token", 89 | blobDomain: "your-blob-domain", // Defaults to blob.core.windows.net 90 | bucketName: "the-buck" 91 | }); 92 | ``` 93 | 94 | Same examples with configuration url: 95 | 96 | ```typescript 97 | const s = new Storage("azure://your-account-name:your-account-key"); 98 | 99 | const s = new Storage("azure://your-account-name:your-account-key@the-buck?maxTries=3"); 100 | ``` 101 | 102 | For more information about configuration urls please read [this](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#configuration-url). 103 | 104 | ## Microsoft Azure Blob Storage 105 | 106 | There are multiple ways to login to Azure Blob Storage. Microsoft recommends to use passwordless authorization, for this you need to provide a value for `accountName` which is the name of your storage account. Then you can either login using the Azure CLI command `az login` or by setting the following environment variables: 107 | 108 | ```shell 109 | AZURE_TENANT_ID 110 | AZURE_CLIENT_ID 111 | AZURE_CLIENT_SECRET 112 | 113 | ``` 114 | 115 | You can find these values in the Azure Portal 116 | 117 | Alternately you can login by: 118 | 119 | - providing a value for `connectionString` 120 | - providing a value for both `accountName` and `accountKey` 121 | - providing a value for both `accountName` and `sasToken` 122 | 123 | Note that if you don't use the `accountKey` for authorization and you add files to a bucket you will get this error message: 124 | 125 | `'Can only generate the SAS when the client is initialized with a shared key credential'` 126 | 127 | This does not mean that the file hasn't been uploaded, it simply means that no public url can been generated for this file. 128 | 129 | ## Standalone 130 | 131 | You can also use the adapter standalone, without the need to create a Storage instance: 132 | 133 | ```typescript 134 | import { AdapterAzureBlob } from "@tweedegolf/sab-adapter-azure-blob"; 135 | 136 | const a = new AdapterAzureBlob({ 137 | accountName: "yourAccount", 138 | }); 139 | const r = await a.listBuckets(); 140 | console.log(r); 141 | ``` 142 | 143 | ## API 144 | 145 | For a complete description of the Adapter API see [this part](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#adapter-api) documentation of the Storage Abstraction package readme. 146 | -------------------------------------------------------------------------------- /publish/AdapterAzureBlob/changelog.md: -------------------------------------------------------------------------------- 1 | # 1.0.7 2 | 3 | - add support for optional config option `blobDomain` 4 | 5 | # 1.0.6 6 | 7 | - add support for optional key `useSignedUrl` in options object `getFileAsUrl` 8 | 9 | # 1.0.5 10 | 11 | - fix bug in selected bucket 12 | 13 | # 1.0.3 14 | 15 | - revert to v1 format of config URLs 16 | - re-implement storing the selected bucket in local state 17 | - `selectBucket` and `geSelectedBucket` 18 | - also implemented as getter 19 | `storage.bucketName = "the-buck";` and setter `console.log(storage.bucketName);` 20 | - fix dependencies 21 | -------------------------------------------------------------------------------- /publish/AdapterAzureBlob/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@tweedegolf/sab-adapter-azure-blob", 3 | "version": "1.0.7", 4 | "description": "Provides an abstraction layer for interacting with Microsoft Azure Blob Storage cloud service.", 5 | "main": "dist/index/AdapterAzureBlob.js", 6 | "types": "dist/index/AdapterAzureBlob.d.ts", 7 | "homepage": "https://github.com/tweedegolf/storage-abstraction/", 8 | "repository": "https://github.com/tweedegolf/storage-abstraction/", 9 | "dependencies": { 10 | "@azure/storage-blob": "^12.17.0", 11 | "@azure/identity": "^4.0.1" 12 | }, 13 | "scripts": {}, 14 | "keywords": [ 15 | "microsoft azure blob", 16 | "amazon s3", 17 | "storage", 18 | "storage abstraction", 19 | "cloud filesystem", 20 | "google cloud", 21 | "backblaze b2", 22 | "cloudflare r2", 23 | "cubbit", 24 | "minio" 25 | ], 26 | "author": "daniel@tweedegolf.nl", 27 | "license": "MIT", 28 | "publishConfig": { 29 | "access": "public" 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /publish/AdapterBackblazeB2/README.md: -------------------------------------------------------------------------------- 1 | # Backblaze B2 Storage Adapter 2 | 3 | An adapter that provides an abstraction layer over the API of the Backblaze B2 cloud storage service. 4 | 5 | This adapter is one of the adapters that is meant to be used as a plugin of the [Storage Abstraction package](https://www.npmjs.com/package/@tweedegolf/storage-abstraction). However it can be used standalone as well, see [below](#standalone). 6 | 7 | The [API](https://github.com/tweedegolf/storage-abstraction/tree/master?tab=readme-ov-file#adapter-api) of the adapter abstracts away the differences between the API's of cloud storage services. The API only supports the basic, most commonly used cloud service operations such as creating a bucket, storing files and so on. 8 | 9 | It is also possible to access all the specific functionality of the cloud service API through the service client of the adapter, see [here](https://github.com/tweedegolf/storage-abstraction/tree/master?tab=readme-ov-file#getserviceclient). 10 | 11 | If you are new to the Storage Abstraction library you may want to read [this](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#how-it-works) first. 12 | 13 | ```typescript 14 | import { Storage, StorageType } from "@tweedegolf/storage-abstraction"; 15 | 16 | const configuration = { 17 | type: StorageType.B2, 18 | applicationKey: "key", 19 | applicationKeyId: "keyId", 20 | }; 21 | 22 | const storage = new Storage(configuration); 23 | 24 | const result = await storage.listBuckets(); 25 | 26 | console.log(result); 27 | ``` 28 | 29 | The Storage class is cloud service agnostic and doesn't know anything about the adapter it uses and adapters are completely interchangeable. It only expects the adapter to have implemented all methods of the `IAdapter` interface, see the [API](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#adapter-api). 30 | 31 | When you create a Storage instance it checks the mandatory `type` key in the configuration object and then loads the appropriate adapter module automatically from your node_modules folder using `require()`. For more information please read [this](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#register-your-adapter). 32 | 33 | ## Configuration 34 | 35 | The configuration object that you pass to the Storage constructor is forwarded to the constructor of the adapter. 36 | 37 | The Storage constructor is only interested in the `type` key of the configuration object, all other keys are necessary for configuring the adapter. 38 | 39 | The Storage constructor expects the configuration to be of type `StorageAdapterConfig`. 40 | 41 | The adapter expects the configuration to be of type `AdapterConfig` or a type that extends this type. 42 | 43 | ```typescript 44 | export interface AdapterConfig { 45 | bucketName?: string; 46 | [id: string]: any; // any mandatory or optional key 47 | } 48 | 49 | export interface StorageAdapterConfig extends AdapterConfig { 50 | type: string; 51 | } 52 | ``` 53 | 54 | The type of the configuration object for this adapter: 55 | 56 | ```typescript 57 | export interface AdapterConfigB2 extends AdapterConfig { 58 | applicationKey: string; 59 | applicationKeyId: string; 60 | } 61 | ``` 62 | 63 | ## Examples 64 | 65 | Examples with configuration object: 66 | 67 | ```typescript 68 | const s = new Storage({ 69 | type: StorageType.B2, 70 | applicationKeyId: "your-key-id", 71 | applicationKey: "your-key", 72 | }); 73 | 74 | const s = new Storage({ 75 | type: StorageType.B2, 76 | applicationKeyId: "your-key-id", 77 | applicationKey: "your-key", 78 | bucketName: "the-buck", 79 | }); 80 | ``` 81 | 82 | Same examples with configuration url: 83 | 84 | ```typescript 85 | const s = new Storage("b2://your-key-id:your-key"); 86 | 87 | const s = new Storage("b2://your-key-id:your-key@the-buck"); 88 | ``` 89 | 90 | For more information about configuration urls please read [this](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#configuration-url). 91 | 92 | ## Standalone 93 | 94 | You can also use the adapter standalone, without the need to create a Storage instance: 95 | 96 | ```typescript 97 | import { AdapterBackblazeB2 } from "@tweedegolf/sab-adapter-backblaze-b2"; 98 | 99 | const a = new AdapterBackblazeB2({ 100 | applicationKey: "key", 101 | applicationKeyId: "keyId", 102 | }); 103 | const r = await a.listBuckets(); 104 | console.log(r); 105 | ``` 106 | 107 | ## API 108 | 109 | For a complete description of the Adapter API see [this part](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#adapter-api) documentation of the Storage Abstraction package readme. 110 | -------------------------------------------------------------------------------- /publish/AdapterBackblazeB2/changelog.md: -------------------------------------------------------------------------------- 1 | # 1.0.5 2 | 3 | - fix bug in selected bucket 4 | 5 | # 1.0.3 6 | 7 | - revert to v1 format of config URLs 8 | - re-implement storing the selected bucket in local state 9 | - `selectBucket` and `geSelectedBucket` 10 | - also implemented as getter 11 | `storage.bucketName = "the-buck";` and setter `console.log(storage.bucketName);` 12 | -------------------------------------------------------------------------------- /publish/AdapterBackblazeB2/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@tweedegolf/sab-adapter-backblaze-b2", 3 | "version": "1.0.5", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "@tweedegolf/sab-adapter-backblaze-b2", 9 | "version": "1.0.5", 10 | "license": "MIT", 11 | "dependencies": { 12 | "@nichoth/backblaze-b2": "^1.7.1" 13 | } 14 | }, 15 | "node_modules/@babel/runtime": { 16 | "version": "7.23.9", 17 | "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.9.tgz", 18 | "integrity": "sha512-0CX6F+BI2s9dkUqr08KFrAIZgNFj75rdBU/DjCyYLIaV/quFjkk6T+EJ2LkZHyZTbEV4L5p97mNkUsHl2wLFAw==", 19 | "dependencies": { 20 | "regenerator-runtime": "^0.14.0" 21 | }, 22 | "engines": { 23 | "node": ">=6.9.0" 24 | } 25 | }, 26 | "node_modules/@nichoth/backblaze-b2": { 27 | "version": "1.7.1", 28 | "resolved": "https://registry.npmjs.org/@nichoth/backblaze-b2/-/backblaze-b2-1.7.1.tgz", 29 | "integrity": "sha512-8b7CPSLvhrv0aMwprGhCk8KpXckEs/loAn2tpfWxg7D4tKPb3laf+rHBGhuADAov0lYmb1X1ZtOviV4l+hFSXw==", 30 | "dependencies": { 31 | "axios": "^1.6.2", 32 | "axios-retry": "^3.9.1", 33 | "lodash": "^4.17.21" 34 | }, 35 | "engines": { 36 | "node": ">=10.0" 37 | } 38 | }, 39 | "node_modules/asynckit": { 40 | "version": "0.4.0", 41 | "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", 42 | "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" 43 | }, 44 | "node_modules/axios": { 45 | "version": "1.6.7", 46 | "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.7.tgz", 47 | "integrity": "sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA==", 48 | "dependencies": { 49 | "follow-redirects": "^1.15.4", 50 | "form-data": "^4.0.0", 51 | "proxy-from-env": "^1.1.0" 52 | } 53 | }, 54 | "node_modules/axios-retry": { 55 | "version": "3.9.1", 56 | "resolved": "https://registry.npmjs.org/axios-retry/-/axios-retry-3.9.1.tgz", 57 | "integrity": "sha512-8PJDLJv7qTTMMwdnbMvrLYuvB47M81wRtxQmEdV5w4rgbTXTt+vtPkXwajOfOdSyv/wZICJOC+/UhXH4aQ/R+w==", 58 | "dependencies": { 59 | "@babel/runtime": "^7.15.4", 60 | "is-retry-allowed": "^2.2.0" 61 | } 62 | }, 63 | "node_modules/combined-stream": { 64 | "version": "1.0.8", 65 | "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", 66 | "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", 67 | "dependencies": { 68 | "delayed-stream": "~1.0.0" 69 | }, 70 | "engines": { 71 | "node": ">= 0.8" 72 | } 73 | }, 74 | "node_modules/delayed-stream": { 75 | "version": "1.0.0", 76 | "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", 77 | "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", 78 | "engines": { 79 | "node": ">=0.4.0" 80 | } 81 | }, 82 | "node_modules/follow-redirects": { 83 | "version": "1.15.6", 84 | "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", 85 | "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", 86 | "funding": [ 87 | { 88 | "type": "individual", 89 | "url": "https://github.com/sponsors/RubenVerborgh" 90 | } 91 | ], 92 | "engines": { 93 | "node": ">=4.0" 94 | }, 95 | "peerDependenciesMeta": { 96 | "debug": { 97 | "optional": true 98 | } 99 | } 100 | }, 101 | "node_modules/form-data": { 102 | "version": "4.0.0", 103 | "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", 104 | "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", 105 | "dependencies": { 106 | "asynckit": "^0.4.0", 107 | "combined-stream": "^1.0.8", 108 | "mime-types": "^2.1.12" 109 | }, 110 | "engines": { 111 | "node": ">= 6" 112 | } 113 | }, 114 | "node_modules/is-retry-allowed": { 115 | "version": "2.2.0", 116 | "resolved": "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-2.2.0.tgz", 117 | "integrity": "sha512-XVm7LOeLpTW4jV19QSH38vkswxoLud8sQ57YwJVTPWdiaI9I8keEhGFpBlslyVsgdQy4Opg8QOLb8YRgsyZiQg==", 118 | "engines": { 119 | "node": ">=10" 120 | }, 121 | "funding": { 122 | "url": "https://github.com/sponsors/sindresorhus" 123 | } 124 | }, 125 | "node_modules/lodash": { 126 | "version": "4.17.21", 127 | "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", 128 | "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" 129 | }, 130 | "node_modules/mime-db": { 131 | "version": "1.52.0", 132 | "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", 133 | "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", 134 | "engines": { 135 | "node": ">= 0.6" 136 | } 137 | }, 138 | "node_modules/mime-types": { 139 | "version": "2.1.35", 140 | "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", 141 | "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", 142 | "dependencies": { 143 | "mime-db": "1.52.0" 144 | }, 145 | "engines": { 146 | "node": ">= 0.6" 147 | } 148 | }, 149 | "node_modules/proxy-from-env": { 150 | "version": "1.1.0", 151 | "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", 152 | "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" 153 | }, 154 | "node_modules/regenerator-runtime": { 155 | "version": "0.14.1", 156 | "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", 157 | "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==" 158 | } 159 | } 160 | } 161 | -------------------------------------------------------------------------------- /publish/AdapterBackblazeB2/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@tweedegolf/sab-adapter-backblaze-b2", 3 | "version": "1.0.5", 4 | "description": "Provides an abstraction layer for interacting with Backblaze B2 cloud service.", 5 | "main": "dist/index/AdapterBackblazeB2.js", 6 | "types": "dist/index/AdapterBackblazeB2.d.ts", 7 | "homepage": "https://github.com/tweedegolf/storage-abstraction/", 8 | "repository": "https://github.com/tweedegolf/storage-abstraction/", 9 | "dependencies": { 10 | "@nichoth/backblaze-b2": "^1.7.1" 11 | }, 12 | "scripts": {}, 13 | "keywords": [ 14 | "backblaze b2", 15 | "amazon s3", 16 | "storage", 17 | "storage abstraction", 18 | "cloud filesystem", 19 | "google cloud", 20 | "cloudflare r2", 21 | "cubbit", 22 | "minio", 23 | "azure" 24 | ], 25 | "author": "daniel@tweedegolf.nl", 26 | "license": "MIT", 27 | "publishConfig": { 28 | "access": "public" 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /publish/AdapterGoogleCloud/README.md: -------------------------------------------------------------------------------- 1 | # Google Cloud Storage Adapter 2 | 3 | An adapter that provides an abstraction layer over the API of the Google cloud storage service. 4 | 5 | This adapter is one of the adapters that is meant to be used as a plugin of the [Storage Abstraction package](https://www.npmjs.com/package/@tweedegolf/storage-abstraction). However it can be used standalone as well, see [below](#standalone). 6 | 7 | The [API](https://github.com/tweedegolf/storage-abstraction/tree/master?tab=readme-ov-file#adapter-api) of the adapter abstracts away the differences between the API's of cloud storage services. The API only supports the basic, most commonly used cloud service operations such as creating a bucket, storing files and so on. 8 | 9 | It is also possible to access all the specific functionality of the cloud service API through the service client of the adapter, see [here](https://github.com/tweedegolf/storage-abstraction/tree/master?tab=readme-ov-file#getserviceclient). 10 | 11 | If you are new to the Storage Abstraction library you may want to read [this](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#how-it-works) first. 12 | 13 | ```typescript 14 | import { Storage, StorageType } from "@tweedegolf/storage-abstraction"; 15 | 16 | const configuration = { 17 | type: StorageType.GCS, 18 | keyFilename: "path/to/keyFile.json", 19 | }; 20 | 21 | const storage = new Storage(configuration); 22 | 23 | const result = await storage.listBuckets(); 24 | 25 | console.log(result); 26 | ``` 27 | 28 | The Storage class is cloud service agnostic and doesn't know anything about the adapter it uses and adapters are completely interchangeable. It only expects the adapter to have implemented all methods of the `IAdapter` interface, see the [API](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#adapter-api). 29 | 30 | When you create a Storage instance it checks the mandatory `type` key in the configuration object and then loads the appropriate adapter module automatically from your node_modules folder using `require()`. For more information please read [this](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#register-your-adapter). 31 | 32 | ## Configuration 33 | 34 | The configuration object that you pass to the Storage constructor is forwarded to the constructor of the adapter. 35 | 36 | The Storage constructor is only interested in the `type` key of the configuration object, all other keys are necessary for configuring the adapter. 37 | 38 | The Storage constructor expects the configuration to be of type `StorageAdapterConfig`. 39 | 40 | The adapter expects the configuration to be of type `AdapterConfig` or a type that extends this type. 41 | 42 | ```typescript 43 | export interface AdapterConfig { 44 | bucketName?: string; 45 | [id: string]: any; // any mandatory or optional key 46 | } 47 | 48 | export interface StorageAdapterConfig extends AdapterConfig { 49 | type: string; 50 | } 51 | ``` 52 | 53 | The type of the configuration object for this adapter: 54 | 55 | ```typescript 56 | export interface AdapterConfigGoogle extends AdapterConfig { 57 | keyFilename?: string; 58 | } 59 | ``` 60 | 61 | ## Examples 62 | 63 | Examples with configuration object: 64 | 65 | ```typescript 66 | const s = new Storage({ 67 | type: StorageType.GCS, 68 | keyFilename: "path/to/keyFile.json", 69 | }); 70 | 71 | const s = new Storage({ 72 | type: StorageType.GCS, 73 | keyFilename: "path/to/keyFile.json", 74 | bucketName: "the-buck", 75 | }); 76 | ``` 77 | 78 | Same examples with configuration url: 79 | 80 | ```typescript 81 | const s = new Storage("gcs://path/to/keyFile.json"); 82 | 83 | const s = new Storage("gcs://path/to/keyFile.json@the-buck"); 84 | ``` 85 | 86 | For more information about configuration urls please read [this](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#configuration-url). 87 | 88 | ## Google Cloud Storage 89 | 90 | Google cloud service can read default credentials from an environment variable. 91 | 92 | ```typescript 93 | const s = new Storage({ type: StorageType.GCS }); 94 | // using a config url: 95 | const s = new Storage("gcs://"); 96 | // and even: 97 | const s = new Storage("gcs"); 98 | ``` 99 | 100 | Environment variable that is automatically read: 101 | 102 | ```shell 103 | GOOGLE_APPLICATION_CREDENTIALS="path/to/keyFile.json" 104 | ``` 105 | 106 | ## Standalone 107 | 108 | You can also use the adapter standalone, without the need to create a Storage instance: 109 | 110 | ```typescript 111 | import { AdapterGoogleStorage } from "@tweedegolf/sab-adapter-google-storage"; 112 | 113 | const a = new AdapterGoogleStorage(); 114 | const r = await a.listBuckets(); 115 | console.log(r); 116 | ``` 117 | 118 | ## API 119 | 120 | For a complete description of the Adapter API see [this part](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#adapter-api) documentation of the Storage Abstraction package readme. 121 | -------------------------------------------------------------------------------- /publish/AdapterGoogleCloud/changelog.md: -------------------------------------------------------------------------------- 1 | # 1.0.8 2 | 3 | - fixed gcs signed url generation 4 | 5 | # 1.0.7 6 | 7 | - updated @google-cloud/storage to 7.11.1 8 | - add support for optional key `useSignedUrl` in options object `getFileAsUrl` 9 | 10 | # 1.0.6 11 | 12 | - updated @google-cloud/storage to 7.9.0 13 | 14 | # 1.0.5 15 | 16 | - fix bug in selected bucket 17 | 18 | # 1.0.3 19 | 20 | - revert to v1 format of config URLs 21 | - re-implement storing the selected bucket in local state 22 | - `selectBucket` and `geSelectedBucket` 23 | - also implemented as getter 24 | `storage.bucketName = "the-buck";` and setter `console.log(storage.bucketName);` 25 | -------------------------------------------------------------------------------- /publish/AdapterGoogleCloud/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@tweedegolf/sab-adapter-google-cloud", 3 | "version": "1.0.8", 4 | "description": "Provides an abstraction layer for interacting with Google Cloud storage service.", 5 | "main": "dist/index/AdapterGoogleCloud.js", 6 | "types": "dist/index/AdapterGoogleCloud.d.ts", 7 | "homepage": "https://github.com/tweedegolf/storage-abstraction/", 8 | "repository": { 9 | "type": "git", 10 | "url": "git+https://github.com/tweedegolf/storage-abstraction.git" 11 | }, 12 | "dependencies": { 13 | "@google-cloud/storage": "^7.11.1" 14 | }, 15 | "scripts": {}, 16 | "keywords": [ 17 | "google cloud", 18 | "backblaze b2", 19 | "amazon s3", 20 | "storage", 21 | "storage abstraction", 22 | "cloud filesystem", 23 | "cloudflare r2", 24 | "cubbit", 25 | "minio", 26 | "azure" 27 | ], 28 | "author": "daniel@tweedegolf.nl", 29 | "license": "MIT", 30 | "publishConfig": { 31 | "access": "public" 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /publish/AdapterLocal/README.md: -------------------------------------------------------------------------------- 1 | # Local Storage Adapter 2 | 3 | An adapter that mimics a cloud storage service and uses your local file system to store files and folders. 4 | 5 | This adapter is one of the adapters that is meant to be used as a plugin of the [Storage Abstraction package](https://www.npmjs.com/package/@tweedegolf/storage-abstraction). However it can be used standalone as well, see [below](#standalone). 6 | 7 | The [API](https://github.com/tweedegolf/storage-abstraction/tree/master?tab=readme-ov-file#adapter-api) of the adapter abstracts away the differences between the API's of cloud storage services. The API only supports the basic, most commonly used cloud service operations such as creating a bucket, storing files and so on. 8 | 9 | This adapter is meant to be used in development phase; you can develop and test your application using the local storage adapter and seamlessly switch to one of the available adapters that interact with a cloud storage service on your production server. 10 | 11 | However you can use the local storage adapter on your production server as well; files will be stored on the hard disk of your server. 12 | 13 | If you are new to the Storage Abstraction library you may want to read [this](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#how-it-works) first. 14 | 15 | ```typescript 16 | import { Storage, StorageType } from "@tweedegolf/storage-abstraction"; 17 | 18 | const configuration = { 19 | type: StorageType.LOCAL, 20 | directory: "path/to/directory", 21 | mode: 750, 22 | }; 23 | 24 | const storage = new Storage(configuration); 25 | 26 | const result = await storage.listBuckets(); 27 | 28 | console.log(result); 29 | ``` 30 | 31 | The Storage class is cloud service agnostic and doesn't know anything about the adapter it uses and adapters are completely interchangeable. It only expects the adapter to have implemented all methods of the `IAdapter` interface, see the [API](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#adapter-api). 32 | 33 | When you create a Storage instance it checks the mandatory `type` key in the configuration object and then loads the appropriate adapter module automatically from your node_modules folder using `require()`. For more information please read [this](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#register-your-adapter). 34 | 35 | ## Configuration 36 | 37 | The configuration object that you pass to the Storage constructor is forwarded to the constructor of the adapter. 38 | 39 | The Storage constructor is only interested in the `type` key of the configuration object, all other keys are necessary for configuring the adapter. 40 | 41 | The Storage constructor expects the configuration to be of type `StorageAdapterConfig`. 42 | 43 | The adapter expects the configuration to be of type `AdapterConfig` or a type that extends this type. 44 | 45 | ```typescript 46 | export interface AdapterConfig { 47 | bucketName?: string; 48 | [id: string]: any; // any mandatory or optional key 49 | } 50 | 51 | export interface StorageAdapterConfig extends AdapterConfig { 52 | type: string; 53 | } 54 | ``` 55 | 56 | The type of the configuration object for this adapter: 57 | 58 | ```typescript 59 | export interface AdapterConfigLocal extends AdapterConfig { 60 | directory: string; 61 | mode?: number; 62 | } 63 | ``` 64 | 65 | ## Examples 66 | 67 | Examples with configuration object: 68 | 69 | ```typescript 70 | const s = new Storage({ 71 | type: StorageType.LOCAL, 72 | directory: "path/to/directory", 73 | mode: 750, 74 | }); 75 | 76 | const s = new Storage({ 77 | type: StorageType.LOCAL, 78 | directory: "path/to/directory", 79 | mode: 750, 80 | bucketName: "the-buck", 81 | }); 82 | ``` 83 | 84 | Same examples with configuration url: 85 | 86 | ```typescript 87 | const s = new Storage("local://path/to/directory?mode=750"); 88 | 89 | const s = new Storage("local://path/to/directory@the-buck?mode=750"); 90 | ``` 91 | 92 | For more information about configuration urls please read [this](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#configuration-url). 93 | 94 | ## Local storage 95 | 96 | With the optional key `mode` you can set the access rights when you create new local buckets. The default value is `0o777`, note that the actual value is dependent on the umask settings on your system (Linux and MacOS only). You can pass this value both in decimal and in octal format. E.g. `rwxrwxrwx` is `0o777` in octal format or `511` in decimal format. 97 | 98 | When you use a configuration URL you can only pass values as strings. String values without radix prefix will be interpreted as decimal numbers, so "777" is _not_ the same as "0o777" and yields `41411`. This is probably not what you want. The configuration parser handles this by returning the default value in case you pass a value over decimal `511`. 99 | 100 | Examples: 101 | 102 | ```typescript 103 | const config = { 104 | type: StorageType.LOCAL, 105 | directory: "path/to/folder", 106 | mode: 488, // decimal literal 107 | }; 108 | const s = new Storage(config); 109 | 110 | // or 111 | const url = "local://directory=path/to/folder&mode=488"; 112 | const s = new Storage(url); 113 | 114 | // and the same with octal values: 115 | 116 | const config = { 117 | type: StorageType.LOCAL, 118 | directory: "path/to/folder", 119 | mode: 0o750, // octal literal 120 | }; 121 | const s = new Storage(config); 122 | 123 | // or 124 | const url = "local://directory=path/to/folder&mode=0o750"; 125 | const s = new Storage(url); 126 | ``` 127 | 128 | Buckets will be created inside the directory `path/to/folder`, parent folders will be created if necessary. 129 | 130 | ## Standalone 131 | 132 | You can also use the adapter standalone, without the need to create a Storage instance: 133 | 134 | ```typescript 135 | import { AdapterLocal } from "@tweedegolf/sab-adapter-local"; 136 | 137 | const a = new AdapterLocal({ 138 | directory: "path/to/directory", 139 | }); 140 | const r = await a.listBuckets(); 141 | console.log(r); 142 | ``` 143 | 144 | ## API 145 | 146 | For a complete description of the Adapter API see [this part](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#adapter-api) documentation of the Storage Abstraction package readme. 147 | -------------------------------------------------------------------------------- /publish/AdapterLocal/changelog.md: -------------------------------------------------------------------------------- 1 | # 1.0.7 2 | 3 | - add support for urls (paths) without containing directory (credits: https://github.com/tesirm99) 4 | 5 | # 1.0.6 6 | 7 | - small cosmetic code changes (no change in functionality) 8 | - updated glob to 10.3.12 9 | 10 | # 1.0.5 11 | 12 | - fix bug in selected bucket 13 | 14 | # 1.0.3 15 | 16 | - revert to v1 format of config URLs 17 | - re-implement storing the selected bucket in local state 18 | - `selectBucket` and `geSelectedBucket` 19 | - also implemented as getter 20 | `storage.bucketName = "the-buck";` and setter `console.log(storage.bucketName);` 21 | -------------------------------------------------------------------------------- /publish/AdapterLocal/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@tweedegolf/sab-adapter-local", 3 | "version": "1.0.7", 4 | "description": "Provides an abstraction layer for interacting with local storage on your local machine or on your server.", 5 | "main": "dist/index/AdapterLocal.js", 6 | "types": "dist/index/AdapterLocal.d.ts", 7 | "homepage": "https://github.com/tweedegolf/storage-abstraction/", 8 | "repository": "https://github.com/tweedegolf/storage-abstraction/", 9 | "dependencies": { 10 | "glob": "^10.3.12", 11 | "rimraf": "^5.0.5" 12 | }, 13 | "scripts": {}, 14 | "keywords": [ 15 | "local file system", 16 | "google cloud", 17 | "backblaze b2", 18 | "amazon s3", 19 | "storage", 20 | "storage abstraction", 21 | "cloud filesystem", 22 | "cloudflare r2", 23 | "cubbit", 24 | "minio", 25 | "azure" 26 | ], 27 | "author": "daniel@tweedegolf.nl", 28 | "license": "MIT", 29 | "publishConfig": { 30 | "access": "public" 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /publish/AdapterMinio/README.md: -------------------------------------------------------------------------------- 1 | # MinIO Storage Adapter 2 | 3 | An adapter that provides an abstraction layer over the API of the MinIO cloud storage service. 4 | 5 | This adapter is one of the adapters that is meant to be used as a plugin of the [Storage Abstraction package](https://www.npmjs.com/package/@tweedegolf/storage-abstraction). However it can be used standalone as well, see [below](#standalone). 6 | 7 | The [API](https://github.com/tweedegolf/storage-abstraction/tree/master?tab=readme-ov-file#adapter-api) of the adapter abstracts away the differences between the API's of cloud storage services. The API only supports the basic, most commonly used cloud service operations such as creating a bucket, storing files and so on. 8 | 9 | It is also possible to access all the specific functionality of the cloud service API through the service client of the adapter, see [here](https://github.com/tweedegolf/storage-abstraction/tree/master?tab=readme-ov-file#getserviceclient). 10 | 11 | If you are new to the Storage Abstraction library you may want to read [this](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#how-it-works) first. 12 | 13 | ```typescript 14 | import { Storage, StorageType } from "@tweedegolf/storage-abstraction"; 15 | 16 | const configuration = { 17 | type: StorageType.MINIO, 18 | port: 9000, 19 | useSSL: true, 20 | region: "us-east-1", 21 | endPoint: "play.min.io", 22 | accessKey: "Q3AM3UQ867SPQQA43P2F", 23 | secretKey: "zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG", 24 | }; 25 | 26 | const storage = new Storage(configuration); 27 | 28 | const result = await storage.listBuckets(); 29 | 30 | console.log(result); 31 | ``` 32 | 33 | The Storage class is cloud service agnostic and doesn't know anything about the adapter it uses and adapters are completely interchangeable. It only expects the adapter to have implemented all methods of the `IAdapter` interface, see the [API](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#adapter-api). 34 | 35 | When you create a Storage instance it checks the mandatory `type` key in the configuration object and then loads the appropriate adapter module automatically from your node_modules folder using `require`. For more information please read [this](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#register-your-adapter). 36 | 37 | ## Configuration 38 | 39 | The configuration object that you pass to the Storage constructor is forwarded to the constructor of the adapter. 40 | 41 | The Storage constructor is only interested in the `type` key of the configuration object, all other keys are necessary for configuring the adapter. 42 | 43 | The Storage constructor expects the configuration to be of type `StorageAdapterConfig`. 44 | 45 | The adapter expects the configuration to be of type `AdapterConfig` or a type that extends this type. 46 | 47 | ```typescript 48 | export interface AdapterConfig { 49 | bucketName?: string; 50 | [id: string]: any; // any mandatory or optional key 51 | } 52 | 53 | export interface StorageAdapterConfig extends AdapterConfig { 54 | type: string; 55 | } 56 | ``` 57 | 58 | The type of the configuration object for this adapter: 59 | 60 | ```typescript 61 | export interface AdapterConfigMinio extends AdapterConfig { 62 | endPoint: string; 63 | accessKey: string; 64 | secretKey: string; 65 | region?: string; 66 | useSSL?: boolean; 67 | port?: number; 68 | } 69 | ``` 70 | 71 | ## Examples 72 | 73 | Examples with configuration object: 74 | 75 | ```typescript 76 | const s = new Storage({ 77 | type: StorageType.MINIO, 78 | endPoint: "play.min.io", 79 | accessKey: "your-access-key", 80 | secretKey: "your-secret-key", 81 | }); 82 | 83 | const s = new Storage({ 84 | type: StorageType.MINIO, 85 | endPoint: "play.min.io", 86 | accessKey: "your-access-key", 87 | secretKey: "your-secret-key", 88 | bucketName: "the-buck", 89 | port: 9000, 90 | }); 91 | 92 | const s = new Storage({ 93 | type: StorageType.MINIO, 94 | endPoint: "play.min.io", 95 | accessKey: "your-access-key", 96 | secretKey: "your-secret-key", 97 | port: 9000, 98 | }); 99 | ``` 100 | 101 | Same examples with configuration url: 102 | 103 | ```typescript 104 | const s = new Storage( 105 | "minio://your-access-key:your-secret-key?endPoint=play.min.io" 106 | 107 | 108 | const s = new Storage( 109 | "minio://your-access-key:your-secret-key@the-buck:9000?endPoint=play.min.io" 110 | 111 | const s = new Storage( 112 | "minio://your-access-key:your-secret-key:9000?endPoint=play.min.io" 113 | ); 114 | ``` 115 | 116 | You can pass the port using a colon but you can also pass it as a query param, the following urls are equal because the `searchParams` object will be flattened into the config object: 117 | 118 | ```typescript 119 | const s = new Storage("minio://your-access-key:your-secret-key@the-buck:9000"); 120 | const p = { 121 | protocol: "minio", 122 | username: "your-access-key", 123 | password: "your-secret-key", 124 | host: "the-buck", 125 | port: "9000", 126 | path: null, 127 | searchParams: null, 128 | }; 129 | 130 | // same as: 131 | const s = new Storage("minio://your-access-key:your-secret-key@the-buck?port=9000"); 132 | const p = { 133 | protocol: "minio", 134 | username: "your-access-key", 135 | password: "your-secret-key", 136 | host: "the-buck", 137 | port: null, 138 | path: null, 139 | searchParams: { port: "9000" }, 140 | }; 141 | 142 | // both are converted to: 143 | const c: AdapterConfigMinio = { 144 | type: "minio", 145 | accessKey: "your-access-key"; 146 | secretKey: "your-secret-keu"; 147 | bucketName: "the-buck", 148 | port: 9000; 149 | } 150 | ``` 151 | 152 | For more information about configuration urls please read [this](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#configuration-url). 153 | 154 | ## Standalone 155 | 156 | You can also use the adapter standalone, without the need to create a Storage instance: 157 | 158 | ```typescript 159 | import { AdapterMinio } from "@tweedegolf/sab-adapter-minio"; 160 | 161 | const adapter = new AdapterMinio({ 162 | endPoint: "play.min.io", 163 | accessKey: "Q3AM3UQ867SPQQA43P2F", 164 | secretKey: "zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG", 165 | }); 166 | 167 | const result = await adapter.listBuckets(); 168 | console.log(result); 169 | ``` 170 | 171 | ## API 172 | 173 | For a complete description of the Adapter API see [this part](https://github.com/tweedegolf/storage-abstraction/blob/master/README.md#adapter-api) documentation of the Storage Abstraction package readme. 174 | -------------------------------------------------------------------------------- /publish/AdapterMinio/changelog.md: -------------------------------------------------------------------------------- 1 | # 1.0.10 2 | 3 | - update minio npm package to 8.0.0 4 | - add support for optional key `useSignedUrl` in options object `getFileAsUrl` 5 | 6 | # 1.0.9 7 | 8 | - fix bug in selected bucket 9 | 10 | # 1.0.7 11 | 12 | - revert to v1 format of config URLs 13 | - re-implement storing the selected bucket in local state 14 | - `selectBucket` and `geSelectedBucket` 15 | - also implemented as getter 16 | `storage.bucketName = "the-buck";` and setter `console.log(storage.bucketName);` 17 | -------------------------------------------------------------------------------- /publish/AdapterMinio/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@tweedegolf/sab-adapter-minio", 3 | "version": "1.0.10", 4 | "description": "Provides an abstraction layer for interacting with MinIO cloud storage service.", 5 | "main": "dist/index/AdapterMinio.js", 6 | "types": "dist/index/AdapterMinio.d.ts", 7 | "homepage": "https://github.com/tweedegolf/storage-abstraction/", 8 | "repository": "https://github.com/tweedegolf/storage-abstraction/", 9 | "dependencies": { 10 | "minio": "^8.0.0" 11 | }, 12 | "scripts": {}, 13 | "keywords": [ 14 | "minio", 15 | "google cloud", 16 | "backblaze b2", 17 | "amazon s3", 18 | "storage", 19 | "storage abstraction", 20 | "cloud filesystem", 21 | "cloudflare r2", 22 | "cubbit", 23 | "azure" 24 | ], 25 | "author": "daniel@tweedegolf.nl", 26 | "license": "MIT", 27 | "publishConfig": { 28 | "access": "public" 29 | } 30 | } -------------------------------------------------------------------------------- /publish/Storage/changelog.md: -------------------------------------------------------------------------------- 1 | # 2.1.6 2 | 3 | - updated Google AzureBlob adapter to 1.0.7 4 | 5 | # 2.1.5 6 | 7 | - updated Google Cloud adapter to 1.0.8 8 | 9 | # 2.1.4 10 | 11 | - updated dependencies 12 | - remarkable dependency autolinker set to "^4.0.0" 13 | 14 | # 2.1.3 15 | 16 | - updated dependencies 17 | 18 | # 2.1.2 19 | 20 | - updated dependencies 21 | - fixed test scripts 22 | - fix for type error in @types/glob 23 | 24 | # 2.1.1 25 | 26 | - fix issue with optional `bucketName` argument 27 | 28 | # 2.1.0 29 | 30 | - reverted to v1 of the configuration URLs (with some improvements) 31 | - re-implemented storing the selected bucket in local state 32 | 33 | # 2.0.0 34 | 35 | - Complete overhaul of the API. If you are using API 1.x please read the [migration document](migration_to_api2.1.md) 36 | -------------------------------------------------------------------------------- /publish/Storage/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@tweedegolf/storage-abstraction", 3 | "version": "2.1.6", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "@tweedegolf/storage-abstraction", 9 | "version": "2.1.6", 10 | "license": "MIT" 11 | } 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /publish/Storage/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@tweedegolf/storage-abstraction", 3 | "version": "2.1.6", 4 | "description": "Provides an abstraction layer for interacting with a storage; the storage can be local or any of the supported cloud storage services.", 5 | "src": "src/Storage.js", 6 | "main": "dist/index/Storage.js", 7 | "types": "dist/index/Storage.d.ts", 8 | "homepage": "https://github.com/tweedegolf/storage-abstraction/", 9 | "repository": "https://github.com/tweedegolf/storage-abstraction/", 10 | "scripts": {}, 11 | "keywords": [ 12 | "storage abstraction", 13 | "cloud storage", 14 | "storage", 15 | "cloud", 16 | "cloud filesystem", 17 | "google cloud", 18 | "amazon s3", 19 | "backblaze b2", 20 | "cloudflare r2", 21 | "cubbit", 22 | "minio", 23 | "azure" 24 | ], 25 | "author": "daniel@tweedegolf.nl", 26 | "license": "MIT", 27 | "publishConfig": { 28 | "access": "public" 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/AbstractAdapter.ts: -------------------------------------------------------------------------------- 1 | import { AdapterConfig, IAdapter, Options, StreamOptions } from "./types/general"; 2 | import { FileBufferParams, FilePathParams, FileStreamParams } from "./types/add_file_params"; 3 | import { 4 | ResultObject, 5 | ResultObjectBoolean, 6 | ResultObjectBuckets, 7 | ResultObjectFiles, 8 | ResultObjectNumber, 9 | ResultObjectStream, 10 | } from "./types/result"; 11 | 12 | export abstract class AbstractAdapter implements IAdapter { 13 | protected _type = "abstract-adapter"; 14 | protected _config: AdapterConfig | null; 15 | protected _configError: string | null = null; 16 | protected _bucketName: string = null; 17 | protected _client: any = null; // eslint-disable-line 18 | 19 | constructor(config: string | AdapterConfig) {} 20 | 21 | get type(): string { 22 | return this._type; 23 | } 24 | 25 | getType(): string { 26 | return this.type; 27 | } 28 | 29 | get config(): AdapterConfig { 30 | return this._config; 31 | } 32 | 33 | getConfig(): AdapterConfig { 34 | return this.config; 35 | } 36 | 37 | get configError(): string { 38 | return this._configError; 39 | } 40 | 41 | getConfigError(): string { 42 | return this.configError; 43 | } 44 | 45 | // eslint-disable-next-line 46 | get serviceClient(): any { 47 | return this._client; 48 | } 49 | 50 | // eslint-disable-next-line 51 | getServiceClient(): any { 52 | return this._client; 53 | } 54 | 55 | setSelectedBucket(bucketName: string | null) { 56 | this._bucketName = bucketName; 57 | } 58 | 59 | getSelectedBucket(): string | null { 60 | return this._bucketName; 61 | } 62 | 63 | set(bucketName: string | null) { 64 | this._bucketName = bucketName; 65 | } 66 | 67 | get bucketName(): string | null { 68 | return this._bucketName; 69 | } 70 | 71 | async addFileFromPath(params: FilePathParams): Promise { 72 | return await this.addFile(params); 73 | } 74 | 75 | async addFileFromBuffer(params: FileBufferParams): Promise { 76 | return await this.addFile(params); 77 | } 78 | 79 | async addFileFromStream(params: FileStreamParams): Promise { 80 | return await this.addFile(params); 81 | } 82 | 83 | protected _getFileAndBucket( 84 | arg1: string, 85 | arg2?: string 86 | ): { bucketName: string; fileName: string; error: string } { 87 | let bucketName: string = null; 88 | let fileName: string = null; 89 | if (typeof arg2 === "string") { 90 | bucketName = arg1; 91 | fileName = arg2; 92 | } else if (typeof arg2 === "undefined") { 93 | fileName = arg1; 94 | if (this._bucketName === null) { 95 | return { 96 | bucketName, 97 | fileName, 98 | error: "no bucket selected", 99 | }; 100 | } 101 | bucketName = this._bucketName; 102 | } 103 | return { 104 | bucketName, 105 | fileName, 106 | error: null, 107 | }; 108 | } 109 | 110 | // protected stubs 111 | 112 | protected abstract _clearBucket(name: string): Promise; 113 | 114 | protected abstract _deleteBucket(name: string): Promise; 115 | 116 | protected abstract _bucketExists(name: string): Promise; 117 | 118 | protected abstract _listFiles(bucketName: string, numFiles: number): Promise; 119 | 120 | protected abstract _sizeOf(bucketName: string, fileName: string): Promise; 121 | 122 | protected abstract _addFile( 123 | params: FilePathParams | FileBufferParams | FileStreamParams 124 | ): Promise; 125 | 126 | protected abstract _fileExists( 127 | bucketName: string, 128 | fileName: string 129 | ): Promise; 130 | 131 | protected abstract _getFileAsStream( 132 | bucketName: string, 133 | fileName: string, 134 | options: StreamOptions 135 | ): Promise; 136 | 137 | protected abstract _getFileAsURL( 138 | bucketName: string, 139 | fileName: string, 140 | options: Options // e.g. { expiresIn: 3600 } 141 | ): Promise; 142 | 143 | protected abstract _removeFile( 144 | bucketName: string, 145 | fileName: string, 146 | allVersions: boolean 147 | ): Promise; 148 | 149 | // public stubs 150 | 151 | abstract listBuckets(): Promise; 152 | 153 | abstract createBucket(name: string, options?: Options): Promise; 154 | 155 | // public 156 | 157 | public async clearBucket(name?: string): Promise { 158 | if (this._configError !== null) { 159 | return { value: null, error: this.configError }; 160 | } 161 | if (typeof name === "undefined") { 162 | if (this._bucketName === null) { 163 | return { 164 | value: null, 165 | error: "no bucket selected", 166 | }; 167 | } 168 | name = this._bucketName; 169 | } 170 | return this._clearBucket(name); 171 | } 172 | 173 | public async deleteBucket(name?: string): Promise { 174 | if (this._configError !== null) { 175 | return { value: null, error: this.configError }; 176 | } 177 | if (typeof name === "undefined") { 178 | if (this._bucketName === null) { 179 | return { 180 | value: null, 181 | error: "no bucket selected", 182 | }; 183 | } 184 | name = this._bucketName; 185 | } 186 | return this._deleteBucket(name); 187 | } 188 | 189 | public async bucketExists(name?: string): Promise { 190 | if (this._configError !== null) { 191 | return { value: null, error: this.configError }; 192 | } 193 | if (typeof name === "undefined") { 194 | if (this._bucketName === null) { 195 | return { 196 | value: null, 197 | error: "no bucket selected", 198 | }; 199 | } 200 | name = this._bucketName; 201 | } 202 | return this._bucketExists(name); 203 | } 204 | 205 | public async listFiles(bucketName: string, numFiles?: number): Promise; 206 | public async listFiles(numFiles?: number): Promise; 207 | public async listFiles(arg1?: number | string, arg2?: number): Promise { 208 | if (this._configError !== null) { 209 | return { value: null, error: this.configError }; 210 | } 211 | 212 | let bucketName: string; 213 | let numFiles: number = 10000; 214 | 215 | if (typeof arg1 === "number") { 216 | if (this._bucketName === null) { 217 | return { 218 | value: null, 219 | error: "no bucket selected", 220 | }; 221 | } 222 | bucketName = this._bucketName; 223 | numFiles = arg1; 224 | } else if (typeof arg1 === "string") { 225 | bucketName = arg1; 226 | if (typeof arg2 === "number") { 227 | numFiles = arg2; 228 | } 229 | } else { 230 | if (this._bucketName === null) { 231 | return { 232 | value: null, 233 | error: "no bucket selected", 234 | }; 235 | } 236 | bucketName = this._bucketName; 237 | } 238 | return this._listFiles(bucketName, numFiles); 239 | } 240 | 241 | public async addFile( 242 | params: FilePathParams | FileBufferParams | FileStreamParams 243 | ): Promise { 244 | if (this._configError !== null) { 245 | return { value: null, error: this.configError }; 246 | } 247 | 248 | if (typeof params.bucketName === "undefined") { 249 | if (this._bucketName === null) { 250 | return { 251 | value: null, 252 | error: "no bucket selected", 253 | }; 254 | } 255 | params.bucketName = this._bucketName; 256 | } 257 | if (typeof params.options !== "object") { 258 | params.options = {}; 259 | } 260 | return this._addFile(params); 261 | } 262 | 263 | public async getFileAsStream( 264 | bucketName: string, 265 | fileName: string, 266 | options?: StreamOptions 267 | ): Promise; 268 | public async getFileAsStream( 269 | fileName: string, 270 | options?: StreamOptions 271 | ): Promise; 272 | public async getFileAsStream( 273 | arg1: string, 274 | arg2?: StreamOptions | string, 275 | arg3?: StreamOptions 276 | ): Promise { 277 | if (this.configError !== null) { 278 | return { error: this.configError, value: null }; 279 | } 280 | 281 | let bucketName: string; 282 | let fileName: string; 283 | let options: StreamOptions = {}; 284 | if (typeof arg1 === "string" && typeof arg2 === "string") { 285 | bucketName = arg1; 286 | fileName = arg2; 287 | if (typeof arg3 !== "undefined") { 288 | options = arg3; 289 | } 290 | } else if (typeof arg1 === "string" && typeof arg2 !== "string") { 291 | if (this._bucketName === null) { 292 | return { 293 | value: null, 294 | error: "no bucket selected", 295 | }; 296 | } 297 | bucketName = this._bucketName; 298 | fileName = arg1; 299 | if (typeof arg2 !== "undefined") { 300 | options = arg2; 301 | } 302 | } 303 | return this._getFileAsStream(bucketName, fileName, options); 304 | } 305 | 306 | public async getFileAsURL( 307 | bucketName: string, 308 | fileName: string, 309 | options?: Options // e.g. { expiresIn: 3600 } 310 | ): Promise; 311 | public async getFileAsURL(fileName: string, options?: Options): Promise; 312 | public async getFileAsURL( 313 | arg1: string, 314 | arg2?: Options | string, 315 | arg3?: Options 316 | ): Promise { 317 | if (this._configError !== null) { 318 | return { value: null, error: this.configError }; 319 | } 320 | 321 | let bucketName: string; 322 | let fileName: string; 323 | let options: Options = {}; 324 | if (typeof arg1 === "string" && typeof arg2 === "string") { 325 | bucketName = arg1; 326 | fileName = arg2; 327 | if (typeof arg3 !== "undefined") { 328 | options = arg3; 329 | } 330 | } else if (typeof arg1 === "string" && typeof arg2 !== "string") { 331 | if (this._bucketName === null) { 332 | return { 333 | value: null, 334 | error: "no bucket selected", 335 | }; 336 | } 337 | bucketName = this._bucketName; 338 | fileName = arg1; 339 | if (typeof arg2 !== "undefined") { 340 | options = arg2; 341 | } 342 | } 343 | return this._getFileAsURL(bucketName, fileName, options); 344 | } 345 | 346 | public async sizeOf(bucketName: string, fileName: string): Promise; 347 | public async sizeOf(fileName: string): Promise; 348 | public async sizeOf(arg1: string, arg2?: string): Promise { 349 | if (this.configError !== null) { 350 | return { value: null, error: this.configError }; 351 | } 352 | const { bucketName, fileName, error } = this._getFileAndBucket(arg1, arg2); 353 | if (error !== null) { 354 | return { value: null, error }; 355 | } 356 | return this._sizeOf(bucketName, fileName); 357 | } 358 | 359 | public async fileExists(bucketName: string, fileName: string): Promise; 360 | public async fileExists(fileName: string): Promise; 361 | public async fileExists(arg1: string, arg2?: string): Promise { 362 | if (this.configError !== null) { 363 | return { value: null, error: this.configError }; 364 | } 365 | const { bucketName, fileName, error } = this._getFileAndBucket(arg1, arg2); 366 | if (error !== null) { 367 | return { value: null, error }; 368 | } 369 | return this._fileExists(bucketName, fileName); 370 | } 371 | 372 | public async removeFile( 373 | bucketName: string, 374 | fileName: string, 375 | allVersions?: boolean 376 | ): Promise; 377 | public async removeFile(fileName: string, allVersions?: boolean): Promise; 378 | public async removeFile( 379 | arg1: string, 380 | arg2?: boolean | string, 381 | arg3?: boolean 382 | ): Promise { 383 | if (this.configError !== null) { 384 | return { value: null, error: this.configError }; 385 | } 386 | 387 | let bucketName: string; 388 | let fileName: string; 389 | let allVersions: boolean = false; 390 | 391 | if (typeof arg1 === "string" && typeof arg2 === "string") { 392 | bucketName = arg1; 393 | fileName = arg2; 394 | if (typeof arg3 === "boolean") { 395 | allVersions = arg3; 396 | } 397 | } else if (typeof arg1 === "string" && typeof arg2 !== "string") { 398 | if (this._bucketName === null) { 399 | return { 400 | value: null, 401 | error: "No bucket selected", 402 | }; 403 | } 404 | bucketName = this._bucketName; 405 | fileName = arg1; 406 | if (typeof arg2 === "boolean") { 407 | allVersions = arg2; 408 | } 409 | } 410 | return this._removeFile(bucketName, fileName, allVersions); 411 | } 412 | } 413 | -------------------------------------------------------------------------------- /src/AdapterBackblazeB2F.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs"; 2 | import B2 from "@nichoth/backblaze-b2"; 3 | 4 | import { Options, StreamOptions, StorageType, IAdapter } from "./types/general"; 5 | import { FileBufferParams, FilePathParams, FileStreamParams } from "./types/add_file_params"; 6 | import { 7 | ResultObject, 8 | ResultObjectBoolean, 9 | ResultObjectBuckets, 10 | ResultObjectFiles, 11 | ResultObjectNumber, 12 | ResultObjectStream, 13 | } from "./types/result"; 14 | import { AdapterConfigBackblazeB2 } from "./types/adapter_backblaze_b2"; 15 | 16 | import { parseQueryString, parseUrl, validateName } from "./util"; 17 | 18 | const getConfig = (): AdapterConfigBackblazeB2 => { 19 | return { 20 | type: StorageType.B2, 21 | applicationKeyId: "", 22 | applicationKey: "", 23 | }; 24 | }; 25 | 26 | const getType = (): string => "string"; 27 | 28 | const getConfigError = (): string => "string"; 29 | 30 | const getServiceClient = (): any => {}; // eslint-disable-line 31 | 32 | const createBucket = async (name: string, options?: Options): Promise => { 33 | const error = validateName(name); 34 | if (error !== null) { 35 | return { value: null, error }; 36 | } 37 | return { value: "ok", error: null }; 38 | }; 39 | 40 | const clearBucket = async (name?: string): Promise => { 41 | return { value: "ok", error: null }; 42 | }; 43 | 44 | const deleteBucket = async (name?: string): Promise => { 45 | return { value: "ok", error: null }; 46 | }; 47 | 48 | const listBuckets = async (): Promise => { 49 | return { value: ["string", "string"], error: null }; 50 | }; 51 | 52 | const addFileFromPath = async (params: FilePathParams): Promise => { 53 | return { value: "public url", error: null }; 54 | }; 55 | 56 | const addFileFromBuffer = async (params: FileBufferParams): Promise => { 57 | return { value: "public url", error: null }; 58 | }; 59 | 60 | const addFileFromStream = async (params: FileStreamParams): Promise => { 61 | return { value: "public url", error: null }; 62 | }; 63 | 64 | const addFile = async ( 65 | params: FilePathParams | FileBufferParams | FileStreamParams 66 | ): Promise => { 67 | return { value: "public url", error: null }; 68 | }; 69 | 70 | const getFileAsStream = async ( 71 | arg1: string, 72 | arg2: StreamOptions | string, 73 | arg3?: StreamOptions 74 | ): Promise => { 75 | return { value: fs.createReadStream(""), error: null }; 76 | }; 77 | 78 | const getFileAsURL = async ( 79 | arg1: string, 80 | arg2: Options | string, 81 | arg3?: Options 82 | ): Promise => { 83 | return { value: "url", error: null }; 84 | }; 85 | 86 | const removeFile = async ( 87 | arg1: string, 88 | arg2?: boolean | string, 89 | arg3?: boolean 90 | ): Promise => { 91 | return { value: "ok", error: null }; 92 | }; 93 | 94 | const listFiles = async (arg1: number | string, arg2?: number): Promise => { 95 | return { value: [["s", 0]], error: null }; 96 | }; 97 | 98 | const sizeOf = async (bucketName: string, fileName: string): Promise => { 99 | return { value: 42, error: null }; 100 | }; 101 | 102 | const fileExists = async (bucketName: string, fileName?: string): Promise => { 103 | return { value: true, error: null }; 104 | }; 105 | 106 | const bucketExists = async (bucketName: string): Promise => { 107 | return { value: true, error: null }; 108 | }; 109 | 110 | const adapter: IAdapter = { 111 | get type() { 112 | return getType(); 113 | }, 114 | get config() { 115 | return getConfig(); 116 | }, 117 | get configError() { 118 | return getConfigError(); 119 | }, 120 | get serviceClient() { 121 | return getServiceClient(); 122 | }, 123 | get bucketName() { 124 | return getServiceClient(); 125 | }, 126 | set(bucketName: string): void {}, 127 | setSelectedBucket(bucketName: string): void {}, 128 | getSelectedBucket(): string { 129 | return "bucketName"; 130 | }, 131 | getType, 132 | getConfigError, 133 | getConfig, 134 | getServiceClient, 135 | createBucket, 136 | clearBucket, 137 | deleteBucket, 138 | listBuckets, 139 | addFile, 140 | addFileFromPath, 141 | addFileFromBuffer, 142 | addFileFromStream, 143 | getFileAsStream, 144 | getFileAsURL, 145 | removeFile, 146 | listFiles, 147 | sizeOf, 148 | bucketExists, 149 | fileExists, 150 | }; 151 | 152 | const createAdapter = (config: string | AdapterConfigBackblazeB2): IAdapter => { 153 | console.log("create functional adapter"); 154 | const configError = null; 155 | let conf: AdapterConfigBackblazeB2; 156 | if (typeof config === "string") { 157 | conf = parseQueryString(config) as AdapterConfigBackblazeB2; 158 | } else { 159 | conf = { ...config }; 160 | } 161 | 162 | const state = { 163 | applicationKey: conf.applicationKey, 164 | applicationKeyId: conf.applicationKeyId, 165 | configError, 166 | }; 167 | 168 | return adapter; 169 | }; 170 | 171 | export { createAdapter }; 172 | -------------------------------------------------------------------------------- /src/AdapterGoogleCloud.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs"; 2 | import { Readable } from "stream"; 3 | import { Storage as GoogleCloudStorage } from "@google-cloud/storage"; 4 | import { AbstractAdapter } from "./AbstractAdapter"; 5 | import { Options, StreamOptions, StorageType } from "./types/general"; 6 | import { FileBufferParams, FilePathParams, FileStreamParams } from "./types/add_file_params"; 7 | import { 8 | ResultObject, 9 | ResultObjectBoolean, 10 | ResultObjectBuckets, 11 | ResultObjectFiles, 12 | ResultObjectNumber, 13 | ResultObjectStream, 14 | } from "./types/result"; 15 | import { AdapterConfigGoogleCloud } from "./types/adapter_google_cloud"; 16 | import { parseUrl, validateName } from "./util"; 17 | 18 | export class AdapterGoogleCloud extends AbstractAdapter { 19 | protected _type = StorageType.GCS; 20 | protected _config: AdapterConfigGoogleCloud; 21 | protected _configError: string | null = null; 22 | protected _client: GoogleCloudStorage; 23 | 24 | constructor(config?: string | AdapterConfigGoogleCloud) { 25 | super(config); 26 | if (typeof config !== "string") { 27 | this._config = { ...config }; 28 | } else { 29 | const { value, error } = parseUrl(config); 30 | if (error !== null) { 31 | this._configError = `[configError] ${error}`; 32 | } else { 33 | const { protocol: type, username: accessKeyId, host: bucketName, searchParams } = value; 34 | if (searchParams !== null) { 35 | this._config = { type, ...searchParams }; 36 | } else { 37 | this._config = { type }; 38 | } 39 | if (accessKeyId !== null) { 40 | this._config.accessKeyId = accessKeyId; 41 | } 42 | if (bucketName !== null) { 43 | this._config.bucketName = bucketName; 44 | } 45 | } 46 | } 47 | 48 | try { 49 | this._client = new GoogleCloudStorage(this._config as object); 50 | } catch (e) { 51 | this._configError = `[configError] ${e.message}`; 52 | } 53 | 54 | if (typeof this.config.bucketName !== "undefined") { 55 | this._bucketName = this.config.bucketName; 56 | } 57 | } 58 | 59 | // protected, called by methods of public API via AbstractAdapter 60 | protected async _getFileAsURL( 61 | bucketName: string, 62 | fileName: string, 63 | options: Options 64 | ): Promise { 65 | try { 66 | const file = this._client.bucket(bucketName).file(fileName); 67 | if (options.useSignedUrl) { 68 | return { 69 | value: (await file.getSignedUrl({ 70 | action: "read", 71 | expires: options.expiresOn || 86400, 72 | }))[0], 73 | error: null, 74 | }; 75 | } else { 76 | return { value: file.publicUrl(), error: null }; 77 | } 78 | } catch (e) { 79 | return { value: null, error: e.message }; 80 | } 81 | } 82 | 83 | protected async _getFileAsStream( 84 | bucketName: string, 85 | fileName: string, 86 | options: StreamOptions 87 | ): Promise { 88 | try { 89 | const file = this._client.bucket(bucketName).file(fileName); 90 | const [exists] = await file.exists(); 91 | if (exists) { 92 | return { value: file.createReadStream(options as object), error: null }; 93 | } else { 94 | return { 95 | value: null, 96 | error: `File '${fileName}' does not exist in bucket '${bucketName}'.`, 97 | }; 98 | } 99 | } catch (e) { 100 | return { 101 | value: null, 102 | error: `File ${fileName} could not be retrieved from bucket ${bucketName}`, 103 | }; 104 | } 105 | } 106 | 107 | protected async _removeFile( 108 | bucketName: string, 109 | fileName: string, 110 | allVersions: boolean 111 | ): Promise { 112 | try { 113 | const file = this._client.bucket(bucketName).file(fileName); 114 | const [exists] = await file.exists(); 115 | if (exists) { 116 | await this._client.bucket(bucketName).file(fileName).delete(); 117 | return { value: "ok", error: null }; 118 | } 119 | // no fail if the file does not exist 120 | return { value: "ok", error: null }; 121 | } catch (e) { 122 | return { value: null, error: e.message }; 123 | } 124 | } 125 | 126 | protected async _addFile( 127 | params: FilePathParams | FileBufferParams | FileStreamParams 128 | ): Promise { 129 | try { 130 | let readStream: Readable; 131 | if (typeof (params as FilePathParams).origPath === "string") { 132 | const f = (params as FilePathParams).origPath; 133 | if (!fs.existsSync(f)) { 134 | return { value: null, error: `File with given path: ${f}, was not found` }; 135 | } 136 | readStream = fs.createReadStream(f); 137 | } else if (typeof (params as FileBufferParams).buffer !== "undefined") { 138 | readStream = new Readable(); 139 | readStream._read = (): void => {}; // _read is required but you can noop it 140 | readStream.push((params as FileBufferParams).buffer); 141 | readStream.push(null); 142 | } else if (typeof (params as FileStreamParams).stream !== "undefined") { 143 | readStream = (params as FileStreamParams).stream; 144 | } 145 | 146 | const file = this._client.bucket(params.bucketName).file(params.targetPath, params.options); 147 | const writeStream = file.createWriteStream(params.options); 148 | return new Promise((resolve) => { 149 | readStream 150 | .pipe(writeStream) 151 | .on("error", (e: Error) => { 152 | resolve({ value: null, error: e.message }); 153 | }) 154 | .on("finish", () => { 155 | resolve({ value: file.publicUrl(), error: null }); 156 | }); 157 | writeStream.on("error", (e: Error) => { 158 | resolve({ value: null, error: e.message }); 159 | }); 160 | }); 161 | } catch (e) { 162 | return { value: null, error: e.message }; 163 | } 164 | } 165 | 166 | protected async _listFiles(bucketName: string, numFiles: number): Promise { 167 | try { 168 | const data = await this._client.bucket(bucketName).getFiles(); 169 | return { 170 | value: data[0].map((f) => [f.name, parseInt(f.metadata.size as string, 10)]), 171 | error: null, 172 | }; 173 | } catch (e) { 174 | return { value: null, error: e.message }; 175 | } 176 | } 177 | 178 | protected async _sizeOf(bucketName: string, fileName: string): Promise { 179 | try { 180 | const file = this._client.bucket(bucketName).file(fileName); 181 | const [metadata] = await file.getMetadata(); 182 | return { value: parseInt(metadata.size as string, 10), error: null }; 183 | } catch (e) { 184 | return { value: null, error: e.message }; 185 | } 186 | } 187 | 188 | protected async _bucketExists(name: string): Promise { 189 | try { 190 | const data = await this._client.bucket(name).exists(); 191 | // console.log(data); 192 | return { value: data[0], error: null }; 193 | } catch (e) { 194 | return { value: null, error: e.message }; 195 | } 196 | } 197 | 198 | protected async _fileExists(bucketName: string, fileName: string): Promise { 199 | try { 200 | const data = await this._client.bucket(bucketName).file(fileName).exists(); 201 | // console.log(data); 202 | return { value: data[0], error: null }; 203 | } catch (e) { 204 | return { value: null, error: e.message }; 205 | } 206 | } 207 | 208 | protected async _deleteBucket(name: string): Promise { 209 | try { 210 | await this.clearBucket(name); 211 | } catch (e) { 212 | return { value: null, error: e.message }; 213 | } 214 | try { 215 | await this._client.bucket(name).delete(); 216 | return { value: "ok", error: null }; 217 | } catch (e) { 218 | return { value: null, error: e.message }; 219 | } 220 | } 221 | 222 | protected async _clearBucket(name: string): Promise { 223 | try { 224 | await this._client.bucket(name).deleteFiles({ force: true }); 225 | return { value: "ok", error: null }; 226 | } catch (e) { 227 | return { value: null, error: e.message }; 228 | } 229 | } 230 | 231 | //public 232 | 233 | get config(): AdapterConfigGoogleCloud { 234 | return this._config as AdapterConfigGoogleCloud; 235 | } 236 | 237 | getConfig(): AdapterConfigGoogleCloud { 238 | return this._config as AdapterConfigGoogleCloud; 239 | } 240 | 241 | get serviceClient(): GoogleCloudStorage { 242 | return this._client as GoogleCloudStorage; 243 | } 244 | 245 | getServiceClient(): GoogleCloudStorage { 246 | return this._client as GoogleCloudStorage; 247 | } 248 | 249 | public async listBuckets(): Promise { 250 | if (this.configError !== null) { 251 | return { value: null, error: this.configError }; 252 | } 253 | 254 | try { 255 | const [buckets] = await this._client.getBuckets(); 256 | return { value: buckets.map((b) => b.name), error: null }; 257 | } catch (e) { 258 | return { value: null, error: e.message }; 259 | } 260 | } 261 | 262 | public async createBucket(name: string, options: Options = {}): Promise { 263 | if (this.configError !== null) { 264 | return { value: null, error: this.configError }; 265 | } 266 | 267 | const error = validateName(name); 268 | if (error !== null) { 269 | return { value: null, error }; 270 | } 271 | 272 | try { 273 | const bucket = this._client.bucket(name, options); 274 | const [exists] = await bucket.exists(); 275 | if (exists) { 276 | return { value: null, error: "bucket exists" }; 277 | } 278 | } catch (e) { 279 | return { value: null, error: e.message }; 280 | } 281 | 282 | try { 283 | await this._client.createBucket(name, options); 284 | return { value: "ok", error: null }; 285 | } catch (e) { 286 | return { value: null, error: e.message }; 287 | } 288 | } 289 | } 290 | -------------------------------------------------------------------------------- /src/AdapterLocal.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs"; 2 | import path from "path"; 3 | import { glob } from "glob"; 4 | import { rimraf } from "rimraf"; 5 | import { Readable } from "stream"; 6 | import { Options, StreamOptions, StorageType } from "./types/general"; 7 | import { FileBufferParams, FilePathParams, FileStreamParams } from "./types/add_file_params"; 8 | import { 9 | ResultObject, 10 | ResultObjectBoolean, 11 | ResultObjectBuckets, 12 | ResultObjectFiles, 13 | ResultObjectNumber, 14 | ResultObjectStream, 15 | ResultObjectStringArray, 16 | } from "./types/result"; 17 | import { AdapterConfigLocal } from "./types/adapter_local"; 18 | import { AbstractAdapter } from "./AbstractAdapter"; 19 | import { parseMode, parseUrl, validateName } from "./util"; 20 | 21 | export class AdapterLocal extends AbstractAdapter { 22 | protected _type = StorageType.LOCAL; 23 | protected _config: AdapterConfigLocal; 24 | protected _configError: string | null = null; 25 | 26 | constructor(config: AdapterConfigLocal) { 27 | super(config); 28 | if (typeof config !== "string") { 29 | this._config = { ...config }; 30 | } else { 31 | const { value, error } = parseUrl(config); 32 | if (error !== null) { 33 | this._configError = `[configError] ${error}`; 34 | } else { 35 | const { protocol: type, username: directory, host: bucketName, searchParams } = value; 36 | if (searchParams !== null) { 37 | this._config = { type, directory, ...searchParams }; 38 | } else { 39 | this._config = { type, directory }; 40 | } 41 | if (bucketName !== null) { 42 | this._config.bucketName = bucketName; 43 | } 44 | } 45 | // console.log(this._config); 46 | } 47 | 48 | if (typeof this.config.mode !== "undefined") { 49 | const { value, error } = parseMode(this.config.mode); 50 | if (error !== null) { 51 | this._configError = `[configError] ${error}`; 52 | } else { 53 | this._config.mode = value; 54 | } 55 | } else { 56 | this._config.mode = 0o777; 57 | } 58 | if (typeof this.config.directory !== "string") { 59 | this._configError = 60 | "[configError] You must specify a value for 'directory' for storage type 'local'"; 61 | } 62 | if (typeof this.config.bucketName !== "undefined") { 63 | this._bucketName = this.config.bucketName; 64 | } 65 | } 66 | 67 | /** 68 | * @param path 69 | * creates a directory if it doesn't exist 70 | */ 71 | private async createDirectory(path: string): Promise { 72 | try { 73 | await fs.promises.access(path, this._config.mode); 74 | // return { value: false, error: `directory ${path} already exists` }; 75 | return { value: true, error: null }; 76 | } catch (e) { 77 | try { 78 | await fs.promises.mkdir(path, { 79 | recursive: true, 80 | mode: this._config.mode, 81 | }); 82 | // const m = (await fs.promises.stat(path)).mode; 83 | // console.log(m, this.options.mode); 84 | return { value: true, error: null }; 85 | } catch (e) { 86 | return { value: null, error: e.message }; 87 | } 88 | } 89 | } 90 | 91 | private async globFiles( 92 | folder: string, 93 | pattern: string = "**/*.*" 94 | ): Promise { 95 | try { 96 | const files = await glob(`${folder}/${pattern}`, {}); 97 | return { value: files, error: null }; 98 | } catch (e) { 99 | return { value: null, error: e.message }; 100 | } 101 | } 102 | 103 | // protected, called by methods of public API via AbstractAdapter 104 | 105 | protected async _addFile( 106 | params: FilePathParams | FileBufferParams | FileStreamParams 107 | ): Promise { 108 | const dest = path.join(this._config.directory, params.bucketName, params.targetPath); 109 | 110 | const { error } = await this.createDirectory(path.dirname(dest)); 111 | if (error !== null) { 112 | return { value: null, error }; 113 | } 114 | 115 | try { 116 | let readStream: Readable; 117 | if (typeof (params as FilePathParams).origPath === "string") { 118 | await fs.promises.copyFile((params as FilePathParams).origPath, dest); 119 | return { value: dest, error: null }; 120 | } else if (typeof (params as FileBufferParams).buffer !== "undefined") { 121 | readStream = new Readable(); 122 | readStream._read = (): void => {}; // _read is required but you can noop it 123 | readStream.push((params as FileBufferParams).buffer); 124 | readStream.push(null); 125 | } else if (typeof (params as FileStreamParams).stream !== "undefined") { 126 | readStream = (params as FileStreamParams).stream; 127 | } 128 | // console.time(); 129 | const writeStream = fs.createWriteStream(dest, params.options); 130 | return new Promise((resolve) => { 131 | readStream 132 | .pipe(writeStream) 133 | .on("error", (e) => { 134 | resolve({ value: null, error: `[readStream error] ${e.message}` }); 135 | }) 136 | .on("finish", () => { 137 | resolve({ value: dest, error: null }); 138 | }); 139 | writeStream.on("error", (e) => { 140 | resolve({ value: null, error: `[writeStream error] ${e.message}` }); 141 | }); 142 | }); 143 | // console.timeEnd(); 144 | } catch (e) { 145 | return { value: null, error: e.message }; 146 | } 147 | } 148 | 149 | protected async _clearBucket(name: string): Promise { 150 | try { 151 | // remove all files and folders inside bucket directory, but not the directory itself 152 | const p = path.join(this._config.directory, name); 153 | await rimraf(p, { preserveRoot: false }); 154 | return { value: "ok", error: null }; 155 | } catch (e) { 156 | return { value: null, error: e.message }; 157 | } 158 | } 159 | 160 | protected async _deleteBucket(name: string): Promise { 161 | try { 162 | const p = path.join(this._config.directory, name); 163 | await rimraf(p); 164 | return { value: "ok", error: null }; 165 | } catch (e) { 166 | return { value: null, error: e.message }; 167 | } 168 | } 169 | 170 | protected async _listFiles(bucketName: string): Promise { 171 | try { 172 | const storagePath = path.join(this._config.directory, bucketName); 173 | const { value: files, error } = await this.globFiles(storagePath); 174 | if (error !== null) { 175 | return { value: null, error }; 176 | } 177 | const result: [string, number][] = []; 178 | for (let i = 0; i < files.length; i += 1) { 179 | const f = files[i]; 180 | const stat = await fs.promises.stat(f); 181 | // result.push([path.basename(f), stat.size]) 182 | result.push([f.replace(`${storagePath}/`, ""), stat.size]); 183 | } 184 | return { value: result, error: null }; 185 | } catch (e) { 186 | return { value: null, error: e.message }; 187 | } 188 | } 189 | 190 | protected async _getFileAsStream( 191 | bucketName: string, 192 | fileName: string, 193 | options: StreamOptions 194 | ): Promise { 195 | try { 196 | const p = path.join(this._config.directory, bucketName, fileName); 197 | await fs.promises.access(p); 198 | const stream = fs.createReadStream(p, options); 199 | return { value: stream, error: null }; 200 | } catch (e) { 201 | return { value: null, error: e }; 202 | } 203 | } 204 | 205 | protected async _getFileAsURL( 206 | bucketName: string, 207 | fileName: string, 208 | options: Options 209 | ): Promise { 210 | try { 211 | const p = path.join(this._config.directory, bucketName, fileName); 212 | try { 213 | await fs.promises.access(p); 214 | } catch (e) { 215 | return { value: null, error: e }; 216 | } 217 | if (options.withoutDirectory) { 218 | return { value: path.join(bucketName, fileName), error: null }; 219 | } 220 | return { value: p, error: null }; 221 | } catch (e) { 222 | return { value: null, error: e.message }; 223 | } 224 | } 225 | 226 | protected async _removeFile( 227 | bucketName: string, 228 | fileName: string, 229 | allVersions: boolean 230 | ): Promise { 231 | try { 232 | const p = path.join(this._config.directory, bucketName, fileName); 233 | if (!fs.existsSync(p)) { 234 | return { value: "ok", error: null }; 235 | } 236 | await fs.promises.unlink(p); 237 | return { value: "ok", error: null }; 238 | } catch (e) { 239 | return { value: null, error: e.message }; 240 | } 241 | } 242 | 243 | protected async _sizeOf(bucketName: string, fileName: string): Promise { 244 | try { 245 | const p = path.join(this._config.directory, bucketName, fileName); 246 | const { size } = await fs.promises.stat(p); 247 | return { value: size, error: null }; 248 | } catch (e) { 249 | return { value: null, error: e.message }; 250 | } 251 | } 252 | 253 | protected async _bucketExists(bucketName: string): Promise { 254 | try { 255 | const p = path.join(this._config.directory, bucketName); 256 | // const r = fs.existsSync(p); 257 | const m = await fs.promises.stat(p); 258 | return { value: true, error: null }; 259 | } catch (e) { 260 | // console.log(e); 261 | // error only means that the directory does not exist 262 | return { value: false, error: null }; 263 | } 264 | } 265 | 266 | protected async _fileExists(bucketName: string, fileName: string): Promise { 267 | try { 268 | await fs.promises.access(path.join(this._config.directory, bucketName, fileName)); 269 | return { value: true, error: null }; 270 | } catch (e) { 271 | return { value: false, error: null }; 272 | } 273 | } 274 | 275 | // public 276 | 277 | get config(): AdapterConfigLocal { 278 | return this._config; 279 | } 280 | 281 | getConfig(): AdapterConfigLocal { 282 | return this.config; 283 | } 284 | 285 | public async listBuckets(): Promise { 286 | if (this.configError !== null) { 287 | return { value: null, error: this.configError }; 288 | } 289 | 290 | try { 291 | const dirents = await fs.promises.readdir(this._config.directory, { withFileTypes: true }); 292 | const files = dirents 293 | .filter((dirent) => dirent.isFile() === false) 294 | .map((dirent) => dirent.name); 295 | // const stats = await Promise.all( 296 | // files.map((f) => fs.promises.stat(path.join(this._config.directory, f))) 297 | // ); 298 | return { value: files, error: null }; 299 | } catch (e) { 300 | return { value: null, error: e.message }; 301 | } 302 | } 303 | 304 | public async createBucket(name: string, options?: Options): Promise { 305 | if (this.configError !== null) { 306 | return { value: null, error: this.configError }; 307 | } 308 | 309 | const error = validateName(name); 310 | if (error !== null) { 311 | return { value: null, error }; 312 | } 313 | 314 | try { 315 | const p = path.join(this._config.directory, name); 316 | const created = await this.createDirectory(p); 317 | if (created) { 318 | return { value: "ok", error: null }; 319 | } else { 320 | return { value: null, error: `Could not create bucket ${p}` }; 321 | } 322 | } catch (e) { 323 | return { value: null, error: e.message }; 324 | } 325 | } 326 | } 327 | -------------------------------------------------------------------------------- /src/AdapterMinio.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs"; 2 | import * as Minio from "minio"; 3 | import { Readable } from "stream"; 4 | import { AbstractAdapter } from "./AbstractAdapter"; 5 | import { Options, StreamOptions, StorageType } from "./types/general"; 6 | import { FileBufferParams, FilePathParams, FileStreamParams } from "./types/add_file_params"; 7 | import { 8 | ResultObject, 9 | ResultObjectBoolean, 10 | ResultObjectBuckets, 11 | ResultObjectFiles, 12 | ResultObjectNumber, 13 | ResultObjectStream, 14 | } from "./types/result"; 15 | import { AdapterConfigMinio } from "./types/adapter_minio"; 16 | import { parseUrl, validateName } from "./util"; 17 | 18 | export class AdapterMinio extends AbstractAdapter { 19 | protected _type = StorageType.MINIO; 20 | protected _client: Minio.Client; 21 | protected _configError: string | null = null; 22 | protected _config: AdapterConfigMinio; 23 | 24 | constructor(config: string | AdapterConfigMinio) { 25 | super(config); 26 | if (typeof config !== "string") { 27 | this._config = { ...config }; 28 | } else { 29 | const { value, error } = parseUrl(config); 30 | if (error !== null) { 31 | this._configError = `[configError] ${error}`; 32 | } else { 33 | const { 34 | protocol: type, 35 | username: accessKey, 36 | password: secretKey, 37 | host: bucketName, 38 | searchParams, 39 | } = value; 40 | let endPoint: string; 41 | if (searchParams !== null) { 42 | ({ endPoint } = searchParams); 43 | delete searchParams.endPoint; 44 | this._config = { type, accessKey, secretKey, endPoint, ...searchParams }; 45 | } else { 46 | this._config = { type, accessKey, secretKey, endPoint }; 47 | } 48 | if (bucketName !== null) { 49 | this._config.bucketName = bucketName; 50 | } 51 | } 52 | // console.log(this._config); 53 | } 54 | 55 | if (!this.config.accessKey || !this.config.secretKey || !this.config.endPoint) { 56 | this._configError = 'Please provide a value for "accessKey", "secretKey and "endPoint"'; 57 | } else { 58 | const useSSL = this.config.useSSL; 59 | if (typeof useSSL === "undefined") { 60 | this.config.useSSL = true; 61 | } 62 | if (typeof useSSL === "string") { 63 | this.config.useSSL = useSSL === "true"; 64 | } 65 | const port = this.config.port; 66 | if (typeof port === "undefined") { 67 | this.config.port = this.config.useSSL ? 443 : 80; 68 | } 69 | if (typeof port === "string") { 70 | this.config.port = parseInt(port, 10); 71 | } 72 | const region = this.config.region; 73 | if (typeof region !== "string") { 74 | this.config.region = "auto"; 75 | } 76 | // console.log(useSSL, port, region); 77 | const c = { 78 | endPoint: this.config.endPoint, 79 | region: this.config.region, 80 | port: this.config.port, 81 | useSSL: this.config.useSSL, 82 | accessKey: this.config.accessKey, 83 | secretKey: this.config.secretKey, 84 | }; 85 | // console.log(c); 86 | try { 87 | this._client = new Minio.Client(c); 88 | } catch (e) { 89 | this._configError = `[configError] ${e.message}`; 90 | } 91 | } 92 | 93 | if (typeof this.config.bucketName !== "undefined") { 94 | this._bucketName = this.config.bucketName; 95 | } 96 | } 97 | 98 | // protected, called by methods of public API via AbstractAdapter 99 | 100 | protected async _getFileAsStream( 101 | bucketName: string, 102 | fileName: string, 103 | options: StreamOptions 104 | ): Promise { 105 | const { start, end } = options; 106 | let offset: number; 107 | let length: number; 108 | if (typeof start !== "undefined") { 109 | offset = start; 110 | } else { 111 | offset = 0; 112 | } 113 | if (typeof end !== "undefined") { 114 | length = end - offset + 1; 115 | } 116 | 117 | try { 118 | let stream: Readable; 119 | if (typeof length !== "undefined") { 120 | stream = await this._client.getPartialObject(bucketName, fileName, offset, length); 121 | } else { 122 | stream = await this._client.getPartialObject(bucketName, fileName, offset); 123 | } 124 | return { value: stream, error: null }; 125 | } catch (e) { 126 | return { value: null, error: e.message }; 127 | } 128 | } 129 | 130 | protected async _removeFile( 131 | bucketName: string, 132 | fileName: string, 133 | allVersions: boolean 134 | ): Promise { 135 | try { 136 | await this._client.removeObject(bucketName, fileName); 137 | return { value: "ok", error: null }; 138 | } catch (e) { 139 | return { value: null, error: e.message }; 140 | } 141 | } 142 | 143 | protected async _clearBucket(name: string): Promise { 144 | const { value: files, error } = await this.listFiles(name); 145 | if (error !== null) { 146 | return { value: null, error }; 147 | } 148 | 149 | try { 150 | await this._client.removeObjects( 151 | name, 152 | files.map((t) => t[0]) 153 | ); 154 | return { value: "ok", error: null }; 155 | } catch (e) { 156 | return { value: null, error: e.message }; 157 | } 158 | } 159 | 160 | protected async _deleteBucket(name: string): Promise { 161 | try { 162 | await this.clearBucket(name); 163 | await this._client.removeBucket(name); 164 | return { value: "ok", error: null }; 165 | } catch (e) { 166 | return { value: null, error: e.message }; 167 | } 168 | } 169 | 170 | protected async _addFile( 171 | params: FilePathParams | FileBufferParams | FileStreamParams 172 | ): Promise { 173 | try { 174 | let fileData: Readable | Buffer; 175 | let size: number; 176 | if (typeof (params as FilePathParams).origPath !== "undefined") { 177 | const f = (params as FilePathParams).origPath; 178 | if (!fs.existsSync(f)) { 179 | return { value: null, error: `File with given path: ${f}, was not found` }; 180 | } 181 | try { 182 | const stats = await fs.promises.stat(f); 183 | size = stats.size; 184 | } catch (e) { 185 | return { value: null, error: `Cannot access file ${f} Error: ${e}` }; 186 | } 187 | fileData = fs.createReadStream(f); 188 | } else if (typeof (params as FileBufferParams).buffer !== "undefined") { 189 | fileData = (params as FileBufferParams).buffer; 190 | size = fileData.buffer.byteLength; 191 | } else if (typeof (params as FileStreamParams).stream !== "undefined") { 192 | fileData = (params as FileStreamParams).stream; 193 | } 194 | 195 | const { bucketName, targetPath } = params; 196 | const response = await this._client.putObject( 197 | bucketName, 198 | targetPath, 199 | fileData, 200 | size, 201 | params.options 202 | ); 203 | return this.getFileAsURL(params.bucketName, params.targetPath, params.options); 204 | } catch (e) { 205 | return { value: null, error: e.message }; 206 | } 207 | } 208 | 209 | protected async _getFileAsURL( 210 | bucketName: string, 211 | fileName: string, 212 | options: Options // e.g. { expiry: 3600 } 213 | ): Promise { 214 | const expiry = options.expiry || 7 * 24 * 60 * 60; 215 | let url = ""; 216 | try { 217 | if (options.useSignedUrl) { 218 | url = await this._client.presignedUrl("GET", bucketName, fileName, expiry, options); 219 | } else { 220 | url = `https://${this.config.endPoint}/`; 221 | if (this.config.port) { 222 | url += `:${this.config.port}`; 223 | } 224 | url += `/${bucketName}/${fileName}`; 225 | } 226 | 227 | return { value: url, error: null }; 228 | } catch (e) { 229 | return { value: null, error: e.message }; 230 | } 231 | } 232 | 233 | protected async _listFiles(bucketName: string, numFiles: number): Promise { 234 | try { 235 | const stream = this._client.listObjectsV2(bucketName, "", true); 236 | const files: Array<[string, number]> = []; 237 | const { error: streamError } = await new Promise((resolve) => { 238 | stream.on("data", function (obj) { 239 | files.push([obj.name, obj.size]); 240 | }); 241 | stream.on("end", function () { 242 | resolve({ value: files, error: null }); 243 | }); 244 | stream.on("error", function (e) { 245 | resolve({ value: null, error: e.message }); 246 | }); 247 | }); 248 | if (streamError !== null) { 249 | return { value: null, error: streamError }; 250 | } 251 | return { value: files, error: null }; 252 | } catch (e) { 253 | return { value: null, error: e.message }; 254 | } 255 | } 256 | 257 | protected async _sizeOf(bucketName: string, fileName: string): Promise { 258 | try { 259 | const stats = await this._client.statObject(bucketName, fileName); 260 | return { value: stats.size, error: null }; 261 | } catch (e) { 262 | return { value: null, error: e.message }; 263 | } 264 | } 265 | 266 | protected async _bucketExists(bucketName: string): Promise { 267 | try { 268 | const exists = await this._client.bucketExists(bucketName); 269 | return { value: exists, error: null }; 270 | } catch (e) { 271 | return { value: null, error: e.message }; 272 | } 273 | } 274 | 275 | protected async _fileExists(bucketName: string, fileName: string): Promise { 276 | try { 277 | const stats = await this._client.statObject(bucketName, fileName); 278 | return { value: stats !== null, error: null }; 279 | } catch (e) { 280 | return { value: false, error: null }; 281 | } 282 | } 283 | 284 | // public 285 | 286 | get config(): AdapterConfigMinio { 287 | return this._config as AdapterConfigMinio; 288 | } 289 | 290 | public getConfig(): AdapterConfigMinio { 291 | return this._config as AdapterConfigMinio; 292 | } 293 | 294 | get serviceClient(): Minio.Client { 295 | return this._client as Minio.Client; 296 | } 297 | 298 | public getServiceClient(): Minio.Client { 299 | return this._client as Minio.Client; 300 | } 301 | 302 | public async listBuckets(): Promise { 303 | if (this.configError !== null) { 304 | return { value: null, error: this.configError }; 305 | } 306 | 307 | try { 308 | const buckets = await this._client.listBuckets(); 309 | return { value: buckets.map((b) => b.name), error: null }; 310 | } catch (e) { 311 | return { value: null, error: e.message }; 312 | } 313 | } 314 | 315 | public async createBucket(name: string, options: Options = {}): Promise { 316 | if (this.configError !== null) { 317 | return { value: null, error: this.configError }; 318 | } 319 | 320 | const error = validateName(name); 321 | if (error !== null) { 322 | return { value: null, error }; 323 | } 324 | 325 | try { 326 | const e = await this._client.bucketExists(name); 327 | if (e) { 328 | return { value: null, error: "bucket exists" }; 329 | } 330 | } catch (e) { 331 | return { value: null, error: e.message }; 332 | } 333 | 334 | try { 335 | const { region } = this._config; 336 | await this._client.makeBucket(name, region, options as Minio.MakeBucketOpt); 337 | return { value: "ok", error: null }; 338 | } catch (e) { 339 | return { value: null, error: e.message }; 340 | } 341 | } 342 | } 343 | -------------------------------------------------------------------------------- /src/Storage.ts: -------------------------------------------------------------------------------- 1 | import path from "path"; 2 | import { 3 | IAdapter, 4 | AdapterConfig, 5 | Options, 6 | StreamOptions, 7 | StorageAdapterConfig, 8 | } from "./types/general"; 9 | import { FileBufferParams, FilePathParams, FileStreamParams } from "./types/add_file_params"; 10 | import { 11 | ResultObject, 12 | ResultObjectBoolean, 13 | ResultObjectBuckets, 14 | ResultObjectFiles, 15 | ResultObjectNumber, 16 | ResultObjectStream, 17 | } from "./types/result"; 18 | import { adapterClasses, adapterFunctions, getAvailableAdapters } from "./adapters"; 19 | 20 | const availableAdapters: string = getAvailableAdapters(); 21 | 22 | export class Storage implements IAdapter { 23 | private _adapter: IAdapter; 24 | // public ready: Promise; 25 | 26 | constructor(config: string | StorageAdapterConfig) { 27 | // this.ready = this.switchAdapter(config); 28 | this.switchAdapter(config); 29 | } 30 | 31 | // public async switchAdapter(config: string | AdapterConfig): Promise { 32 | public switchAdapter(config: string | StorageAdapterConfig): void { 33 | // console.log(config); 34 | // at this point we are only interested in the type of the config 35 | let type: string; 36 | if (typeof config === "string") { 37 | if (config.indexOf("://") !== -1) { 38 | type = config.substring(0, config.indexOf("://")); 39 | } else { 40 | // you can also pass a string that only contains the type, e.g. "gcs" 41 | type = config; 42 | } 43 | } else { 44 | type = config.type; 45 | } 46 | // console.log("type", type); 47 | // console.log("adapterClasses", adapterClasses); 48 | // console.log("class", adapterClasses[type], "function", adapterFunctions[type]); 49 | if (!adapterClasses[type] && !adapterFunctions[type]) { 50 | throw new Error(`unsupported storage type, must be one of ${availableAdapters}`); 51 | } 52 | if (adapterClasses[type]) { 53 | const adapterName = adapterClasses[type][0]; 54 | const adapterPath = adapterClasses[type][1]; 55 | // const AdapterClass = require(path.join(__dirname, name)); 56 | let AdapterClass: any; // eslint-disable-line 57 | try { 58 | AdapterClass = require(adapterPath)[adapterName]; 59 | // console.log(`using remote adapter class ${adapterName}`); 60 | } catch (e) { 61 | // console.log(`using local adapter class ${adapterName}`); 62 | // console.log(e.message); 63 | try { 64 | AdapterClass = require(path.join(__dirname, adapterName))[adapterName]; 65 | } catch (e) { 66 | throw new Error(e.message); 67 | } 68 | } 69 | this._adapter = new AdapterClass(config); 70 | // const AdapterClass = await import(`./${name}`); 71 | // this.adapter = new AdapterClass[name](args); 72 | } else if (adapterFunctions[type]) { 73 | const adapterName = adapterClasses[type][0]; 74 | const adapterPath = adapterClasses[type][1]; 75 | // const module = require(path.join(__dirname, name)); 76 | let module: any; // eslint-disable-line 77 | try { 78 | module = require(adapterPath); 79 | } catch (e) { 80 | module = require(require(path.join(__dirname, adapterPath))); 81 | } 82 | this._adapter = module.createAdapter(config); 83 | } 84 | } 85 | 86 | // introspective adapter API 87 | 88 | setSelectedBucket(bucketName: string | null) { 89 | this.adapter.bucketName = bucketName; 90 | } 91 | 92 | getSelectedBucket(): string | null { 93 | return this.adapter.bucketName; 94 | } 95 | 96 | set(bucketName: string | null) { 97 | this.adapter.bucketName = bucketName; 98 | } 99 | 100 | get bucketName(): string | null { 101 | return this.adapter.bucketName; 102 | } 103 | 104 | get adapter(): IAdapter { 105 | return this._adapter; 106 | } 107 | 108 | public getAdapter(): IAdapter { 109 | return this.adapter; 110 | } 111 | 112 | get type(): string { 113 | return this.adapter.type; 114 | } 115 | 116 | public getType(): string { 117 | return this.adapter.type; 118 | } 119 | 120 | get config(): AdapterConfig { 121 | return this.adapter.config; 122 | } 123 | 124 | public getConfig(): AdapterConfig { 125 | return this.adapter.config; 126 | } 127 | 128 | get configError(): string { 129 | return this.adapter.configError; 130 | } 131 | 132 | public getConfigError(): string { 133 | return this.adapter.configError; 134 | } 135 | //eslint-disable-next-line 136 | get serviceClient(): any { 137 | return this.adapter.serviceClient; 138 | } 139 | //eslint-disable-next-line 140 | public getServiceClient(): any { 141 | return this.adapter.serviceClient; 142 | } 143 | 144 | // public adapter API 145 | 146 | public async addFile( 147 | paramObject: FilePathParams | FileBufferParams | FileStreamParams 148 | ): Promise { 149 | return this.adapter.addFile(paramObject); 150 | } 151 | 152 | async addFileFromPath(params: FilePathParams): Promise { 153 | return this.adapter.addFileFromPath(params); 154 | } 155 | 156 | async addFileFromBuffer(params: FileBufferParams): Promise { 157 | return this.adapter.addFileFromBuffer(params); 158 | } 159 | 160 | async addFileFromStream(params: FileStreamParams): Promise { 161 | return this.adapter.addFileFromStream(params); 162 | } 163 | 164 | async createBucket(bucketName: string, options?: object): Promise { 165 | return this.adapter.createBucket(bucketName, options); 166 | } 167 | 168 | async clearBucket(bucketName?: string): Promise { 169 | return this.adapter.clearBucket(bucketName); 170 | } 171 | 172 | async deleteBucket(bucketName?: string): Promise { 173 | return this.adapter.deleteBucket(bucketName); 174 | } 175 | 176 | async listBuckets(): Promise { 177 | return this.adapter.listBuckets(); 178 | } 179 | 180 | async getFileAsStream( 181 | bucketName: string, 182 | fileName: string, 183 | options?: StreamOptions 184 | ): Promise; 185 | async getFileAsStream(fileName: string, options?: StreamOptions): Promise; 186 | async getFileAsStream( 187 | arg1: string, 188 | arg2?: StreamOptions | string, 189 | arg3?: StreamOptions 190 | ): Promise { 191 | return this.adapter.getFileAsStream(arg1, arg2, arg3); 192 | } 193 | 194 | async getFileAsURL( 195 | bucketName: string, 196 | fileName: string, 197 | options?: Options 198 | ): Promise; 199 | async getFileAsURL(fileName: string, options?: Options): Promise; 200 | async getFileAsURL(arg1: string, arg2?: Options | string, arg3?: Options): Promise { 201 | return this.adapter.getFileAsURL(arg1, arg2, arg3); 202 | } 203 | 204 | async removeFile( 205 | bucketName: string, 206 | fileName: string, 207 | allVersions?: boolean 208 | ): Promise; 209 | async removeFile(fileName: string, allVersions?: boolean): Promise; 210 | async removeFile( 211 | arg1: string, 212 | arg2?: boolean | string, 213 | arg3: boolean = false 214 | ): Promise { 215 | return this.adapter.removeFile(arg1, arg2, arg3); 216 | } 217 | 218 | async listFiles(bucketName: string, numFiles?: number): Promise; 219 | async listFiles(numFiles?: number): Promise; 220 | async listFiles(arg1?: number | string, arg2?: number): Promise { 221 | return this.adapter.listFiles(arg1, arg2); 222 | } 223 | 224 | async sizeOf(bucketName: string, fileName: string): Promise; 225 | async sizeOf(fileName: string): Promise; 226 | async sizeOf(arg1: string, arg2?: string): Promise { 227 | return this.adapter.sizeOf(arg1, arg2); 228 | } 229 | 230 | async bucketExists(bucketName?: string): Promise { 231 | return this.adapter.bucketExists(bucketName); 232 | } 233 | 234 | async fileExists(bucketName: string, fileName: string): Promise; 235 | async fileExists(fileName: string): Promise; 236 | async fileExists(arg1: string, arg2?: string): Promise { 237 | return this.adapter.fileExists(arg1, arg2); 238 | } 239 | } 240 | -------------------------------------------------------------------------------- /src/adapters.ts: -------------------------------------------------------------------------------- 1 | // add new storage adapters here 2 | export const adapterClasses = { 3 | b2: ["AdapterBackblazeB2", "@tweedegolf/sab-adapter-backblaze-b2"], 4 | s3: ["AdapterAmazonS3", "@tweedegolf/sab-adapter-amazon-s3"], 5 | gcs: ["AdapterGoogleCloud", "@tweedegolf/sab-adapter-google-cloud"], 6 | local: ["AdapterLocal", "@tweedegolf/sab-adapter-local"], 7 | azure: ["AdapterAzureBlob", "@tweedegolf/sab-adapter-azure-blob"], 8 | minio: ["AdapterMinio", "@tweedegolf/sab-adapter-minio"], 9 | }; 10 | 11 | // or here for functional adapters 12 | export const adapterFunctions = { 13 | b2f: ["AdapterBackblazeB2F", "@tweedegolf/sab-adapter-backblaze-b2f"], 14 | }; 15 | 16 | export function getAvailableAdapters(): string { 17 | return Object.keys(adapterClasses) 18 | .concat(Object.keys(adapterFunctions)) 19 | .reduce((acc, val) => { 20 | if (acc.findIndex((v) => v === val) === -1) { 21 | acc.push(val[0]); 22 | } 23 | return acc; 24 | }, []) 25 | .sort() 26 | .join(", "); 27 | } 28 | -------------------------------------------------------------------------------- /src/indexes/AdapterAmazonS3.ts: -------------------------------------------------------------------------------- 1 | export { AdapterAmazonS3 } from "../AdapterAmazonS3"; 2 | export * from "../types/adapter_amazon_s3"; 3 | export * from "../types/general"; 4 | export * from "../types/result"; 5 | export * from "../types/add_file_params"; 6 | -------------------------------------------------------------------------------- /src/indexes/AdapterAzureBlob.ts: -------------------------------------------------------------------------------- 1 | export { AdapterAzureBlob } from "../AdapterAzureBlob"; 2 | export * from "../types/adapter_azure_blob"; 3 | export * from "../types/general"; 4 | export * from "../types/result"; 5 | export * from "../types/add_file_params"; 6 | -------------------------------------------------------------------------------- /src/indexes/AdapterBackblazeB2.ts: -------------------------------------------------------------------------------- 1 | export { AdapterBackblazeB2 } from "../AdapterBackblazeB2"; 2 | export * from "../types/adapter_backblaze_b2"; 3 | export * from "../types/general"; 4 | export * from "../types/result"; 5 | export * from "../types/add_file_params"; 6 | -------------------------------------------------------------------------------- /src/indexes/AdapterGoogleCloud.ts: -------------------------------------------------------------------------------- 1 | export { AdapterGoogleCloud } from "../AdapterGoogleCloud"; 2 | export * from "../types/adapter_google_cloud"; 3 | export * from "../types/general"; 4 | export * from "../types/result"; 5 | export * from "../types/add_file_params"; 6 | -------------------------------------------------------------------------------- /src/indexes/AdapterLocal.ts: -------------------------------------------------------------------------------- 1 | export { AdapterLocal } from "../AdapterLocal"; 2 | export * from "../types/adapter_local"; 3 | export * from "../types/general"; 4 | export * from "../types/result"; 5 | export * from "../types/add_file_params"; 6 | -------------------------------------------------------------------------------- /src/indexes/AdapterMinio.ts: -------------------------------------------------------------------------------- 1 | export { AdapterMinio } from "../AdapterMinio"; 2 | export * from "../types/adapter_minio"; 3 | export * from "../types/general"; 4 | export * from "../types/result"; 5 | export * from "../types/add_file_params"; 6 | -------------------------------------------------------------------------------- /src/indexes/Storage.ts: -------------------------------------------------------------------------------- 1 | export { Storage } from "../Storage"; 2 | export * from "../adapters"; 3 | export { StorageType, IAdapter, AdapterConfig, StorageAdapterConfig } from "../types/general"; 4 | -------------------------------------------------------------------------------- /src/template_class.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs"; 2 | import { Readable } from "stream"; 3 | import { AbstractAdapter } from "./AbstractAdapter"; 4 | // Use ConfigTemplate as starting point for your own configuration object 5 | import { parseUrl, validateName } from "./util"; 6 | import { AdapterConfig, Options, StreamOptions } from "./types/general"; 7 | import { 8 | ResultObject, 9 | ResultObjectBoolean, 10 | ResultObjectBuckets, 11 | ResultObjectFiles, 12 | ResultObjectNumber, 13 | ResultObjectStream, 14 | } from "./types/result"; 15 | import { FilePathParams, FileBufferParams, FileStreamParams } from "./types/add_file_params"; 16 | 17 | // stub of a 3rd-party service client library to silence ts-lint 18 | // see the last line of the constructor below 19 | export const WrapperLibrary = function (config: string | AdapterConfig) {}; 20 | 21 | export class AdapterTemplate extends AbstractAdapter { 22 | // Your storage type, add this type to the enum StorageType in ./types.ts 23 | protected _type: string; 24 | 25 | // The instance of the service client if you use another library as a wrapper 26 | // around the API of your storage service, e.g. aws-sdk for Amazon S3. 27 | protected _client: any; // eslint-disable-line 28 | 29 | // The constructor can take both a string and an object. You should define an interface 30 | // for the object that extends IConfig, see the file ./types.ts. You can use any name 31 | // for your interface but it is convenient if you start your name with Config and then 32 | // the name of your storage service in camelcase, e.g. ConfigMyStorageType. 33 | constructor(config: string | AdapterConfig) { 34 | super(config); 35 | if (typeof config === "string") { 36 | // you may want to implement your own parser instead of the default query string parser 37 | const { value, error } = parseUrl(config); 38 | if (error) { 39 | this._configError = `[configError] ${error}`; 40 | } 41 | this._config = value; 42 | } else { 43 | this._config = { ...config }; 44 | } 45 | 46 | // you might need to perform some extra checks 47 | if (!this._config?.someKey || !this._config?.someOtherKey) { 48 | throw new Error( 49 | "You must specify a value for both 'someKey' and 'someOtherKey' for storage type 'yourtype'" 50 | ); 51 | } 52 | 53 | // If you are using a wrapper library, create an instance here 54 | this._client = new WrapperLibrary(this._config); 55 | } 56 | 57 | async _getFileAsStream( 58 | bucketName: string, 59 | fileName: string, 60 | options: StreamOptions 61 | ): Promise { 62 | // Return a stream that you've created somehow in your adapter or that you pipe 63 | // directly from your cloud storage. 64 | const r: Readable = fs.createReadStream("path"); 65 | return { value: r, error: null }; 66 | } 67 | 68 | async _getFileAsURL( 69 | bucketName: string, 70 | fileName: string, 71 | options: Options 72 | ): Promise { 73 | // Return a public url to the file. Note that you might need extra right to 74 | // be able to create a public url. In the options object you can pass extra 75 | // parameters such as an expiration date of the url 76 | return { value: "https://public.url.to.your.file", error: null }; 77 | } 78 | 79 | async _removeFile(bucketName: string, fileName: string): Promise { 80 | return { value: "ok", error: null }; 81 | } 82 | 83 | // In the super class AbstractStorage there are 3 API methods connected to `addFile()`: 84 | // The API methods are: 85 | // 1. addFileFromPath 86 | // 2. addFileFromBuffer 87 | // 3. addFileFromStream 88 | async _addFile( 89 | params: FilePathParams | FileBufferParams | FileStreamParams 90 | ): Promise { 91 | return { value: "ok", error: null }; 92 | } 93 | 94 | async addFileFromPath(params: FilePathParams): Promise { 95 | return await this.addFile(params); 96 | } 97 | 98 | async addFileFromBuffer(params: FileBufferParams): Promise { 99 | return await this.addFile(params); 100 | } 101 | 102 | async addFileFromStream(params: FileStreamParams): Promise { 103 | return await this.addFile(params); 104 | } 105 | 106 | async createBucket(bucketName: string): Promise { 107 | // Usually your cloud service will check if a valid bucket name has been provided. 108 | // However, in general `null`, `undefined` and empty strings are not allowed (nor desirable) 109 | // so you may want to perform this check locally using the validateName function in ./src/util.ts 110 | const error = validateName(bucketName); 111 | if (error !== null) { 112 | return { value: null, error }; 113 | } 114 | return { value: "ok", error: null }; 115 | } 116 | 117 | async _clearBucket(bucketName: string): Promise { 118 | return { value: "ok", error: null }; 119 | } 120 | 121 | async _deleteBucket(bucketName: string): Promise { 122 | return { value: "ok", error: null }; 123 | } 124 | 125 | // Returns the names of all existing buckets, should may be renamed to listBucketNames 126 | async listBuckets(): Promise { 127 | return { 128 | value: ["bucket1", "bucket2"], 129 | error: null, 130 | }; 131 | } 132 | 133 | async _listFiles(bucketName: string, numFiles: number = 1000): Promise { 134 | return { 135 | value: [ 136 | ["file.txt", 3000], 137 | ["image.jpg", 4567], 138 | ], 139 | error: null, 140 | }; 141 | } 142 | 143 | async _sizeOf(bucketName: string, fileName: string): Promise { 144 | return { value: 42, error: null }; 145 | } 146 | 147 | async _bucketExists(bucketName: string): Promise { 148 | return { value: true, error: null }; 149 | } 150 | 151 | async _fileExists(bucketName: string, fileName: string): Promise { 152 | return { value: true, error: null }; 153 | } 154 | } 155 | -------------------------------------------------------------------------------- /src/template_functional.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs"; 2 | import { validateName } from "./util"; 3 | import { AdapterConfig, IAdapter, Options, StreamOptions } from "./types/general"; 4 | import { 5 | ResultObject, 6 | ResultObjectBoolean, 7 | ResultObjectBuckets, 8 | ResultObjectFiles, 9 | ResultObjectNumber, 10 | ResultObjectStream, 11 | } from "./types/result"; 12 | import { FilePathParams, FileBufferParams, FileStreamParams } from "./types/add_file_params"; 13 | 14 | /** 15 | * You can use this template as a starting point for your own functional adapter. You are 16 | * totally free in the way you setup your code as long as it exports a function with the name 17 | * `createAdapter()` that returns an object that should match the IStorage interface. 18 | * 19 | * You can use some util functions that are used in the class AbstractAdapter because they 20 | * are defined in a separate file ./src/util.ts 21 | */ 22 | 23 | const getType = (): string => "string"; 24 | const getConfig = (): AdapterConfig => ({}) as AdapterConfig; 25 | const getConfigError = (): string | null => null; 26 | const getServiceClient = (): any => "instance of your 3-rd party service client"; // eslint-disable-line 27 | const getSelectedBucket = (): string => "the-buck"; 28 | const setSelectedBucket = (bucketName: string): void => {}; 29 | 30 | const createBucket = async (name: string): Promise => { 31 | // Usually your cloud service will check if a valid bucket name has been provided. 32 | // However, in general `null`, `undefined` and empty strings are not allowed (nor desirable) 33 | // so you may want to perform this check locally using the validateName function in ./src/util.ts 34 | const error = validateName(name); 35 | if (error !== null) { 36 | return { value: null, error }; 37 | } 38 | return { value: "ok", error: null }; 39 | }; 40 | 41 | const clearBucket = async (name: string): Promise => { 42 | return { value: "ok", error: null }; 43 | }; 44 | 45 | const deleteBucket = async (name: string): Promise => { 46 | return { value: "ok", error: null }; 47 | }; 48 | 49 | const listBuckets = async (): Promise => { 50 | return { value: ["bucket1", "bucket2"], error: null }; 51 | }; 52 | 53 | const addFileFromPath = async (params: FilePathParams): Promise => ({ 54 | value: "https://public.url", 55 | error: null, 56 | }); 57 | 58 | const addFileFromBuffer = async (params: FileBufferParams): Promise => ({ 59 | value: "https://public.url", 60 | error: null, 61 | }); 62 | 63 | const addFileFromStream = async (params: FileStreamParams): Promise => ({ 64 | value: "https://public.url", 65 | error: null, 66 | }); 67 | 68 | const addFile = async ( 69 | params: FileStreamParams | FileBufferParams | FileStreamParams 70 | ): Promise => ({ 71 | value: "https://public.url", 72 | error: null, 73 | }); 74 | 75 | /** 76 | * arg1: bucketName or fileName 77 | * arg2: fileName or options 78 | * arg3: options 79 | */ 80 | const getFileAsURL = async ( 81 | arg1: string, 82 | arg2?: string | Options, 83 | arg3?: Options 84 | ): Promise => { 85 | return { value: "https://public.url", error: null }; 86 | }; 87 | 88 | /** 89 | * arg1: bucketName or fileName 90 | * arg2: fileName or options 91 | * arg3: options 92 | */ 93 | const getFileAsStream = async ( 94 | arg1: string, 95 | arg2?: string | StreamOptions, 96 | arg3?: StreamOptions 97 | ): Promise => { 98 | return { value: fs.createReadStream(""), error: null }; 99 | }; 100 | 101 | /** 102 | * arg1: bucketName or fileName 103 | * arg2: fileName or options 104 | * arg3: options 105 | */ 106 | const removeFile = async ( 107 | arg1: string, 108 | arg2?: string | boolean, 109 | arg3?: boolean 110 | ): Promise => { 111 | return { value: "ok", error: null }; 112 | }; 113 | 114 | /** 115 | * arg1: bucketName or numFiles or undefined 116 | * arg2: numFiles or undefined 117 | */ 118 | const listFiles = async (arg1?: string | number, arg2?: number): Promise => { 119 | return { 120 | value: [ 121 | ["file.txt", 4000], 122 | ["img.jpg", 54000], 123 | ], 124 | error: null, 125 | }; 126 | }; 127 | 128 | /** 129 | * arg1: bucketName or fileName 130 | * arg2: fileName or undefined 131 | */ 132 | const sizeOf = async (arg1: string, arg2?: string): Promise => { 133 | return { value: 42, error: null }; 134 | }; 135 | 136 | const bucketExists = async (bucketName?: string): Promise => { 137 | return { value: true, error: null }; 138 | }; 139 | 140 | /** 141 | * arg1: bucketName or fileName 142 | * arg2: fileName or undefined 143 | */ 144 | const fileExists = async (arg1: string, arg2?: string): Promise => { 145 | return { value: true, error: null }; 146 | }; 147 | 148 | const adapter: IAdapter = { 149 | getType, 150 | get type() { 151 | return this.getType(); 152 | }, 153 | getConfig, 154 | get config() { 155 | return getConfig(); 156 | }, 157 | getConfigError, 158 | get configError() { 159 | return getConfigError(); 160 | }, 161 | getServiceClient, 162 | get serviceClient() { 163 | return getServiceClient(); 164 | }, 165 | getSelectedBucket, 166 | get bucketName() { 167 | return getSelectedBucket(); 168 | }, 169 | setSelectedBucket(bucketName: string): void { 170 | this.bucketName = bucketName; 171 | }, 172 | set(bucketName: string): void { 173 | setSelectedBucket(bucketName); 174 | }, 175 | createBucket, 176 | clearBucket, 177 | deleteBucket, 178 | listBuckets, 179 | addFile, 180 | addFileFromPath, 181 | addFileFromBuffer, 182 | addFileFromStream, 183 | getFileAsURL, 184 | getFileAsStream, 185 | removeFile, 186 | listFiles, 187 | sizeOf, 188 | fileExists, 189 | bucketExists, 190 | }; 191 | 192 | const createAdapter = (config: AdapterConfig): IAdapter => { 193 | console.log("create adapter", config); 194 | return adapter; 195 | }; 196 | 197 | export { createAdapter }; 198 | -------------------------------------------------------------------------------- /src/types/adapter_amazon_s3.ts: -------------------------------------------------------------------------------- 1 | import { AdapterConfig } from "./general"; 2 | 3 | export enum S3Compatible { 4 | Amazon, 5 | R2, 6 | Backblaze, 7 | Cubbit, 8 | } 9 | 10 | export interface AdapterConfigAmazonS3 extends AdapterConfig { 11 | region?: string; 12 | endpoint?: string; 13 | credentials?: { 14 | accessKeyId?: string; 15 | secretAccessKey?: string; 16 | }; 17 | accessKeyId?: string; 18 | secretAccessKey?: string; 19 | } 20 | -------------------------------------------------------------------------------- /src/types/adapter_azure_blob.ts: -------------------------------------------------------------------------------- 1 | import { AdapterConfig } from "./general"; 2 | 3 | export interface AdapterConfigAzureBlob extends AdapterConfig { 4 | accountName?: string; 5 | connectionString?: string; 6 | accountKey?: string; 7 | sasToken?: string; 8 | blobDomain?: string; 9 | } 10 | -------------------------------------------------------------------------------- /src/types/adapter_backblaze_b2.ts: -------------------------------------------------------------------------------- 1 | import { AdapterConfig } from "./general"; 2 | 3 | export interface AdapterConfigBackblazeB2 extends AdapterConfig { 4 | applicationKey: string; 5 | applicationKeyId: string; 6 | } 7 | 8 | export type BackblazeAxiosResponse = { 9 | response: { 10 | data: { 11 | code: string; 12 | message: string; 13 | status: number; 14 | allowed?: { 15 | capabilities: Array; 16 | }; 17 | buckets?: Array; // eslint-disable-line 18 | }; 19 | }; 20 | }; 21 | 22 | export type BackblazeB2Bucket = { 23 | accountId: "string"; 24 | bucketId: "string"; 25 | bucketInfo: "object"; 26 | bucketName: "string"; 27 | bucketType: "string"; 28 | corsRules: string[]; 29 | lifecycleRules: string[]; 30 | options: string[]; 31 | revision: number; 32 | }; 33 | 34 | export type BackblazeB2File = { 35 | accountId: string; 36 | action: string; 37 | bucketId: string; 38 | contentLength: number; 39 | contentMd5: string; 40 | contentSha1: string; 41 | contentType: string; 42 | fileId: string; 43 | fileInfo: [object]; 44 | fileName: string; 45 | uploadTimestamp: number; 46 | }; 47 | 48 | export type BucketB2 = { 49 | id: string; 50 | name: string; 51 | }; 52 | 53 | export type FileB2 = { 54 | id: string; 55 | name: string; 56 | contentType: string; 57 | contentLength: number; 58 | }; 59 | 60 | export type BackblazeBucketOptions = { 61 | bucketType: string; 62 | }; 63 | 64 | export type ResultObjectBucketsB2 = { 65 | error: string | null; 66 | value: Array | null; 67 | }; 68 | 69 | export type ResultObjectBucketB2 = { 70 | error: string | null; 71 | value: BucketB2 | null; 72 | }; 73 | 74 | export type ResultObjectFileB2 = { 75 | error: string | null; 76 | value: FileB2 | null; 77 | }; 78 | 79 | export type ResultObjectFilesB2 = { 80 | error: string | null; 81 | value: Array | null; 82 | }; 83 | -------------------------------------------------------------------------------- /src/types/adapter_google_cloud.ts: -------------------------------------------------------------------------------- 1 | import { AdapterConfig } from "./general"; 2 | 3 | export interface AdapterConfigGoogleCloud extends AdapterConfig { 4 | keyFilename?: string; 5 | } 6 | -------------------------------------------------------------------------------- /src/types/adapter_local.ts: -------------------------------------------------------------------------------- 1 | import { AdapterConfig } from "./general"; 2 | 3 | export interface AdapterConfigLocal extends AdapterConfig { 4 | directory: string; 5 | mode?: number; 6 | } 7 | -------------------------------------------------------------------------------- /src/types/adapter_minio.ts: -------------------------------------------------------------------------------- 1 | import { AdapterConfig } from "./general"; 2 | 3 | export interface AdapterConfigMinio extends AdapterConfig { 4 | endPoint: string; 5 | accessKey: string; 6 | secretKey: string; 7 | region?: string; 8 | useSSL?: boolean; 9 | port?: number; 10 | } 11 | -------------------------------------------------------------------------------- /src/types/add_file_params.ts: -------------------------------------------------------------------------------- 1 | import { Readable } from "stream"; 2 | import { Options } from "./general"; 3 | 4 | /** 5 | * @param bucketName name of the bucket you want to use 6 | * @param origPath path of the file to be copied 7 | * @param targetPath path to copy the file to, folders will be created automatically 8 | * @param options additional option such as access rights 9 | **/ 10 | export type FilePathParams = { 11 | bucketName?: string; 12 | origPath: string; 13 | targetPath: string; 14 | options?: Options; 15 | }; 16 | 17 | /** 18 | * @param bucketName name of the bucket you want to use 19 | * @param buffer file as buffer 20 | * @param targetPath path to the file to save the buffer to, folders will be created automatically 21 | * @param options additional option such as access rights 22 | **/ 23 | export type FileBufferParams = { 24 | bucketName?: string; 25 | buffer: Buffer; 26 | targetPath: string; 27 | options?: Options; 28 | }; 29 | 30 | /** 31 | * @param bucketName name of the bucket you want to use 32 | * @param stream a read stream 33 | * @param targetPath path to the file to save the stream to, folders will be created automatically 34 | * @param options additional option such as access rights 35 | **/ 36 | export type FileStreamParams = { 37 | bucketName?: string; 38 | stream: Readable; 39 | targetPath: string; 40 | options?: Options; 41 | }; 42 | -------------------------------------------------------------------------------- /src/types/general.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ResultObject, 3 | ResultObjectBoolean, 4 | ResultObjectBuckets, 5 | ResultObjectFiles, 6 | ResultObjectNumber, 7 | ResultObjectStream, 8 | } from "./result"; 9 | import { FileBufferParams, FilePathParams, FileStreamParams } from "./add_file_params"; 10 | 11 | // add your custom type here 12 | export enum StorageType { 13 | LOCAL = "local", 14 | GCS = "gcs", // Google Cloud Storage 15 | GS = "gs", // Google Cloud Storage 16 | S3 = "s3", // Amazon S3 17 | B2 = "b2", // BackBlaze B2 18 | AZURE = "azure", // Azure Storage Blob 19 | MINIO = "minio", 20 | } 21 | 22 | export interface AdapterConfig { 23 | bucketName?: string; 24 | [id: string]: any; // eslint-disable-line 25 | } 26 | 27 | export interface StorageAdapterConfig extends AdapterConfig { 28 | type: string; 29 | } 30 | export interface Options { 31 | [id: string]: any; // eslint-disable-line 32 | } 33 | 34 | export interface StreamOptions extends Options { 35 | start?: number; 36 | end?: number; 37 | } 38 | 39 | export interface IAdapter { 40 | getServiceClient(): any; // eslint-disable-line 41 | 42 | serviceClient: any; // eslint-disable-line 43 | 44 | /** 45 | * Returns the storage type, e.g. 'gcs', 'b2', 'local' etc. 46 | */ 47 | getType(): string; 48 | 49 | /** 50 | * Same as `getType` but implemented as getter 51 | * @returns adapter type, e.g. 'gcs', 'b2', 'local' etc. 52 | */ 53 | type: string; 54 | 55 | /** 56 | * Returns configuration settings that you've provided when instantiating as an object. 57 | * Use this only for debugging and with great care as it may expose sensitive information. 58 | * 59 | * The object contains the key `bucketName` which is the initial value that you've set during 60 | * initialization. 61 | * 62 | * The object also contains the key `options` which are only the options passed in during 63 | * initialization; if you want all options, including the default options use `getOptions()` 64 | * 65 | * @returns adapter configuration as object 66 | */ 67 | getConfig(): AdapterConfig; 68 | 69 | /** 70 | * Same as `getConfiguration` but implemented as getter 71 | * @returns adapter configuration as object 72 | */ 73 | config: AdapterConfig; 74 | 75 | getConfigError(): null | string; 76 | 77 | configError: null | string; 78 | 79 | getSelectedBucket(): null | string; 80 | 81 | setSelectedBucket(bucketName: null | string): void; 82 | 83 | bucketName: null | string; 84 | 85 | set(bucketName: null | string): void; 86 | 87 | /** 88 | * Returns an object that contains both the options passed with the configuration and the 89 | * default options of the storage type if not overruled by the options you passed in. 90 | */ 91 | // getOptions(): JSON; 92 | 93 | /** 94 | * @param bucketName name of the bucket to create, returns "ok" once the bucket has been created but 95 | * also when the bucket already exists. 96 | * @param options: additional options for creating a bucket such as access rights 97 | * @returns string or error 98 | */ 99 | createBucket(bucketName: string, options?: Options): Promise; 100 | 101 | /** 102 | * @param bucketName: deletes all file in the bucket. 103 | */ 104 | clearBucket(bucketName?: string): Promise; 105 | 106 | /** 107 | * deletes the bucket with the provided name 108 | * @param {string} bucketName name of the bucket 109 | * @returns {Promise} a promise that always resolves in a ResultObject: 110 | * ```typescript 111 | * { error: null | string, value: null | string } 112 | * ``` 113 | */ 114 | deleteBucket(bucketName?: string): Promise; 115 | 116 | /** 117 | * @returns an array of the names of the buckets in this storage 118 | */ 119 | listBuckets(): Promise; 120 | 121 | /** 122 | * @param {filePathParams | FileBufferParams | FileStreamParams} params related to the file to be added 123 | * @returns the public url to the file 124 | * Called internally by addFileFromPath, addFileFromBuffer and addFileFromReadable 125 | */ 126 | addFile(params: FilePathParams | FileBufferParams | FileStreamParams): Promise; 127 | 128 | /** 129 | * @param {FilePathParams} params object that has the following keys: 130 | * ```typescript 131 | * { 132 | * bucketName: string 133 | * origPath: string //path to the file that you want to add, e.g. /home/user/Pictures/image1.jpg 134 | * targetPath: string //path on the storage, you can add a path or only provide name of the file 135 | * options?: object 136 | * } 137 | * ``` 138 | * @returns {ResultObject} a promise that always resolves in a ResultObject: 139 | * ```typescript 140 | * { 141 | * value: string | null 142 | * error: string | null 143 | * } 144 | * ``` 145 | */ 146 | addFileFromPath(params: FilePathParams): Promise; 147 | 148 | /** 149 | * @param {FileBufferParams} params 150 | * @property {string} FilePath.bucketName 151 | * @property {Buffer} FilePath.buffer - buffer 152 | * @property {string} FilePath.targetPath - path on the storage, you can add a path or only provide name of the file 153 | * @property {object} FilePath.options 154 | */ 155 | addFileFromBuffer(params: FileBufferParams): Promise; 156 | 157 | /** 158 | * @param {FileStreamParams} params object that contains the following keys: 159 | * ```typescript 160 | * { 161 | * bucketName: string 162 | * readable: Readable // stream from the local file, e.g. fs.createReadStream(path) 163 | * targetPath: string // path on the storage, you can add a path or only provide name of the file 164 | * options?: object 165 | * } 166 | * ``` 167 | * @returns {ResultObject} a promise that always resolves in a ResultObject 168 | * ```typescript 169 | * { 170 | * value: string | null // if success value is the public url to the file 171 | * error: string | null // if fails error is the error message 172 | * } 173 | * ``` 174 | */ 175 | addFileFromStream(params: FileStreamParams): Promise; 176 | 177 | /** 178 | * @param bucketName name of the bucket where the file is stored 179 | * @param fileName name of the file to be returned as a readable stream 180 | * @param start? the byte of the file where the stream starts (default: 0) 181 | * @param end? the byte in the file where the stream ends (default: last byte of file) 182 | */ 183 | getFileAsStream( 184 | bucketName: string, 185 | fileName: string, 186 | options?: StreamOptions 187 | ): Promise; 188 | getFileAsStream(fileName: string, options?: StreamOptions): Promise; 189 | getFileAsStream( 190 | arg1: string, 191 | arg2?: StreamOptions | string, 192 | arg3?: StreamOptions 193 | ): Promise; 194 | 195 | /** 196 | * @param bucketName name of the bucket where the file is stored 197 | * @param fileName name of the file 198 | */ 199 | getFileAsURL(bucketName: string, fileName: string, options?: Options): Promise; 200 | getFileAsURL(fileName: string, options?: Options): Promise; 201 | getFileAsURL(arg1: string, arg2?: Options | string, arg3?: Options): Promise; 202 | 203 | /** 204 | * @param {string} bucketName name of the bucket where the file is stored 205 | * @param {string} fileName name of the file to be removed 206 | * @param {boolean} [allVersions = true] in case there are more versions of this file you can choose to remove 207 | * all of them in one go or delete only the latest version (only if applicable such as with Backblaze B2 and S3 208 | * when you've enabled versioning) 209 | */ 210 | removeFile(bucketName: string, fileName: string, allVersions?: boolean): Promise; 211 | removeFile(fileName: string, allVersions?: boolean): Promise; 212 | removeFile(arg1: string, arg2?: boolean | string, arg3?: boolean): Promise; 213 | 214 | /** 215 | * @param bucketName name of the bucket 216 | * @param numFiles optional, only works for S3 compatible storages: the maximal number of files to retrieve 217 | * @returns an array of tuples containing the file path and the file size of all files in the bucket. 218 | */ 219 | listFiles(numFiles?: number): Promise; 220 | listFiles(bucketName: string, numFiles?: number): Promise; 221 | listFiles(arg1?: number | string, arg2?: number): Promise; 222 | 223 | /** 224 | * @param bucketName name of the bucket where the file is stored 225 | * @param fileName name of the file 226 | * @returns the size of the file in bytes 227 | */ 228 | sizeOf(bucketName: string, fileName: string): Promise; 229 | 230 | /** 231 | * @param bucketName name of the bucket 232 | * @returns boolean 233 | */ 234 | bucketExists(bucketName?: string): Promise; 235 | 236 | /** 237 | * @param bucketName name of the bucket where the file is stored 238 | * @param fileName name of the file 239 | */ 240 | fileExists(bucketName: string, fileName: string): Promise; 241 | } 242 | -------------------------------------------------------------------------------- /src/types/result.ts: -------------------------------------------------------------------------------- 1 | import { Readable } from "stream"; 2 | 3 | export type ParseUrlResult = { 4 | error: string | null; 5 | value: { 6 | protocol: string; 7 | username: string; 8 | password: string; 9 | host: string; 10 | port: string; 11 | path: string; 12 | searchParams: { [key: string]: string }; 13 | }; 14 | }; 15 | 16 | export interface ResultObject { 17 | error: string | null; 18 | value: string | null; 19 | } 20 | 21 | export type ResultObjectNumber = { 22 | error: string | null; 23 | value: number | null; 24 | }; 25 | 26 | export type ResultObjectBoolean = { 27 | error: string | null; 28 | value: boolean | null; 29 | }; 30 | 31 | export type ResultObjectFiles = { 32 | error: string | null; 33 | value: Array<[string, number]> | null; // file name, file size 34 | }; 35 | 36 | export type ResultObjectBuckets = { 37 | error: string | null; 38 | value: Array | null; 39 | }; 40 | 41 | export type ResultObjectStringArray = { 42 | error: string | null; 43 | value: Array | null; 44 | }; 45 | 46 | export type ResultObjectKeyValue = { 47 | error: string | null; 48 | value: { [key: string]: any } | null; // eslint-disable-line 49 | }; 50 | 51 | export type ResultObjectStream = { 52 | error: string | null; 53 | value: Readable | null; 54 | }; 55 | -------------------------------------------------------------------------------- /src/util.ts: -------------------------------------------------------------------------------- 1 | import { URL } from "url"; 2 | import { StorageType } from "./types/general"; 3 | import { ParseUrlResult, ResultObjectNumber } from "./types/result"; 4 | 5 | /** 6 | * @param {string} url 7 | * strips off the querystring of an url and returns it as an object 8 | */ 9 | export const parseQueryString = (url: string): { [id: string]: string } => { 10 | let options = {}; 11 | const questionMark = url.indexOf("?"); 12 | if (questionMark !== -1) { 13 | options = url 14 | .substring(questionMark + 1) 15 | .split("&") 16 | .map((pair) => pair.split("=")) 17 | .reduce((acc, val) => { 18 | // acc[val[0]] = `${val[1]}`.valueOf(); 19 | acc[val[0]] = val[1]; 20 | return acc; 21 | }, {}); 22 | } 23 | return options; 24 | }; 25 | 26 | /** 27 | * @param {string} url 28 | * Parses a config url string into fragments and parses the query string into a 29 | * key-value object. 30 | */ 31 | export const parseUrlStandard = (url: string, checkType = false): ParseUrlResult => { 32 | let parsed = null; 33 | let searchParams = null; 34 | 35 | if (isBlankString(url)) { 36 | return { 37 | value: null, 38 | error: "please provide a configuration url", 39 | }; 40 | } 41 | 42 | try { 43 | parsed = new URL(url); 44 | } catch (e) { 45 | return { value: null, error: e.message }; 46 | } 47 | 48 | if (Object.keys(parsed.searchParams)) { 49 | searchParams = {}; 50 | for (const [key, val] of parsed.searchParams) { 51 | searchParams[key] = val; 52 | } 53 | } 54 | 55 | return { 56 | value: { 57 | protocol: parsed.protocol || null, 58 | username: parsed.username || null, 59 | password: parsed.password || null, 60 | host: parsed.host || null, 61 | port: parsed.port || null, 62 | path: parsed.path || null, 63 | searchParams, 64 | }, 65 | error: null, 66 | }; 67 | }; 68 | 69 | /** 70 | * @param {string} url 71 | * Parses a config url string into fragments and parses the query string into a 72 | * key-value object. 73 | */ 74 | export const parseUrl = (url: string, checkType = false): ParseUrlResult => { 75 | let protocol = null; 76 | let username = null; 77 | let password = null; 78 | let port = null; 79 | let path = null; 80 | let host = null; 81 | let searchParams = null; 82 | 83 | if (isBlankString(url)) { 84 | return { 85 | value: null, 86 | error: "please provide a configuration url", 87 | }; 88 | } 89 | 90 | const p = url.indexOf("://"); 91 | if (p === -1) { 92 | return { 93 | value: { protocol: url, username, password, host, port, path, searchParams }, 94 | error: null, 95 | }; 96 | } 97 | protocol = url.substring(0, p); 98 | if ( 99 | checkType === true && 100 | Object.values(StorageType).includes(protocol as StorageType) === false 101 | ) { 102 | return { value: null, error: `"${protocol}" is not a valid storage type` }; 103 | } 104 | 105 | let config = url.substring(p + 3); 106 | const at = config.indexOf("@"); 107 | const questionMark = config.indexOf("?"); 108 | 109 | // parse options 110 | if (questionMark !== -1) { 111 | searchParams = parseQueryString(url); 112 | config = config.substring(0, questionMark); 113 | } 114 | 115 | // get host (bucket name) 116 | if (at !== -1) { 117 | host = config.substring(at + 1); 118 | // remove port 119 | const colon = host.indexOf(":"); 120 | if (colon !== -1) { 121 | port = host.substring(colon + 1); 122 | host = host.substring(0, colon); 123 | } 124 | // console.log(colon, port); 125 | if (questionMark !== -1) { 126 | host = host.substring(0, questionMark); 127 | } 128 | if (isBlankString(host)) { 129 | host = null; 130 | } 131 | config = config.substring(0, at); 132 | } 133 | 134 | // get credentials 135 | const colon = config.indexOf(":"); 136 | if (colon !== -1) { 137 | if (port === null) { 138 | [username, password, port] = config.split(":"); 139 | if (typeof port === "undefined") { 140 | port = null; 141 | } 142 | } else { 143 | [username, password] = config.split(":"); 144 | } 145 | } else if (config !== "") { 146 | username = config; 147 | } 148 | 149 | // remove path from port in case it hasn't been removed 150 | if (port !== null) { 151 | const slash = port.indexOf("/"); 152 | if (slash !== -1) { 153 | path = port.substring(slash + 1); 154 | port = port.substring(0, slash); 155 | } 156 | } 157 | 158 | // remove path from bucketName in case it hasn't been removed 159 | if (host !== null) { 160 | const slash = host.indexOf("/"); 161 | if (slash !== -1) { 162 | path = host.substring(slash + 1); 163 | host = host.substring(0, slash); 164 | } 165 | } 166 | 167 | return { 168 | value: { protocol, username, password, host, port, path, searchParams }, 169 | error: null, 170 | }; 171 | }; 172 | 173 | /** 174 | * @param {string} s 175 | * 176 | * Parses a string that contains a radix prefix to a number 177 | * 178 | */ 179 | export const parseIntFromString = (s: string): number => { 180 | if (s.startsWith("0o")) { 181 | return parseInt(s, 8); 182 | } 183 | if (s.startsWith("0x") || s.startsWith("0X")) { 184 | return parseInt(s, 16); 185 | } 186 | if (s.startsWith("0b") || s.startsWith("0B")) { 187 | return parseInt(s, 2); 188 | } 189 | return parseInt(s); 190 | }; 191 | 192 | export const parseMode = (mode: number | string): ResultObjectNumber => { 193 | // if mode is a number, parseMode assumes it is a decimal number 194 | if (typeof mode === "number") { 195 | if (mode < 0) { 196 | return { 197 | value: null, 198 | error: `The argument 'mode' must be a 32-bit unsigned integer or an octal string. Received ${mode}`, 199 | }; 200 | } 201 | return { value: mode, error: null }; 202 | } 203 | 204 | // mode is a string 205 | 206 | // e.g "0o755" (octal string) 207 | if (mode.startsWith("0o")) { 208 | return { value: parseInt(mode.substring(2), 8), error: null }; 209 | } 210 | // e.g '511' (decimal) 211 | const i = parseInt(mode, 10); 212 | // quick fix for erroneously passed octal number as string (without 0o prefix) 213 | return { value: i > 511 ? 511 : i, error: null }; 214 | }; 215 | 216 | /** 217 | * @param {string} str 218 | * 219 | * Checks if the value of the name is not null or undefined 220 | */ 221 | export const isBlankString = (str: string): boolean => { 222 | return !str || /^\s*$/.test(str); 223 | }; 224 | 225 | /** 226 | * @param {string} name 227 | * 228 | * Checks if the value of the name is not null, undefined or an empty string 229 | */ 230 | export const validateName = (name: string): string => { 231 | if (name === null) { 232 | return "Bucket name can not be `null`"; 233 | } 234 | if (name === "null") { 235 | return 'Please do not use the string "null" as bucket name'; 236 | } 237 | if (typeof name === "undefined") { 238 | return "Bucket name can not be `undefined`"; 239 | } 240 | if (name === "undefined") { 241 | return 'Please do not use the string "undefined" as bucket name'; 242 | } 243 | if (isBlankString(name)) { 244 | return "Bucket name can not be an empty string"; 245 | } 246 | return null; 247 | }; 248 | -------------------------------------------------------------------------------- /tests/config.ts: -------------------------------------------------------------------------------- 1 | import "jasmine"; 2 | import path from "path"; 3 | import dotenv from "dotenv"; 4 | import { StorageAdapterConfig, StorageType } from "../src/types/general"; 5 | 6 | export function getConfig(t: string = StorageType.LOCAL): string | StorageAdapterConfig { 7 | dotenv.config(); 8 | 9 | let config: StorageAdapterConfig | string = ""; 10 | if (t === StorageType.LOCAL) { 11 | config = { 12 | type: StorageType.LOCAL, 13 | bucketName: process.env.BUCKET_NAME, 14 | directory: process.env.LOCAL_DIRECTORY, 15 | }; 16 | } else if (t === StorageType.GCS) { 17 | config = { 18 | type: StorageType.GCS, 19 | bucketName: process.env.BUCKET_NAME, 20 | keyFilename: process.env.GOOGLE_CLOUD_KEY_FILENAME, 21 | }; 22 | } else if (t === StorageType.S3) { 23 | config = { 24 | type: StorageType.S3, 25 | bucketName: process.env.BUCKET_NAME, 26 | accessKeyId: process.env.AWS_ACCESS_KEY_ID, 27 | secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY, 28 | region: process.env.AWS_REGION, 29 | }; 30 | } else if (t === "S3-Cloudflare-R2") { 31 | config = { 32 | type: StorageType.S3, 33 | region: process.env.R2_REGION, 34 | bucketName: process.env.BUCKET_NAME, 35 | endpoint: process.env.R2_ENDPOINT, 36 | accessKeyId: process.env.R2_ACCESS_KEY_ID, 37 | secretAccessKey: process.env.R2_SECRET_ACCESS_KEY, 38 | }; 39 | } else if (t === "S3-Backblaze-B2") { 40 | config = { 41 | type: StorageType.S3, 42 | bucketName: process.env.BUCKET_NAME, 43 | region: process.env.B2_S3_REGION, 44 | endpoint: process.env.B2_S3_ENDPOINT, 45 | accessKeyId: process.env.B2_S3_ACCESS_KEY_ID, 46 | secretAccessKey: process.env.B2_S3_SECRET_ACCESS_KEY, 47 | }; 48 | } else if (t === "S3-Cubbit") { 49 | config = { 50 | type: StorageType.S3, 51 | bucketName: process.env.BUCKET_NAME, 52 | endpoint: process.env.CUBBIT_ENDPOINT, 53 | accessKeyId: process.env.CUBBIT_ACCESS_KEY_ID, 54 | secretAccessKey: process.env.CUBBIT_SECRET_ACCESS_KEY, 55 | }; 56 | } else if (t === StorageType.B2) { 57 | config = { 58 | type: StorageType.B2, 59 | bucketName: process.env.BUCKET_NAME, 60 | applicationKeyId: process.env.B2_APPLICATION_KEY_ID, 61 | applicationKey: process.env.B2_APPLICATION_KEY, 62 | }; 63 | } else if (t === StorageType.AZURE) { 64 | config = { 65 | type: StorageType.AZURE, 66 | bucketName: process.env.BUCKET_NAME, 67 | accountName: process.env.AZURE_STORAGE_ACCOUNT_NAME, 68 | accountKey: process.env.AZURE_STORAGE_ACCOUNT_KEY, 69 | }; 70 | } else if (t === StorageType.MINIO) { 71 | config = { 72 | type: StorageType.MINIO, 73 | endPoint: process.env.MINIO_ENDPOINT, 74 | port: process.env.MINIO_PORT, 75 | useSSL: process.env.MINIO_USE_SSL, 76 | region: process.env.MINIO_REGION, 77 | accessKey: process.env.MINIO_ACCESS_KEY, 78 | secretKey: process.env.MINIO_SECRET_KEY, 79 | }; 80 | } else { 81 | // const p = path.join(process.cwd(), "tests", "test_directory"); 82 | const p = path.join("tests", "test_directory"); 83 | config = process.env.CONFIG_URL || `local://directory=${p}`; 84 | } 85 | 86 | return config; 87 | } 88 | /* 89 | function tmp() { 90 | if (typeof (this.config as ConfigAmazonS3).region === "undefined") { 91 | if (this.s3Compatible === S3Compatible.R2) { 92 | this.config.region = "auto"; 93 | this.region = this.config.region; 94 | } else if (this.s3Compatible === S3Compatible.Backblaze) { 95 | let ep = this.config.endpoint; 96 | ep = ep.substring(ep.indexOf("s3.") + 3); 97 | this.config.region = ep.substring(0, ep.indexOf(".")); 98 | // console.log(this.config.region); 99 | this.region = this.config.region; 100 | } 101 | } else { 102 | this.region = (this.config as ConfigAmazonS3).region; 103 | } 104 | if (typeof this.config.endpoint === "undefined") { 105 | this.storage = new S3Client({ region: this.region }); 106 | } else { 107 | this.storage = new S3Client({ 108 | region: this.region, 109 | endpoint: this.config.endpoint, 110 | credentials: { 111 | accessKeyId: this.config.accessKeyId, 112 | secretAccessKey: this.config.secretAccessKey, 113 | }, 114 | }); 115 | } 116 | } 117 | */ 118 | -------------------------------------------------------------------------------- /tests/data/image1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tweedegolf/storage-abstraction/767900d9dbeca8a22cec9e99f47e22f84cd344c8/tests/data/image1.jpg -------------------------------------------------------------------------------- /tests/data/image2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tweedegolf/storage-abstraction/767900d9dbeca8a22cec9e99f47e22f84cd344c8/tests/data/image2.jpg -------------------------------------------------------------------------------- /tests/data/input.txt: -------------------------------------------------------------------------------- 1 | hello world -------------------------------------------------------------------------------- /tests/data/with space.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tweedegolf/storage-abstraction/767900d9dbeca8a22cec9e99f47e22f84cd344c8/tests/data/with space.jpg -------------------------------------------------------------------------------- /tests/test.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs"; 2 | import path from "path"; 3 | import { rimraf } from "rimraf"; 4 | import { Storage } from "../src/Storage"; 5 | import { IAdapter, StorageType } from "../src/types/general"; 6 | import { getConfig } from "./config"; 7 | import { saveFile, timeout } from "./util"; 8 | 9 | const newBucketName1 = "bucket-test-sab-1"; 10 | const newBucketName2 = "bucket-test-sab-2"; 11 | 12 | let storage: Storage; 13 | let bucketName: string; 14 | 15 | function colorLog(s: string): string { 16 | return `\x1b[96m [${s}]\x1b[0m`; 17 | } 18 | 19 | const types = [ 20 | StorageType.LOCAL, // 0 21 | StorageType.S3, // 1 22 | StorageType.B2, // 2 23 | StorageType.GCS, // 3 24 | StorageType.AZURE, // 4 25 | StorageType.MINIO, // 5 26 | "S3-Cubbit", // 6 27 | "S3-Cloudflare-R2", // 7 28 | "S3-Backblaze-B2", // 8 29 | ]; 30 | 31 | let index = 0; 32 | // console.log(process.argv); 33 | if (process.argv[2]) { 34 | index = parseInt(process.argv[2], 10); 35 | } 36 | 37 | async function init() { 38 | // select the type of storage you want to test or pass it on the command line 39 | storage = new Storage(getConfig(types[index])); 40 | bucketName = storage.config.bucketName || newBucketName1; 41 | console.log(colorLog("init"), storage.config); 42 | 43 | await fs.promises.stat(path.join(process.cwd(), "tests", "test_directory")).catch(async (e) => { 44 | await fs.promises.mkdir(path.join(process.cwd(), "tests", "test_directory")); 45 | }); 46 | } 47 | 48 | async function cleanup() { 49 | const p = path.normalize(path.join(process.cwd(), "tests", "test_directory")); 50 | await rimraf(p, { 51 | preserveRoot: false, 52 | }); 53 | } 54 | 55 | async function listBuckets(): Promise | null> { 56 | const r = await storage.listBuckets(); 57 | console.log(colorLog("listBuckets"), r); 58 | return r.value; 59 | } 60 | 61 | async function bucketExists() { 62 | const r = await storage.bucketExists(bucketName); 63 | console.log(colorLog("bucketExists"), r); 64 | } 65 | 66 | async function createBucket() { 67 | const r = await storage.createBucket(newBucketName2); 68 | console.log(colorLog("createBucket"), r); 69 | } 70 | 71 | async function clearBucket() { 72 | const r = await storage.clearBucket(newBucketName2); 73 | console.log(colorLog("clearBucket"), r); 74 | } 75 | 76 | async function deleteBucket() { 77 | const r = await storage.deleteBucket(newBucketName2); 78 | console.log(colorLog("deleteBucket"), r); 79 | } 80 | 81 | async function listFiles() { 82 | const r = await storage.listFiles(newBucketName2); 83 | console.log(colorLog("listFiles"), r); 84 | } 85 | 86 | async function addFileFromPath() { 87 | const r = await storage.addFileFromPath({ 88 | bucketName: newBucketName2, 89 | origPath: "./tests/data/image1.jpg", 90 | targetPath: "image1-path.jpg", 91 | }); 92 | console.log(colorLog("addFileFromPath"), r); 93 | } 94 | 95 | async function addFileFromBuffer() { 96 | const buffer = await fs.promises.readFile("./tests/data/image1.jpg"); 97 | const r = await storage.addFileFromBuffer({ 98 | bucketName: newBucketName2, 99 | buffer, 100 | targetPath: "image1-buffer.jpg", 101 | }); 102 | console.log(colorLog("addFileFromPath"), r); 103 | } 104 | 105 | async function addFileFromStream() { 106 | const stream = fs.createReadStream("./tests/data/image1.jpg"); 107 | const r = await storage.addFileFromStream({ 108 | bucketName: newBucketName2, 109 | stream, 110 | targetPath: "image1-stream.jpg", 111 | }); 112 | console.log(colorLog("addFileFromPath"), r); 113 | } 114 | 115 | async function getFileAsStream() { 116 | const { value, error } = await storage.getFileAsStream(newBucketName2, "image1-path.jpg"); 117 | console.log(colorLog("getFileAsStream"), error); 118 | if (value !== null) { 119 | const filePath = path.join( 120 | process.cwd(), 121 | "tests", 122 | "test_directory", 123 | `test-${storage.getType()}-full.jpg` 124 | ); 125 | const writeStream = fs.createWriteStream(filePath); 126 | await saveFile(value, writeStream); 127 | } 128 | } 129 | 130 | async function getFileAsStreamPartial() { 131 | const { value, error } = await storage.getFileAsStream(newBucketName2, "image1-path.jpg", { 132 | start: 0, 133 | end: 2000, 134 | }); 135 | console.log(colorLog("getFileAsStream"), error); 136 | if (value !== null) { 137 | const filePath = path.join( 138 | process.cwd(), 139 | "tests", 140 | "test_directory", 141 | `test-${storage.getType()}-partial.jpg` 142 | ); 143 | const writeStream = fs.createWriteStream(filePath); 144 | await saveFile(value, writeStream); 145 | } 146 | } 147 | 148 | async function getFileAsStreamPartial2() { 149 | const { value, error } = await storage.getFileAsStream(newBucketName2, "image1-path.jpg", { 150 | end: 2000, 151 | }); 152 | console.log(colorLog("getFileAsStream"), error); 153 | if (value !== null) { 154 | const filePath = path.join( 155 | process.cwd(), 156 | "tests", 157 | "test_directory", 158 | `test-${storage.getType()}-partial2.jpg` 159 | ); 160 | const writeStream = fs.createWriteStream(filePath); 161 | await saveFile(value, writeStream); 162 | } 163 | } 164 | async function getFileAsStreamPartial3() { 165 | const { value, error } = await storage.getFileAsStream(newBucketName2, "image1-path.jpg", { 166 | start: 2000, 167 | }); 168 | console.log(colorLog("getFileAsStream"), error); 169 | if (value !== null) { 170 | const filePath = path.join( 171 | process.cwd(), 172 | "tests", 173 | "test_directory", 174 | `test-${storage.getType()}-partial3.jpg` 175 | ); 176 | const writeStream = fs.createWriteStream(filePath); 177 | await saveFile(value, writeStream); 178 | } 179 | } 180 | 181 | async function fileExists() { 182 | const r = await storage.fileExists(newBucketName2, "image1-path.jpg"); 183 | console.log(colorLog("fileExists"), r); 184 | } 185 | 186 | async function sizeOf() { 187 | const r = await storage.sizeOf(newBucketName2, "image1-path.jpg"); 188 | console.log(colorLog("sizeOf"), r); 189 | } 190 | 191 | async function removeFile() { 192 | const r = await storage.removeFile(newBucketName2, "image1-path.jpg"); 193 | console.log(colorLog("removeFile"), r); 194 | } 195 | 196 | async function deleteAllBuckets(list: Array, storage: IAdapter, delay: number = 500) { 197 | for (let i = 0; i < list.length; i++) { 198 | const b = list[i]; 199 | console.log(colorLog("remove bucket"), b); 200 | try { 201 | await storage.clearBucket(b); 202 | if (delay) { 203 | await timeout(delay); 204 | } 205 | // const files = await storage.listFiles(); 206 | // console.log(`\tfiles: ${files}`); 207 | await storage.deleteBucket(b); 208 | } catch (e) { 209 | console.error("\x1b[31m", "[Error removeAllBuckets]", b, e.message); 210 | } 211 | } 212 | } 213 | 214 | async function run() { 215 | await init(); 216 | 217 | // const r = await storage.serviceClient.config.region(); 218 | // console.log(r); 219 | 220 | const buckets = await listBuckets(); 221 | if (buckets !== null && buckets.length > 0) { 222 | await deleteAllBuckets(buckets, storage); 223 | } 224 | 225 | // await bucketExists(); 226 | // await createBucket(); 227 | // await listBuckets(); 228 | 229 | // await addFileFromPath(); 230 | // await addFileFromBuffer(); 231 | // await addFileFromStream(); 232 | // await addFileFromPath(); 233 | // await addFileFromBuffer(); 234 | // await addFileFromStream(); 235 | // await listFiles(); 236 | 237 | // await getFileAsStream(); 238 | // await getFileAsStreamPartial(); 239 | // await getFileAsStreamPartial2(); 240 | // await getFileAsStreamPartial3(); 241 | 242 | // process.exit(); 243 | 244 | // await fileExists(); 245 | // await sizeOf(); 246 | // await removeFile(); 247 | // await removeFile(); 248 | // await removeFile(); 249 | // await fileExists(); 250 | // await clearBucket(); 251 | // await listFiles(); 252 | // await deleteBucket(); 253 | // await listBuckets(); 254 | 255 | // await cleanup(); 256 | } 257 | 258 | run(); 259 | -------------------------------------------------------------------------------- /tests/test_urls.ts: -------------------------------------------------------------------------------- 1 | import { parseUrl } from "../src/util"; 2 | import { parseUrlStandard } from "../src/util"; 3 | 4 | const urls = [ 5 | "local://path/to/bucket@bucket_name?mode=511&extra_option2=value2", 6 | "s3://access_key_id:secret_access_key@bucket_name?region=region&extra_option2=value2", 7 | "s3://@bucket_name?region=region&extra_option2=value2", 8 | "s3://@bucket_name", 9 | "s3://", 10 | // "s3", 11 | "s3://@bucket_name?extra_option1=value1&extra_option2=value2", 12 | "gcs://path/to/key_file.json@bucket_name?extra_option1=value1&extra_option2=value2", 13 | "b2://application_key_id:application_key@bucket_name?extra_option1=value1&extra_option2=value2", 14 | "b2://application_key_id:application_key@bucket_name", 15 | "azure://account_name:account_key@container_name?extra_option1=value1&extra_option2=value2", 16 | "azure://account_name@container_name", 17 | "azure://account_name@container_name?sas_token=SAS_TOKEN", 18 | "azure://account_name@container_name?connection_string=CONNECTION_STRING&extra_option2=value2", 19 | "minio://access_key:secret_key@bucket_name?region=region&endPoint=END_POINT&port=PORT&useSSL=USE_SSL", 20 | "minio://accessKey:secretKey@bucket_name?endPoint=END_POINT", 21 | "minio://accessKey:secretKey@bucket_name:9000?endPoint=END_POINT", 22 | ]; 23 | 24 | // urls.forEach((u: string) => { 25 | // const r = parseUrl(u); 26 | // console.log(u); 27 | // console.log(r); 28 | // }); 29 | 30 | // console.log(parseUrl("minio://accessKey:secretKey@bucket_name:9000?endPoint=END_POINT")); 31 | 32 | // console.log(parseUrlStandard("minio://accessKey:secretKey@bucket_name:9000?endPoint=END_POINT")); 33 | 34 | // console.log(parseUrl("minio://accessKey:secretKey:9000?endPoint=END_POINT")); 35 | // console.log(parseUrl("minio://accessKey:secretKey@:9000?endPoint=END_POINT")); 36 | // console.log(parseUrl("minio://accessKey:secretKey@the-buck:9000?endPoint=END_POINT")); 37 | // console.log(parseUrl("minio://@the-buck:9000?endPoint=END_POINT")); 38 | // console.log(parseUrl("minio://@the-buck?endPoint=END_POINT")); 39 | // console.log(parseUrl("minio://@the-buck:9000")); 40 | // console.log(parseUrl("minio://@the-buck")); 41 | 42 | // console.log(parseUrlStandard("minio://accessKey:secretKey@:9000?endPoint=END_POINT")); 43 | 44 | // port and bucket 45 | const u1 = "protocol://part1:part2@bucket:port/path/to/object?region=auto&option2=value2"; 46 | 47 | // no bucket but with @ 48 | const u2 = "protocol://part1:part2@:port/path/to/object?region=auto&option2=value2"; 49 | 50 | // no bucket 51 | const u3 = "protocol://part1:part2:port/path/to/object?region=auto&option2=value2"; 52 | 53 | // no credentials, note: @ is mandatory in order to be able to parse the bucket name 54 | const u4 = "protocol://@bucket/path/to/object?region=auto&option2=value2"; 55 | 56 | // no credentials, no bucket 57 | const u5 = "protocol:///path/to/object?region=auto&option2=value2"; 58 | 59 | // no credentials, no bucket, no extra options (query string) 60 | const u6 = "protocol:///path/to/object"; 61 | 62 | // only protocol 63 | const u7 = "protocol://"; 64 | 65 | // bare 66 | const u8 = "protocol"; 67 | 68 | [u1, u2, u3, u4, u5, u6, u7, u8].forEach((u: string) => { 69 | const r = parseUrl(u); 70 | console.log(u); 71 | console.log(r); 72 | }); 73 | 74 | // console.log(parseUrl("minio://your-access-key:your-secret-key@the-buck:9000")); 75 | // console.log(parseUrl("minio://your-access-key:your-secret-key@the-buck?port=9000")); 76 | -------------------------------------------------------------------------------- /tests/util.ts: -------------------------------------------------------------------------------- 1 | import { Readable, Stream, Writable } from "stream"; 2 | 3 | /** 4 | * Utility function that connects a read-stream (from the storage) to a write-stream (to a local file) 5 | */ 6 | export const saveFile = ( 7 | readStream: Readable, 8 | writeStream: Writable, 9 | log: boolean = false 10 | ): Promise => { 11 | return new Promise((resolve, reject) => { 12 | readStream 13 | .pipe(writeStream) 14 | .on("error", (e) => { 15 | console.error("\x1b[31m", e, "\n"); 16 | reject(); 17 | }) 18 | .on("finish", () => { 19 | if (log) { 20 | console.log("read finished"); 21 | } 22 | }); 23 | writeStream 24 | .on("error", (e) => { 25 | console.error("\x1b[31m", e, "\n"); 26 | reject(); 27 | }) 28 | .on("finish", () => { 29 | if (log) { 30 | console.log("write finished"); 31 | } 32 | resolve(); 33 | }); 34 | }); 35 | }; 36 | 37 | export async function waitABit(ms = 100): Promise { 38 | return new Promise((resolve) => { 39 | setTimeout(() => { 40 | // console.log(`just wait a bit (${millis}ms)`); 41 | resolve(); 42 | }, ms); 43 | }); 44 | } 45 | 46 | export function streamToString(stream: Readable) { 47 | const chunks: Array = []; 48 | return new Promise((resolve, reject) => { 49 | stream.on("data", (chunk) => chunks.push(Buffer.from(chunk))); 50 | stream.on("error", (err) => reject(err)); 51 | stream.on("end", () => resolve(Buffer.concat(chunks).toString("utf8"))); 52 | }); 53 | } 54 | 55 | export async function timeout(millis: number): Promise { 56 | return new Promise((resolve) => { 57 | setTimeout(() => { 58 | return resolve(); 59 | }, millis); 60 | }); 61 | } 62 | 63 | // credits: https://stackoverflow.com/questions/14269233/node-js-how-to-read-a-stream-into-a-buffer 64 | export async function stream2buffer(stream: Stream): Promise { 65 | return new Promise((resolve, reject) => { 66 | const _buf = Array(); // eslint-disable-line 67 | 68 | stream.on("data", (chunk) => _buf.push(chunk)); 69 | stream.on("end", () => resolve(Buffer.concat(_buf))); 70 | stream.on("error", (err) => reject(`error converting stream - ${err}`)); 71 | }); 72 | } 73 | -------------------------------------------------------------------------------- /todo.md: -------------------------------------------------------------------------------- 1 | ```typescript 2 | enum FileReturnType = { 3 | Stream, 4 | SomethingElse, 5 | ... 6 | } 7 | 8 | type GetFile = { 9 | bucketName: string, 10 | fileName: string, 11 | type: FileReturnType, 12 | options?: { start?: number; end?: number } 13 | } 14 | 15 | getFile(GetFile): Promise 16 | ``` 17 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es2021", 4 | "lib": [ 5 | "es2021" 6 | ], 7 | // "types": ["reflect-metadata"], 8 | "outDir": "publish/dist", 9 | "module": "commonjs", 10 | "moduleResolution": "node", 11 | "esModuleInterop": true, 12 | "resolveJsonModule": true, 13 | "experimentalDecorators": true, 14 | "emitDecoratorMetadata": true, 15 | "preserveSymlinks": true, 16 | "sourceMap": true, 17 | "declaration": true, 18 | "allowSyntheticDefaultImports": true 19 | }, 20 | "include": [ 21 | "src/*.ts", 22 | "src/types/*.ts", 23 | "src/indexes/*.ts" 24 | ], 25 | // "include": ["src/index.ts"], 26 | "exclude": [ 27 | "node_modules", 28 | "publish", 29 | "spec", 30 | "src/template_class.ts", 31 | "src/template_functional.ts", 32 | "tests", 33 | "tests_local" 34 | ] 35 | } --------------------------------------------------------------------------------