├── .gitignore ├── .prettierrc ├── LICENSE ├── README.md ├── lerna.json ├── package.json ├── packages ├── aws │ ├── README.md │ ├── package.json │ ├── src │ │ ├── AwsS3Storage.ts │ │ ├── AwsS3StorageOptions.ts │ │ └── index.ts │ └── tsconfig.json ├── azure │ ├── README.md │ ├── package.json │ ├── src │ │ ├── AzureBlobStorage.ts │ │ ├── AzureStorageAccount.ts │ │ └── index.ts │ └── tsconfig.json ├── core │ ├── README.md │ ├── package.json │ ├── src │ │ ├── Factory.ts │ │ ├── Files.ts │ │ ├── LocalStorage.ts │ │ ├── Manager.ts │ │ ├── Storage.ts │ │ ├── StorageProvider.ts │ │ ├── index.ts │ │ └── utils │ │ │ ├── Streams.ts │ │ │ └── index.ts │ └── tsconfig.json ├── gcp │ ├── README.md │ ├── package.json │ ├── src │ │ ├── GcpBucketStorage.ts │ │ ├── GcpBucketStorageOptions.ts │ │ └── index.ts │ └── tsconfig.json └── samples │ ├── README.md │ ├── package.json │ └── ts │ ├── AwsS3Storage.ts │ ├── AzureBlobStorage.ts │ ├── GcpBucketStorage.ts │ └── LocalStorage.ts ├── tsconfig.json └── yarn.lock /.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore the compiled output. 2 | dist/ 3 | 4 | # TypeScript incremental compilation cache 5 | *.tsbuildinfo 6 | 7 | # Logs 8 | logs 9 | *.log 10 | npm-debug.log* 11 | yarn-debug.log* 12 | yarn-error.log* 13 | 14 | # Coverage (from Jest) 15 | coverage/ 16 | 17 | # JUnit Reports (used mainly in CircleCI) 18 | reports/ 19 | junit.xml 20 | 21 | # Node modules 22 | node_modules/ 23 | 24 | # Mac OS 25 | .DS_Store 26 | 27 | # Intellij Configuration Files 28 | .idea/ 29 | 30 | data/ -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "semi": false, 3 | "singleQuote": true, 4 | "trailingComma": "none", 5 | "bracketSpacing": true, 6 | "endOfLine": "lf", 7 | "arrowParens": "avoid" 8 | } 9 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 DataTorch 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Node File Storage 2 | 3 | In many applications the ability to dynamically change or read files from 4 | different storage providers is a must. These packages provides a implementation 5 | for `Storage` to standardize common functions. 6 | 7 | This repository contains the core class along with implementations. 8 | 9 | Usage samples can be found on the repository [here](https://github.com/datatorch/node-storage/tree/master/packages/samples). 10 | 11 | ## The gist 12 | 13 | Install the packages you need: 14 | 15 | ```sh 16 | # Local Storage 17 | $ yarn add storage-core 18 | 19 | # Azure Storage 20 | $ yarn add storage-azure 21 | 22 | # AWS Storage 23 | $ yarn add storage-aws 24 | 25 | # Google Cloud Storage 26 | $ yarn add storage-gcp 27 | ``` 28 | 29 | Create a storage instance with the required configuration and write to it. 30 | 31 | ```ts 32 | import { LocalStorage } from 'storage-core' 33 | import { AzureBlobStorage } from 'storage-azure' 34 | import { AwsS3Storage } from 'storage-aws' 35 | 36 | // Write to local folder 37 | const ls = new LocalStorage({ path: process.env.PATH }) 38 | await ls.writeFile('local-write/test.txt', 'using local storage') 39 | 40 | // Write to azure blob 41 | const az = new AzureBlobStorage({ 42 | container: process.env.CONTAINER, 43 | accountName: process.env.ACCOUNT_NAME, 44 | accountKey: process.env.ACCOUNT_KEY 45 | }) 46 | await az.writeFile('azure-write/test.txt', 'using azure storage') 47 | 48 | // Write to aws s3 49 | const aws = new AwsS3Storage({ 50 | bucket: process.env.BUCKET, 51 | accessKeyId: process.env.ACCESS_KEY, 52 | secretAccessKey: process.env.SECRET_KEY 53 | }) 54 | await aws.writeFile('aws-write/test.txt', 'using aws s3 storage') 55 | ``` 56 | -------------------------------------------------------------------------------- /lerna.json: -------------------------------------------------------------------------------- 1 | { 2 | "packages": [ 3 | "packages/*" 4 | ], 5 | "version": "0.7.2", 6 | "npmClient": "yarn", 7 | "useWorkspaces": true 8 | } 9 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@storage/mono-repo", 3 | "license": "MIT", 4 | "private": true, 5 | "version": "0.7.2", 6 | "scripts": { 7 | "bootstrap": "lerna bootstrap", 8 | "build": "lerna run build", 9 | "clean": "rimraf packages/**/dist packages/**/*.tsbuildinfo", 10 | "update": "yarn upgrade-interactive --latest", 11 | "release": "yarn clean && yarn build && lerna publish" 12 | }, 13 | "devDependencies": { 14 | "@types/node": "^14.6.2", 15 | "lerna": "^3.22.1", 16 | "prettier": "^2.1.1", 17 | "rimraf": "^3.0.2", 18 | "ts-node": "^9.0.0", 19 | "typescript": "^4.0.2" 20 | }, 21 | "files": [ 22 | "!tsconfig*" 23 | ], 24 | "workspaces": [ 25 | "packages/*" 26 | ], 27 | "dependencies": { 28 | "storage-gcp": "^0.6.4" 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /packages/aws/README.md: -------------------------------------------------------------------------------- 1 | # Storage AWS S3 2 | 3 | This package exports an implementation of `Storage` that allows using aws s3 4 | buckets as a generic storage unit. 5 | 6 | ## Usage 7 | -------------------------------------------------------------------------------- /packages/aws/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "storage-aws", 3 | "version": "0.7.2", 4 | "description": "AWS storage implementation", 5 | "author": "Justin Brooks ", 6 | "homepage": "https://github.com/datatorch/node-storage#readme", 7 | "license": "MIT", 8 | "main": "./dist/index.js", 9 | "types": "./dist/index.d.ts", 10 | "repository": { 11 | "type": "git", 12 | "url": "git+https://github.com/datatorch/node-storage.git" 13 | }, 14 | "scripts": { 15 | "build": "tsc", 16 | "test": "echo \"Error: run tests from root\" && exit 1" 17 | }, 18 | "bugs": { 19 | "url": "https://github.com/datatorch/node-storage/issues" 20 | }, 21 | "files": [ 22 | "dist" 23 | ], 24 | "gitHead": "b05db75f50e3a44fd01b8c95d114f3fb5c9801c3", 25 | "dependencies": { 26 | "aws-sdk": "^2.743.0", 27 | "storage-core": "^0.7.2" 28 | }, 29 | "keywords": [ 30 | "node", 31 | "storage", 32 | "aws", 33 | "s3", 34 | "files" 35 | ] 36 | } 37 | -------------------------------------------------------------------------------- /packages/aws/src/AwsS3Storage.ts: -------------------------------------------------------------------------------- 1 | import { Storage, FilesReadable, ListResult, PathAbs } from 'storage-core' 2 | import { S3, AWSError } from 'aws-sdk' 3 | import pathModule from 'path' 4 | 5 | import { Readable, PassThrough, Writable } from 'stream' 6 | import { AwsS3StorageOptions } from './AwsS3StorageOptions' 7 | import { PromiseResult } from 'aws-sdk/lib/request' 8 | 9 | const formatContent = (f: S3.Object) => ({ 10 | path: f.Key || '', 11 | name: pathModule.basename(f.Key || ''), 12 | size: f.Size, 13 | updatedAt: f.LastModified, 14 | md5Hash: f.ETag, 15 | isFile: true, 16 | raw: f as object 17 | }) 18 | 19 | export class AwsS3Storage extends Storage { 20 | s3: S3 21 | 22 | constructor(options: AwsS3StorageOptions) { 23 | super(options) 24 | this.s3 = new S3({ ...options }) 25 | } 26 | 27 | @PathAbs() 28 | async getTopLevel(path?: string): Promise { 29 | const request = await this.s3 30 | .listObjectsV2({ 31 | Bucket: this.options.bucket, 32 | Prefix: path ? `${path}/` : '', 33 | Delimiter: '/' 34 | }) 35 | .promise() 36 | 37 | const { CommonPrefixes, Contents } = request 38 | 39 | const dirs: ListResult[] = 40 | (CommonPrefixes && 41 | CommonPrefixes.map(p => ({ 42 | name: pathModule.basename(p.Prefix || ''), 43 | path: (p.Prefix || '').slice(0, -1), 44 | isFile: false, 45 | raw: p 46 | }))) || 47 | [] 48 | 49 | const files: ListResult[] = 50 | (Contents && 51 | Contents.filter(c => c.Key && c.Key[c.Key.length - 1] !== '/').map(c => 52 | formatContent(c) 53 | )) || 54 | [] 55 | 56 | return dirs.concat(files) 57 | } 58 | 59 | @PathAbs() 60 | getFilesStream(path?: string): Readable { 61 | let request: PromiseResult | undefined 62 | let ContinuationToken: string | undefined 63 | 64 | return new FilesReadable(async () => { 65 | if (request && !request.IsTruncated) return null 66 | 67 | request = await this.s3 68 | .listObjectsV2({ 69 | Bucket: this.options.bucket, 70 | Prefix: path, 71 | ContinuationToken 72 | }) 73 | .promise() 74 | 75 | ContinuationToken = request.NextContinuationToken 76 | const contents = request.Contents 77 | return ( 78 | contents && 79 | contents 80 | .filter(c => c.Key && c.Key[c.Key.length - 1] !== '/') 81 | .map(f => formatContent(f)) 82 | ) 83 | }) 84 | } 85 | 86 | @PathAbs() 87 | async readFile(filePath: string): Promise { 88 | const r = await this.s3 89 | .getObject({ Bucket: this.options.bucket, Key: filePath }) 90 | .promise() 91 | 92 | let buffer: Buffer | undefined = undefined 93 | if (r.Body instanceof Buffer) buffer = r.Body 94 | if (typeof r.Body === 'string') buffer = Buffer.from(r.Body, 'utf8') 95 | if (r.Body instanceof Readable) { 96 | const chunks = [] 97 | for await (let chunk of r.Body) { 98 | chunks.push(chunk) 99 | } 100 | buffer = Buffer.concat(chunks) 101 | } 102 | 103 | if (!buffer || !r.Body) 104 | throw new Error('Could not convert return body to buffer.') 105 | 106 | return buffer 107 | } 108 | 109 | @PathAbs() 110 | async writeFile(filePath: string, data: string | Buffer): Promise { 111 | await this.s3 112 | .putObject({ 113 | Bucket: this.options.bucket, 114 | Key: filePath, 115 | Body: data 116 | }) 117 | .promise() 118 | } 119 | 120 | @PathAbs() 121 | async deleteFile(filePath: string): Promise { 122 | await this.s3 123 | .deleteObject({ Bucket: this.options.bucket, Key: filePath }) 124 | .promise() 125 | } 126 | 127 | @PathAbs() 128 | async getFileSize(filePath: string): Promise { 129 | const r = await this.s3 130 | .headObject({ Bucket: this.options.bucket, Key: filePath }) 131 | .promise() 132 | return r.ContentLength || 0 133 | } 134 | 135 | @PathAbs() 136 | async createWriteStream(filePath: string): Promise { 137 | const pass = new PassThrough() 138 | this.s3 139 | .upload({ Bucket: this.options.bucket, Key: filePath, Body: pass }) 140 | .promise() 141 | return pass 142 | } 143 | 144 | @PathAbs() 145 | async createReadStream(filePath: string): Promise { 146 | return this.s3 147 | .getObject({ Bucket: this.options.bucket, Key: filePath }) 148 | .createReadStream() 149 | } 150 | 151 | async makeDir(_: string): Promise {} 152 | } 153 | -------------------------------------------------------------------------------- /packages/aws/src/AwsS3StorageOptions.ts: -------------------------------------------------------------------------------- 1 | import { StorageOptions } from 'storage-core' 2 | 3 | export interface AwsS3StorageOptions extends StorageOptions { 4 | bucket: string 5 | accessKeyId: string 6 | secretAccessKey: string 7 | region?: string 8 | endpoint?: string 9 | useAccelerateEndpoint?: boolean 10 | } 11 | -------------------------------------------------------------------------------- /packages/aws/src/index.ts: -------------------------------------------------------------------------------- 1 | export * from './AwsS3Storage' 2 | export * from './AwsS3StorageOptions' 3 | -------------------------------------------------------------------------------- /packages/aws/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "rootDir": "./src", 5 | "outDir": "./dist" 6 | }, 7 | "references": [{ "path": "../core" }], 8 | "include": ["./src"] 9 | } 10 | -------------------------------------------------------------------------------- /packages/azure/README.md: -------------------------------------------------------------------------------- 1 | # Storage Azure Storage 2 | 3 | This package exports an implementation of `Storage` that allows using azure blob 4 | containers as a generic storage unit. 5 | 6 | ## Usage 7 | -------------------------------------------------------------------------------- /packages/azure/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "storage-azure", 3 | "version": "0.7.2", 4 | "description": "Azure blob storage implementation", 5 | "author": "Justin Brooks ", 6 | "homepage": "https://github.com/datatorch/node-storage#readme", 7 | "license": "MIT", 8 | "main": "./dist/index.js", 9 | "types": "./dist/index.d.ts", 10 | "repository": { 11 | "type": "git", 12 | "url": "git+https://github.com/datatorch/node-storage.git" 13 | }, 14 | "scripts": { 15 | "build": "tsc", 16 | "test": "echo \"Error: run tests from root\" && exit 1" 17 | }, 18 | "bugs": { 19 | "url": "https://github.com/datatorch/node-storage/issues" 20 | }, 21 | "files": [ 22 | "dist" 23 | ], 24 | "gitHead": "b05db75f50e3a44fd01b8c95d114f3fb5c9801c3", 25 | "dependencies": { 26 | "@azure/storage-blob": "12.2.0-preview.1", 27 | "storage-core": "^0.7.2" 28 | }, 29 | "keywords": [ 30 | "node", 31 | "storage", 32 | "azure", 33 | "files" 34 | ] 35 | } 36 | -------------------------------------------------------------------------------- /packages/azure/src/AzureBlobStorage.ts: -------------------------------------------------------------------------------- 1 | import { Storage, FilesReadable, PathAbs, StorageOptions } from 'storage-core' 2 | import pathModule from 'path' 3 | import { BlobServiceClient, ContainerClient } from '@azure/storage-blob' 4 | import { Readable, PassThrough } from 'stream' 5 | import { ListResult } from 'storage-core' 6 | import { 7 | AzureStorageAccount, 8 | AzureStorageAccountOptions 9 | } from './AzureStorageAccount' 10 | 11 | export interface AzureBlobStorageOptions 12 | extends StorageOptions, 13 | AzureStorageAccountOptions { 14 | container: string 15 | } 16 | 17 | export class AzureBlobStorage extends Storage { 18 | blobClient: BlobServiceClient 19 | containerClient: ContainerClient 20 | 21 | constructor( 22 | options: AzureBlobStorageOptions, 23 | provider?: AzureStorageAccount 24 | ) { 25 | super(options) 26 | 27 | this.blobClient = (provider || new AzureStorageAccount(options)).blobClient 28 | this.containerClient = this.blobClient.getContainerClient(options.container) 29 | } 30 | 31 | @PathAbs() 32 | async getTopLevel(path?: string): Promise { 33 | const iterator = this.containerClient 34 | .listBlobsByHierarchy('/', { prefix: path ? `${path}/` : '' }) 35 | .byPage({ maxPageSize: 1000 }) 36 | 37 | const { value } = await iterator.next() 38 | const { blobItems, blobPrefixes } = value.segment 39 | 40 | const dirs: ListResult[] = blobPrefixes.map((b: any) => ({ 41 | name: pathModule.basename(b.name), 42 | path: b.name.slice(0, -1), 43 | isFile: false, 44 | raw: b 45 | })) 46 | const files: ListResult[] = blobItems.map((b: any) => ({ 47 | name: pathModule.basename(b.name), 48 | path: b.name, 49 | size: b.properties.contentLength, 50 | createdAt: b.properties.createdOn, 51 | lastModified: b.properties.lastModified, 52 | isFile: true, 53 | raw: b 54 | })) 55 | 56 | return dirs.concat(files) 57 | } 58 | 59 | @PathAbs() 60 | async getFileSize(path: string): Promise { 61 | const props = await this.containerClient 62 | .getBlockBlobClient(path) 63 | .getProperties() 64 | return props.contentLength || 0 65 | } 66 | 67 | @PathAbs() 68 | getFilesStream(path?: string | undefined): Readable { 69 | let iterator = this.containerClient.listBlobsFlat({ prefix: path }) 70 | return new FilesReadable(async () => { 71 | const { value } = await iterator.next() 72 | return ( 73 | value && [ 74 | { 75 | path: value.name, 76 | name: value.name, 77 | size: value.properties.contentLength, 78 | createdAt: value.properties.createdOn, 79 | lastModified: value.properties.lastModified, 80 | raw: value 81 | } 82 | ] 83 | ) 84 | }) 85 | } 86 | 87 | @PathAbs() 88 | readFile(filePath: string): Promise { 89 | return this.containerClient.getBlockBlobClient(filePath).downloadToBuffer() 90 | } 91 | 92 | @PathAbs() 93 | async writeFile(filePath: string, data: string | Buffer): Promise { 94 | await this.containerClient 95 | .getBlockBlobClient(filePath) 96 | .upload(data, data.length) 97 | } 98 | 99 | @PathAbs() 100 | async deleteFile(filePath: string): Promise { 101 | await this.containerClient.getBlockBlobClient(filePath).delete() 102 | } 103 | 104 | @PathAbs() 105 | async createWriteStream(filePath: string) { 106 | // Failed if file is not already created 107 | await this.writeFile(filePath, '') 108 | 109 | const stream = new PassThrough() 110 | this.containerClient.getBlockBlobClient(filePath).uploadStream(stream) 111 | return stream 112 | } 113 | 114 | @PathAbs() 115 | async createReadStream(filePath: string) { 116 | const download = await this.containerClient 117 | .getBlockBlobClient(filePath) 118 | .download() 119 | const stream = download.readableStreamBody 120 | if (!stream) throw new Error('Readable stream is undefined.') 121 | return stream as Readable 122 | } 123 | 124 | async makeDir(_: string): Promise {} 125 | } 126 | -------------------------------------------------------------------------------- /packages/azure/src/AzureStorageAccount.ts: -------------------------------------------------------------------------------- 1 | import { 2 | BlobServiceClient, 3 | StorageSharedKeyCredential 4 | } from '@azure/storage-blob' 5 | import { StorageProvider } from 'storage-core' 6 | import { AzureBlobStorage } from './AzureBlobStorage' 7 | 8 | export interface AzureStorageAccountOptions { 9 | accountName: string 10 | accountKey: string 11 | endpoint?: string 12 | } 13 | 14 | export class AzureStorageAccount extends StorageProvider { 15 | blobClient: BlobServiceClient 16 | 17 | constructor(private options: AzureStorageAccountOptions) { 18 | super() 19 | 20 | const credentials = new StorageSharedKeyCredential( 21 | options.accountName, 22 | options.accountKey 23 | ) 24 | this.blobClient = new BlobServiceClient( 25 | options.endpoint || 26 | `https://${options.accountName}.blob.core.windows.net`, 27 | credentials 28 | ) 29 | } 30 | 31 | getStorage(name: string) { 32 | return new Promise(resolve => 33 | resolve(new AzureBlobStorage({ ...this.options, container: name })) 34 | ) 35 | } 36 | 37 | async createStorage(name: string) { 38 | const container = await this.blobClient.createContainer(name) 39 | const storage = new AzureBlobStorage({ ...this.options, container: name }) 40 | storage.containerClient = container.containerClient 41 | return storage 42 | } 43 | 44 | deleteStorage(name: string) { 45 | return this.blobClient.deleteContainer(name) 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /packages/azure/src/index.ts: -------------------------------------------------------------------------------- 1 | export * from './AzureBlobStorage' 2 | export * from './AzureStorageAccount' 3 | -------------------------------------------------------------------------------- /packages/azure/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "rootDir": "./src", 5 | "outDir": "./dist" 6 | }, 7 | "references": [{ "path": "../core" }], 8 | "include": ["./src"] 9 | } 10 | -------------------------------------------------------------------------------- /packages/core/README.md: -------------------------------------------------------------------------------- 1 | # Storage Core 2 | 3 | In many applications the ability dynamically change storage is a must. This 4 | package provides an class for `Storage` and implements a basic local storage 5 | class. 6 | 7 | Built with extensibility in mind, you can also implement your owner storage 8 | mount. You need to implement `Storage` from `storage-core` 9 | 10 | List of currently supported storage options: 11 | 12 | - [Local](https://www.npmjs.com/package/storage-core) 13 | - [AWS S3](https://www.npmjs.com/package/storage-aws) 14 | - [Azure Blob](https://www.npmjs.com/package/storage-azure) 15 | - [Google Cloud Bucket](https://www.npmjs.com/package/storage-gcp) 16 | 17 | If you have created a storage that implements the Storage class. Please share it 18 | on the GitHub repository to have it added to this list. 19 | 20 | Usage samples can be found on the repository [here](https://github.com/datatorch/node-storage/tree/master/packages/samples). 21 | -------------------------------------------------------------------------------- /packages/core/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "storage-core", 3 | "version": "0.7.2", 4 | "description": "Local storage implementation and storage interfaces", 5 | "author": "Justin Brooks ", 6 | "homepage": "https://github.com/datatorch/node-storage#readme", 7 | "license": "MIT", 8 | "main": "./dist/index.js", 9 | "types": "./dist/index.d.ts", 10 | "files": [ 11 | "dist" 12 | ], 13 | "repository": { 14 | "type": "git", 15 | "url": "git+https://github.com/datatorch/node-storage.git" 16 | }, 17 | "scripts": { 18 | "build": "tsc -p tsconfig.json", 19 | "test": "echo \"Error: run tests from root\" && exit 1" 20 | }, 21 | "bugs": { 22 | "url": "https://github.com/datatorch/node-storage/issues" 23 | }, 24 | "dependencies": { 25 | "@types/lru-cache": "^5.1.0", 26 | "globby": "^11.0.1", 27 | "lru-cache": "^6.0.0" 28 | }, 29 | "gitHead": "b05db75f50e3a44fd01b8c95d114f3fb5c9801c3", 30 | "keywords": [ 31 | "node", 32 | "storage", 33 | "files" 34 | ] 35 | } 36 | -------------------------------------------------------------------------------- /packages/core/src/Factory.ts: -------------------------------------------------------------------------------- 1 | import { Storage, StorageOptions } from './Storage' 2 | import { LocalStorage } from './LocalStorage' 3 | 4 | export type FactoryStorageOption = O & { 5 | type: string 6 | } 7 | 8 | export type FactoryProviderOptions = O & { type: string } 9 | 10 | export class StorageFactory { 11 | private map = new Map Storage>() 12 | 13 | constructor() { 14 | this.register('local', LocalStorage) 15 | } 16 | 17 | register>( 18 | type: string, 19 | StorageType: { new (options: any): T } 20 | ): void { 21 | this.map.set(type, StorageType) 22 | } 23 | 24 | create( 25 | options: FactoryStorageOption 26 | ): Storage { 27 | const StorageType = this.map.get(options.type) 28 | if (!StorageType) 29 | throw new Error(`'${options.type}' has not been registered.`) 30 | return new StorageType(options) 31 | } 32 | } 33 | 34 | const storageFactory = new StorageFactory() 35 | 36 | export function getStorageFactory() { 37 | return storageFactory 38 | } 39 | -------------------------------------------------------------------------------- /packages/core/src/Files.ts: -------------------------------------------------------------------------------- 1 | export interface ListResult extends ListFile { 2 | isFile: boolean 3 | } 4 | 5 | export interface ListFile { 6 | name: string 7 | path: string 8 | size?: number 9 | md5Hash?: string 10 | createdAt?: Date 11 | updatedAt?: Date 12 | raw: object 13 | } 14 | -------------------------------------------------------------------------------- /packages/core/src/LocalStorage.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs' 2 | 3 | import pathModule from 'path' 4 | import globby from 'globby' 5 | 6 | import { PathAbs, Storage, StorageOptions } from './Storage' 7 | import { FilesTransform } from './utils' 8 | import { Readable } from 'stream' 9 | import { ListResult } from './Files' 10 | 11 | export interface LocalStorageOptions extends StorageOptions { 12 | path: string 13 | } 14 | 15 | export class LocalStorage extends Storage { 16 | constructor(options: LocalStorageOptions) { 17 | super(options) 18 | this.initialize() 19 | } 20 | 21 | async initialize(): Promise { 22 | const [, folder] = await Promise.all([ 23 | fs.promises.access( 24 | this.options.path, 25 | fs.constants.F_OK | fs.constants.R_OK | fs.constants.W_OK 26 | ), 27 | fs.promises.lstat(this.options.path) 28 | ]) 29 | if (!folder.isDirectory()) throw new Error('Path is not a directory') 30 | } 31 | 32 | async terminate(): Promise {} 33 | 34 | @PathAbs() 35 | async getTopLevel(path?: string): Promise { 36 | const fullPath = this.fullPath(path) 37 | const files: any[] = await globby(`${fullPath}/*`, { 38 | onlyFiles: false, 39 | onlyDirectories: false, 40 | objectMode: true 41 | }) 42 | 43 | const rootPath = pathModule.resolve(this.fullPath()) 44 | const formatPath = (path: string) => 45 | pathModule.resolve(path).replace(`${rootPath}/`, '') 46 | 47 | return files.map(f => ({ 48 | path: formatPath(f.path), 49 | name: pathModule.basename(f.path), 50 | raw: f, 51 | isFile: f.dirent.isFile() 52 | })) 53 | } 54 | 55 | @PathAbs() 56 | getFilesStream(path?: string): Readable { 57 | const fullPath = this.fullPath(path) 58 | 59 | const rootPath = pathModule.resolve(this.fullPath()) 60 | const formatPath = (path: string) => 61 | pathModule.resolve(path).replace(`${rootPath}/`, '') 62 | 63 | const trans = new FilesTransform((f: any) => ({ 64 | name: pathModule.basename(f.path), 65 | path: formatPath(f.path), 66 | size: f.stats.size, 67 | raw: f 68 | })) 69 | return globby 70 | .stream(`${fullPath}/**/*`, { 71 | objectMode: true, 72 | onlyFiles: true, 73 | stats: true 74 | }) 75 | .pipe(trans) 76 | } 77 | 78 | @PathAbs() 79 | async getFileSize(filePath: string): Promise { 80 | const stat = await fs.promises.stat(this.fullPath(filePath)) 81 | return stat.size 82 | } 83 | 84 | @PathAbs() 85 | async readFile(filePath: string): Promise { 86 | const fullPath = this.fullPath(filePath) 87 | return fs.promises.readFile(fullPath) 88 | } 89 | 90 | @PathAbs() 91 | async writeFile(filePath: string, data: string | Buffer): Promise { 92 | await this.makeDir(filePath) 93 | const fullPath = this.fullPath(filePath) 94 | await fs.promises.writeFile(fullPath, data) 95 | } 96 | 97 | @PathAbs() 98 | async deleteFile(filePath: string): Promise { 99 | await fs.promises.unlink(this.fullPath(filePath)) 100 | } 101 | 102 | @PathAbs() 103 | async makeDir(path: string) { 104 | const dir = pathModule.dirname(path) 105 | await fs.promises.mkdir(this.fullPath(dir), { recursive: true }) 106 | } 107 | 108 | @PathAbs() 109 | async createWriteStream(path: string) { 110 | await this.makeDir(path) 111 | return fs.createWriteStream(this.fullPath(path)) 112 | } 113 | 114 | @PathAbs() 115 | async createReadStream(path: string, options?: any) { 116 | return fs.createReadStream(this.fullPath(path), options) 117 | } 118 | 119 | @PathAbs() 120 | fullPath(path?: string): string { 121 | return path ? pathModule.join(this.options.path, path) : this.options.path 122 | } 123 | } 124 | -------------------------------------------------------------------------------- /packages/core/src/Manager.ts: -------------------------------------------------------------------------------- 1 | import LRUCache from 'lru-cache' 2 | 3 | import { Storage, StorageOptions } from './Storage' 4 | import { getStorageFactory, FactoryStorageOption } from './Factory' 5 | 6 | const DEFAULT_MAX_SIZE = 1000 7 | const defaultSizeCalculator = () => 1 8 | 9 | export type ManagerStorageOption< 10 | O extends StorageOptions = any 11 | > = FactoryStorageOption & { id: string } 12 | 13 | export type StorageManagerOptions = { 14 | maxSize?: number 15 | sizeCalculator?: (value: Storage, key: string) => number 16 | } 17 | 18 | const DEFAULT_MANAGER: StorageManagerOptions = { 19 | maxSize: DEFAULT_MAX_SIZE, 20 | sizeCalculator: defaultSizeCalculator 21 | } 22 | 23 | export class StorageManager { 24 | /** Cache map */ 25 | private storages: LRUCache 26 | 27 | constructor(options: StorageManagerOptions = DEFAULT_MANAGER) { 28 | const { maxSize, sizeCalculator } = options 29 | this.storages = new LRUCache({ 30 | max: maxSize, 31 | length: sizeCalculator 32 | }) 33 | } 34 | 35 | /** 36 | * Checks if cache contains storage unit 37 | * 38 | * @param id identifier associated with storage unit 39 | */ 40 | has(id: string): boolean { 41 | return this.storages.has(id) 42 | } 43 | 44 | /** 45 | * Retrieves storage unit from cache 46 | * 47 | * @param id identifier associated with storage unit 48 | */ 49 | get(id: string): Storage { 50 | const storage = this.storages.get(id) 51 | if (!storage) throw new Error('Storage not found.') 52 | return storage 53 | } 54 | 55 | /** 56 | * Gets or creates a storage unit from cache. 57 | * 58 | * @remarks 59 | * Created storage will also be cached. Requires storage type to be registered 60 | * in the storage factory. 61 | * 62 | * @param options options of storage 63 | * @param ttl max age in milliseconds before storage is removed 64 | */ 65 | async getOrCreate( 66 | options: ManagerStorageOption, 67 | ttl?: number 68 | ): Promise> { 69 | if (!options.id) return this.create(options, ttl) 70 | try { 71 | return this.get(options.id) 72 | } catch (ex) { 73 | return this.create(options, ttl) 74 | } 75 | } 76 | 77 | /** 78 | * Creates a storage unit based on the configuration parameters. 79 | * 80 | * @remarks 81 | * Created storage will also be cached. Requires storage type to be registered 82 | * in the storage factory. 83 | * 84 | * @param options options of storage 85 | * @param ttl max age in milliseconds before storage is removed 86 | */ 87 | async create( 88 | options: ManagerStorageOption, 89 | ttl?: number 90 | ): Promise> { 91 | const storage = getStorageFactory().create(options) 92 | this.storages.set(options.id, storage, ttl) 93 | return storage 94 | } 95 | } 96 | 97 | const storageManager = new StorageManager() 98 | 99 | export function getStorageManager(): StorageManager { 100 | return storageManager 101 | } 102 | -------------------------------------------------------------------------------- /packages/core/src/Storage.ts: -------------------------------------------------------------------------------- 1 | import { Readable, Writable } from 'stream' 2 | import { ListResult } from './Files' 3 | 4 | export const PathAbs = (index: number = 0) => ( 5 | _: any, 6 | __: string, 7 | propDesc: PropertyDescriptor 8 | ) => { 9 | let originalFunction: Function = propDesc.value 10 | propDesc.value = function () { 11 | let argValue = arguments[index] 12 | let newArgs = [] 13 | for (let i = 0; i < arguments.length; i++) newArgs.push(arguments[i]) 14 | newArgs[index] = (this as any).pathAbs(argValue) 15 | 16 | return originalFunction.apply(this, newArgs) 17 | } 18 | return propDesc 19 | } 20 | 21 | export interface StorageOptions { 22 | directory?: string 23 | } 24 | 25 | export abstract class Storage { 26 | public readonly options: O 27 | public directoryNormalized: string 28 | 29 | constructor(options: O) { 30 | this.options = options 31 | const directory = this.options.directory || '' 32 | this.directoryNormalized = 33 | directory.charAt(0) === '/' ? directory.substring(1) : directory 34 | } 35 | 36 | abstract getTopLevel(path?: string): Promise 37 | 38 | abstract getFilesStream(path?: string): Readable 39 | 40 | abstract readFile(filePath: string): Promise 41 | 42 | abstract writeFile(filePath: string, data: string | Buffer): Promise 43 | 44 | abstract deleteFile(filePath: string): Promise 45 | 46 | abstract getFileSize(filePath: string): Promise 47 | 48 | abstract createWriteStream(filePath: string): Promise 49 | 50 | abstract createReadStream(filePath: string): Promise 51 | 52 | abstract makeDir(path: string): Promise 53 | 54 | public pathAbs(path: string) { 55 | const pathNormalized = path.charAt(0) === '/' ? path.substring(1) : path 56 | return this.directoryNormalized.length > 0 57 | ? `${this.directoryNormalized}/${pathNormalized}` 58 | : pathNormalized 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /packages/core/src/StorageProvider.ts: -------------------------------------------------------------------------------- 1 | import { getStorageManager, StorageManager } from './Manager' 2 | import { Storage } from './Storage' 3 | 4 | export abstract class StorageProvider { 5 | readonly manager: StorageManager 6 | 7 | constructor(manager?: StorageManager) { 8 | this.manager = manager || getStorageManager() 9 | } 10 | 11 | abstract getStorage(name: string): Promise 12 | abstract createStorage(name: string): Promise 13 | abstract deleteStorage(name: string): Promise 14 | } 15 | -------------------------------------------------------------------------------- /packages/core/src/index.ts: -------------------------------------------------------------------------------- 1 | export * from './LocalStorage' 2 | export * from './Storage' 3 | export * from './Factory' 4 | export * from './Manager' 5 | export * from './utils' 6 | export * from './Files' 7 | export * from './StorageProvider' 8 | -------------------------------------------------------------------------------- /packages/core/src/utils/Streams.ts: -------------------------------------------------------------------------------- 1 | import { Readable, Transform } from 'stream' 2 | import { ListFile } from '../Files' 3 | export class FilesTransform extends Transform { 4 | constructor(private transform: (object: any) => ListFile) { 5 | super({ objectMode: true }) 6 | } 7 | 8 | _transform(chunck: object, _: any, done: () => void) { 9 | this.push(this.transform(chunck)) 10 | done() 11 | } 12 | } 13 | 14 | export class FilesReadable extends Readable { 15 | array: Array | null | undefined = null 16 | 17 | constructor( 18 | private readMore: () => Promise, 19 | objectMode: boolean = true 20 | ) { 21 | super({ objectMode }) 22 | } 23 | 24 | popPush() { 25 | if (!this.array) { 26 | this._read() 27 | return 28 | } 29 | 30 | const value = this.array.pop() 31 | if (!value) return 32 | 33 | const more = this.push(value) 34 | if (more) this.popPush() 35 | } 36 | 37 | _read() { 38 | if (!this.array || this.array.length === 0) { 39 | this.readMore() 40 | .then(array => { 41 | if (!array) { 42 | this.push(null) 43 | return 44 | } 45 | this.array = array 46 | this.popPush() 47 | }) 48 | .catch(err => this.destroy(err)) 49 | return 50 | } 51 | this.popPush() 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /packages/core/src/utils/index.ts: -------------------------------------------------------------------------------- 1 | export * from './Streams' 2 | -------------------------------------------------------------------------------- /packages/core/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "rootDir": "./src", 5 | "outDir": "./dist" 6 | }, 7 | "include": ["./src"] 8 | } 9 | -------------------------------------------------------------------------------- /packages/gcp/README.md: -------------------------------------------------------------------------------- 1 | # Storage Google Cloud Platform 2 | 3 | This package exports an implementation of `Storage` that allows using google 4 | cloud platform storage buckets as a generic storage unit. 5 | 6 | ## Usage 7 | 8 | ```typescript 9 | import { GoogleCloudStorage, GoogleCloudStorageOptions } from 'storage-gcp' 10 | 11 | const options: GoogleCloudStorageOptions = { 12 | // ... 13 | } 14 | const storage = new GoogleCloudStorage(options) 15 | ``` 16 | 17 | Optional: Register component with `StorageFactory`. 18 | 19 | ```javascript 20 | import { getStorageFactory } from 'storage-core' 21 | import { GoogleCloudStorage } from 'storage-gcp' 22 | 23 | getStorageFactory().register('google-cloud', GoogleCloudStorage) 24 | 25 | //... 26 | 27 | const options: GoogleCloudStorageOptions = { 28 | // ... 29 | } 30 | const storage = getStorageFactory().create({ type: 'google-cloud', ...options }) 31 | ``` 32 | -------------------------------------------------------------------------------- /packages/gcp/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "storage-gcp", 3 | "version": "0.7.2", 4 | "description": "Google cloud storage implementation", 5 | "author": "Justin Brooks ", 6 | "homepage": "https://github.com/datatorch/node-storage#readme", 7 | "license": "MIT", 8 | "main": "./dist/index.js", 9 | "types": "./dist/index.d.ts", 10 | "repository": { 11 | "type": "git", 12 | "url": "git+https://github.com/datatorch/node-storage.git" 13 | }, 14 | "scripts": { 15 | "build": "tsc", 16 | "test": "echo \"Error: run tests from root\" && exit 1" 17 | }, 18 | "bugs": { 19 | "url": "https://github.com/datatorch/node-storage/issues" 20 | }, 21 | "files": [ 22 | "dist" 23 | ], 24 | "gitHead": "b05db75f50e3a44fd01b8c95d114f3fb5c9801c3", 25 | "dependencies": { 26 | "@google-cloud/common": "^3.3.3", 27 | "@google-cloud/storage": "^5.3.0", 28 | "storage-core": "^0.7.2" 29 | }, 30 | "keywords": [ 31 | "node", 32 | "storage", 33 | "gcp", 34 | "files" 35 | ] 36 | } 37 | -------------------------------------------------------------------------------- /packages/gcp/src/GcpBucketStorage.ts: -------------------------------------------------------------------------------- 1 | import { Storage, FilesTransform, ListResult, PathAbs } from 'storage-core' 2 | import pathModule from 'path' 3 | 4 | import { 5 | Storage as GoogleStorage, 6 | Bucket as GoogleBucket, 7 | CreateReadStreamOptions, 8 | CreateWriteStreamOptions, 9 | StorageOptions, 10 | File 11 | } from '@google-cloud/storage' 12 | 13 | import { GcpBucketStorageOptions } from './GcpBucketStorageOptions' 14 | import { Readable, Writable } from 'stream' 15 | 16 | const formatFile = (f: File) => ({ 17 | name: pathModule.basename(f.name), 18 | path: f.name, 19 | size: f.metadata.size, 20 | md5Hash: f.metadata.md5Hash, 21 | createdAt: new Date(f.metadata.timeCreated), 22 | updatedAt: new Date(f.metadata.updated), 23 | isFile: true, 24 | raw: f.metadata 25 | }) 26 | 27 | export class GcpBucketStorage extends Storage { 28 | googleStorage: GoogleStorage 29 | bucket: GoogleBucket 30 | 31 | constructor(options: GcpBucketStorageOptions) { 32 | super(options) 33 | 34 | const gcpOptions: StorageOptions = { 35 | ...options, 36 | credentials: { 37 | client_email: options.clientEmail, 38 | private_key: options.privateKey 39 | } 40 | } 41 | 42 | this.googleStorage = new GoogleStorage(gcpOptions) 43 | 44 | const { bucket } = this.options 45 | this.bucket = this.googleStorage.bucket(bucket) 46 | } 47 | 48 | @PathAbs() 49 | getTopLevel(path?: string): Promise { 50 | return new Promise((resolve, reject) => { 51 | this.bucket.getFiles( 52 | { 53 | autoPaginate: false, 54 | directory: path, 55 | delimiter: '/' 56 | }, 57 | (err, files, _, { prefixes } = {}) => { 58 | if (err) reject(err) 59 | 60 | const dirs: ListResult[] = (prefixes || []).map((d: string) => ({ 61 | name: pathModule.basename(d), 62 | path: d.slice(0, -1), 63 | isFile: false, 64 | raw: d 65 | })) 66 | const f: ListResult[] = (files || []).map(formatFile) 67 | 68 | resolve(dirs.concat(f)) 69 | } 70 | ) 71 | }) 72 | } 73 | 74 | @PathAbs() 75 | async getFiles(path?: string): Promise { 76 | const [files] = await this.bucket.getFiles({ directory: path }) 77 | return files.map(f => f.name) 78 | } 79 | 80 | @PathAbs() 81 | getFilesStream(path?: string): Readable { 82 | const trans = new FilesTransform(formatFile) 83 | return this.bucket.getFilesStream({ directory: path }).pipe(trans) 84 | } 85 | 86 | @PathAbs() 87 | async getFileSize(filePath: string): Promise { 88 | const stat = (await this.bucket.file(filePath).getMetadata()).find(r => r) 89 | return (stat && stat.size) || 0 90 | } 91 | 92 | @PathAbs() 93 | async readFile(filePath: string): Promise { 94 | const [buffer] = await this.bucket.file(filePath).download() 95 | return buffer 96 | } 97 | 98 | @PathAbs() 99 | async writeFile(filePath: string, data: string | Buffer): Promise { 100 | await this.bucket.file(filePath).save(data) 101 | } 102 | 103 | @PathAbs() 104 | async deleteFile(filePath: string): Promise { 105 | await this.bucket.file(filePath).delete() 106 | } 107 | 108 | @PathAbs() 109 | async createWriteStream( 110 | filePath: string, 111 | options?: CreateWriteStreamOptions 112 | ): Promise { 113 | return this.bucket.file(filePath).createWriteStream(options) 114 | } 115 | 116 | @PathAbs() 117 | async createReadStream( 118 | filePath: string, 119 | options?: CreateReadStreamOptions 120 | ): Promise { 121 | const file = this.bucket.file(filePath) 122 | return file.createReadStream(options) 123 | } 124 | 125 | async makeDir(_path: string): Promise {} 126 | } 127 | -------------------------------------------------------------------------------- /packages/gcp/src/GcpBucketStorageOptions.ts: -------------------------------------------------------------------------------- 1 | import { StorageOptions } from 'storage-core' 2 | 3 | export interface GcpBucketStorageOptions extends StorageOptions { 4 | bucket: string 5 | projectId?: string 6 | clientEmail: string 7 | privateKey: string 8 | autoRetry?: boolean 9 | autoRetries?: number 10 | } 11 | -------------------------------------------------------------------------------- /packages/gcp/src/index.ts: -------------------------------------------------------------------------------- 1 | export { GcpBucketStorage } from './GcpBucketStorage' 2 | export { GcpBucketStorageOptions } from './GcpBucketStorageOptions' 3 | -------------------------------------------------------------------------------- /packages/gcp/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "rootDir": "./src", 5 | "outDir": "./dist" 6 | }, 7 | "references": [{ "path": "../core" }], 8 | "include": ["./src"] 9 | } 10 | -------------------------------------------------------------------------------- /packages/samples/README.md: -------------------------------------------------------------------------------- 1 | # Samples 2 | 3 | `ts` folder contains typescript samples. 4 | `js` folder contains javascript samples. 5 | -------------------------------------------------------------------------------- /packages/samples/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "samples", 3 | "version": "0.7.2", 4 | "description": "Samples", 5 | "author": "Justin Brooks ", 6 | "homepage": "https://github.com/datatorch/node-storage#readme", 7 | "private": true, 8 | "repository": { 9 | "type": "git", 10 | "url": "git+https://github.com/datatorch/node-storage.git" 11 | }, 12 | "dependencies": { 13 | "storage-aws": "^0.7.2", 14 | "storage-azure": "^0.7.2", 15 | "storage-core": "^0.7.2", 16 | "storage-gcp": "^0.7.2", 17 | "ts-node": "^9.0.0" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /packages/samples/ts/AwsS3Storage.ts: -------------------------------------------------------------------------------- 1 | import { AwsS3Storage } from 'storage-aws' 2 | import { getStorageManager, getStorageFactory } from 'storage-core' 3 | /** 4 | * Samples using AWS S3 Buckets 5 | * - Read/Write 6 | * - Read/Write using StorageManager 7 | * - WriteStream 8 | */ 9 | ;(async () => { 10 | const config = { 11 | id: 'example-id', 12 | type: 'aws-s3', 13 | bucket: process.env.BUCKET || '', 14 | accessKeyId: process.env.ACCESS_KEY || '', 15 | secretAccessKey: process.env.SECRET_KEY || '' 16 | } 17 | 18 | // Create aws s3 storage 19 | const ls = new AwsS3Storage(config) 20 | 21 | // Write data to test.txt 22 | await ls.writeFile('aws-write/test.txt', 'using aws storage') 23 | console.log(await ls.readFile('aws-write/test.txt')) 24 | 25 | // Using storage manager 26 | 27 | // Register aws storage 28 | getStorageFactory().register('aws-s3', AwsS3Storage) 29 | 30 | // Create will also insert storage into cache 31 | const ls2 = await getStorageManager().create(config) 32 | await ls2.writeFile('aws-write/test2.txt', 'using storage create') 33 | console.log(await ls2.readFile('aws-write/test2.txt')) 34 | console.log(await ls2.getFileSize('aws-write/test2.txt')) 35 | 36 | // Get storage from cache 37 | const ls3 = getStorageManager().get('example-id') 38 | await ls3.writeFile('aws-write/test3.txt', 'using storage manager get') 39 | console.log(await ls3.readFile('aws-write/test3.txt')) 40 | 41 | const ws = await ls3.createWriteStream('aws-write/stream.txt') 42 | 43 | ws.write('writing\n') 44 | ws.write('using\n') 45 | ws.write('a\n') 46 | ws.write('stream\n') 47 | ws.end() 48 | 49 | await new Promise(resolve => ws.on('finish', () => resolve())) 50 | 51 | // Read created files from directory 52 | for await (let path of ls.getFilesStream('aws-write')) { 53 | console.log(path.name) 54 | } 55 | 56 | for (let result of await ls.getTopLevel('aws-write')) { 57 | console.log(`${result.isFile} | ${result.path}`) 58 | } 59 | })() 60 | -------------------------------------------------------------------------------- /packages/samples/ts/AzureBlobStorage.ts: -------------------------------------------------------------------------------- 1 | import { AzureBlobStorage } from 'storage-azure' 2 | 3 | import { getStorageManager, getStorageFactory } from 'storage-core' 4 | /** 5 | * Samples using Azure Storage 6 | * - Read/Write 7 | * - Read/Write using StorageManager 8 | * - WriteStream 9 | */ 10 | ;(async () => { 11 | const config = { 12 | directory: '/sub/dir', 13 | id: 'example-id', 14 | type: 'azure-blob', 15 | container: process.env.CONTAINER || '', 16 | accountName: process.env.ACCOUNT_NAME || '', 17 | accountKey: process.env.ACCOUNT_KEY || '' 18 | } 19 | 20 | // Create local storage 21 | const ls = new AzureBlobStorage(config) 22 | 23 | // Write data to test.txt 24 | await ls.writeFile('azure-write/test.txt', 'using azure storage') 25 | console.log(await ls.readFile('azure-write/test.txt')) 26 | 27 | // Using storage manager 28 | 29 | // Register azure storage 30 | getStorageFactory().register('azure-blob', AzureBlobStorage) 31 | 32 | // Create will also insert storage into cache 33 | const ls2 = await getStorageManager().create(config) 34 | await ls2.writeFile('azure-write/test2.txt', 'using storage create') 35 | console.log(await ls2.readFile('azure-write/test2.txt')) 36 | console.log(await ls2.getFileSize('azure-write/test2.txt')) 37 | 38 | // Get storage from cache 39 | const ls3 = getStorageManager().get('example-id') 40 | await ls3.writeFile('azure-write/test3.txt', 'using storage manager get') 41 | console.log(await ls3.readFile('azure-write/test3.txt')) 42 | 43 | const ws = await ls3.createWriteStream('azure-write/stream.txt') 44 | ws.write('writing\n') 45 | ws.write('using\n') 46 | ws.write('a\n') 47 | ws.write('stream\n') 48 | ws.end() 49 | 50 | // Read created files from directory 51 | for await (let path of ls.getFilesStream('azure-write')) { 52 | console.log(path.name) 53 | } 54 | 55 | for (let result of await ls.getTopLevel('azure-write')) { 56 | console.log(`${result.isFile} | ${result.path}`) 57 | } 58 | })() 59 | -------------------------------------------------------------------------------- /packages/samples/ts/GcpBucketStorage.ts: -------------------------------------------------------------------------------- 1 | import { GcpBucketStorage } from 'storage-gcp' 2 | import { getStorageManager, getStorageFactory } from 'storage-core' 3 | /** 4 | * Samples using google buckets 5 | * - Read/Write 6 | * - Read/Write using StorageManager 7 | * - WriteStream 8 | */ 9 | ;(async () => { 10 | const config = { 11 | id: 'example-id', 12 | type: 'gcp-bucket', 13 | bucket: process.env.BUCKET || '', 14 | projectId: process.env.PROJECT_ID, 15 | clientEmail: process.env.CLIENT_EMAIL || '', 16 | privateKey: (process.env.PRIVATE_KEY || '').replace( 17 | new RegExp('\\\\n', 'g'), 18 | '\n' 19 | ) 20 | } 21 | 22 | // Create gcp storage 23 | const ls = new GcpBucketStorage(config) 24 | 25 | // Write data to test.txt 26 | await ls.writeFile('gcp-write/test.txt', 'using gcp storage') 27 | console.log(await ls.readFile('gcp-write/test.txt')) 28 | 29 | // Using storage manager 30 | // Register gcp storage 31 | getStorageFactory().register('gcp-bucket', GcpBucketStorage) 32 | 33 | // Create will also insert storage into cache 34 | const ls2 = await getStorageManager().create(config) 35 | await ls2.writeFile('gcp-write/test2.txt', 'using storage create') 36 | console.log(await ls2.readFile('gcp-write/test2.txt')) 37 | console.log(await ls2.getFileSize('gcp-write/test2.txt')) 38 | 39 | // Get storage from cache 40 | const ls3 = getStorageManager().get('example-id') 41 | await ls3.writeFile('gcp-write/test3.txt', 'using storage manager get') 42 | console.log(await ls3.readFile('gcp-write/test3.txt')) 43 | 44 | const ws = await ls3.createWriteStream('gcp-write/stream.txt') 45 | ws.write('writing\n') 46 | ws.write('using\n') 47 | ws.write('a\n') 48 | ws.write('stream\n') 49 | ws.end() 50 | 51 | // Read created files from directory 52 | for await (let path of ls.getFilesStream('gcp-write')) { 53 | console.log(path.name) 54 | } 55 | 56 | for (let result of await ls.getTopLevel('gcp-write')) { 57 | console.log(`${result.isFile} | ${result.path}`) 58 | } 59 | })() 60 | -------------------------------------------------------------------------------- /packages/samples/ts/LocalStorage.ts: -------------------------------------------------------------------------------- 1 | import { LocalStorage, getStorageManager } from 'storage-core' 2 | /** 3 | * Samples using LocalStorage 4 | * - Read/Write 5 | * - Read/Write using StorageManager 6 | * - WriteStream 7 | */ 8 | ;(async () => { 9 | console.log('Buffers will be written to console.') 10 | // Create local storage 11 | const ls = new LocalStorage({ path: './data' }) 12 | // Write data to test.txt 13 | await ls.writeFile('local-write/test.txt', 'using local storage') 14 | console.log(await ls.readFile('local-write/test.txt')) 15 | 16 | // Using storage manager 17 | const config = { 18 | id: 'example-id', 19 | type: 'local', 20 | path: './data' 21 | } 22 | 23 | // Create will also insert storage into cache 24 | const ls2 = await getStorageManager().create(config) 25 | await ls2.writeFile('local-write/test2.txt', 'using storage create') 26 | console.log(await ls2.readFile('local-write/test2.txt')) 27 | 28 | // Get storage from cache 29 | const ls3 = getStorageManager().get('example-id') 30 | await ls3.writeFile('local-write/test3.txt', 'using storage manager get') 31 | console.log(await ls3.readFile('local-write/test3.txt')) 32 | 33 | const ws = await ls3.createWriteStream('local-write/stream.txt') 34 | ws.write('writing\n') 35 | ws.write('using\n') 36 | ws.write('a\n') 37 | ws.write('stream\n') 38 | ws.end() 39 | 40 | // Read created files from directory 41 | for await (let path of ls.getFilesStream('local-write')) { 42 | console.log(`${path.path} [${path.size}]`) 43 | } 44 | 45 | for (let result of await ls.getTopLevel('local-write')) { 46 | console.log(`${result.isFile} | ${result.path}`) 47 | } 48 | })() 49 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "composite": true, 4 | "target": "ES2020", 5 | "module": "commonjs", 6 | "outDir": "./dist", 7 | "declaration": true, 8 | "declarationMap": true, 9 | "moduleResolution": "node", 10 | 11 | "strict": true, 12 | "noImplicitAny": true, 13 | "strictNullChecks": true, 14 | "strictFunctionTypes": true, 15 | "noUnusedLocals": true, 16 | "noUnusedParameters": true, 17 | "noImplicitReturns": true, 18 | "esModuleInterop": true, 19 | "allowSyntheticDefaultImports": true, 20 | "noImplicitThis": true, 21 | 22 | "experimentalDecorators": true, 23 | "emitDecoratorMetadata": true, 24 | "traceResolution": true, 25 | "removeComments": false, 26 | 27 | "forceConsistentCasingInFileNames": true, 28 | "lib": ["es2017", "esnext.asynciterable"], 29 | "types": ["node"], 30 | "baseUrl": ".", 31 | "paths": { 32 | "*": ["types/*"] 33 | } 34 | }, 35 | "exclude": ["node_modules", "dist", "**/*.spec.ts"] 36 | } 37 | --------------------------------------------------------------------------------