├── spec ├── support │ ├── jasmine.json │ ├── helper.js │ ├── jasmine.js │ ├── request.js │ └── server.js ├── integration.spec.js ├── mocks │ └── s3adapter.js └── test.spec.js ├── .releaserc ├── template.hbs ├── footer.hbs ├── header.hbs └── commit.hbs ├── .nycrc ├── config └── default.json ├── .github ├── dependabot.yml └── workflows │ ├── release-automated.yml │ └── ci.yml ├── .gitignore ├── eslint.config.js ├── LICENSE ├── package.json ├── CODE_OF_CONDUCT.md ├── .releaserc.js ├── lib └── optionsFromArguments.js ├── index.js ├── CHANGELOG.md └── README.md /spec/support/jasmine.json: -------------------------------------------------------------------------------- 1 | { 2 | "spec_dir": "spec", 3 | "spec_files": [ 4 | "**/*[sS]pec.js" 5 | ], 6 | "helpers": [ 7 | "support/helper.js", 8 | "support/jasmine.js" 9 | ], 10 | "random": true 11 | } 12 | -------------------------------------------------------------------------------- /.releaserc/template.hbs: -------------------------------------------------------------------------------- 1 | {{> header}} 2 | 3 | {{#each commitGroups}} 4 | 5 | {{#if title}} 6 | ### {{title}} 7 | 8 | {{/if}} 9 | {{#each commits}} 10 | {{> commit root=@root}} 11 | {{/each}} 12 | {{/each}} 13 | 14 | {{> footer}} 15 | -------------------------------------------------------------------------------- /.nycrc: -------------------------------------------------------------------------------- 1 | { 2 | "all": true, 3 | "reporter": [ 4 | "lcov", 5 | "text-summary" 6 | ], 7 | "include": [ 8 | "src/**/*.js", 9 | "lib/**/*.js", 10 | "index.js" 11 | ], 12 | "exclude": [ 13 | "**/spec/**" 14 | ] 15 | } 16 | -------------------------------------------------------------------------------- /.releaserc/footer.hbs: -------------------------------------------------------------------------------- 1 | {{#if noteGroups}} 2 | {{#each noteGroups}} 3 | 4 | ### {{title}} 5 | 6 | {{#each notes}} 7 | * {{#if commit.scope}}**{{commit.scope}}:** {{/if}}{{text}} ([{{commit.shortHash}}]({{commit.shortHash}})) 8 | {{/each}} 9 | {{/each}} 10 | 11 | {{/if}} 12 | -------------------------------------------------------------------------------- /config/default.json: -------------------------------------------------------------------------------- 1 | { 2 | "accessKey": "accessKey", 3 | "secretKey": "secretKey", 4 | "insufficientOptions": { 5 | "accessKey": "accessKey", 6 | "secretKey": "secretKey" 7 | }, 8 | "bucket": "bucket", 9 | "objectWithBucket": { 10 | "bucket": "bucket" 11 | }, 12 | "emptyObject": {}, 13 | "paramsObjectWBucket": { 14 | "params": { 15 | "Bucket": "bucket" 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # Dependabot dependency updates 2 | # Docs: https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 3 | 4 | version: 2 5 | updates: 6 | - package-ecosystem: "npm" 7 | # Location of package-lock.json 8 | directory: "/" 9 | # Check daily for updates 10 | schedule: 11 | interval: "daily" 12 | commit-message: 13 | # Set commit message prefix 14 | prefix: "refactor" -------------------------------------------------------------------------------- /spec/support/helper.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | const { SpecReporter } = require('jasmine-spec-reporter'); 3 | const { startServer, stopServer, reconfigureServer } = require('./server'); 4 | 5 | // Setup jasmine 6 | jasmine.DEFAULT_TIMEOUT_INTERVAL = process.env.TESTING_TIMEOUT || "360000"; 7 | jasmine.getEnv().addReporter(new SpecReporter()); 8 | 9 | beforeAll(async () => { 10 | await startServer(); 11 | }); 12 | 13 | afterAll(async () => { 14 | await stopServer(); 15 | }); 16 | 17 | beforeEach(async () => { 18 | await reconfigureServer(); 19 | }); 20 | -------------------------------------------------------------------------------- /.releaserc/header.hbs: -------------------------------------------------------------------------------- 1 | {{#if isPatch~}} 2 | ## 3 | {{~else~}} 4 | # 5 | {{~/if}} {{#if @root.linkCompare~}} 6 | [{{version}}]( 7 | {{~#if @root.repository~}} 8 | {{~#if @root.host}} 9 | {{~@root.host}}/ 10 | {{~/if}} 11 | {{~#if @root.owner}} 12 | {{~@root.owner}}/ 13 | {{~/if}} 14 | {{~@root.repository}} 15 | {{~else}} 16 | {{~@root.repoUrl}} 17 | {{~/if~}} 18 | /compare/{{previousTag}}...{{currentTag}}) 19 | {{~else}} 20 | {{~version}} 21 | {{~/if}} 22 | {{~#if title}} "{{title}}" 23 | {{~/if}} 24 | {{~#if date}} ({{date}}) 25 | {{/if}} 26 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | 6 | # Runtime data 7 | pids 8 | *.pid 9 | *.seed 10 | 11 | # Directory for instrumented libs generated by jscoverage/JSCover 12 | lib-cov 13 | 14 | # Coverage directory used by tools like istanbul 15 | .nyc_output 16 | coverage 17 | 18 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 19 | .grunt 20 | 21 | # node-waf configuration 22 | .lock-wscript 23 | 24 | # Compiled binary addons (http://nodejs.org/api/addons.html) 25 | build/Release 26 | 27 | # Dependency directory 28 | node_modules 29 | 30 | # Optional npm cache directory 31 | .npm 32 | 33 | # Optional REPL history 34 | .node_repl_history 35 | 36 | # Webstorm 37 | .idea/ 38 | 39 | # Lint 40 | .eslintcache 41 | 42 | # Parse Server 43 | test_logs -------------------------------------------------------------------------------- /.github/workflows/release-automated.yml: -------------------------------------------------------------------------------- 1 | name: release-automated 2 | on: 3 | push: 4 | branches: [ master, release, alpha, beta ] 5 | jobs: 6 | release: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - name: Checkout repository 10 | uses: actions/checkout@v4 11 | with: 12 | persist-credentials: false 13 | - name: Setup Node 14 | uses: actions/setup-node@v4 15 | with: 16 | node-version: 20 17 | cache: npm 18 | - name: Install dependencies 19 | run: npm ci 20 | - name: Run semantic-release 21 | run: npx semantic-release 22 | env: 23 | GH_TOKEN: ${{ secrets.RELEASE_GITHUB_TOKEN }} 24 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 25 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }} 26 | -------------------------------------------------------------------------------- /spec/support/jasmine.js: -------------------------------------------------------------------------------- 1 | const semver = require('semver'); 2 | 3 | const satisfiesParseServerVersion = version => { 4 | const envVersion = process.env.PARSE_SERVER_VERSION; 5 | const semverVersion = semver.coerce(envVersion); 6 | console.log(`satisfiesParseServerVersion: envVersion: ${version}`); 7 | console.log(`satisfiesParseServerVersion: version: ${envVersion}`); 8 | return !envVersion || !semverVersion || semver.satisfies(semverVersion, version); 9 | } 10 | 11 | global.it_only_parse_server_version = version => satisfiesParseServerVersion(version) ? it : xit; 12 | global.fit_only_parse_server_version = version => satisfiesParseServerVersion(version) ? fit : xit; 13 | global.describe_only_parse_server_version = version => satisfiesParseServerVersion(version) ? describe : xdescribe; 14 | global.fdescribe_only_parse_server_version = version => satisfiesParseServerVersion(version) ? fdescribe : xdescribe; 15 | -------------------------------------------------------------------------------- /spec/integration.spec.js: -------------------------------------------------------------------------------- 1 | const { httpRequest } = require('./support/request'); 2 | 3 | const fileName = 'file.txt'; 4 | const fileData = 'hello world'; 5 | 6 | describe_only_parse_server_version('>=7')('Parse Server >=7 integration test', () => { 7 | it('stores a file', async () => { 8 | const base64 = Buffer.from(fileData).toString('base64'); 9 | const file = new Parse.File(fileName, { base64 }); 10 | await file.save(); 11 | 12 | expect(file).toBeDefined(); 13 | expect(file.url()).toMatch(/file.txt$/); 14 | }); 15 | 16 | it('reads the contents of a file', async () => { 17 | const base64 = Buffer.from(fileData).toString('base64'); 18 | const file = new Parse.File(fileName, { base64 }); 19 | await file.save(); 20 | const fileLink = file.url(); 21 | 22 | const response = await httpRequest(fileLink); 23 | const text = response.toString(); 24 | 25 | expect(text).toBe(fileData); 26 | }); 27 | 28 | it('deletes a file', async () => { 29 | const base64 = Buffer.from(fileData).toString('base64'); 30 | const file = new Parse.File(fileName, { base64 }); 31 | await file.save(); 32 | 33 | const fileLink = file.url(); 34 | await file.destroy(); 35 | 36 | await expectAsync(httpRequest(fileLink)).toBeRejectedWithError( 37 | 'Request failed with status code 404' 38 | ); 39 | }); 40 | }); 41 | -------------------------------------------------------------------------------- /spec/support/request.js: -------------------------------------------------------------------------------- 1 | const http = require('http'); 2 | const https = require('https'); 3 | 4 | /** 5 | * Makes an HTTP or HTTPS request. 6 | * @param {string} url - The URL to request. 7 | * @returns {Promise} - A promise that resolves with the response data or rejects with an error. 8 | */ 9 | function httpRequest(url) { 10 | return new Promise((resolve, reject) => { 11 | // Determine the appropriate module to use based on the URL protocol 12 | const client = url.startsWith('https') ? https : http; 13 | 14 | // Make the request 15 | client 16 | .get(url, response => { 17 | let data = ''; 18 | 19 | // Collect the data chunks 20 | response.on('data', chunk => { 21 | data += chunk; 22 | }); 23 | 24 | // When the response ends, resolve or reject the promise 25 | response.on('end', () => { 26 | if (response.statusCode && response.statusCode && response.statusCode >= 200 && response.statusCode < 300) { 27 | resolve(data); // Resolve with the collected data 28 | } else { 29 | reject(new Error(`Request failed with status code ${response.statusCode}`)); 30 | } 31 | }); 32 | }) 33 | .on('error', error => { 34 | reject(new Error(`Error making request: ${error.message}`)); // Reject on error 35 | }); 36 | }); 37 | } 38 | 39 | module.exports = { httpRequest }; 40 | -------------------------------------------------------------------------------- /eslint.config.js: -------------------------------------------------------------------------------- 1 | const js = require('@eslint/js'); 2 | const globals = require('globals'); 3 | 4 | module.exports = [ 5 | { 6 | ignores: ['coverage/*'], 7 | }, 8 | js.configs.recommended, 9 | { 10 | languageOptions: { 11 | ecmaVersion: 2020, 12 | sourceType: 'module', 13 | globals: { 14 | ...globals.node, 15 | Parse: 'readonly', 16 | }, 17 | }, 18 | rules: { 19 | indent: ['error', 2, { SwitchCase: 1 }], 20 | 'linebreak-style': ['error', 'unix'], 21 | 'no-trailing-spaces': 2, 22 | 'eol-last': 2, 23 | 'space-in-parens': ['error', 'never'], 24 | 'no-multiple-empty-lines': 1, 25 | 'prefer-const': 'error', 26 | 'space-infix-ops': 'error', 27 | 'no-useless-escape': 'off', 28 | 'require-atomic-updates': 'off', 29 | 'object-curly-spacing': ['error', 'always'], 30 | curly: ['error', 'all'], 31 | 'block-spacing': ['error', 'always'], 32 | }, 33 | }, 34 | { 35 | files: ['spec/**/*.js'], 36 | languageOptions: { 37 | globals: { 38 | ...globals.node, 39 | ...globals.jasmine, 40 | Parse: 'readonly', 41 | reconfigureServer: 'readonly', 42 | it_only_parse_server_version: 'readonly', 43 | fit_only_parse_server_version: 'readonly', 44 | describe_only_parse_server_version: 'readonly', 45 | fdescribe_only_parse_server_version: 'readonly', 46 | }, 47 | }, 48 | rules: { 49 | 'no-console': 0, 50 | 'no-var': 'error', 51 | }, 52 | }, 53 | ]; 54 | -------------------------------------------------------------------------------- /.releaserc/commit.hbs: -------------------------------------------------------------------------------- 1 | *{{#if scope}} **{{scope}}:** 2 | {{~/if}} {{#if subject}} 3 | {{~subject}} 4 | {{~else}} 5 | {{~header}} 6 | {{~/if}} 7 | 8 | {{~!-- commit link --}} {{#if @root.linkReferences~}} 9 | ([{{shortHash}}]( 10 | {{~#if @root.repository}} 11 | {{~#if @root.host}} 12 | {{~@root.host}}/ 13 | {{~/if}} 14 | {{~#if @root.owner}} 15 | {{~@root.owner}}/ 16 | {{~/if}} 17 | {{~@root.repository}} 18 | {{~else}} 19 | {{~@root.repoUrl}} 20 | {{~/if}}/ 21 | {{~@root.commit}}/{{hash}})) 22 | {{~else}} 23 | {{~shortHash}} 24 | {{~/if}} 25 | 26 | {{~!-- commit references --}} 27 | {{~#if references~}} 28 | , closes 29 | {{~#each references}} {{#if @root.linkReferences~}} 30 | [ 31 | {{~#if this.owner}} 32 | {{~this.owner}}/ 33 | {{~/if}} 34 | {{~this.repository}}#{{this.issue}}]( 35 | {{~#if @root.repository}} 36 | {{~#if @root.host}} 37 | {{~@root.host}}/ 38 | {{~/if}} 39 | {{~#if this.repository}} 40 | {{~#if this.owner}} 41 | {{~this.owner}}/ 42 | {{~/if}} 43 | {{~this.repository}} 44 | {{~else}} 45 | {{~#if @root.owner}} 46 | {{~@root.owner}}/ 47 | {{~/if}} 48 | {{~@root.repository}} 49 | {{~/if}} 50 | {{~else}} 51 | {{~@root.repoUrl}} 52 | {{~/if}}/ 53 | {{~@root.issue}}/{{this.issue}}) 54 | {{~else}} 55 | {{~#if this.owner}} 56 | {{~this.owner}}/ 57 | {{~/if}} 58 | {{~this.repository}}#{{this.issue}} 59 | {{~/if}}{{/each}} 60 | {{~/if}} 61 | 62 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD License 2 | 3 | For Parse Server software 4 | 5 | Copyright (c) 2015-present, Parse, LLC. All rights reserved. 6 | 7 | Redistribution and use in source and binary forms, with or without modification, 8 | are permitted provided that the following conditions are met: 9 | 10 | * Redistributions of source code must retain the above copyright notice, this 11 | list of conditions and the following disclaimer. 12 | 13 | * Redistributions in binary form must reproduce the above copyright notice, 14 | this list of conditions and the following disclaimer in the documentation 15 | and/or other materials provided with the distribution. 16 | 17 | * Neither the name Parse nor the names of its contributors may be used to 18 | endorse or promote products derived from this software without specific 19 | prior written permission. 20 | 21 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 22 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 23 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 24 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR 25 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 26 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 27 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 28 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 30 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 | 32 | ----- 33 | 34 | As of April 5, 2017, Parse, LLC has transferred this code to the parse-community organization, and will no longer be contributing to or distributing this code. 35 | -------------------------------------------------------------------------------- /spec/mocks/s3adapter.js: -------------------------------------------------------------------------------- 1 | const { Readable } = require('stream'); 2 | const S3Adapter = require('../../index.js'); 3 | const { GetObjectCommand, PutObjectCommand, DeleteObjectCommand } = require('@aws-sdk/client-s3'); 4 | 5 | function getMockS3Adapter(options) { 6 | const accessKey = process.env.TEST_S3_ACCESS_KEY || 'ACCESS_KEY'; 7 | const secretKey = process.env.TEST_S3_SECRET_KEY || 'SECRET_KEY'; 8 | const bucket = process.env.TEST_S3_BUCKET || 'BUCKET'; 9 | const region = process.env.TEST_S3_REGION || 'us-east-1'; 10 | 11 | const s3 = new S3Adapter(Object.assign({ 12 | accessKey, 13 | secretKey, 14 | bucket, 15 | }, options)); 16 | 17 | const objects = {}; 18 | 19 | s3._s3Client = { 20 | // @ts-ignore 21 | send: command => { 22 | if (command instanceof PutObjectCommand) { 23 | const { Key, Body } = command.input; 24 | objects[Key] = Body; 25 | return Promise.resolve({ Location: `https://${bucket}.s3.${region}.amazonaws.com/${Key}` }); 26 | } 27 | 28 | if (command instanceof DeleteObjectCommand) { 29 | const { Key } = command.input; 30 | delete objects[Key]; 31 | return Promise.resolve({}); 32 | } 33 | 34 | if (command instanceof GetObjectCommand) { 35 | const { Key } = command.input; 36 | 37 | if (objects[Key]) { 38 | const stream = new Readable(); 39 | stream.push('hello world'); 40 | // End of stream 41 | stream.push(null); 42 | return { 43 | Body: stream, 44 | AcceptRanges: 'bytes', 45 | ContentLength: 36, 46 | ContentRange: 'bytes 0-35/36', 47 | ContentType: 'text/plain', 48 | }; 49 | } else { 50 | return Promise.reject(new Error('Not found')); 51 | } 52 | } 53 | return Promise.resolve(); 54 | }, 55 | }; 56 | 57 | return s3; 58 | } 59 | 60 | module.exports = { getMockS3Adapter }; 61 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@parse/s3-files-adapter", 3 | "version": "4.2.0", 4 | "description": "AWS S3 adapter for parse-server", 5 | "main": "index.js", 6 | "repository": { 7 | "type": "git", 8 | "url": "git+https://github.com/parse-community/parse-server-s3-adapter.git" 9 | }, 10 | "keywords": [ 11 | "parse-server", 12 | "AWS", 13 | "S3" 14 | ], 15 | "author": "Parse", 16 | "license": "ISC", 17 | "bugs": { 18 | "url": "https://github.com/parse-community/parse-server-s3-adapter/issues" 19 | }, 20 | "homepage": "https://github.com/parse-community/parse-server-s3-adapter#readme", 21 | "dependencies": { 22 | "@aws-sdk/client-s3": "3.858.0", 23 | "@aws-sdk/s3-request-presigner": "3.952.0" 24 | }, 25 | "devDependencies": { 26 | "@semantic-release/changelog": "6.0.3", 27 | "@semantic-release/commit-analyzer": "13.0.1", 28 | "@semantic-release/git": "10.0.1", 29 | "@semantic-release/github": "12.0.2", 30 | "@semantic-release/npm": "13.1.2", 31 | "@semantic-release/release-notes-generator": "14.1.0", 32 | "config": "4.1.1", 33 | "cross-env": "10.1.0", 34 | "eslint": "9.39.2", 35 | "express": "5.2.1", 36 | "jasmine": "5.13.0", 37 | "jasmine-spec-reporter": "7.0.0", 38 | "mongodb-runner": "6.4.0", 39 | "nyc": "17.1.0", 40 | "parse": "8.0.0", 41 | "parse-server": "9.1.0", 42 | "rewire": "7.0.0", 43 | "semantic-release": "25.0.2", 44 | "semver": "7.7.3" 45 | }, 46 | "scripts": { 47 | "lint": "eslint --cache ./", 48 | "lint:fix": "eslint --fix --cache ./", 49 | "pretest": "npm run test:mongodb:runnerstart", 50 | "posttest": "npm run test:mongodb:runnerstop", 51 | "test": "npm run test:only", 52 | "test:only": "TESTING=1 nyc jasmine", 53 | "test:mongodb:runnerstart": "mongodb-runner start -t standalone -- --port 27017", 54 | "test:mongodb:runnerstop": "mongodb-runner stop --all" 55 | }, 56 | "engines": { 57 | "node": ">=18.20.5 <19.0.0 || >=20.18.1 <21.0.0 || >=22.12.0 <23.0.0" 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | on: 3 | push: 4 | branches: 5 | - master 6 | pull_request: 7 | branches: 8 | - '**' 9 | jobs: 10 | lint: 11 | name: Lint 12 | timeout-minutes: 15 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v4 16 | - name: Use Node.js 17 | uses: actions/setup-node@v4 18 | with: 19 | cache: npm 20 | - name: Install dependencies 21 | run: npm ci 22 | - name: Lint 23 | run: npm run lint 24 | test: 25 | strategy: 26 | matrix: 27 | include: 28 | - name: Parse Server 8, Node.js 18 29 | NODE_VERSION: 18.20.4 30 | PARSE_SERVER_VERSION: 8 31 | - name: Parse Server 8, Node.js 20 32 | NODE_VERSION: 20.15.1 33 | PARSE_SERVER_VERSION: 8 34 | - name: Parse Server 8, Node.js 22 35 | NODE_VERSION: 22.4.1 36 | PARSE_SERVER_VERSION: 8 37 | - name: Parse Server 7, Node.js 18 38 | NODE_VERSION: 18.20.4 39 | PARSE_SERVER_VERSION: 7 40 | - name: Parse Server 7, Node.js 20 41 | NODE_VERSION: 20.15.1 42 | PARSE_SERVER_VERSION: 7 43 | - name: Parse Server 7, Node.js 22 44 | NODE_VERSION: 22.4.1 45 | PARSE_SERVER_VERSION: 7 46 | fail-fast: false 47 | name: ${{ matrix.name }} 48 | timeout-minutes: 15 49 | runs-on: ubuntu-latest 50 | env: 51 | NODE_VERSION: ${{ matrix.NODE_VERSION }} 52 | PARSE_SERVER_VERSION: ${{ matrix.PARSE_SERVER_VERSION }} 53 | steps: 54 | - uses: actions/checkout@v4 55 | - name: Use Node.js ${{ matrix.NODE_VERSION }} 56 | uses: actions/setup-node@v4 57 | with: 58 | node-version: ${{ matrix.NODE_VERSION }} 59 | cache: npm 60 | - name: Install Parse Server ${{ matrix.PARSE_SERVER_VERSION }} 61 | run: npm i -DE parse-server@${{ matrix.PARSE_SERVER_VERSION }} 62 | - name: Install dependencies 63 | run: npm ci 64 | - name: Run tests 65 | run: npm run test 66 | - name: Upload code coverage 67 | uses: codecov/codecov-action@v4 68 | with: 69 | fail_ci_if_error: false 70 | token: ${{ secrets.CODECOV_TOKEN }} 71 | env: 72 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 73 | 74 | concurrency: 75 | group: ${{ github.workflow }}-${{ github.ref }} 76 | cancel-in-progress: true 77 | -------------------------------------------------------------------------------- /spec/support/server.js: -------------------------------------------------------------------------------- 1 | const { ParseServer } = require('parse-server'); 2 | const express = require('express'); 3 | const http = require('http'); 4 | const { getMockS3Adapter } = require('../mocks/s3adapter.js'); 5 | const Config = require('../../node_modules/parse-server/lib/Config.js'); 6 | 7 | const expressApp = express(); 8 | const S3Adapter = getMockS3Adapter(); 9 | 10 | let serverState = {}; 11 | 12 | const defaultConfig = { 13 | databaseURI: 'mongodb://127.0.0.1:27017/s3-adapter', 14 | appId: 'test', 15 | masterKey: 'test', 16 | serverURL: 'http://127.0.0.1:1327/api/parse', 17 | port: 1327, 18 | mountPath: '/api/parse', 19 | verbose: false, 20 | silent: true, 21 | fileUpload: { 22 | enableForPublic: true, 23 | enableForAnonymousUser: true, 24 | enableForAuthenticatedUser: true, 25 | }, 26 | filesAdapter: S3Adapter, 27 | verifyUserEmails: false, 28 | }; 29 | 30 | async function startServer(config = {}) { 31 | if (!process.env.TESTING) { 32 | throw 'requires test environment to run'; 33 | } 34 | 35 | // Compose server config 36 | const serverConfig = Object.assign({}, config, defaultConfig); 37 | 38 | // Launch parse server 39 | const parseServer = ParseServer(serverConfig); 40 | await parseServer.start(); 41 | expressApp.use(serverConfig.mountPath, parseServer.app); 42 | 43 | // Launch http server 44 | const httpServer = http.createServer(expressApp); 45 | await new Promise((resolve, reject) => { 46 | httpServer.listen(serverConfig.port) 47 | .once('listening', resolve) 48 | .once('error', e => reject(e)); 49 | }).catch(e => { 50 | console.log(`parse-server failed to launch with error: ${e}`); 51 | }); 52 | 53 | // Update server state 54 | Object.assign(serverState, { 55 | parseServer, 56 | httpServer, 57 | serverConfig, 58 | }); 59 | } 60 | 61 | async function stopServer() { 62 | if (!process.env.TESTING) { 63 | throw 'requires test environment to run'; 64 | } 65 | 66 | // Clear database 67 | await Parse.User.logOut(); 68 | const app = Config.get(defaultConfig.appId); 69 | await app?.database.deleteEverything(true); 70 | 71 | // Stop server 72 | const { httpServer } = serverState; 73 | await new Promise(resolve => httpServer.close(resolve)); 74 | serverState = {}; 75 | } 76 | 77 | async function reconfigureServer(config = {}) { 78 | await stopServer(); 79 | return await startServer(config); 80 | } 81 | 82 | module.exports = { 83 | reconfigureServer, 84 | startServer, 85 | stopServer, 86 | }; 87 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies both within project spaces and in public spaces 49 | when an individual is representing the project or its community. Examples of 50 | representing a project or community include using an official project e-mail 51 | address, posting via an official social media account, or acting as an appointed 52 | representative at an online or offline event. Representation of a project may be 53 | further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at codeofconduct@parseplatform.org. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | 75 | For answers to common questions about this code of conduct, see 76 | https://www.contributor-covenant.org/faq 77 | -------------------------------------------------------------------------------- /.releaserc.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Semantic Release Config 3 | */ 4 | 5 | const { readFile } = require('fs').promises; 6 | const { resolve } = require('path'); 7 | 8 | // For ES6 modules use: 9 | // import { readFile } from 'fs/promises'; 10 | // import { resolve, dirname } from 'path'; 11 | // import { fileURLToPath } from 'url'; 12 | 13 | // Get env vars 14 | const ref = process.env.GITHUB_REF; 15 | const serverUrl = process.env.GITHUB_SERVER_URL; 16 | const repository = process.env.GITHUB_REPOSITORY; 17 | const repositoryUrl = serverUrl + '/' + repository; 18 | 19 | // Declare params 20 | const resourcePath = './.releaserc/'; 21 | const templates = { 22 | main: { file: 'template.hbs', text: undefined }, 23 | header: { file: 'header.hbs', text: undefined }, 24 | commit: { file: 'commit.hbs', text: undefined }, 25 | footer: { file: 'footer.hbs', text: undefined }, 26 | }; 27 | 28 | // Declare semantic config 29 | async function config() { 30 | 31 | // Get branch 32 | const branch = ref?.split('/')?.pop()?.split('-')[0] || '(current branch could not be determined)'; 33 | console.log(`Running on branch: ${branch}`); 34 | 35 | // Set changelog file 36 | // const changelogFile = `./changelogs/CHANGELOG_${branch}.md`; 37 | const changelogFile = `./CHANGELOG.md`; 38 | console.log(`Changelog file output to: ${changelogFile}`); 39 | 40 | // Load template file contents 41 | await loadTemplates(); 42 | 43 | const config = { 44 | branches: [ 45 | 'master', 46 | 'main', 47 | 'release', 48 | { name: 'alpha', prerelease: true }, 49 | { name: 'beta', prerelease: true }, 50 | ], 51 | dryRun: false, 52 | debug: true, 53 | ci: true, 54 | tagFormat: '${version}', 55 | plugins: [ 56 | ['@semantic-release/commit-analyzer', { 57 | preset: 'angular', 58 | releaseRules: [ 59 | { type: 'docs', scope: 'README', release: 'patch' }, 60 | { scope: 'no-release', release: false }, 61 | ], 62 | parserOpts: { 63 | noteKeywords: ['BREAKING CHANGE'], 64 | }, 65 | }], 66 | ['@semantic-release/release-notes-generator', { 67 | preset: 'angular', 68 | parserOpts: { 69 | noteKeywords: ['BREAKING CHANGE'] 70 | }, 71 | writerOpts: { 72 | commitsSort: ['subject', 'scope'], 73 | mainTemplate: templates.main.text, 74 | headerPartial: templates.header.text, 75 | commitPartial: templates.commit.text, 76 | footerPartial: templates.footer.text, 77 | }, 78 | }], 79 | ['@semantic-release/changelog', { 80 | 'changelogFile': changelogFile, 81 | }], 82 | ['@semantic-release/npm', { 83 | 'npmPublish': true, 84 | }], 85 | ['@semantic-release/git', { 86 | assets: [changelogFile, 'package.json', 'package-lock.json', 'npm-shrinkwrap.json'], 87 | }], 88 | ['@semantic-release/github', { 89 | successComment: getReleaseComment(), 90 | labels: ['type:ci'], 91 | releasedLabels: ['state:released<%= nextRelease.channel ? `-\${nextRelease.channel}` : "" %>'] 92 | }], 93 | ], 94 | }; 95 | 96 | return config; 97 | } 98 | 99 | async function loadTemplates() { 100 | for (const template of Object.keys(templates)) { 101 | 102 | // For ES6 modules use: 103 | // const fileUrl = import.meta.url; 104 | // const __dirname = dirname(fileURLToPath(fileUrl)); 105 | 106 | const filePath = resolve(__dirname, resourcePath, templates[template].file); 107 | const text = await readFile(filePath, 'utf-8'); 108 | templates[template].text = text; 109 | } 110 | } 111 | 112 | function getReleaseComment() { 113 | const url = repositoryUrl + '/releases/tag/${nextRelease.gitTag}'; 114 | const comment = '🎉 This change has been released in version [${nextRelease.version}](' + url + ')'; 115 | return comment; 116 | } 117 | 118 | module.exports = config(); 119 | -------------------------------------------------------------------------------- /lib/optionsFromArguments.js: -------------------------------------------------------------------------------- 1 | const DEFAULT_S3_REGION = 'us-east-1'; 2 | 3 | function requiredOrFromEnvironment(options, key, env) { 4 | const opts = Object.assign({}, options); 5 | opts[key] = options[key] || process.env[env]; 6 | if (!opts[key]) { 7 | throw new Error(`S3Adapter requires option '${key}' or env. variable ${env}`); 8 | } 9 | return opts; 10 | } 11 | 12 | function fromEnvironmentOrDefault(options, key, env, defaultValue) { 13 | const opts = Object.assign({}, options); 14 | opts[key] = options[key] || process.env[env] || defaultValue; 15 | // If we used the overrides, 16 | // make sure they take priority 17 | if (opts.s3overrides) { 18 | if (opts.s3overrides[key]) { 19 | opts[key] = opts.s3overrides[key]; 20 | } else if (opts.s3overrides.params && opts.s3overrides.params.Bucket) { 21 | opts.bucket = opts.s3overrides.params.Bucket; 22 | } 23 | } 24 | return opts; 25 | } 26 | 27 | function fromOptionsDictionaryOrDefault(options, key, defaultValue) { 28 | const opts = Object.assign({}, options); 29 | opts[key] = options[key] || defaultValue; 30 | return opts; 31 | } 32 | 33 | const optionsFromArguments = function optionsFromArguments(args) { 34 | const stringOrOptions = args[0]; 35 | let options = {}; 36 | let s3overrides = {}; 37 | let otherOptions; 38 | 39 | if (typeof stringOrOptions === 'string') { 40 | if (args.length === 1) { 41 | options.bucket = stringOrOptions; 42 | } else if (args.length === 2) { 43 | options.bucket = stringOrOptions; 44 | if (typeof args[1] !== 'object') { 45 | throw new Error("Failed to configure S3Adapter. Arguments don't make sense"); 46 | } 47 | otherOptions = args[1]; 48 | } else if (args.length > 2) { 49 | if (typeof args[1] !== 'string' || typeof args[2] !== 'string') { 50 | throw new Error("Failed to configure S3Adapter. Arguments don't make sense"); 51 | } 52 | options.accessKey = args[0]; 53 | options.secretKey = args[1]; 54 | options.bucket = args[2]; 55 | otherOptions = args[3]; 56 | } 57 | 58 | if (otherOptions) { 59 | options.bucketPrefix = otherOptions.bucketPrefix; 60 | options.credentials = otherOptions.credentials; 61 | options.directAccess = otherOptions.directAccess; 62 | options.fileAcl = otherOptions.fileAcl; 63 | options.baseUrl = otherOptions.baseUrl; 64 | options.baseUrlDirect = otherOptions.baseUrlDirect; 65 | options.signatureVersion = otherOptions.signatureVersion; 66 | options.globalCacheControl = otherOptions.globalCacheControl; 67 | options.presignedUrl = otherOptions.presignedUrl; 68 | options.presignedUrlExpires = otherOptions.presignedUrlExpires; 69 | options.ServerSideEncryption = otherOptions.ServerSideEncryption; 70 | options.generateKey = otherOptions.generateKey; 71 | options.validateFilename = otherOptions.validateFilename; 72 | s3overrides = otherOptions.s3overrides; 73 | } 74 | } else if (args.length === 1) { 75 | Object.assign(options, stringOrOptions); 76 | } else if (args.length === 2) { 77 | Object.assign(options, stringOrOptions); 78 | s3overrides = args[1]; 79 | 80 | if (s3overrides.params) { 81 | options.bucket = s3overrides.params.Bucket; 82 | } 83 | } else if (args.length > 2) { 84 | throw new Error("Failed to configure S3Adapter. Arguments don't make sense"); 85 | } 86 | 87 | options = fromOptionsDictionaryOrDefault(options, 's3overrides', s3overrides); 88 | options = requiredOrFromEnvironment(options, 'bucket', 'S3_BUCKET'); 89 | options = fromEnvironmentOrDefault(options, 'accessKey', 'S3_ACCESS_KEY', null); 90 | options = fromEnvironmentOrDefault(options, 'secretKey', 'S3_SECRET_KEY', null); 91 | options = fromEnvironmentOrDefault(options, 'bucketPrefix', 'S3_BUCKET_PREFIX', ''); 92 | options = fromEnvironmentOrDefault(options, 'region', 'S3_REGION', DEFAULT_S3_REGION); 93 | options = fromEnvironmentOrDefault(options, 'directAccess', 'S3_DIRECT_ACCESS', false); 94 | options = fromEnvironmentOrDefault(options, 'fileAcl', 'S3_FILE_ACL', null); 95 | options = fromEnvironmentOrDefault(options, 'baseUrl', 'S3_BASE_URL', null); 96 | options = fromEnvironmentOrDefault(options, 'baseUrlDirect', 'S3_BASE_URL_DIRECT', false); 97 | options = fromEnvironmentOrDefault(options, 'signatureVersion', 'S3_SIGNATURE_VERSION', 'v4'); 98 | options = fromEnvironmentOrDefault(options, 'globalCacheControl', 'S3_GLOBAL_CACHE_CONTROL', null); 99 | options = fromEnvironmentOrDefault(options, 'presignedUrl', 'S3_PRESIGNED_URL', false); 100 | options = fromEnvironmentOrDefault(options, 'presignedUrlExpires', 'S3_PRESIGNED_URL_EXPIRES', null); 101 | options = fromOptionsDictionaryOrDefault(options, 'generateKey', null); 102 | options = fromOptionsDictionaryOrDefault(options, 'validateFilename', null); 103 | 104 | return options; 105 | }; 106 | 107 | module.exports = optionsFromArguments; 108 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | // S3Adapter 2 | // 3 | // Stores Parse files in AWS S3. 4 | 5 | const { 6 | S3Client, 7 | CreateBucketCommand, 8 | PutObjectCommand, 9 | DeleteObjectCommand, 10 | GetObjectCommand, 11 | HeadBucketCommand, 12 | } = require('@aws-sdk/client-s3'); 13 | const { getSignedUrl } = require('@aws-sdk/s3-request-presigner'); 14 | const optionsFromArguments = require('./lib/optionsFromArguments'); 15 | 16 | const awsCredentialsDeprecationNotice = function awsCredentialsDeprecationNotice() { 17 | // eslint-disable-next-line no-console 18 | console.warn( 19 | 'Passing AWS credentials to this adapter is now DEPRECATED and will be removed in a future version', 20 | 'See: https://github.com/parse-server-modules/parse-server-s3-adapter#aws-credentials for details' 21 | ); 22 | }; 23 | 24 | const serialize = obj => { 25 | const str = []; 26 | Object.keys(obj).forEach(key => { 27 | if (obj[key]) { 28 | str.push(`${encodeURIComponent(key)}=${encodeURIComponent(obj[key])}`); 29 | } 30 | }); 31 | return str.join('&'); 32 | }; 33 | 34 | async function buildDirectAccessUrl(baseUrl, baseUrlFileKey, presignedUrl, config, filename) { 35 | let urlBase; 36 | if (typeof baseUrl === 'function') { 37 | urlBase = await baseUrl(config, filename); 38 | } else { 39 | urlBase = baseUrl; 40 | } 41 | let directAccessUrl = `${urlBase}/${baseUrlFileKey}`; 42 | 43 | if (presignedUrl) { 44 | directAccessUrl += presignedUrl.substring(presignedUrl.indexOf('?')); 45 | } 46 | 47 | return directAccessUrl; 48 | } 49 | 50 | function responseToBuffer(response) { 51 | return new Promise((resolve, reject) => { 52 | const chunks = []; 53 | response.Body.on('data', chunk => chunks.push(chunk)); 54 | response.Body.on('end', () => resolve(Buffer.concat(chunks))); 55 | response.Body.on('error', reject); 56 | }); 57 | } 58 | 59 | class S3Adapter { 60 | // Creates an S3 session. 61 | // Providing AWS access, secret keys and bucket are mandatory 62 | // Region will use sane defaults if omitted 63 | constructor(...args) { 64 | const options = optionsFromArguments(args); 65 | this._region = options.region; 66 | this._bucket = options.bucket; 67 | this._bucketPrefix = options.bucketPrefix; 68 | this._directAccess = options.directAccess; 69 | this._fileAcl = options.fileAcl; 70 | this._baseUrl = options.baseUrl; 71 | this._baseUrlDirect = options.baseUrlDirect; 72 | this._signatureVersion = options.signatureVersion; 73 | this._globalCacheControl = options.globalCacheControl; 74 | this._presignedUrl = options.presignedUrl; 75 | this._presignedUrlExpires = parseInt(options.presignedUrlExpires, 10); 76 | this._encryption = options.ServerSideEncryption; 77 | this._generateKey = options.generateKey; 78 | this._endpoint = options.s3overrides?.endpoint; 79 | // Optional FilesAdaptor method 80 | this.validateFilename = options.validateFilename; 81 | 82 | const s3Options = { 83 | params: { Bucket: this._bucket }, 84 | region: this._region, 85 | signatureVersion: this._signatureVersion, 86 | globalCacheControl: this._globalCacheControl, 87 | }; 88 | 89 | if (options.accessKey && options.secretKey) { 90 | awsCredentialsDeprecationNotice(); 91 | s3Options.credentials = { 92 | accessKeyId: options.accessKey, 93 | secretAccessKey: options.secretKey, 94 | }; 95 | } else if (options.credentials) { 96 | s3Options.credentials = options.credentials; 97 | } 98 | 99 | if (options.accessKey && options.secretKey) { 100 | awsCredentialsDeprecationNotice(); 101 | s3Options.accessKeyId = options.accessKey; 102 | s3Options.secretAccessKey = options.secretKey; 103 | } 104 | 105 | Object.assign(s3Options, options.s3overrides); 106 | 107 | this._s3Client = new S3Client(s3Options); 108 | this._hasBucket = false; 109 | } 110 | 111 | async createBucket() { 112 | if (this._hasBucket) { 113 | return; 114 | } 115 | 116 | try { 117 | // Check if the bucket exists 118 | await this._s3Client.send(new HeadBucketCommand({ Bucket: this._bucket })); 119 | this._hasBucket = true; 120 | } catch (error) { 121 | if (error.name !== 'NotFound') { 122 | // If the error is something other than "NotFound", rethrow it 123 | throw error; 124 | } 125 | 126 | // If the bucket does not exist, attempt to create it 127 | try { 128 | await this._s3Client.send(new CreateBucketCommand({ Bucket: this._bucket })); 129 | this._hasBucket = true; 130 | } catch (creationError) { 131 | // Handle specific errors during bucket creation 132 | if (creationError.name === 'BucketAlreadyExists' || creationError.name === 'BucketAlreadyOwnedByYou') { 133 | this._hasBucket = true; 134 | } else { 135 | throw creationError; 136 | } 137 | } 138 | } 139 | } 140 | 141 | // For a given config object, filename, and data, store a file in S3 142 | // Returns a promise containing the S3 object creation response 143 | async createFile(filename, data, contentType, options = {}) { 144 | const params = { 145 | Bucket: this._bucket, 146 | Key: this._bucketPrefix + filename, 147 | Body: data, 148 | }; 149 | 150 | if (this._generateKey instanceof Function) { 151 | params.Key = this._bucketPrefix + this._generateKey(filename); 152 | } 153 | if (this._fileAcl) { 154 | if (this._fileAcl === 'none') { 155 | delete params.ACL; 156 | } else { 157 | params.ACL = this._fileAcl; 158 | } 159 | } else if (this._directAccess) { 160 | params.ACL = 'public-read'; 161 | } 162 | if (contentType) { 163 | params.ContentType = contentType; 164 | } 165 | if (this._globalCacheControl) { 166 | params.CacheControl = this._globalCacheControl; 167 | } 168 | if (this._encryption === 'AES256' || this._encryption === 'aws:kms') { 169 | params.ServerSideEncryption = this._encryption; 170 | } 171 | if (options.metadata && typeof options.metadata === 'object') { 172 | params.Metadata = options.metadata; 173 | } 174 | if (options.tags && typeof options.tags === 'object') { 175 | const serializedTags = serialize(options.tags); 176 | params.Tagging = serializedTags; 177 | } 178 | await this.createBucket(); 179 | const command = new PutObjectCommand(params); 180 | const response = await this._s3Client.send(command); 181 | const endpoint = this._endpoint || `https://${this._bucket}.s3.${this._region}.amazonaws.com`; 182 | const location = `${endpoint}/${params.Key}`; 183 | 184 | return Object.assign(response || {}, { Location: location }); 185 | } 186 | 187 | async deleteFile(filename) { 188 | const params = { 189 | Bucket: this._bucket, 190 | Key: this._bucketPrefix + filename, 191 | }; 192 | await this.createBucket(); 193 | const command = new DeleteObjectCommand(params); 194 | const response = await this._s3Client.send(command); 195 | return response; 196 | } 197 | 198 | // Search for and return a file if found by filename 199 | // Returns a promise that succeeds with the buffer result from S3 200 | async getFileData(filename) { 201 | const params = { 202 | Bucket: this._bucket, 203 | Key: this._bucketPrefix + filename, 204 | }; 205 | await this.createBucket(); 206 | const command = new GetObjectCommand(params); 207 | const response = await this._s3Client.send(command); 208 | if (response && !response.Body) { 209 | throw new Error(response); 210 | } 211 | 212 | const buffer = await responseToBuffer(response); 213 | return buffer; 214 | } 215 | 216 | // Exposed only for testing purposes 217 | getFileSignedUrl(client, command, options) { 218 | return getSignedUrl(client, command, options); 219 | } 220 | 221 | // Generates and returns the location of a file stored in S3 for the given request and filename 222 | // The location is the direct S3 link if the option is set, 223 | // otherwise we serve the file through parse-server 224 | async getFileLocation(config, filename) { 225 | const fileName = filename.split('/').map(encodeURIComponent).join('/'); 226 | if (!this._directAccess) { 227 | return `${config.mount}/files/${config.applicationId}/${fileName}`; 228 | } 229 | 230 | const fileKey = `${this._bucketPrefix}${fileName}`; 231 | 232 | let presignedUrl = ''; 233 | if (this._presignedUrl) { 234 | const params = { Bucket: this._bucket, Key: fileKey }; 235 | const options = this._presignedUrlExpires ? { expiresIn: this._presignedUrlExpires } : {}; 236 | 237 | const command = new GetObjectCommand(params); 238 | presignedUrl = await this.getFileSignedUrl(this._s3Client, command, options); 239 | 240 | if (!this._baseUrl) { 241 | return presignedUrl; 242 | } 243 | } 244 | 245 | if (!this._baseUrl) { 246 | return `https://${this._bucket}.s3.amazonaws.com/${fileKey}`; 247 | } 248 | 249 | const baseUrlFileKey = this._baseUrlDirect ? fileName : fileKey; 250 | return await buildDirectAccessUrl(this._baseUrl, baseUrlFileKey, presignedUrl, config, filename); 251 | } 252 | 253 | async handleFileStream(filename, req, res) { 254 | const params = { 255 | Bucket: this._bucket, 256 | Key: this._bucketPrefix + filename, 257 | Range: req.get('Range'), 258 | }; 259 | 260 | await this.createBucket(); 261 | const command = new GetObjectCommand(params); 262 | const data = await this._s3Client.send(command); 263 | if (data && !data.Body) { 264 | throw new Error('S3 object body is missing.'); 265 | } 266 | 267 | res.writeHead(206, { 268 | 'Accept-Ranges': data.AcceptRanges, 269 | 'Content-Length': data.ContentLength, 270 | 'Content-Range': data.ContentRange, 271 | 'Content-Type': data.ContentType, 272 | }); 273 | data.Body.on('data', chunk => res.write(chunk)); 274 | data.Body.on('end', () => res.end()); 275 | data.Body.on('error', e => { 276 | res.status(404); 277 | res.send(e.message); 278 | }); 279 | return responseToBuffer(data); 280 | } 281 | } 282 | 283 | module.exports = S3Adapter; 284 | module.exports.default = S3Adapter; 285 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # [4.2.0](https://github.com/parse-community/parse-server-s3-adapter/compare/4.1.1...4.2.0) (2025-06-12) 2 | 3 | 4 | ### Features 5 | 6 | * Add support for option `baseUrl` set to async function ([#285](https://github.com/parse-community/parse-server-s3-adapter/issues/285)) ([6eae9e1](https://github.com/parse-community/parse-server-s3-adapter/commit/6eae9e1f4f4e496e4a53983464639e6d2a303f93)) 7 | 8 | ## [4.1.1](https://github.com/parse-community/parse-server-s3-adapter/compare/4.1.0...4.1.1) (2025-06-04) 9 | 10 | 11 | ### Bug Fixes 12 | 13 | * Security upgrade cookie and parse-server ([#274](https://github.com/parse-community/parse-server-s3-adapter/issues/274)) ([4b6aadc](https://github.com/parse-community/parse-server-s3-adapter/commit/4b6aadc7244dd91d79818c7c6cf752333ee79776)) 14 | 15 | # [4.1.0](https://github.com/parse-community/parse-server-s3-adapter/compare/4.0.0...4.1.0) (2025-01-03) 16 | 17 | 18 | ### Features 19 | 20 | * Upgrade @aws-sdk/client-s3 from 3.668.0 to 3.709.0 ([#235](https://github.com/parse-community/parse-server-s3-adapter/issues/235)) ([a344626](https://github.com/parse-community/parse-server-s3-adapter/commit/a344626b37946a8bed680740a6752c5903389509)) 21 | 22 | # [4.0.0](https://github.com/parse-community/parse-server-s3-adapter/compare/3.0.0...4.0.0) (2025-01-01) 23 | 24 | 25 | ### Features 26 | 27 | * Migrate S3 Client from AWS SDK v2 to v3 ([#231](https://github.com/parse-community/parse-server-s3-adapter/issues/231)) ([8ac6014](https://github.com/parse-community/parse-server-s3-adapter/commit/8ac6014d2945e211c1c6cddd3675037c49729cca)) 28 | 29 | 30 | ### BREAKING CHANGES 31 | 32 | * The AWS S3 credentials are now set under `s3overrides.credentials` instead of directly under the `s3overrides` key; see the [migration guide](https://github.com/parse-community/parse-server-s3-adapter?#migration-guide-from-3x-to-4x) for more details. ([8ac6014](8ac6014)) 33 | 34 | # [3.0.0](https://github.com/parse-community/parse-server-s3-adapter/compare/2.2.0...3.0.0) (2024-10-22) 35 | 36 | 37 | ### Features 38 | 39 | * Add support for Node 20, 22; remove support for Node 14, 16 ([#226](https://github.com/parse-community/parse-server-s3-adapter/issues/226)) ([da5a94f](https://github.com/parse-community/parse-server-s3-adapter/commit/da5a94fa180ba57dfae33659e18db4704582e8e6)) 40 | 41 | 42 | ### BREAKING CHANGES 43 | 44 | * Removes support for Node 14, 16. ([da5a94f](da5a94f)) 45 | 46 | # [2.2.0](https://github.com/parse-community/parse-server-s3-adapter/compare/2.1.0...2.2.0) (2023-05-15) 47 | 48 | 49 | ### Features 50 | 51 | * Upgrade aws-sdk from 2.1362.0 to 2.1363.0 ([#192](https://github.com/parse-community/parse-server-s3-adapter/issues/192)) ([3c3a953](https://github.com/parse-community/parse-server-s3-adapter/commit/3c3a953ebbe9654c05893ab127b80b7913818008)) 52 | 53 | # [2.1.0](https://github.com/parse-community/parse-server-s3-adapter/compare/2.0.2...2.1.0) (2023-05-12) 54 | 55 | 56 | ### Features 57 | 58 | * Add option to generate pre-signed URL with expiration time ([#180](https://github.com/parse-community/parse-server-s3-adapter/issues/180)) ([d92363d](https://github.com/parse-community/parse-server-s3-adapter/commit/d92363d68a609b1db089bf83028b4f6780c9491c)) 59 | 60 | ## [2.0.2](https://github.com/parse-community/parse-server-s3-adapter/compare/2.0.1...2.0.2) (2023-04-21) 61 | 62 | 63 | ### Bug Fixes 64 | 65 | * Remove development dependencies from production ([#190](https://github.com/parse-community/parse-server-s3-adapter/issues/190)) ([73b17e4](https://github.com/parse-community/parse-server-s3-adapter/commit/73b17e40f124212020cf72e700976f8d3cbb22d5)) 66 | 67 | ## [2.0.1](https://github.com/parse-community/parse-server-s3-adapter/compare/2.0.0...2.0.1) (2023-04-21) 68 | 69 | 70 | ### Bug Fixes 71 | 72 | * Security upgrade xml2js and aws-sdk ([#181](https://github.com/parse-community/parse-server-s3-adapter/issues/181)) ([66fad32](https://github.com/parse-community/parse-server-s3-adapter/commit/66fad32dd94b5280f29a12fbdf24d9427eb8c2eb)) 73 | 74 | # [2.0.0](https://github.com/parse-community/parse-server-s3-adapter/compare/1.6.3...2.0.0) (2023-04-21) 75 | 76 | 77 | ### Features 78 | 79 | * Add support for Node 16, 18, remove support for Node 12, 15 ([#189](https://github.com/parse-community/parse-server-s3-adapter/issues/189)) ([993534c](https://github.com/parse-community/parse-server-s3-adapter/commit/993534c57cc7009363a740bbbb04a0e4e56c7f0c)) 80 | 81 | 82 | ### BREAKING CHANGES 83 | 84 | * Removes support for Node 12 and 15 which have reached their End-of-Life date and are not officially maintained anymore. ([993534c](993534c)) 85 | 86 | ## [1.6.3](https://github.com/parse-community/parse-server-s3-adapter/compare/1.6.2...1.6.3) (2023-04-21) 87 | 88 | 89 | ### Bug Fixes 90 | 91 | * upgrade aws-sdk from 2.906.0 to 2.907.0 ([#166](https://github.com/parse-community/parse-server-s3-adapter/issues/166)) ([e224f0a](https://github.com/parse-community/parse-server-s3-adapter/commit/e224f0aa3388b03307e06700aa0dbe9251fae1a9)) 92 | 93 | ## 1.6.2 94 | [Full Changelog](https://github.com/parse-community/parse-server-s3-adapter/compare/1.6.1...1.6.2) 95 | 96 | ### Breaking Changes 97 | none 98 | ### Notable Changes 99 | none 100 | ### Other Changes 101 | - Upgrade to AWS SDK 2.905.0 (Antonio Davi Macedo Coelho de Castro) [#163](https://github.com/parse-community/parse-server-s3-adapter/pull/163) 102 | ___ 103 | 104 | ## 1.6.1 105 | [Full Changelog](https://github.com/parse-community/parse-server-s3-adapter/compare/1.6.0...1.6.1) 106 | ### Breaking Changes 107 | none 108 | ### Notable Changes 109 | none 110 | ### Other Changes 111 | - Upgraded to AWS SDK 2.879.0 (Manuel Trezza) [#132](https://github.com/parse-community/parse-server-s3-adapter/pull/132) 112 | ___ 113 | ## 1.6.0 114 | [Full Changelog](https://github.com/parse-community/parse-server-s3-adapter/compare/1.5.0...1.6.0) 115 | - NEW: Support passing baseUrl param as a function [#106](https://github.com/parse-community/parse-server-s3-adapter/pull/106). Thanks to [uzaysan](https://github.com/uzaysan) 116 | 117 | ## 1.5.0 118 | [Full Changelog](https://github.com/parse-community/parse-server-s3-adapter/compare/1.4.0...1.5.0) 119 | - NEW: Add file ACL override parameter [#90](https://github.com/parse-community/parse-server-s3-adapter/pull/90). Thanks to [Manuel](https://github.com/mtrezza) 120 | - NEW: Added support for metadata and tagging files [#83](https://github.com/parse-community/parse-server-s3-adapter/pull/83). Thanks to [stevestencil](https://github.com/stevestencil) 121 | 122 | ## 1.4.0 123 | [Full Changelog](https://github.com/parse-community/parse-server-s3-adapter/compare/1.3.0...1.4.0) 124 | - NEW: Support endpoint in S3Overrides [#79](https://github.com/parse-community/parse-server-s3-adapter/pull/79). Thanks to [Kyle Barron](https://github.com/kylebarron) 125 | - NEW: Support filename validation and AWS directories [#76](https://github.com/parse-community/parse-server-s3-adapter/pull/76). Thanks to [Mike Patnode](https://github.com/mpatnode) 126 | 127 | ## 1.3.0 128 | [Full Changelog](https://github.com/parse-community/parse-server-s3-adapter/compare/1.2.3...1.3.0) 129 | - CHANGE: Conform to FilesAdapter Interface [#73](https://github.com/parse-community/parse-server-s3-adapter/pull/73). Thanks to [Diamond Lewis](https://github.com/dplewis) 130 | - CHANGE: Add airbnb style guide to linter [#72](https://github.com/parse-community/parse-server-s3-adapter/pull/72). Thanks to [Diamond Lewis](https://github.com/dplewis) 131 | - NEW: Support byte range requests [#71](https://github.com/parse-community/parse-server-s3-adapter/pull/71). Thanks to [Diamond Lewis](https://github.com/dplewis) 132 | 133 | ## 1.2.3 134 | [Full Changelog](https://github.com/parse-community/parse-server-s3-adapter/compare/1.2.2...1.2.3) 135 | - Another attempt at getting travis/npm working together [#69](https://github.com/parse-community/parse-server-s3-adapter/pull/69) 136 | 137 | ## 1.2.2 138 | [Full Changelog](https://github.com/parse-server-modules/parse-server-s3-adapter/compare/v1.0.6...1.2.2) 139 | - Dependency Security Updates 140 | - Fix tests [#69](https://github.com/parse-community/parse-server-s3-adapter/pull/68) thanks to [davimacedo](https://github.com/davimacedo) 141 | 142 | ## [v1.0.6](https://github.com/parse-server-modules/parse-server-s3-adapter/tree/v1.0.6) (2016-12-6) 143 | 144 | [Full Changelog](https://github.com/parse-server-modules/parse-server-s3-adapter/compare/v1.0.5...v1.0.6) 145 | 146 | **Closed issues:** 147 | 148 | - commit bb933cc breaks adapter for me [\#31](https://github.com/parse-server-modules/parse-server-s3-adapter/issues/31) 149 | - getFileLocation does not URI encode filename in directAccess cases [\#28](https://github.com/parse-server-modules/parse-server-s3-adapter/issues/28) 150 | - Ability to Resize Images [\#27](https://github.com/parse-server-modules/parse-server-s3-adapter/issues/27) 151 | 152 | **Merged pull requests:** 153 | 154 | - Add lint to project. [\#34](https://github.com/parse-server-modules/parse-server-s3-adapter/pull/34) ([acinader](https://github.com/acinader)) 155 | - Handle immutable configuration [\#33](https://github.com/parse-server-modules/parse-server-s3-adapter/pull/33) ([acinader](https://github.com/acinader)) 156 | - Revert "also using base url as endpoint in order to use aws s3 compat… [\#32](https://github.com/parse-server-modules/parse-server-s3-adapter/pull/32) ([acinader](https://github.com/acinader)) 157 | - Encode File URI [\#30](https://github.com/parse-server-modules/parse-server-s3-adapter/pull/30) ([davimacedo](https://github.com/davimacedo)) 158 | - Add s3overrides option format [\#24](https://github.com/parse-server-modules/parse-server-s3-adapter/pull/24) ([viawest-davidsix](https://github.com/viawest-davidsix)) 159 | - Use baseUrl as S3 endpoint [\#23](https://github.com/parse-server-modules/parse-server-s3-adapter/pull/23) ([dpoetzsch](https://github.com/dpoetzsch)) 160 | - v1.0.5 - changelog [\#22](https://github.com/parse-server-modules/parse-server-s3-adapter/pull/22) ([acinader](https://github.com/acinader)) 161 | 162 | ## [v1.0.5](https://github.com/parse-server-modules/parse-server-s3-adapter/tree/v1.0.5) (2016-08-16) 163 | [Full Changelog](https://github.com/parse-server-modules/parse-server-s3-adapter/compare/v1.0.4...v1.0.5) 164 | 165 | **Closed issues:** 166 | 167 | - Files are not deleted on AWS S3 after being deleted on Parse-Dashboard. [\#17](https://github.com/parse-server-modules/parse-server-s3-adapter/issues/17) 168 | - Use AWS SDK & CLI standard configuration [\#14](https://github.com/parse-server-modules/parse-server-s3-adapter/issues/14) 169 | 170 | **Merged pull requests:** 171 | 172 | - 24 hours in seconds is 86400, not 86400000 [\#21](https://github.com/parse-server-modules/parse-server-s3-adapter/pull/21) ([joeyslack](https://github.com/joeyslack)) 173 | - Fix bug that put credentials on the wrong object. [\#19](https://github.com/parse-server-modules/parse-server-s3-adapter/pull/19) ([acinader](https://github.com/acinader)) 174 | - Use default AWS credential provider. [\#15](https://github.com/parse-server-modules/parse-server-s3-adapter/pull/15) ([acinader](https://github.com/acinader)) 175 | - Add an optional global cache control for all s3 uploaded files. [\#13](https://github.com/parse-server-modules/parse-server-s3-adapter/pull/13) ([KBog](https://github.com/KBog)) 176 | 177 | ## [v1.0.4](https://github.com/parse-server-modules/parse-server-s3-adapter/tree/v1.0.4) (2016-07-18) 178 | [Full Changelog](https://github.com/parse-server-modules/parse-server-s3-adapter/compare/v1.0.3...v1.0.4) 179 | 180 | **Merged pull requests:** 181 | 182 | - get signature version from environment or default [\#12](https://github.com/parse-server-modules/parse-server-s3-adapter/pull/12) ([ststroppel](https://github.com/ststroppel)) 183 | - Updates changelog [\#11](https://github.com/parse-server-modules/parse-server-s3-adapter/pull/11) ([flovilmart](https://github.com/flovilmart)) 184 | 185 | ## [v1.0.3](https://github.com/parse-server-modules/parse-server-s3-adapter/tree/v1.0.3) (2016-05-24) 186 | [Full Changelog](https://github.com/parse-server-modules/parse-server-s3-adapter/compare/v1.0.2...v1.0.3) 187 | 188 | **Merged pull requests:** 189 | 190 | - Updates changelog and version [\#10](https://github.com/parse-server-modules/parse-server-s3-adapter/pull/10) ([flovilmart](https://github.com/flovilmart)) 191 | - Adding ability to ignore bucketPrefix for public URLs [\#9](https://github.com/parse-server-modules/parse-server-s3-adapter/pull/9) ([blacha](https://github.com/blacha)) 192 | 193 | ## [v1.0.2](https://github.com/parse-server-modules/parse-server-s3-adapter/tree/v1.0.2) (2016-04-25) 194 | [Full Changelog](https://github.com/parse-server-modules/parse-server-s3-adapter/compare/v1.0.1...v1.0.2) 195 | 196 | **Closed issues:** 197 | 198 | - Is it possible to save to a customized folder dynamically? [\#4](https://github.com/parse-server-modules/parse-server-s3-adapter/issues/4) 199 | - The S3 file adapter seems not working [\#2](https://github.com/parse-server-modules/parse-server-s3-adapter/issues/2) 200 | 201 | **Merged pull requests:** 202 | 203 | - Adds option to specify an alternate baseUrl \(e.g. CloudFront\) [\#6](https://github.com/parse-server-modules/parse-server-s3-adapter/pull/6) ([flavordaaave](https://github.com/flavordaaave)) 204 | 205 | ## [v1.0.1](https://github.com/parse-server-modules/parse-server-s3-adapter/tree/v1.0.1) (2016-03-31) 206 | **Merged pull requests:** 207 | 208 | - 1.0.1 [\#3](https://github.com/parse-server-modules/parse-server-s3-adapter/pull/3) ([flovilmart](https://github.com/flovilmart)) 209 | - Fixed bug whereby region was ignored. [\#1](https://github.com/parse-server-modules/parse-server-s3-adapter/pull/1) ([jsuresh](https://github.com/jsuresh)) 210 | 211 | 212 | 213 | \* *This Change Log was automatically generated by [github_changelog_generator](https://github.com/skywinder/Github-Changelog-Generator)* 214 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Parse Server S3 File Adapter 2 | 3 | [![Build Status](https://github.com/parse-community/parse-server-s3-adapter/actions/workflows/ci.yml/badge.svg)](https://github.com/parse-community/parse-server-s3-adapter/actions/workflows/ci.yml) 4 | [![Snyk Badge](https://snyk.io/test/github/parse-community/parse-server-s3-adapter/badge.svg)](https://snyk.io/test/github/parse-community/parse-server-s3-adapter) 5 | [![Coverage](https://img.shields.io/codecov/c/github/parse-community/parse-server-s3-adapter/master.svg)](https://codecov.io/github/parse-community/parse-server-s3-adapter?branch=master) 6 | [![auto-release](https://img.shields.io/badge/%F0%9F%9A%80-auto--release-9e34eb.svg)](https://github.com/parse-community/parse-server-s3-adapter/releases) 7 | 8 | [![Parse Server](https://img.shields.io/badge/Parse_Server-7.0-169CEE.svg?style=flat&logo=data:image/svg%2bxml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz48c3ZnIGlkPSJ1dWlkLTg2MzQ2MDY1LTNjNjQtNDBlYy1hNmQ0LWUyNzZmM2E0Y2U5MiIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB2aWV3Qm94PSIwIDAgMTExMy42NiAxMDk5LjQyIj48ZyBpZD0idXVpZC04MWRmNWUyZC04YWQyLTQwMmEtYTNlZS1hYjE2YTQ5NjhhNjciPjxwYXRoIGQ9Ik00ODUuMDMsNzg1LjE0Yy04MC4zMSwwLTE2MC42MS0uMDktMjQwLjkyLjA3LTE5LjY5LjA0LTM4Ljk2LDIuODUtNTYuODksMTEuODYtMzAuNjEsMTUuMzktNDYuMjQsNDAuODYtNTAuNTEsNzQuMTctMS43OCwxMy44Ny0xLjA3LDI3LjUzLDIuNCw0MS4wNyw5Ljg4LDM4LjYxLDQwLjk3LDYzLDgwLjc3LDYzLjYxLDE0LjQ4LjIyLDI4LjYtMS4xMyw0Mi4xOC02LjU3LDIyLjIxLTguODksMzcuNjgtMjQuNjksNDcuNTUtNDYuMjIsNS43LTEyLjQ0LDguNzgtMjUuNiwxMC4wMy0zOS4yMS43LTcuNjUsMS4zNC04LjM5LDkuMDctOC40LDM5LjExLS4wNiw3OC4yMi0uMDYsMTE3LjMzLDAsNy44MS4wMSw4LjcuNzcsOC4yNSw4LjYxLTEuNSwyNS45LTYuMjYsNTEuMTktMTQuOTUsNzUuNjgtOS44OSwyNy44OC0yNC41Miw1Mi45Ni00NC44OCw3NC40OS0xMi4wNiwxMi43NS0yNS44NiwyMy41LTQxLDMyLjM2LTI3LjYxLDE2LjE3LTU3LjU2LDI1LjU0LTg5LjIxLDI5LjYzLTE2LjAzLDIuMDctMzIuMTMsMy41Ni00OC4zMiwyLjk5LTUxLjA1LTEuODEtOTguMTktMTUuMzItMTM4LjkyLTQ3LjM1LTI5LjE4LTIyLjk0LTUwLjIzLTUxLjkxLTYzLjE2LTg2LjczQzQuNDksOTQwLjAzLjIsOTE0LjAyLDAsODg3LjEyYy0uMi0yNy4zOSwzLjIzLTU0LjA2LDEyLjA0LTgwLjAxLDE2LjE1LTQ3LjU1LDQ2LjA0LTg0LjIyLDg4LjM3LTExMC44NSwzMy41LTIxLjA4LDcwLjM1LTMyLjQxLDEwOS41MS0zNi43NiwxOC45My0yLjEsMzcuOTEtMi43OCw1Ni45NS0yLjc4LDE0Ni4wMS4wNiwyOTIuMDItLjE0LDQzOC4wMy4xNCw0MC43OC4wOCw3OS44OC03LjIsMTE3LjEzLTIzLjY0LDUxLjQ0LTIyLjcsOTEuNi01OC4yNSwxMTkuNzUtMTA3LjA4LDE5LjE3LTMzLjI3LDI5Ljk3LTY5LjE0LDMzLjU2LTEwNy4zNSw0LjI0LTQ1LjEyLS42My04OS4xNy0xNi44LTEzMS40Ni0yOS4xNS03Ni4xOS04My4xMS0xMjUuOTUtMTYxLjc0LTE0OC41NS0zMC42OC04LjgxLTYyLjExLTExLjExLTkzLjc0LTkuMDMtNTAuMzEsMy4zMS05Ni41MiwxOC45LTEzNy4wOCw0OS40MS0yNi45OCwyMC4zLTQ4Ljg5LDQ1LjI3LTY1LjkxLDc0LjQ3LTIzLjY0LDQwLjU2LTM2LjIsODQuNTgtNDEuMzYsMTMxLTIuMDUsMTguNDItMi45OSwzNi44NS0yLjkzLDU1LjM4LjEzLDM4LjA3LjA0LDc2LjEzLjA0LDExNC4yLDAsMi4zNS4xLDQuNy0uMDgsNy4wNC0uMzYsNC44Ny0xLjIzLDUuNjktNi4yMiw2LjA4LTEuODIuMTQtMy42NS4wNy01LjQ3LjA3LTM3LjU1LDAtNzUuMDksMC0xMTIuNjQsMC0xLjU2LDAtMy4xMy4wNS00LjY5LS4wNC01Ljk2LS4zMi02Ljc1LTEuMDgtNy4xMS02LjgyLS4wNi0xLjA0LS4wNC0yLjA5LS4wNC0zLjEzLjAyLTQ1LjYzLS44NC05MS4yOC4yOC0xMzYuODgsMS44MS03My44NSwxNi43My0xNDQuODQsNTAuNTQtMjExLjE0LDIxLjE3LTQxLjUxLDQ4LjY0LTc4LjQsODMuMi0xMDkuNzEsNDEuMzktMzcuNDksODguOTYtNjQuMjcsMTQyLjM5LTgwLjcxLDMwLjU1LTkuNCw2MS43NC0xNS4zNSw5My41My0xNy42NSw4MC4yMS01Ljc5LDE1Ny4wNSw2Ljg1LDIyOC42LDQ0Ljg3LDYzLjExLDMzLjU0LDExMi4wMSw4MS44OCwxNDYuNTUsMTQ0LjU1LDI0LjczLDQ0Ljg3LDM5LjE3LDkyLjk2LDQ1LjU3LDE0My43MSwzLjcxLDI5LjM4LDQuMjIsNTguODcsMi4yOSw4OC4yMS00LjU0LDY5LjI2LTI1LjQxLDEzMy4zOS02NC41LDE5MS4xMS0zNS41MSw1Mi40Mi04MS43Miw5Mi44OC0xMzcuNjgsMTIyLjM4LTQ1LjQ5LDIzLjk4LTkzLjg4LDM4LjY1LTE0NC43NSw0NS4zNC0xOS4zOCwyLjU1LTM4Ljg3LDMuNzQtNTguNDYsMy43LTc0LjA1LS4xNi0xNDguMS0uMDYtMjIyLjE0LS4wNloiIHN0eWxlPSJmaWxsOiNlMGUwZTA7Ii8+PC9nPjwvc3ZnPg==)](https://github.com/parse-community/parse-server/releases) 9 | [![Node Version](https://img.shields.io/badge/nodejs-18,_20,_22-green.svg?logo=node.js&style=flat)](https://nodejs.org) 10 | 11 | [![npm latest version](https://img.shields.io/npm/v/@parse/s3-files-adapter.svg)](https://www.npmjs.com/package/@parse/s3-files-adapter) 12 | 13 | --- 14 | 15 | The official AWS S3 file storage adapter for Parse Server. See [Parse Server S3 File Adapter Configuration](https://docs.parseplatform.org/parse-server/guide/#configuring-s3adapter) for more details. 16 | 17 | --- 18 | 19 | - [Getting Started](#getting-started) 20 | - [Installation](#installation) 21 | - [Compatibility](#compatibility) 22 | - [Parse Server](#parse-server) 23 | - [Node.js](#nodejs) 24 | - [AWS Credentials](#aws-credentials) 25 | - [Deprecated Configuration](#deprecated-configuration) 26 | - [Usage with Parse Server](#usage-with-parse-server) 27 | - [Parameters](#parameters) 28 | - [Using a Config File](#using-a-config-file) 29 | - [Using Environment Variables](#using-environment-variables) 30 | - [Passing as an Instance](#passing-as-an-instance) 31 | - [Adding Metadata and Tags](#adding-metadata-and-tags) 32 | - [Compatibility with other Storage Providers](#compatibility-with-other-storage-providers) 33 | - [Digital Ocean Spaces](#digital-ocean-spaces) 34 | - [Migration Guide from 3.x to 4.x](#migration-guide-from-3x-to-4x) 35 | - [AWS IAM Permissions](#aws-iam-permissions) 36 | - [Passing S3 Credentials](#passing-s3-credentials) 37 | 38 | 39 | # Getting Started 40 | 41 | ## Installation 42 | 43 | `npm install --save @parse/s3-files-adapter` 44 | 45 | ## Compatibility 46 | 47 | ### Parse Server 48 | 49 | Parse Server S3 Adapter is compatible with the following versions of Parse Server. 50 | 51 | | Parse Server Version | End-of-Life | Compatible | 52 | |----------------------|---------------|------------| 53 | | <=5 | December 2023 | ❌ No | 54 | | 6 | December 2024 | ❌ No | 55 | | <7.3.0 | December 2025 | ❌ No | 56 | | >=7.3.0 | December 2025 | ✅ Yes | 57 | 58 | ### Node.js 59 | 60 | Parse Server S3 Adapter is continuously tested with the most recent releases of Node.js to ensure compatibility. We follow the [Node.js Long Term Support plan](https://github.com/nodejs/Release) and only test against versions that are officially supported and have not reached their end-of-life date. 61 | 62 | | Node.js Version | End-of-Life | Compatible | 63 | |-----------------|-------------|------------| 64 | | 18 | April 2025 | ✅ Yes | 65 | | 20 | April 2026 | ✅ Yes | 66 | | 22 | April 2027 | ✅ Yes | 67 | 68 | ## AWS Credentials 69 | 70 | ⚠️ The ability to explicitly pass credentials to this adapter is deprecated and will be removed in a future release. 71 | 72 | You may already be compatible with this change. If you have not explicitly set an `accessKey` and `secretKey` and you have configured the environment variables `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`, then you're all set and this will continue to work as is. 73 | 74 | If you explicitly configured the environment variables `S3_ACCESS_KEY` 75 | `S3_SECRET_KEY` 76 | 77 | *OR* 78 | 79 | If you explicitly configured the `accessKey` and `secretKey` in your adapter configuration, then you'll need to... 80 | 81 | For non AWS hosts: 82 | 83 | * Run `aws configure` in a terminal which will step you through configuring credentials for the AWS SDK and CLI 84 | 85 | For an AWS host: 86 | 87 | * Ensure that the role that your host is running as has permissions for your s3 bucket 88 | 89 | *Then* 90 | 91 | * remove the `accessKey` and `secretKey` from your configuration 92 | 93 | If for some reason you really need to be able to set the key and secret explicitly, you can still do it using `s3overrides` as described below and setting `accessKeyId` and `secretAccessKey` in the `s3Overrides` object. 94 | 95 | ## Deprecated Configuration 96 | 97 | Although it is not recommended, AWS credentials can be explicitly configured through an options 98 | object, constructor string arguments or environment variables ([see below](#using-a-config-file)). 99 | This option is provided for backward compatibility and will be removed in the forthcoming version 2.0 of this adapter. 100 | 101 | The preferred method is to use the default AWS credentials pattern. If no AWS credentials are explicitly configured, the AWS SDK will look for credentials in the standard locations used by all AWS SDKs and the AWS CLI. More info can be found in [the docs](http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html#config-settings-and-precedence). For more information on AWS best practices, see [IAM Best Practices User Guide](http://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html). 102 | 103 | # Usage with Parse Server 104 | 105 | ## Parameters 106 | 107 | *(This list is still incomplete and in the works, in the meantime find more descriptions in the chapters below.)* 108 | 109 | | Parameter | Optional | Default value | Environment variable | Description | 110 | |-----------------------|----------|---------------|--------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| 111 | | `fileAcl` | yes | `undefined` | S3_FILE_ACL | Sets the [Canned ACL](https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl) of the file when storing it in the S3 bucket. Setting this parameter overrides the file ACL that would otherwise depend on the `directAccess` parameter. Setting the value `'none'` causes any ACL parameter to be removed that would otherwise be set. | 112 | | `presignedUrl` | yes | `false` | S3_PRESIGNED_URL | If `true` a [presigned URL](https://docs.aws.amazon.com/AmazonS3/latest/dev/ShareObjectPreSignedURL.html) is returned when requesting the URL of file. The URL is only valid for a specified duration, see parameter `presignedUrlExpires`. | 113 | | `presignedUrlExpires` | yes | `undefined` | S3_PRESIGNED_URL_EXPIRES | Sets the duration in seconds after which the [presigned URL](https://docs.aws.amazon.com/AmazonS3/latest/dev/ShareObjectPreSignedURL.html) of the file expires. If no value is set, the AWS S3 SDK default [Expires](https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#getSignedUrl-property) value applies. This parameter requires `presignedUrl` to be `true`. | 114 | 115 | ## Using a Config File 116 | 117 | ``` 118 | { 119 | "appId": 'my_app_id', 120 | "masterKey": 'my_master_key', 121 | // other options 122 | "filesAdapter": { 123 | "module": "@parse/s3-files-adapter", 124 | "options": { 125 | "bucket": "my_bucket", 126 | // optional: 127 | "region": 'us-east-1', // default value 128 | "bucketPrefix": '', // default value 129 | "directAccess": false, // default value 130 | "fileAcl": null, // default value 131 | "baseUrl": null, // string, function or async function 132 | "baseUrlDirect": false, // default value 133 | "signatureVersion": 'v4', // default value 134 | "globalCacheControl": null, // default value. Or 'public, max-age=86400' for 24 hrs Cache-Control 135 | "presignedUrl": false, // Optional. If true a presigned URL is returned when requesting the URL of file. The URL is only valid for a specified duration, see parameter `presignedUrlExpires`. Default is false. 136 | "presignedUrlExpires": null, // Optional. Sets the duration in seconds after which the presigned URL of the file expires. Defaults to the AWS S3 SDK default Expires value. 137 | "ServerSideEncryption": 'AES256|aws:kms', //AES256 or aws:kms, or if you do not pass this, encryption won't be done 138 | "validateFilename": null, // Default to parse-server FilesAdapter::validateFilename. 139 | "generateKey": null // Will default to Parse.FilesController.preserveFileName 140 | } 141 | } 142 | } 143 | ``` 144 | ***Note*** By default Parse.FilesController.preserveFileName will prefix all filenames with a random hex code. You will want to disable that if you enable it here or wish to use S3 "directories". 145 | 146 | ## Using Environment Variables 147 | 148 | Set your environment variables: 149 | 150 | ``` 151 | S3_BUCKET=bucketName 152 | ``` 153 | 154 | the following optional configuration can be set by environment variable too: 155 | 156 | ``` 157 | S3_SIGNATURE_VERSION=v4 158 | ``` 159 | 160 | And update your config / options 161 | 162 | ``` 163 | { 164 | "appId": 'my_app_id', 165 | "masterKey": 'my_master_key', 166 | // other options 167 | "filesAdapter": "@parse/s3-files-adapter" 168 | } 169 | ``` 170 | 171 | 172 | ## Passing as an Instance 173 | ``` 174 | var S3Adapter = require('@parse/s3-files-adapter'); 175 | 176 | var s3Adapter = new S3Adapter( 177 | 'accessKey', 178 | 'secretKey', 179 | 'bucket', 180 | { 181 | region: 'us-east-1' 182 | bucketPrefix: '', 183 | directAccess: false, 184 | baseUrl: 'http://images.example.com', 185 | signatureVersion: 'v4', 186 | globalCacheControl: 'public, max-age=86400', // 24 hrs Cache-Control. 187 | presignedUrl: false, 188 | presignedUrlExpires: 900, 189 | validateFilename: (filename) => { 190 | if (filename.length > 1024) { 191 | return 'Filename too long.'; 192 | } 193 | return null; // Return null on success 194 | }, 195 | generateKey: (filename) => { 196 | return `${Date.now()}_${filename}`; // unique prefix for every filename 197 | } 198 | } 199 | ); 200 | 201 | var api = new ParseServer({ 202 | appId: 'my_app', 203 | masterKey: 'master_key', 204 | filesAdapter: s3adapter 205 | }) 206 | ``` 207 | **Note:** there are a few ways you can pass arguments: 208 | 209 | ``` 210 | S3Adapter("bucket") 211 | S3Adapter("bucket", options) 212 | S3Adapter("key", "secret", "bucket") -- Deprecated, see notice above 213 | S3Adapter("key", "secret", "bucket", options) -- Deprecated, see notice above 214 | S3Adapter(options) // where options must contain bucket. 215 | S3Adapter(options, s3overrides) 216 | ``` 217 | If you use the last form, `s3overrides` are the parameters passed to [AWS.S3](http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#constructor-property). 218 | 219 | In this form if you set `s3overrides.params`, you must set at least `s3overrides.params.Bucket` 220 | 221 | or with an options hash 222 | 223 | ``` 224 | var S3Adapter = require('@parse/s3-files-adapter'); 225 | 226 | var s3Options = { 227 | "bucket": "my_bucket", 228 | // optional: 229 | "region": 'us-east-1', // default value 230 | "bucketPrefix": '', // default value 231 | "directAccess": false, // default value 232 | "baseUrl": null // string, function or async function 233 | "signatureVersion": 'v4', // default value 234 | "globalCacheControl": null, // default value. Or 'public, max-age=86400' for 24 hrs Cache-Control 235 | "presignedUrl": false, // default value 236 | "presignedUrlExpires": 900, // default value (900 seconds) 237 | "validateFilename": () => null, // Anything goes! 238 | "generateKey": (filename) => filename, // Ensure Parse.FilesController.preserveFileName is true! 239 | } 240 | 241 | var s3Adapter = new S3Adapter(s3Options); 242 | 243 | var api = new ParseServer({ 244 | appId: 'my_app', 245 | masterKey: 'master_key', 246 | filesAdapter: s3Adapter 247 | }) 248 | ``` 249 | 250 | ## Adding Metadata and Tags 251 | 252 | Use the optional options argument to add [Metadata](https://docs.aws.amazon.com/AmazonS3/latest/user-guide/add-object-metadata.html) and/or [Tags](https://docs.aws.amazon.com/AmazonS3/latest/user-guide/add-object-tags.html) to S3 objects 253 | 254 | ``` 255 | 256 | 257 | const S3Adapter = require('@parse/s3-files-adapter'); 258 | 259 | const s3Options = {}; // Add correct options 260 | const s3Adapter = new S3Adapter(s3Options); 261 | 262 | const filename = 'Fictional_Characters.txt'; 263 | const data = 'That\'s All Folks!'; 264 | const contentType = 'text/plain'; 265 | const tags = { 266 | createdBy: 'Elmer Fudd', 267 | owner: 'Popeye' 268 | }; 269 | const metadata = { 270 | source: 'Mickey Mouse' 271 | }; 272 | const options = { tags, metadata }; 273 | s3Adapter.createFile(filename, data, contentType, options); 274 | 275 | ``` 276 | 277 | **Note:** This adapter will **automatically** add the "x-amz-meta-" prefix to the beginning of metadata tags as stated in [S3 Documentation](https://docs.aws.amazon.com/AmazonS3/latest/user-guide/add-object-metadata.html). 278 | 279 | 280 | # Compatibility with other Storage Providers 281 | 282 | ## Digital Ocean Spaces 283 | 284 | ```js 285 | var S3Adapter = require("@parse/s3-files-adapter"); 286 | var AWS = require("aws-sdk"); 287 | 288 | //Configure Digital Ocean Spaces EndPoint 289 | var s3Options = { 290 | bucket: process.env.SPACES_BUCKET_NAME, 291 | baseUrl: process.env.SPACES_BASE_URL, 292 | region: process.env.SPACES_REGION, 293 | directAccess: true, 294 | globalCacheControl: "public, max-age=31536000", 295 | presignedUrl: false, 296 | presignedUrlExpires: 900, 297 | bucketPrefix: process.env.SPACES_BUCKET_PREFIX, 298 | s3overrides: { 299 | accessKeyId: process.env.SPACES_ACCESS_KEY, 300 | secretAccessKey: process.env.SPACES_SECRET_KEY, 301 | endpoint: process.env.SPACES_ENDPOINT 302 | } 303 | }; 304 | 305 | var s3Adapter = new S3Adapter(s3Options); 306 | 307 | var api = new ParseServer({ 308 | databaseURI: process.env.DATABASE_URI || "mongodb://localhost:27017/dev", 309 | cloud: process.env.CLOUD_CODE_MAIN || __dirname + "/cloud/main.js", 310 | appId: process.env.APP_ID || "myAppId", 311 | masterKey: process.env.MASTER_KEY || "", 312 | serverURL: process.env.SERVER_URL || "http://localhost:1337/parse", 313 | logLevel: process.env.LOG_LEVEL || "info", 314 | allowClientClassCreation: false, 315 | filesAdapter: s3Adapter 316 | }); 317 | ``` 318 | 319 | 320 | # Migration Guide from 3.x to 4.x 321 | 322 | Due to the deprecation of the AWS SDK v2, Parse Server S3 Adapter 4.x adopts the AWS SDK v3. When upgrading from Parse Server S3 Adapter 3.x to 4.x, consider the following changes: 323 | 324 | ## AWS IAM Permissions 325 | 326 | In version 4.x, when uploading a file, the adapter will automatically create the specified S3 bucket, if it doesn't exist yet. To find out whether the bucket already exists, it will send a `HEAD` request to AWS S3 to list the existing bucket. This request requires the AWS IAM permission `s3:ListBucket` on the bucket resource, for example: 327 | 328 | ```json 329 | { 330 | "Effect": "Allow", 331 | "Action": [ 332 | "s3:ListBucket" 333 | ], 334 | "Resource": "arn:aws:s3:::" 335 | } 336 | ``` 337 | > [!NOTE] 338 | > The specified resource needs to be the bucket ARN itself, no `/*` at the end, because it's a bucket-level permission, not object-level. 339 | 340 | ## Passing S3 Credentials 341 | 342 | In version 4.x the S3 credentials are passed differently: 343 | 344 | *Parse Server S3 Adapter 3.x:* 345 | 346 | ```js 347 | const options = { 348 | bucket: '', 349 | s3overrides: { 350 | accessKeyId: '', 351 | secretAccessKey: '' 352 | } 353 | }; 354 | ``` 355 | 356 | *Parse Server S3 Adapter 4.x:* 357 | 358 | ```js 359 | const options = { 360 | bucket: '', 361 | s3overrides: { 362 | credentials: { 363 | accessKeyId: '', 364 | secretAccessKey: '' 365 | } 366 | } 367 | }; 368 | ``` 369 | 370 | Alternatively, the credentials can be set on the root object: 371 | 372 | ```js 373 | const options = { 374 | bucket: '', 375 | credentials: { 376 | accessKeyId: '', 377 | secretAccessKey: '' 378 | } 379 | }; 380 | ``` 381 | 382 | > [!NOTE] 383 | > It is best practice to not store credentials as environment variables, as they can be easily retrieved on a compromised machine. For Parse Server running in an AWS environment, use more secure alternatives like AWS Secrets Manager, or AWS Credential Identity Provider to access shared credentials: 384 | > 385 | > ```js 386 | > import { fromIni } from 'aws-sdk/credential-providers'; 387 | > 388 | > const options = { 389 | > bucket: '', 390 | > s3overrides: { 391 | > credentials: fromIni({ profile: '' }) 392 | > } 393 | > }; 394 | > ``` 395 | -------------------------------------------------------------------------------- /spec/test.spec.js: -------------------------------------------------------------------------------- 1 | const { Readable } = require('stream'); 2 | const config = require('config'); 3 | const S3Adapter = require('../index'); 4 | const optionsFromArguments = require('../lib/optionsFromArguments'); 5 | const { GetObjectCommand, PutObjectCommand, HeadBucketCommand, CreateBucketCommand } = require('@aws-sdk/client-s3'); 6 | const { getMockS3Adapter } = require('./mocks/s3adapter'); 7 | const rewire = require('rewire'); 8 | 9 | 10 | describe('S3Adapter tests', () => { 11 | beforeEach(() => { 12 | delete process.env.S3_BUCKET; 13 | delete process.env.S3_REGION; 14 | spyOn(console, 'warn').and.returnValue(); 15 | }); 16 | 17 | it('should throw when not initialized properly', () => { 18 | expect(() => { 19 | new S3Adapter(); 20 | }).toThrow(new Error("S3Adapter requires option 'bucket' or env. variable S3_BUCKET")); 21 | 22 | expect(() => { 23 | new S3Adapter('accessKey', 'secretKey', {}); 24 | }).toThrow(new Error("Failed to configure S3Adapter. Arguments don't make sense")); 25 | 26 | expect(() => { 27 | new S3Adapter({ accessKey: 'accessKey', secretKey: 'secretKey' }); 28 | }).toThrow(new Error("S3Adapter requires option 'bucket' or env. variable S3_BUCKET")); 29 | }); 30 | 31 | it('should not throw when initialized properly', () => { 32 | expect(() => { 33 | new S3Adapter('bucket'); 34 | }).not.toThrow(); 35 | 36 | expect(() => { 37 | new S3Adapter({ bucket: 'bucket' }); 38 | }).not.toThrow(); 39 | 40 | expect(() => { 41 | new S3Adapter({}, { params: { Bucket: 'bucket' } }); 42 | }).not.toThrow(); 43 | }); 44 | 45 | it('should accept environment for required', () => { 46 | const TEST_BUCKET = 'testBucket'; 47 | process.env.S3_BUCKET = TEST_BUCKET; 48 | const s3 = new S3Adapter(); 49 | expect(s3._bucket).toBe(TEST_BUCKET); 50 | }); 51 | 52 | describe('bucket operations', () => { 53 | let s3, s3ClientMock; 54 | beforeEach(() => { 55 | const options = { 56 | bucket: 'bucket-1', 57 | bucketPrefix: 'test/', 58 | }; 59 | s3ClientMock = jasmine.createSpyObj('S3Client', ['send']); 60 | s3ClientMock.send.and.returnValue(Promise.resolve()); 61 | 62 | s3 = new S3Adapter(options); 63 | s3._s3Client = s3ClientMock; 64 | }); 65 | 66 | it('should return early if _hasBucket is true', async () => { 67 | s3._hasBucket = true; 68 | 69 | await s3.createBucket(); 70 | 71 | expect(s3ClientMock.send).not.toHaveBeenCalled(); 72 | }); 73 | 74 | it('should set _hasBucket to true if bucket exists', async () => { 75 | s3ClientMock.send.and.returnValue(Promise.resolve({})); 76 | 77 | await s3.createBucket(); 78 | 79 | expect(s3ClientMock.send).toHaveBeenCalledWith(jasmine.any(HeadBucketCommand)); 80 | expect(s3._hasBucket).toBe(true); 81 | }); 82 | 83 | it('should attempt to create bucket if NotFound error occurs', async () => { 84 | const notFoundError = { name: 'NotFound' }; 85 | s3ClientMock.send.and.returnValues( 86 | Promise.reject(notFoundError), 87 | Promise.resolve({}) 88 | ); 89 | 90 | await s3.createBucket(); 91 | 92 | expect(s3ClientMock.send).toHaveBeenCalledTimes(2); 93 | expect(s3ClientMock.send).toHaveBeenCalledWith(jasmine.any(HeadBucketCommand)); 94 | expect(s3ClientMock.send).toHaveBeenCalledWith(jasmine.any(CreateBucketCommand)); 95 | expect(s3._hasBucket).toBe(true); 96 | }); 97 | 98 | it('should handle BucketAlreadyExists error during creation', async () => { 99 | const notFoundError = { name: 'NotFound' }; 100 | const bucketExistsError = { name: 'BucketAlreadyExists' }; 101 | s3ClientMock.send.and.returnValues( 102 | Promise.reject(notFoundError), 103 | Promise.reject(bucketExistsError) 104 | ); 105 | 106 | await s3.createBucket(); 107 | 108 | expect(s3ClientMock.send).toHaveBeenCalledTimes(2); 109 | expect(s3ClientMock.send).toHaveBeenCalledWith(jasmine.any(HeadBucketCommand)); 110 | expect(s3ClientMock.send).toHaveBeenCalledWith(jasmine.any(CreateBucketCommand)); 111 | expect(s3._hasBucket).toBe(true); 112 | }); 113 | 114 | it('should handle BucketAlreadyOwnedByYou error during creation', async () => { 115 | const notFoundError = { name: 'NotFound' }; 116 | const bucketOwnedError = { name: 'BucketAlreadyOwnedByYou' }; 117 | s3ClientMock.send.and.returnValues( 118 | Promise.reject(notFoundError), 119 | Promise.reject(bucketOwnedError) 120 | ); 121 | 122 | await s3.createBucket(); 123 | 124 | expect(s3ClientMock.send).toHaveBeenCalledTimes(2); 125 | expect(s3ClientMock.send).toHaveBeenCalledWith(jasmine.any(HeadBucketCommand)); 126 | expect(s3ClientMock.send).toHaveBeenCalledWith(jasmine.any(CreateBucketCommand)); 127 | expect(s3._hasBucket).toBe(true); 128 | }); 129 | 130 | it('should throw non-NotFound errors during check', async () => { 131 | const otherError = { name: 'SomeOtherError' }; 132 | s3ClientMock.send.and.returnValue(Promise.reject(otherError)); 133 | 134 | await expectAsync(s3.createBucket()) 135 | .toBeRejectedWith(otherError); 136 | expect(s3._hasBucket).toBe(false); 137 | }); 138 | 139 | it('should throw unexpected errors during creation', async () => { 140 | const notFoundError = { name: 'NotFound' }; 141 | const creationError = { name: 'CreationError' }; 142 | s3ClientMock.send.and.returnValues( 143 | Promise.reject(notFoundError), 144 | Promise.reject(creationError) 145 | ); 146 | 147 | await expectAsync(s3.createBucket()) 148 | .toBeRejectedWith(creationError); 149 | expect(s3._hasBucket).toBe(false); 150 | }); 151 | }) 152 | 153 | describe('configured with immutable values', () => { 154 | describe('not initialized properly', () => { 155 | it('should fail with two string arguments', () => { 156 | expect(() => { 157 | new S3Adapter(config.get('accessKey'), config.get('secretKey'), {}); 158 | }).toThrow(new Error('Failed to configure S3Adapter. Arguments don\'t make sense')); 159 | }); 160 | 161 | it('should fail when passed an object without a bucket', () => { 162 | expect(() => { 163 | new S3Adapter(config.get('insufficientOptions')); 164 | }).toThrow(new Error("S3Adapter requires option 'bucket' or env. variable S3_BUCKET")); 165 | }); 166 | }); 167 | 168 | describe('should not throw when initialized properly', () => { 169 | it('should accept a string bucket', () => { 170 | expect(() => { 171 | new S3Adapter(config.get('bucket')); 172 | }).not.toThrow(); 173 | }); 174 | 175 | it('should accept an object with a bucket', () => { 176 | expect(() => { 177 | new S3Adapter(config.get('objectWithBucket')); 178 | }).not.toThrow(); 179 | }); 180 | 181 | it('should accept a second argument of object with a params object with a bucket', () => { 182 | expect(() => { 183 | new S3Adapter(config.get('emptyObject'), config.get('paramsObjectWBucket')); 184 | }).not.toThrow(); 185 | }); 186 | 187 | it('should accept environment over default', () => { 188 | const TEST_REGION = 'test'; 189 | process.env.S3_REGION = TEST_REGION; 190 | const s3 = new S3Adapter(config.get('bucket')); 191 | expect(s3._region).toBe(TEST_REGION); 192 | }); 193 | }); 194 | }); 195 | 196 | describe('to find the right arg in the right place', () => { 197 | it('should accept just bucket as first string arg', () => { 198 | const args = ['bucket']; 199 | const options = optionsFromArguments(args); 200 | expect(options.bucket).toEqual('bucket'); 201 | }); 202 | 203 | it('should accept bucket and options', () => { 204 | const confObj = { bucketPrefix: 'test/' }; 205 | const args = ['bucket', confObj]; 206 | const options = optionsFromArguments(args); 207 | expect(options.bucket).toEqual('bucket'); 208 | expect(options.bucketPrefix).toEqual('test/'); 209 | }); 210 | 211 | it('should accept key, secret, and bucket as args', () => { 212 | const args = ['key', 'secret', 'bucket']; 213 | const options = optionsFromArguments(args); 214 | expect(options.accessKey).toEqual('key'); 215 | expect(options.secretKey).toEqual('secret'); 216 | expect(options.bucket).toEqual('bucket'); 217 | }); 218 | 219 | it('should accept key, secret, bucket, and options object as args', () => { 220 | const confObj = { bucketPrefix: 'test/' }; 221 | const args = ['key', 'secret', 'bucket', confObj]; 222 | const options = optionsFromArguments(args); 223 | expect(options.accessKey).toEqual('key'); 224 | expect(options.secretKey).toEqual('secret'); 225 | expect(options.bucket).toEqual('bucket'); 226 | expect(options.bucketPrefix).toEqual('test/'); 227 | }); 228 | 229 | it('should use credentials when provided', async () => { 230 | const mockCredentials = { 231 | accessKeyId: 'mockAccessKeyId', 232 | secretAccessKey: 'mockSecretAccessKey', 233 | sessionToken: 'mockSessionToken', 234 | }; 235 | 236 | const options = { 237 | bucket: 'bucket-1', 238 | credentials: mockCredentials 239 | }; 240 | 241 | const adapter = new S3Adapter(options); 242 | const credentials = await adapter._s3Client.config.credentials(); 243 | 244 | expect(credentials.accessKeyId).toEqual(mockCredentials.accessKeyId); 245 | expect(credentials.secretAccessKey).toEqual(mockCredentials.secretAccessKey); 246 | expect(credentials.sessionToken).toEqual(mockCredentials.sessionToken); 247 | }); 248 | 249 | it('should accept options and overrides as an option in args', () => { 250 | const confObj = { 251 | bucketPrefix: 'test/', 252 | bucket: 'bucket-1', 253 | secretKey: 'secret-1', 254 | accessKey: 'key-1', 255 | s3overrides: { 256 | secretAccessKey: 'secret-2', 257 | accessKeyId: 'key-2', 258 | params: { Bucket: 'bucket-2' }, 259 | }, 260 | }; 261 | const s3 = new S3Adapter(confObj); 262 | expect(s3._s3Client.config.accessKeyId).toEqual('key-2'); 263 | expect(s3._s3Client.config.secretAccessKey).toEqual('secret-2'); 264 | expect(s3._s3Client.config.params.Bucket).toEqual('bucket-2'); 265 | expect(s3._bucketPrefix).toEqual('test/'); 266 | }); 267 | 268 | it('should accept endpoint as an override option in args', async () => { 269 | const otherEndpoint = 'https://example.com:8080/path?foo=bar'; 270 | const confObj = { 271 | bucketPrefix: 'test/', 272 | bucket: 'bucket-1', 273 | secretKey: 'secret-1', 274 | accessKey: 'key-1', 275 | s3overrides: { endpoint: otherEndpoint }, 276 | }; 277 | const s3 = new S3Adapter(confObj); 278 | expect(s3._endpoint).toEqual(otherEndpoint); 279 | const endpointFromConfig = await s3._s3Client.config.endpoint(); 280 | expect(endpointFromConfig.protocol).toEqual('https:'); 281 | expect(endpointFromConfig.path).toEqual('/path'); 282 | expect(endpointFromConfig.port).toEqual(8080); 283 | expect(endpointFromConfig.hostname).toEqual('example.com'); 284 | expect(endpointFromConfig.query.foo).toEqual('bar'); 285 | }); 286 | 287 | it("should have undefined endpoint if no custom endpoint is provided", async () => { 288 | const confObj = { 289 | bucketPrefix: 'test/', 290 | bucket: 'bucket-1', 291 | secretKey: 'secret-1', 292 | accessKey: 'key-1', 293 | }; 294 | const s3 = new S3Adapter(confObj); 295 | const endpoint = await s3._s3Client.config.endpoint?.(); 296 | expect(endpoint).toBeUndefined(); 297 | }); 298 | 299 | it('should accept options and overrides as args', () => { 300 | const confObj = { 301 | bucketPrefix: 'test/', 302 | bucket: 'bucket-1', 303 | secretKey: 'secret-1', 304 | accessKey: 'key-1', 305 | }; 306 | const overridesObj = { 307 | secretAccessKey: 'secret-2', 308 | accessKeyId: 'key-2', 309 | params: { Bucket: 'bucket-2' }, 310 | }; 311 | const s3 = new S3Adapter(confObj, overridesObj); 312 | expect(s3._s3Client.config.accessKeyId).toEqual('key-2'); 313 | expect(s3._s3Client.config.secretAccessKey).toEqual('secret-2'); 314 | expect(s3._s3Client.config.params.Bucket).toEqual('bucket-2'); 315 | expect(s3._bucketPrefix).toEqual('test/'); 316 | }); 317 | 318 | it('should accept overrides without params', () => { 319 | const confObj = { 320 | bucketPrefix: 'test/', 321 | bucket: 'bucket-1', 322 | secretKey: 'secret-1', 323 | accessKey: 'key-1', 324 | }; 325 | const overridesObj = { secretAccessKey: 'secret-2' }; 326 | const s3 = new S3Adapter(confObj, overridesObj); 327 | expect(s3._s3Client.config.accessKeyId).toEqual('key-1'); 328 | expect(s3._s3Client.config.secretAccessKey).toEqual('secret-2'); 329 | expect(s3._s3Client.config.params.Bucket).toEqual('bucket-1'); 330 | expect(s3._bucketPrefix).toEqual('test/'); 331 | }); 332 | }); 333 | 334 | describe('getFileStream', () => { 335 | it('should handle range bytes', () => { 336 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket'); 337 | const s3ClientMock = jasmine.createSpyObj('S3Client', ['send']); 338 | const stream = new Readable(); 339 | stream.push('hello world'); 340 | stream.push(null); 341 | s3ClientMock.send.and.returnValue(Promise.resolve({ Body: stream })); 342 | s3._s3Client = s3ClientMock; 343 | 344 | const req = { 345 | get: () => 'bytes=0-1', 346 | }; 347 | const resp = { 348 | writeHead: jasmine.createSpy('writeHead'), 349 | write: jasmine.createSpy('write'), 350 | end: jasmine.createSpy('end'), 351 | }; 352 | s3.handleFileStream('test.mov', req, resp).then(data => { 353 | expect(data.toString('utf8')).toBe('hello world'); 354 | expect(resp.writeHead).toHaveBeenCalled(); 355 | expect(resp.write).toHaveBeenCalled(); 356 | expect(resp.end).toHaveBeenCalled(); 357 | expect(s3ClientMock.send).toHaveBeenCalledTimes(2); 358 | 359 | const commands = s3ClientMock.send.calls.all(); 360 | expect(commands[0].args[0]).toBeInstanceOf(HeadBucketCommand); 361 | const commandArg = commands[1].args[0]; 362 | expect(commandArg).toBeInstanceOf(GetObjectCommand); 363 | expect(commandArg.input.Range).toBe('bytes=0-1'); 364 | }); 365 | }); 366 | 367 | it('should handle range bytes error', () => { 368 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket'); 369 | const s3ClientMock = jasmine.createSpyObj('S3Client', ['send']); 370 | s3ClientMock.send.and.returnValue(Promise.reject('FileNotFound')); 371 | s3._s3Client = s3ClientMock; 372 | 373 | const req = { 374 | get: () => 'bytes=0-1', 375 | }; 376 | const resp = { 377 | writeHead: jasmine.createSpy('writeHead'), 378 | write: jasmine.createSpy('write'), 379 | end: jasmine.createSpy('end'), 380 | }; 381 | s3.handleFileStream('test.mov', req, resp).catch(error => { 382 | expect(error).toBe('FileNotFound'); 383 | expect(resp.writeHead).not.toHaveBeenCalled(); 384 | expect(resp.write).not.toHaveBeenCalled(); 385 | expect(resp.end).not.toHaveBeenCalled(); 386 | }); 387 | }); 388 | 389 | it('should handle range bytes no data', () => { 390 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket'); 391 | const s3ClientMock = jasmine.createSpyObj('S3Client', ['send']); 392 | s3ClientMock.send.and.returnValue(Promise.resolve({})); 393 | s3._s3Client = s3ClientMock; 394 | 395 | const req = { 396 | get: () => 'bytes=0-1', 397 | }; 398 | const resp = { 399 | writeHead: jasmine.createSpy('writeHead'), 400 | write: jasmine.createSpy('write'), 401 | end: jasmine.createSpy('end'), 402 | }; 403 | s3.handleFileStream('test.mov', req, resp).catch(error => { 404 | expect(error.message).toBe('S3 object body is missing.'); 405 | expect(resp.writeHead).not.toHaveBeenCalled(); 406 | expect(resp.write).not.toHaveBeenCalled(); 407 | expect(resp.end).not.toHaveBeenCalled(); 408 | }); 409 | }); 410 | 411 | it('should handle stream errors', async () => { 412 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket'); 413 | const s3ClientMock = jasmine.createSpyObj('S3Client', ['send']); 414 | 415 | const mockStream = { 416 | on: (event, callback) => { 417 | if (event === 'error') { 418 | callback(new Error('Mock S3 Body error')); 419 | } 420 | }, 421 | }; 422 | 423 | s3ClientMock.send.and.returnValue(Promise.resolve({ 424 | Body: mockStream, 425 | AcceptRanges: 'bytes', 426 | ContentLength: 1024, 427 | ContentRange: 'bytes 0-1024/2048', 428 | ContentType: 'application/octet-stream', 429 | })); 430 | s3._s3Client = s3ClientMock; 431 | 432 | const mockReq = { 433 | get: () => 'bytes=0-1024', 434 | }; 435 | const mockRes = { 436 | status: jasmine.createSpy('status'), 437 | send: jasmine.createSpy('send'), 438 | writeHead: jasmine.createSpy('writeHead'), 439 | write: jasmine.createSpy('write'), 440 | end: jasmine.createSpy('end'), 441 | }; 442 | 443 | s3.handleFileStream('test.mov', mockReq, mockRes).catch(() => { 444 | expect(mockRes.status).toHaveBeenCalledWith(404); 445 | expect(mockRes.send).toHaveBeenCalledWith('Mock S3 Body error'); 446 | }); 447 | }); 448 | }); 449 | 450 | describe('getFileLocation with directAccess', () => { 451 | const testConfig = { 452 | mount: 'http://my.server.com/parse', 453 | applicationId: 'xxxx', 454 | }; 455 | let options; 456 | 457 | beforeEach(() => { 458 | options = { 459 | directAccess: true, 460 | bucketPrefix: 'foo/bar/', 461 | baseUrl: 'http://example.com/files', 462 | }; 463 | }); 464 | 465 | it('should get using the baseUrl', async () => { 466 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket', options); 467 | await expectAsync(s3.getFileLocation(testConfig, 'test.png')).toBeResolvedTo( 468 | 'http://example.com/files/foo/bar/test.png' 469 | ); 470 | }); 471 | 472 | it('should get direct to baseUrl', async () => { 473 | options.baseUrlDirect = true; 474 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket', options); 475 | await expectAsync(s3.getFileLocation(testConfig, 'test.png')).toBeResolvedTo( 476 | 'http://example.com/files/test.png' 477 | ); 478 | }); 479 | 480 | it('should get without directAccess', async () => { 481 | options.directAccess = false; 482 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket', options); 483 | await expectAsync(s3.getFileLocation(testConfig, 'test.png')).toBeResolvedTo( 484 | 'http://my.server.com/parse/files/xxxx/test.png' 485 | ); 486 | }); 487 | 488 | it('should go directly to amazon', async () => { 489 | delete options.baseUrl; 490 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket', options); 491 | await expectAsync(s3.getFileLocation(testConfig, 'test.png')).toBeResolvedTo( 492 | 'https://my-bucket.s3.amazonaws.com/foo/bar/test.png' 493 | ); 494 | }); 495 | }); 496 | describe('getFileLocation with baseUrl', () => { 497 | const testConfig = { 498 | mount: 'http://my.server.com/parse', 499 | applicationId: 'xxxx', 500 | }; 501 | let options; 502 | 503 | beforeEach(() => { 504 | options = { 505 | directAccess: true, 506 | bucketPrefix: 'foo/bar/', 507 | baseUrl: (fileconfig, filename) => { 508 | if (filename.length > 12) { 509 | return 'http://example.com/files'; 510 | } 511 | return 'http://example.com/files'; 512 | }, 513 | }; 514 | }); 515 | 516 | it('should get using the baseUrl', async () => { 517 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket', options); 518 | await expectAsync(s3.getFileLocation(testConfig, 'test.png')).toBeResolvedTo( 519 | 'http://example.com/files/foo/bar/test.png' 520 | ); 521 | }); 522 | 523 | it('should get direct to baseUrl', async () => { 524 | options.baseUrlDirect = true; 525 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket', options); 526 | await expectAsync(s3.getFileLocation(testConfig, 'test.png')).toBeResolvedTo( 527 | 'http://example.com/files/test.png' 528 | ); 529 | }); 530 | 531 | it('should get without directAccess', async () => { 532 | options.directAccess = false; 533 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket', options); 534 | await expectAsync(s3.getFileLocation(testConfig, 'test.png')).toBeResolvedTo( 535 | 'http://my.server.com/parse/files/xxxx/test.png' 536 | ); 537 | }); 538 | 539 | it('should go directly to amazon', async () => { 540 | delete options.baseUrl; 541 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket', options); 542 | await expectAsync(s3.getFileLocation(testConfig, 'test.png')).toBeResolvedTo( 543 | 'https://my-bucket.s3.amazonaws.com/foo/bar/test.png' 544 | ); 545 | }); 546 | }); 547 | describe('getFileLocation with presignedUrl', () => { 548 | const testConfig = { 549 | mount: 'http://my.server.com/parse', 550 | applicationId: 'xxxx', 551 | }; 552 | let options; 553 | 554 | beforeEach(() => { 555 | options = { 556 | presignedUrl: false, 557 | directAccess: true, 558 | bucketPrefix: 'foo/bar/', 559 | baseUrl: 'http://example.com/files', 560 | }; 561 | }); 562 | 563 | it('should get using the baseUrl', async () => { 564 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket', options); 565 | await expectAsync(s3.getFileLocation(testConfig, 'test.png')).toBeResolvedTo( 566 | 'http://example.com/files/foo/bar/test.png' 567 | ); 568 | }); 569 | 570 | it('when use presigned URL should use S3 \'getObject\' operation', async () => { 571 | options.presignedUrl = true; 572 | const s3 = getMockS3Adapter(options); 573 | 574 | let getSignedUrlCommand = ''; 575 | s3.getFileSignedUrl = (_, command) => { 576 | getSignedUrlCommand = command; 577 | }; 578 | 579 | await s3.getFileLocation(testConfig, 'test.png'); 580 | expect(getSignedUrlCommand).toBeInstanceOf(GetObjectCommand); 581 | }); 582 | 583 | it('should get using the baseUrl and amazon using presigned URL', async () => { 584 | options.presignedUrl = true; 585 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket', options); 586 | 587 | const fileLocation = await s3.getFileLocation(testConfig, 'test.png'); 588 | expect(fileLocation).toMatch(/^http:\/\/example.com\/files\/foo\/bar\/test.png\?/); 589 | expect(fileLocation).toMatch( 590 | /X-Amz-Credential=accessKey%2F\d{8}%2F\w{2}-\w{1,9}-\d%2Fs3%2Faws4_request/ 591 | ); 592 | expect(fileLocation).toMatch(/X-Amz-Date=\d{8}T\d{6}Z/); 593 | expect(fileLocation).toMatch(/X-Amz-Signature=.{64}/); 594 | expect(fileLocation).toMatch(/X-Amz-Expires=\d{1,6}/); 595 | expect(fileLocation).toContain('X-Amz-Algorithm=AWS4-HMAC-SHA256'); 596 | expect(fileLocation).toContain('X-Amz-SignedHeaders=host'); 597 | }); 598 | 599 | it('should get direct to baseUrl', async () => { 600 | options.baseUrlDirect = true; 601 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket', options); 602 | await expectAsync(s3.getFileLocation(testConfig, 'test.png')).toBeResolvedTo( 603 | 'http://example.com/files/test.png' 604 | ); 605 | }); 606 | 607 | it('should get without directAccess', async () => { 608 | options.directAccess = false; 609 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket', options); 610 | await expectAsync(s3.getFileLocation(testConfig, 'test.png')).toBeResolvedTo( 611 | 'http://my.server.com/parse/files/xxxx/test.png' 612 | ); 613 | }); 614 | 615 | it('should go directly to amazon', async () => { 616 | delete options.baseUrl; 617 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket', options); 618 | await expectAsync(s3.getFileLocation(testConfig, 'test.png')).toBeResolvedTo( 619 | 'https://my-bucket.s3.amazonaws.com/foo/bar/test.png' 620 | ); 621 | }); 622 | 623 | it('should go directly to amazon using presigned URL', async () => { 624 | delete options.baseUrl; 625 | options.presignedUrl = true; 626 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket', options); 627 | 628 | const fileLocation = await s3.getFileLocation(testConfig, 'test.png'); 629 | expect(fileLocation).toMatch( 630 | /^https:\/\/my-bucket.s3.us-east-1.amazonaws.com\/foo\/bar\/test.png\?/ 631 | ); 632 | expect(fileLocation).toMatch( 633 | /X-Amz-Credential=accessKey%2F\d{8}%2Fus-east-1%2Fs3%2Faws4_request/ 634 | ); 635 | expect(fileLocation).toMatch(/X-Amz-Date=\d{8}T\d{6}Z/); 636 | expect(fileLocation).toMatch(/X-Amz-Signature=.{64}/); 637 | expect(fileLocation).toMatch(/X-Amz-Expires=\d{1,6}/); 638 | expect(fileLocation).toContain('X-Amz-Algorithm=AWS4-HMAC-SHA256'); 639 | expect(fileLocation).toContain('X-Amz-SignedHeaders=host'); 640 | }); 641 | }); 642 | 643 | describe('getFileLocation with async baseUrl', () => { 644 | const testConfig = { 645 | mount: 'http://example.com/parse', 646 | applicationId: 'xxxx', 647 | }; 648 | let options; 649 | 650 | beforeEach(() => { 651 | options = { 652 | directAccess: true, 653 | bucketPrefix: 'foo/bar/', 654 | baseUrl: async () => { 655 | return await Promise.resolve('http://example.com/files'); 656 | }, 657 | }; 658 | }); 659 | 660 | it('should await async baseUrl', async () => { 661 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket', options); 662 | await expectAsync(s3.getFileLocation(testConfig, 'test.png')).toBeResolvedTo( 663 | 'http://example.com/files/foo/bar/test.png' 664 | ); 665 | }); 666 | 667 | it('should direct to async baseUrl when baseUrlDirect', async () => { 668 | options.baseUrlDirect = true; 669 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket', options); 670 | await expectAsync(s3.getFileLocation(testConfig, 'test.png')).toBeResolvedTo( 671 | 'http://example.com/files/test.png' 672 | ); 673 | }); 674 | }); 675 | 676 | describe('validateFilename', () => { 677 | let options; 678 | 679 | beforeEach(() => { 680 | options = { 681 | validateFilename: null, 682 | }; 683 | }); 684 | 685 | it('should be null by default', () => { 686 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket', options); 687 | expect(s3.validateFilename === null).toBe(true); 688 | }); 689 | 690 | it('should not allow directories when overridden', () => { 691 | options.validateFilename = filename => { 692 | if (filename.indexOf('/') !== -1) { 693 | return new Parse.Error( 694 | Parse.Error.INVALID_FILE_NAME, 695 | 'Filename contains invalid characters.' 696 | ); 697 | } 698 | return null; 699 | }; 700 | const s3 = new S3Adapter('accessKey', 'secretKey', 'my-bucket', options); 701 | expect(s3.validateFilename('foo/bar') instanceof Parse.Error).toBe(true); 702 | }); 703 | }); 704 | 705 | describe('generateKey', () => { 706 | let options; 707 | const promises = []; 708 | 709 | beforeEach(() => { 710 | options = { 711 | bucketPrefix: 'test/', 712 | generateKey: filename => { 713 | let key = ''; 714 | const lastSlash = filename.lastIndexOf('/'); 715 | const prefix = `${Date.now()}_`; 716 | if (lastSlash > 0) { 717 | // put the prefix before the last component of the filename 718 | key += 719 | filename.substring(0, lastSlash + 1) + prefix + filename.substring(lastSlash + 1); 720 | } else { 721 | key += prefix + filename; 722 | } 723 | return key; 724 | }, 725 | }; 726 | }); 727 | 728 | it('should return a file with a date stamp inserted in the path', () => { 729 | const s3 = getMockS3Adapter(options); 730 | const fileName = 'randomFileName.txt'; 731 | const response = s3.createFile(fileName, 'hello world', 'text/utf8').then(value => { 732 | const url = new URL(value.Location); 733 | expect(url.pathname.indexOf(fileName) > 13).toBe(true); 734 | }); 735 | promises.push(response); 736 | }); 737 | 738 | it('should do nothing when null', () => { 739 | options.generateKey = null; 740 | const s3 = getMockS3Adapter(options); 741 | const fileName = 'foo/randomFileName.txt'; 742 | const response = s3.createFile(fileName, 'hello world', 'text/utf8').then(value => { 743 | const url = new URL(value.Location); 744 | expect(url.pathname.substring(1)).toEqual(options.bucketPrefix + fileName); 745 | }); 746 | promises.push(response); 747 | }); 748 | 749 | it('should add unique timestamp to the file name after the last directory when there is a path', () => { 750 | const s3 = getMockS3Adapter(options); 751 | const fileName = 'foo/randomFileName.txt'; 752 | const response = s3.createFile(fileName, 'hello world', 'text/utf8').then(value => { 753 | const url = new URL(value.Location); 754 | expect(url.pathname.indexOf('foo/')).toEqual(6); 755 | expect(url.pathname.indexOf('random') > 13).toBe(true); 756 | }); 757 | promises.push(response); 758 | }); 759 | 760 | afterAll(() => Promise.all(promises)); 761 | }); 762 | 763 | describe('createFile', () => { 764 | let options, s3ClientMock; 765 | beforeEach(() => { 766 | options = { 767 | bucket: 'bucket-1', 768 | bucketPrefix: 'test/', 769 | }; 770 | s3ClientMock = jasmine.createSpyObj('S3Client', ['send']); 771 | s3ClientMock.send.and.returnValue(Promise.resolve()); 772 | }); 773 | 774 | it('should save a file with right command', async () => { 775 | const s3 = new S3Adapter(options); 776 | s3._s3Client = s3ClientMock; 777 | 778 | await s3.createFile('file.txt', 'hello world', 'text/utf8', {}); 779 | 780 | expect(s3ClientMock.send).toHaveBeenCalledTimes(2); 781 | expect(s3ClientMock.send).toHaveBeenCalledWith(jasmine.any(HeadBucketCommand)); 782 | expect(s3ClientMock.send).toHaveBeenCalledWith(jasmine.any(PutObjectCommand)); 783 | }); 784 | 785 | it('should save a file with metadata added', async () => { 786 | const s3 = new S3Adapter(options); 787 | s3._s3Client = s3ClientMock; 788 | const metadata = { foo: 'bar' }; 789 | 790 | await s3.createFile('file.txt', 'hello world', 'text/utf8', { metadata }); 791 | expect(s3ClientMock.send).toHaveBeenCalledTimes(2); 792 | const commands = s3ClientMock.send.calls.all(); 793 | expect(commands[0].args[0]).toBeInstanceOf(HeadBucketCommand); 794 | const commandArg = commands[1].args[0]; 795 | expect(commandArg).toBeInstanceOf(PutObjectCommand); 796 | expect(commandArg.input.Metadata).toEqual({ foo: 'bar' }); 797 | }); 798 | 799 | it('should save a file with tags added', async () => { 800 | const s3 = new S3Adapter(options); 801 | s3._s3Client = s3ClientMock; 802 | const tags = { foo: 'bar', baz: 'bin' }; 803 | 804 | await s3.createFile('file.txt', 'hello world', 'text/utf8', { tags }); 805 | expect(s3ClientMock.send).toHaveBeenCalledTimes(2); 806 | const commands = s3ClientMock.send.calls.all(); 807 | expect(commands[0].args[0]).toBeInstanceOf(HeadBucketCommand); 808 | const commandArg = commands[1].args[0]; 809 | expect(commandArg).toBeInstanceOf(PutObjectCommand); 810 | expect(commandArg.input.Tagging).toBe('foo=bar&baz=bin'); 811 | }); 812 | 813 | it('should save a file with proper ACL with direct access', async () => { 814 | options.directAccess = true; 815 | const s3 = new S3Adapter(options); 816 | s3._s3Client = s3ClientMock; 817 | 818 | await s3.createFile('file.txt', 'hello world', 'text/utf8', {}); 819 | expect(s3ClientMock.send).toHaveBeenCalledTimes(2); 820 | const commands = s3ClientMock.send.calls.all(); 821 | expect(commands[0].args[0]).toBeInstanceOf(HeadBucketCommand); 822 | const commandArg = commands[1].args[0]; 823 | expect(commandArg).toBeInstanceOf(PutObjectCommand); 824 | expect(commandArg.input.ACL).toBe('public-read'); 825 | }); 826 | 827 | it('should save a file with proper ACL without direct access', async () => { 828 | const s3 = new S3Adapter(options); 829 | s3._s3Client = s3ClientMock; 830 | 831 | await s3.createFile('file.txt', 'hello world', 'text/utf8', {}); 832 | expect(s3ClientMock.send).toHaveBeenCalledTimes(2); 833 | const commands = s3ClientMock.send.calls.all(); 834 | expect(commands[0].args[0]).toBeInstanceOf(HeadBucketCommand); 835 | const commandArg = commands[1].args[0]; 836 | expect(commandArg).toBeInstanceOf(PutObjectCommand); 837 | expect(commandArg.input.ACL).toBeUndefined(); 838 | }); 839 | 840 | it('should save a file and override ACL with direct access', async () => { 841 | options.directAccess = true; 842 | options.fileAcl = 'private'; 843 | const s3 = new S3Adapter(options); 844 | s3._s3Client = s3ClientMock; 845 | 846 | await s3.createFile('file.txt', 'hello world', 'text/utf8', {}); 847 | expect(s3ClientMock.send).toHaveBeenCalledTimes(2); 848 | const commands = s3ClientMock.send.calls.all(); 849 | expect(commands[0].args[0]).toBeInstanceOf(HeadBucketCommand); 850 | const commandArg = commands[1].args[0]; 851 | expect(commandArg).toBeInstanceOf(PutObjectCommand); 852 | expect(commandArg.input.ACL).toBe('private'); 853 | }); 854 | 855 | it('should save a file and remove ACL with direct access', async () => { 856 | // Create adapter 857 | options.directAccess = true; 858 | options.fileAcl = 'none'; 859 | const s3 = new S3Adapter(options); 860 | s3._s3Client = s3ClientMock; 861 | 862 | await s3.createFile('file.txt', 'hello world', 'text/utf8', {}); 863 | expect(s3ClientMock.send).toHaveBeenCalledTimes(2); 864 | const commands = s3ClientMock.send.calls.all(); 865 | expect(commands[0].args[0]).toBeInstanceOf(HeadBucketCommand); 866 | const commandArg = commands[1].args[0]; 867 | expect(commandArg).toBeInstanceOf(PutObjectCommand); 868 | expect(commandArg.input.ACL).toBeUndefined(); 869 | }); 870 | }); 871 | 872 | describe('handleFileStream', () => { 873 | const filename = 'file.txt'; 874 | let s3; 875 | 876 | beforeAll(async () => { 877 | s3 = getMockS3Adapter({ bucketPrefix: 'test-prefix/' }); 878 | const testFileContent = 'hello world! This is a test file for S3 streaming.'; 879 | await s3.createFile(filename, testFileContent, 'text/plain', {}); 880 | }); 881 | 882 | afterAll(async () => { 883 | await s3.deleteFile(filename); 884 | }); 885 | 886 | it('should get stream bytes correctly', async () => { 887 | const req = { 888 | get: jasmine.createSpy('get').and.callFake(header => { 889 | if (header === 'Range') { return 'bytes=0-10'; } 890 | return null; 891 | }), 892 | }; 893 | const res = { 894 | writeHead: jasmine.createSpy('writeHead'), 895 | write: jasmine.createSpy('write'), 896 | end: jasmine.createSpy('end'), 897 | }; 898 | const data = await s3.handleFileStream(filename, req, res); 899 | 900 | expect(data.toString('utf8')).toBe('hello world'); 901 | expect(res.writeHead).toHaveBeenCalled(); 902 | expect(res.write).toHaveBeenCalled(); 903 | expect(res.end).toHaveBeenCalled(); 904 | }); 905 | }); 906 | 907 | describe('credentials', () => { 908 | let s3ClientMock, S3Adapter; 909 | 910 | beforeEach(() => { 911 | S3Adapter = rewire("../index"); 912 | 913 | s3ClientMock = jasmine.createSpy("S3Client").and.callFake(function (config) { 914 | this.config = config; 915 | }); 916 | 917 | S3Adapter.__set__("S3Client", s3ClientMock); 918 | }); 919 | 920 | it('should use direct credentials', async () => { 921 | const options = { 922 | bucket: 'bucket-1', 923 | accessKey: 'access-key', 924 | secretKey: 'secret-key' 925 | }; 926 | const s3 = new S3Adapter(options); 927 | 928 | expect(s3._s3Client.config.credentials).toEqual({ 929 | accessKeyId: 'access-key', 930 | secretAccessKey: 'secret-key' 931 | }); 932 | }); 933 | 934 | it('should use credentials', async () => { 935 | const options = { 936 | bucket: 'bucket-1', 937 | credentials: { 938 | accessKeyId: 'access-key', 939 | secretAccessKey: 'secret-key' 940 | } 941 | }; 942 | const s3 = new S3Adapter(options); 943 | 944 | expect(s3._s3Client.config.credentials).toEqual({ 945 | accessKeyId: 'access-key', 946 | secretAccessKey: 'secret-key' 947 | }); 948 | }); 949 | 950 | it('should use s3overrides credentials', async () => { 951 | const options = { 952 | bucket: 'bucket-1', 953 | s3overrides: { 954 | credentials: { 955 | accessKeyId: 'access-key', 956 | secretAccessKey: 'secret-key' 957 | } 958 | } 959 | }; 960 | const s3 = new S3Adapter(options); 961 | 962 | expect(s3._s3Client.config.credentials).toEqual({ 963 | accessKeyId: 'access-key', 964 | secretAccessKey: 'secret-key' 965 | }); 966 | }); 967 | 968 | it('should handle custom credential provider', async () => { 969 | const customCredentials = { 970 | getCredentials: () => Promise.resolve({ 971 | accessKeyId: 'custom-key', 972 | secretAccessKey: 'custom-secret' 973 | }) 974 | }; 975 | const options = { 976 | bucket: 'bucket-1', 977 | credentials: customCredentials 978 | }; 979 | const s3 = new S3Adapter(options); 980 | 981 | expect(s3._s3Client.config.credentials).toBe(customCredentials); 982 | }); 983 | }); 984 | }); 985 | --------------------------------------------------------------------------------