├── .npmignore ├── .gitignore ├── examples ├── simple │ ├── package.json │ └── index.js └── pouchdb │ ├── package.json │ ├── README.md │ ├── pouchdb-s3leveldown.js │ └── index.js ├── .github └── workflows │ ├── publish.yml │ └── nodejs-test.yml ├── s3leveldown.d.ts ├── LICENSE ├── CHANGELOG.md ├── package.json ├── .jshintrc ├── test.js ├── README.md └── s3leveldown.js /.npmignore: -------------------------------------------------------------------------------- 1 | examples 2 | test.js 3 | .*/**/* 4 | .* 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules 3 | examples/**/package-lock.json 4 | -------------------------------------------------------------------------------- /examples/simple/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "simple-example", 3 | "version": "1.0.0", 4 | "description": "Simple s3leveldown example", 5 | "main": "index.js", 6 | "author": "Loune", 7 | "license": "MIT", 8 | "private": true, 9 | "dependencies": { 10 | "@aws-sdk/client-s3": "^3.329.0", 11 | "levelup": "^5.1.1", 12 | "s3leveldown": "^3.0.0" 13 | }, 14 | "scripts": { 15 | "start": "node index.js" 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /examples/pouchdb/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pouchdb-example", 3 | "version": "2.0.0", 4 | "description": "s3leveldown example usage with PouchDB", 5 | "main": "index.js", 6 | "author": "Loune", 7 | "license": "MIT", 8 | "private": true, 9 | "dependencies": { 10 | "@aws-sdk/client-s3": "^3.326.0", 11 | "levelup": "^5.1.1", 12 | "pouchdb": "^8.0.1", 13 | "pouchdb-adapter-leveldb-core": "^8.0.1", 14 | "s3leveldown": "^3.0.0" 15 | }, 16 | "scripts": { 17 | "start": "node index.js" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /examples/pouchdb/README.md: -------------------------------------------------------------------------------- 1 | # S3LevelDOWN PouchDB example 2 | 3 | This is an example of using S3LevelDown with [PouchDB](https://github.com/pouchdb/pouchdb) allowing you to use S3 as a backend to PouchDB. 4 | 5 | WARNING: Concurrent writes are not supported and will result in database corruption. See this [blog post](https://loune.net/2017/04/using-aws-s3-as-a-database-with-pouchdb/) for more information. 6 | 7 | ## Running 8 | 9 | Set `S3_TEST_BUCKET` to your test S3 bucket. 10 | 11 | ```bash 12 | $ S3_TEST_BUCKET=mybucket npm start 13 | ``` 14 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish Package to npmjs 2 | on: 3 | release: 4 | types: [published] 5 | 6 | permissions: 7 | id-token: write # Required for OIDC 8 | contents: read 9 | 10 | jobs: 11 | build: 12 | runs-on: ubuntu-latest 13 | permissions: 14 | contents: read 15 | id-token: write 16 | steps: 17 | - uses: actions/checkout@v5 18 | # Setup .npmrc file to publish to npm 19 | - uses: actions/setup-node@v4 20 | with: 21 | node-version: '24.x' 22 | registry-url: 'https://registry.npmjs.org' 23 | - run: npm ci 24 | - run: npm publish --provenance --access public 25 | -------------------------------------------------------------------------------- /examples/pouchdb/pouchdb-s3leveldown.js: -------------------------------------------------------------------------------- 1 | const CoreLevelPouch = require('pouchdb-adapter-leveldb-core'); 2 | const assign = require('pouchdb-utils').assign; 3 | const S3LevelDown = require('s3leveldown'); 4 | 5 | function S3LevelDownPouch(opts, callback) { 6 | var _opts = assign({ 7 | db: (bucket) => new S3LevelDown(bucket) 8 | }, opts); 9 | 10 | CoreLevelPouch.call(this, _opts, callback); 11 | } 12 | 13 | // overrides for normal LevelDB behavior on Node 14 | S3LevelDownPouch.valid = function () { 15 | return true; 16 | }; 17 | S3LevelDownPouch.use_prefix = false; 18 | 19 | module.exports = function (PouchDB) { 20 | PouchDB.adapter('s3leveldown', S3LevelDownPouch, true); 21 | } 22 | -------------------------------------------------------------------------------- /s3leveldown.d.ts: -------------------------------------------------------------------------------- 1 | import { S3Client } from '@aws-sdk/client-s3'; 2 | import { AbstractLevelDOWN } from 'abstract-leveldown'; 3 | 4 | export interface S3LevelDown extends AbstractLevelDOWN {} 5 | 6 | interface S3LevelDownConstructor { 7 | /** 8 | * Create a S3LevelDown object to pass into levelup. E.g. `levelup(new S3LevelDOWN('mybucket')`. 9 | * @param {string} location Name of the S3 bucket with optional sub-folder. Example `mybucket` or `mybucket/folder`. 10 | * @param {S3} [s3] Optional S3 Client. 11 | */ 12 | new (location: string, s3?: S3Client): S3LevelDown; 13 | } 14 | 15 | declare var S3LevelDown: S3LevelDownConstructor; 16 | 17 | export default S3LevelDown; 18 | -------------------------------------------------------------------------------- /examples/simple/index.js: -------------------------------------------------------------------------------- 1 | const levelup = require('levelup'); 2 | const s3leveldown = require('s3leveldown'); 3 | 4 | // AWS.config.update({ region:'ap-southeast-2' }); 5 | 6 | if (!process.env.S3_TEST_BUCKET) { 7 | console.error("Please set the S3_TEST_BUCKET environment variable to run the test"); 8 | process.exit(1); 9 | return; 10 | } 11 | 12 | (async () => { 13 | // create DB 14 | const db = levelup(new s3leveldown(process.env.S3_TEST_BUCKET)); 15 | 16 | // put items 17 | await db.batch() 18 | .put('name', 'Pikachu') 19 | .put('dob', 'February 27, 1996') 20 | .put('occupation', 'Pokemon') 21 | .write(); 22 | 23 | // read items 24 | await db.createReadStream() 25 | .on('data', data => { console.log('data', `${data.key.toString()}=${data.value.toString()}`); }) 26 | .on('close', () => { console.log('done!') }); 27 | })(); 28 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2019-2021 Loune Lam 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /examples/pouchdb/index.js: -------------------------------------------------------------------------------- 1 | const PouchDB = require('pouchdb'); 2 | 3 | PouchDB.plugin(require('./pouchdb-s3leveldown')); 4 | 5 | if (!process.env.S3_TEST_BUCKET) { 6 | console.error("Please set the S3_TEST_BUCKET environment variable to run the test"); 7 | process.exit(1); 8 | return; 9 | } 10 | 11 | const db = new PouchDB(process.env.S3_TEST_BUCKET, { adapter: 's3leveldown' }); 12 | 13 | function addTodo(text) { 14 | const todo = { 15 | _id: `todo:${text}`, 16 | title: text, 17 | completed: false 18 | }; 19 | db.put(todo, (err, result) => { 20 | if (!err) { 21 | console.log('Successfully posted a todo!'); 22 | } 23 | else { 24 | console.log(err); 25 | } 26 | }); 27 | } 28 | 29 | function showTodos() { 30 | db.allDocs({include_docs: true, descending: true}, (err, doc) => { 31 | if (!err) { 32 | console.log(doc.rows); 33 | } 34 | else { 35 | console.log(err); 36 | } 37 | }); 38 | } 39 | 40 | addTodo('shopping'); 41 | addTodo('isolate'); 42 | addTodo('exercise'); 43 | 44 | showTodos(); 45 | -------------------------------------------------------------------------------- /.github/workflows/nodejs-test.yml: -------------------------------------------------------------------------------- 1 | # This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions 3 | 4 | name: Integration Test 5 | 6 | on: 7 | push: 8 | branches: [ master ] 9 | pull_request: 10 | branches: [ master ] 11 | 12 | jobs: 13 | build: 14 | runs-on: ubuntu-latest 15 | 16 | strategy: 17 | matrix: 18 | node-version: [24.x] 19 | 20 | steps: 21 | - uses: actions/checkout@v5 22 | - name: Use Node.js ${{ matrix.node-version }} 23 | uses: actions/setup-node@v4 24 | with: 25 | node-version: ${{ matrix.node-version }} 26 | - run: npm ci 27 | - run: S3_TEST_BUCKET=${{ secrets.TEST_BUCKET }} npm test 28 | env: 29 | CI: true 30 | AWS_ACCESS_KEY_ID: ${{ secrets.TEST_AWS_ACCESS_KEY_ID }} 31 | AWS_SECRET_ACCESS_KEY: ${{ secrets.TEST_AWS_SECRET_ACCESS_KEY }} 32 | AWS_REGION: ap-southeast-2 33 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## [3.0.0] - 2023-04-05 4 | 5 | ### Changed 6 | 7 | - When initialising S3LevelDown, new is always required. 8 | - Update dependencies 9 | - Node.js 14 or higher required. 10 | - `levelup` `^5.1.1` required. 11 | - `@aws-sdk/client-s3` (AWS SDK v3) required. (replaces `aws-sdk`) 12 | - Use ESNext syntax. 13 | 14 | ## [2.2.2] - 2021-11-07 15 | 16 | ### Changed 17 | 18 | - Move `levelup` to peer dependency and support `^5.1.1`. 19 | 20 | ## [2.2.1] - 2021-02-14 21 | 22 | ### Changed 23 | 24 | - Improve TypeScript definition. 25 | 26 | ## [2.2.0] - 2021-02-05 27 | 28 | ### Added 29 | 30 | - TypeScript types. 31 | 32 | ### Changed 33 | 34 | - Update dependencies. 35 | 36 | ## [2.1.0] - 2020-04-05 37 | 38 | ### Changed 39 | 40 | - Update `abstract-leveldown`. 41 | 42 | ### Fixed 43 | 44 | - S3 client now passed to constructor in function (#2). 45 | 46 | ## [2.0.0] - 2019-04-27 47 | 48 | ### Changed 49 | 50 | - Support LevelDOWN `4.0.1`. 51 | - Support new `abstract-leveldown` `6.0.3` tests. 52 | - Constructor now takes `location` and `s3` parameters. 53 | - `open` now creates the S3 bucket if `createIfMissing` is `true` 54 | 55 | ## [1.0.0] - 2017-02-05 56 | 57 | - Initial Release 58 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "s3leveldown", 3 | "version": "3.0.1", 4 | "description": "An implementation of LevelDOWN that uses Amazon S3. Turn your S3 bucket into a DB", 5 | "homepage": "https://github.com/loune/s3leveldown", 6 | "main": "s3leveldown.js", 7 | "engines": { 8 | "node": ">=14" 9 | }, 10 | "scripts": { 11 | "test": "yarn lint && node ./test.js", 12 | "lint": "./node_modules/.bin/jshint s3leveldown.js" 13 | }, 14 | "author": "Loune Lam (https://loune.net/)", 15 | "keywords": [ 16 | "level", 17 | "leveldb", 18 | "leveldown", 19 | "levelup", 20 | "S3", 21 | "AWS" 22 | ], 23 | "repository": { 24 | "type": "git", 25 | "url": "https://github.com/loune/s3leveldown.git" 26 | }, 27 | "license": "MIT", 28 | "dependencies": { 29 | "@types/abstract-leveldown": "^7.2.5", 30 | "abstract-leveldown": "^7.2.0", 31 | "debug": "^4.4.3", 32 | "ltgt": "^2.2.1" 33 | }, 34 | "peerDependencies": { 35 | "@aws-sdk/client-s3": "^3.891.0", 36 | "levelup": "^5.1.1" 37 | }, 38 | "devDependencies": { 39 | "@aws-sdk/client-s3": "^3.891.0", 40 | "jshint": "^2.13.6", 41 | "levelup": "^5.1.1", 42 | "tape": "^5.9.0" 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /.jshintrc: -------------------------------------------------------------------------------- 1 | { 2 | "predef": [ ] 3 | , "bitwise": false 4 | , "camelcase": false 5 | , "curly": false 6 | , "eqeqeq": false 7 | , "forin": false 8 | , "immed": false 9 | , "latedef": false 10 | , "newcap": true 11 | , "noarg": true 12 | , "noempty": true 13 | , "nonew": true 14 | , "plusplus": false 15 | , "quotmark": true 16 | , "regexp": false 17 | , "undef": true 18 | , "unused": true 19 | , "strict": false 20 | , "trailing": true 21 | , "maxlen": 120 22 | , "asi": true 23 | , "boss": true 24 | , "debug": true 25 | , "eqnull": true 26 | , "evil": true 27 | , "expr": true 28 | , "funcscope": false 29 | , "globalstrict": false 30 | , "iterator": false 31 | , "lastsemic": true 32 | , "laxbreak": true 33 | , "laxcomma": true 34 | , "loopfunc": true 35 | , "multistr": false 36 | , "onecase": false 37 | , "proto": false 38 | , "regexdash": false 39 | , "scripturl": true 40 | , "smarttabs": false 41 | , "shadow": false 42 | , "sub": true 43 | , "supernew": false 44 | , "validthis": true 45 | , "browser": true 46 | , "couch": false 47 | , "devel": false 48 | , "dojo": false 49 | , "mootools": false 50 | , "node": true 51 | , "nonstandard": true 52 | , "prototypejs": false 53 | , "rhino": false 54 | , "worker": true 55 | , "wsh": false 56 | , "nomen": false 57 | , "onevar": true 58 | , "passfail": false 59 | , "esversion": 11 60 | } -------------------------------------------------------------------------------- /test.js: -------------------------------------------------------------------------------- 1 | var test = require('tape') 2 | var suite = require('abstract-leveldown/test') 3 | var S3LevelDown = require('./s3leveldown') 4 | 5 | if (!process.env.S3_TEST_BUCKET) { 6 | console.error("Please set the S3_TEST_BUCKET environment variable to run the test") 7 | process.exit(1) 8 | return 9 | } 10 | 11 | var prefix = "/__leveldown_test-" + Date.now(); 12 | 13 | var bucketTestIndex = 0; 14 | var testCommon = suite.common({ 15 | test: test, 16 | factory: function () { 17 | return new S3LevelDown(process.env.S3_TEST_BUCKET + prefix + "-" + (++bucketTestIndex)) 18 | }, 19 | snapshots: false, 20 | seek: false, 21 | bufferKeys: false, 22 | createIfMissing: false, 23 | errorIfExists: false 24 | }) 25 | 26 | suite(testCommon) 27 | 28 | // custom tests 29 | 30 | var db 31 | 32 | test('setUp common', testCommon.setUp) 33 | 34 | test('setUp #1', function (t) { 35 | db = testCommon.factory() 36 | db.open(function () { 37 | db.batch() 38 | .put('foo', 'bar') 39 | .put('foo2', 'bar2') 40 | .put('foo3', 'bar3') 41 | .write(function() { t.end(); }) 42 | }) 43 | }) 44 | 45 | test('lazy iterator next delete next', function (t) { 46 | var iterator = db.iterator() 47 | iterator.next(function (err, key, value) { 48 | t.equal(err, null, 'no errors') 49 | t.equal(key.toString(), 'foo', 'correct key') 50 | db.del('foo2', function () { 51 | iterator.next(function (err, key, value) { 52 | t.error(err) 53 | t.equal(key.toString(), "foo3", 'correct key') 54 | iterator.end(t.end.bind(t)) 55 | }) 56 | }) 57 | }) 58 | }) 59 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # S3LevelDown 2 | 3 | An [abstract-leveldown](https://github.com/Level/abstract-leveldown) compliant implementation of [LevelDOWN](https://github.com/Level/leveldown) that uses [Amazon S3](https://aws.amazon.com/s3/) as a backing store. S3 is actually a giant key-value store on the cloud, even though it is marketed as a file store. Use this database with the [LevelUP](https://github.com/Level/levelup/) API. 4 | 5 | To use this optimally, please read "Performance considerations" and "Warning about concurrency" sections below. 6 | 7 | You could also use this as an alternative API to read/write S3. The API simpler to use when compared to the AWS SDK! 8 | 9 | ## Installation 10 | 11 | Install `s3leveldown` and peer dependencies `levelup` and `@aws-sdk/client-s3` with `yarn` or `npm`. 12 | 13 | ```bash 14 | $ npm install s3leveldown @aws-sdk/client-s3 levelup 15 | ``` 16 | 17 | ## Documentation 18 | 19 | See the [LevelUP API](https://github.com/Level/levelup#api) for high level usage. 20 | 21 | ### `new S3LevelDown(location [, s3])` 22 | 23 | Constructor of `s3leveldown` backing store. Use with `levelup`. 24 | 25 | Arguments: 26 | * `location` name of the S3 bucket with optional sub-folder. Example `mybucket` or `mybucket/folder`. 27 | * `s3` Optional `S3` client from `aws-sdk`. A default client will be used if not specified. 28 | 29 | ## Example 30 | 31 | Please refer to the [AWS SDK docs to set up your API credentials](http://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/setting-credentials-node.html) before using. 32 | 33 | ### Using Promises 34 | 35 | ```js 36 | const levelup = require('levelup'); 37 | const S3LevelDown = require('s3leveldown'); 38 | 39 | (async () => { 40 | // create DB 41 | const db = levelup(new S3LevelDown('mybucket')); 42 | 43 | // put items 44 | await db.batch() 45 | .put('name', 'Pikachu') 46 | .put('dob', 'February 27, 1996') 47 | .put('occupation', 'Pokemon') 48 | .write(); 49 | 50 | // read items 51 | await db.createReadStream() 52 | .on('data', data => { console.log('data', `${data.key.toString()}=${data.value.toString()}`); }) 53 | .on('close', () => { console.log('done!') }); 54 | })(); 55 | ``` 56 | 57 | ### Using Callbacks 58 | 59 | ```js 60 | const levelup = require('levelup'); 61 | const S3LevelDown = require('s3leveldown'); 62 | 63 | const db = levelup(new S3LevelDown('my_bucket')); 64 | 65 | db.batch() 66 | .put('name', 'Pikachu') 67 | .put('dob', 'February 27, 1996') 68 | .put('occupation', 'Pokemon') 69 | .write(function () { 70 | db.readStream() 71 | .on('data', console.log) 72 | .on('close', function () { console.log('Pika pi!') }) 73 | }); 74 | ``` 75 | 76 | ### Example with min.io 77 | 78 | You could also use s3leveldown with S3 compatible servers such as [MinIO](https://github.com/minio/minio). 79 | 80 | ```js 81 | const levelup = require('levelup'); 82 | const S3LevelDown = require('s3leveldown'); 83 | const AWS = require('aws-sdk'); 84 | 85 | const s3 = new AWS.S3({ 86 | apiVersion: '2006-03-01', 87 | accessKeyId: 'YOUR-ACCESSKEYID', 88 | secretAccessKey: 'YOUR-SECRETACCESSKEY', 89 | endpoint: 'http://127.0.0.1:9000', 90 | s3ForcePathStyle: true, 91 | signatureVersion: 'v4' 92 | }); 93 | 94 | const db = levelup(new S3LevelDown('my_bucket', s3)); 95 | ``` 96 | 97 | ### [Example with PouchDB](./examples/pouchdb) 98 | 99 | ## Sub folders 100 | 101 | You can create your Level DB in a sub-folder in your S3 bucket, just use `my_bucket/sub_folder` when passing the location. 102 | 103 | ## Performance considerations 104 | 105 | There are a few performance caveats due to the limited API provided by the AWS S3 API: 106 | 107 | * When iterating, getting values is expensive. A seperate S3 API call is made to get the value of each key. If you don't need the value, pass `{ values: false }` in the options. Each S3 API call can return 1000 keys, so if there are 3000 results, 3 calls are made to list the keys, and if getting values as well, another 3000 API calls are made. 108 | 109 | * Avoid iterating large datasets when passing `{ reverse: true }`. Since the S3 API call do not allow retrieving keys in reverse order, the entire result set needs to be stored in memory and reversed. If your database is large ( >5k keys ), be sure to provide start (`gt`, `gte`) and end (`lt`, `lte`), or the entire database will need to be fetched. 110 | 111 | * By default when iterating, 1000 keys will be returned. If you only want 10 keys for example, set `{ limit: 10 }` and the S3 API call will only request 10 keys. Note that if you have `{ reverse: true }`, this optimisation does not apply as we need to fetch everything from start to end and reverse it in memory. To override the default number of keys to return in a single API call, you can set the ` s3ListObjectMaxKeys` option when creating the iterator. The maximum accepted by the S3 API is 1000. 112 | 113 | * Specify the AWS region of the bucket to improve performance, by calling `AWS.config.update({ region: 'ap-southeast-2' });` replace `ap-southeast-2` with your region. 114 | 115 | ## Warning about concurrency 116 | 117 | Individual operations (`put` `get` `del`) are atomic as guaranteed by S3, but the implementation of `batch` is not atomic. Two concurrent batch calls will have their operations interwoven. Don't use any plugins which require this to be atomic or you will end up with your database corrupted! However, if you can guarantee that only one process will write the S3 bucket at a time, then this should not be an issue. Ideally, you want to avoid race conditions where two processes are writing to the same key at the same time. In those cases the last write wins. 118 | 119 | Iterator snapshots are not supported. When iterating through a list of keys and values, you may get the changes, similar to dirty reads. 120 | 121 | ## Tests and debug 122 | 123 | S3LevelDown uses [debug](https://github.com/visionmedia/debug). To see debug message set the environment variable `DEBUG=S3LevelDown`. 124 | 125 | To run the test suite, you need to set a S3 bucket to the environment variable `S3_TEST_BUCKET`. Also be sure to [set your AWS credentials](http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html) 126 | 127 | ```bash 128 | $ S3_TEST_BUCKET=my-test-bucket npm run test 129 | ``` 130 | 131 | ## License 132 | 133 | MIT 134 | -------------------------------------------------------------------------------- /s3leveldown.js: -------------------------------------------------------------------------------- 1 | const AbstractLevelDOWN = require('abstract-leveldown').AbstractLevelDOWN 2 | , AbstractIterator = require('abstract-leveldown').AbstractIterator 3 | , ltgt = require('ltgt') 4 | , debug = require('debug')('S3LevelDown') 5 | , AWS = require('@aws-sdk/client-s3') 6 | 7 | const staticS3 = new AWS.S3Client({ apiVersion: '2006-03-01' }) 8 | 9 | function lt(value) { 10 | return ltgt.compare(value, this._finish) < 0 11 | } 12 | 13 | function lte(value) { 14 | return ltgt.compare(value, this._finish) <= 0 15 | } 16 | 17 | function getStartAfterKey(key) { 18 | const keyMinusOneNum = (key.charCodeAt(key.length - 1) - 1) 19 | const keyMinusOne = keyMinusOneNum >= 0 ? (String.fromCharCode(keyMinusOneNum) + '\uFFFF') : '' 20 | return key.substring(0, key.length - 1) + keyMinusOne 21 | } 22 | 23 | function nullEmptyUndefined(v) { 24 | return typeof v === 'undefined' || v === null || v === '' 25 | } 26 | 27 | class S3Iterator extends AbstractIterator { 28 | constructor(db, options) { 29 | super(db) 30 | const self = this 31 | self._limit = options.limit 32 | 33 | if (self._limit === -1) 34 | self._limit = Infinity 35 | 36 | self.keyAsBuffer = options.keyAsBuffer !== false 37 | self.valueAsBuffer = options.valueAsBuffer !== false 38 | self.fetchValues = options.values 39 | self._reverse = options.reverse 40 | self._options = options 41 | self._done = 0 42 | self.bucket = db.bucket 43 | self.db = db 44 | self.s3ListObjectMaxKeys = options.s3ListObjectMaxKeys || 1000 45 | if (!self._reverse && self._limit < self.s3ListObjectMaxKeys) { 46 | self.s3ListObjectMaxKeys = self._limit 47 | } 48 | 49 | self._start = ltgt.lowerBound(options) 50 | self._finish = ltgt.upperBound(options) 51 | if (!nullEmptyUndefined(self._finish)) { 52 | if (ltgt.upperBoundInclusive(options)) 53 | self._test = lte 54 | 55 | else 56 | self._test = lt 57 | } 58 | 59 | if (!nullEmptyUndefined(self._start)) 60 | self.startAfter = ltgt.lowerBoundInclusive(options) ? getStartAfterKey(self._start) : self._start 61 | 62 | debug('new iterator %o', self._options) 63 | } 64 | 65 | _next(callback) { 66 | const self = this 67 | 68 | if (self._done++ >= self._limit || 69 | (self.data && self.dataUpto == self.data.length && !self.s3nextContinuationToken)) 70 | return setImmediate(callback) 71 | 72 | if (!self.data || self.dataUpto == self.data.length) { 73 | listObjects() 74 | } else { 75 | fireCallback() 76 | } 77 | 78 | function listObjects() { 79 | const params = { 80 | Bucket: self.bucket, 81 | MaxKeys: self.s3ListObjectMaxKeys 82 | } 83 | 84 | if (self.db.folderPrefix !== '') { 85 | params.Prefix = self.db.folderPrefix 86 | } 87 | 88 | if (self.s3nextContinuationToken) { 89 | params.ContinuationToken = self.s3nextContinuationToken 90 | debug('listObjectsV2 ContinuationToken %s', params.ContinuationToken) 91 | } 92 | else if (typeof self.startAfter !== 'undefined') { 93 | params.StartAfter = self.db.folderPrefix + self.startAfter 94 | } 95 | 96 | self.db.s3.send(new AWS.ListObjectsV2Command(params), function (err, data) { 97 | if (err) { 98 | debug('listObjectsV2 error %s', err.message) 99 | callback(err) 100 | } else { 101 | if (data.KeyCount === 0) { 102 | debug('listObjectsV2 empty') 103 | return setImmediate(callback) 104 | } 105 | 106 | debug('listObjectsV2 %d keys', data.KeyCount) 107 | 108 | if (self.data && self.dataUpto === 0) { 109 | self.data = self.data.concat(data.Contents) 110 | } else { 111 | self.data = data.Contents 112 | } 113 | 114 | self.dataUpto = 0 115 | self.s3nextContinuationToken = data.NextContinuationToken 116 | 117 | if (self._reverse && self.s3nextContinuationToken && 118 | data.Contents.every(function (x) { 119 | return self._test(x.Key.substring(self.db.folderPrefix.length, x.Key.length)) 120 | })) { 121 | listObjects() 122 | } else { 123 | fireCallback() 124 | } 125 | } 126 | }) 127 | } 128 | 129 | function fireCallback() { 130 | let index, key 131 | for (; ;) { 132 | index = (!self._reverse) ? self.dataUpto : (self.data.length - 1 - self.dataUpto) 133 | const awskey = self.data[index].Key 134 | key = awskey.substring(self.db.folderPrefix.length, awskey.length) 135 | debug('iterator data index %d: %s', index, key) 136 | self.dataUpto++ 137 | 138 | if (self._test(key)) { 139 | break 140 | } 141 | 142 | if (!self._reverse || self.dataUpto === self.data.length) { 143 | return setImmediate(callback) 144 | } 145 | } 146 | 147 | if (self.fetchValues) { 148 | if (self.data[index].Size === 0) 149 | getCallback(null, '') 150 | else 151 | self.db._get(key, { asBuffer: self.valueAsBuffer }, getCallback) 152 | } 153 | else 154 | getCallback() 155 | 156 | function getCallback(err, value) { 157 | debug('iterator data getCallback %s = %s', key, value) 158 | if (err) { 159 | if (err.message == 'NotFound') { 160 | // collection changed while we were iterating, skip this key 161 | return setImmediate(function () { 162 | self._next(callback) 163 | }) 164 | } 165 | return setImmediate(function () { 166 | callback(err) 167 | }) 168 | } 169 | 170 | if (self.keyAsBuffer && !(key instanceof Buffer)) 171 | key = Buffer.from(key) 172 | if (!self.keyAsBuffer && (key instanceof Buffer)) 173 | key = key.toString('utf8') 174 | 175 | if (self.fetchValues) { 176 | if (self.valueAsBuffer && !(value instanceof Buffer)) 177 | value = Buffer.from(value) 178 | if (!self.valueAsBuffer && (value instanceof Buffer)) 179 | value = value.toString('utf8') 180 | } 181 | 182 | setImmediate(function () { 183 | debug('_next result %s=%s', key, value) 184 | callback(null, key, value) 185 | }) 186 | } 187 | } 188 | } 189 | 190 | _test() { return true } 191 | } 192 | 193 | 194 | class S3LevelDown extends AbstractLevelDOWN { 195 | constructor(location, s3) { 196 | super() 197 | if (typeof location !== 'string') { 198 | throw new Error('constructor requires a location string argument') 199 | } 200 | 201 | this.s3 = s3 || staticS3 202 | 203 | if (location.indexOf('/') !== -1) { 204 | this.folderPrefix = location.substring(location.indexOf('/') + 1, location.length) + '/' 205 | this.bucket = location.substring(0, location.indexOf('/')) 206 | } else { 207 | this.folderPrefix = '' 208 | this.bucket = location 209 | } 210 | 211 | debug('db init %s %s', this.bucket, this.folderPrefix) 212 | } 213 | 214 | _open(options, callback) { 215 | this.s3.send(new AWS.HeadBucketCommand({ Bucket: this.bucket }), (err) => { 216 | if (err) { 217 | // error, bucket is not found 218 | if (options.createIfMissing && err['$metadata'].httpStatusCode === 404) { 219 | // try to create it 220 | this.s3.send(new AWS.CreateBucketCommand({ Bucket: this.bucket }), (err) => { 221 | if (err) { 222 | setImmediate(() => callback(err)) 223 | } else { 224 | setImmediate(callback) 225 | } 226 | }) 227 | } else { 228 | setImmediate(() => callback(new Error(`Bucket ${this.bucket} does not exists or is inaccessible`))) 229 | } 230 | } else { 231 | setImmediate(callback) 232 | } 233 | }) 234 | } 235 | 236 | _put(key, value, options, callback) { 237 | if (nullEmptyUndefined(value)) 238 | value = Buffer.from('') 239 | 240 | if (!(value instanceof Buffer || value instanceof String)) 241 | value = String(value) 242 | 243 | this.s3.send(new AWS.PutObjectCommand({ 244 | Bucket: this.bucket, 245 | Key: this.folderPrefix + key, 246 | Body: value 247 | }), function (err) { 248 | if (err) { 249 | debug('Error s3 upload: %s %s', key, err.message) 250 | callback(err) 251 | } else { 252 | debug('Successful s3 upload: %s', key) 253 | callback() 254 | } 255 | }) 256 | } 257 | 258 | _get(key, options, callback) { 259 | this.s3.send(new AWS.GetObjectCommand({ 260 | Bucket: this.bucket, 261 | Key: this.folderPrefix + key 262 | }), async function (err, data) { 263 | if (err) { 264 | debug('Error s3 getObject: %s %s', key, err.message) 265 | if (err.Code === 'NoSuchKey') { 266 | callback(new Error('NotFound')) 267 | } else { 268 | callback(err) 269 | } 270 | } else { 271 | let value 272 | try { 273 | debug('s3 getObject callback as %s: %s', options.asBuffer ? 'buf' : 'string', key) 274 | if (options && options.asBuffer) { 275 | const byteArray = await data.Body?.transformToByteArray() 276 | value = Buffer.from(byteArray.buffer, byteArray.byteOffset, byteArray.byteLength) 277 | } else { 278 | value = await data.Body?.transformToString('utf8') 279 | } 280 | } catch (err) { 281 | callback(err, null) 282 | return 283 | } 284 | callback(null, value) 285 | } 286 | }) 287 | } 288 | 289 | _del(key, options, callback) { 290 | this.s3.send(new AWS.DeleteObjectCommand({ 291 | Bucket: this.bucket, 292 | Key: this.folderPrefix + key 293 | }), function (err) { 294 | if (err) { 295 | debug('Error s3 delete: %s %s', key, err.message) 296 | callback(err) 297 | } else { 298 | debug('Successful s3 delete: %s', key) 299 | callback() 300 | } 301 | }) 302 | } 303 | 304 | _batch(array, options, callback) { 305 | const len = array.length, self = this 306 | let i = 0; 307 | 308 | function act(action, cb) { 309 | if (!action) { 310 | return setImmediate(cb) 311 | } 312 | 313 | const key = (action.key instanceof Buffer) ? action.key : String(action.key) 314 | const value = action.value 315 | 316 | if (action.type === 'put') { 317 | self._put(key, value, null, cb) 318 | } else if (action.type === 'del') { 319 | self._del(key, null, cb) 320 | } 321 | } 322 | 323 | function actCallback(err) { 324 | if (err) { 325 | return setImmediate(function () { callback(err) }) 326 | } 327 | 328 | if (++i >= len) { 329 | return setImmediate(callback) 330 | } 331 | 332 | act(array[i], actCallback) 333 | } 334 | 335 | act(array[i], actCallback) 336 | } 337 | 338 | _iterator(options) { 339 | return new S3Iterator(this, options) 340 | } 341 | } 342 | 343 | module.exports = S3LevelDown 344 | --------------------------------------------------------------------------------