├── .github └── workflows │ └── transform-pouch.yml ├── .gitignore ├── LICENSE ├── README.md ├── index.js ├── package-lock.json ├── package.json └── test.js /.github/workflows/transform-pouch.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Transform Pouch CI 3 | 4 | on: [pull_request, push] 5 | 6 | jobs: 7 | build: 8 | runs-on: ubuntu-latest 9 | strategy: 10 | matrix: 11 | node: [12, 14, 16] 12 | couchdb: ["2.3", "3.1"] 13 | steps: 14 | - uses: actions/checkout@v2 15 | - uses: actions/setup-node@v2 16 | with: 17 | node-version: ${{ matrix.node }} 18 | - uses: actions/cache@v2 19 | with: 20 | path: ~/.npm 21 | key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} 22 | restore-keys: | 23 | ${{ runner.os }}-node- 24 | - run: npm ci 25 | # install chromium manually, for mochify 26 | - run: node node_modules/puppeteer/install.js 27 | # setup couchdb 28 | - uses: iamssen/couchdb-github-action@master 29 | with: 30 | couchdb-version: ${{ matrix.couchdb }} 31 | - run: npm i -g add-cors-to-couchdb 32 | - run: add-cors-to-couchdb -u admin -p password 33 | # run tests 34 | - run: npm test 35 | env: 36 | COUCH_URL: http://admin:password@localhost:5984 37 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | .DS_Store 3 | *~ 4 | .nyc_output 5 | test/test-bundle.js 6 | npm-debug.log 7 | dist 8 | my_gateway/ 9 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Transform Pouch 2 | ===== 3 | 4 | [![.github/workflows/transform-pouch.yml](https://github.com/pouchdb-community/transform-pouch/actions/workflows/transform-pouch.yml/badge.svg)](https://github.com/pouchdb-community/transform-pouch/actions/workflows/transform-pouch.yml) 5 | 6 | Apply a *transform function* to documents before and after they are stored in the database. These functions are triggered invisibly for every `get()`, `put()`, `post()`, `bulkDocs()`, `bulkGet()`, `allDocs()`, `changes()`, and also to documents added via replication. 7 | 8 | This allows you to: 9 | 10 | * Encrypt and decrypt sensitive document fields 11 | * Compress and uncompress large content (e.g. to avoid hitting [browser storage limits](http://pouchdb.com/faq.html#data_limits)) 12 | * Remove or modify documents before storage (e.g. to massage data from CouchDB) 13 | 14 | *__Note:__ This plugin was formerly known as `filter-pouch`, but was renamed to be less confusing. The `filter()` API is still supported, but deprecated.* 15 | 16 | ## Usage 17 | 18 | Just npm install it: 19 | 20 | ``` 21 | npm install transform-pouch 22 | ``` 23 | 24 | And then attach it to the `PouchDB` object: 25 | 26 | ```js 27 | var PouchDB = require('pouchdb'); 28 | PouchDB.plugin(require('transform-pouch')); 29 | ``` 30 | 31 | You can also use `npm run build` to compile browser-ready bundles. 32 | 33 | ## API 34 | 35 | When you create a new PouchDB, you need to configure the transform functions: 36 | 37 | ```js 38 | var pouch = new PouchDB('mydb'); 39 | pouch.transform({ 40 | incoming: function (doc) { 41 | // do something to the document before storage 42 | return doc; 43 | }, 44 | outgoing: function (doc) { 45 | // do something to the document after retrieval 46 | return doc; 47 | } 48 | }); 49 | ``` 50 | 51 | You can also use Promises: 52 | 53 | ```js 54 | var pouch = new PouchDB('mydb'); 55 | pouch.transform({ 56 | incoming: function (doc) { 57 | return Promise.resolve(doc); 58 | }, 59 | outgoing: function (doc) { 60 | return Promise.resolve(doc); 61 | } 62 | }); 63 | ``` 64 | 65 | Notes: 66 | 67 | * You can provide an `incoming` function, an `outgoing` function, or both. 68 | * Your transform function **must** return the document itself, or a new document (or a promise for such). 69 | * `incoming` functions apply to `put()`, `post()`, `bulkDocs()`, and incoming replications. 70 | * `outgoing` functions apply to `get()`, `allDocs()`, `bulkGet()`, `changes()`, `query()`, and outgoing replications. 71 | * The `incoming`/`outgoing` methods can be async or sync – just return a Promise for a doc, or the doc itself. 72 | 73 | ## Example: Encryption 74 | 75 | **Update!** Check out [crypto-pouch](https://github.com/calvinmetcalf/crypto-pouch), which is based on this plugin, and runs in both the browser and Node. The instructions below will only work in Node. 76 | 77 | Using the Node.js crypto library, let's first set up our encrypt/decrypt functions: 78 | 79 | ```js 80 | var crypto = require('crypto'); 81 | 82 | var cipher = crypto.createCipher('aes-256-cbc', 'password'); 83 | var decipher = crypto.createDecipher('aes-256-cbc', 'password'); 84 | 85 | function encrypt(text) { 86 | var crypted = cipher.update(text, 'utf8', 'base64'); 87 | return crypted + cipher.final('base64'); 88 | } 89 | 90 | function decrypt(text) { 91 | var dec = decipher.update(text, 'base64', 'utf8'); 92 | return dec + decipher.final('utf8'); 93 | } 94 | ``` 95 | 96 | Obviously you would want to change the `'password'` to be something only the user knows! 97 | 98 | Next, let's set up our transforms: 99 | 100 | ```js 101 | pouch.transform({ 102 | incoming: function (doc) { 103 | Object.keys(doc).forEach(function (field) { 104 | if (field !== '_id' && field !== '_rev' && field !== '_revisions') { 105 | doc[field] = encrypt(doc[field]); 106 | } 107 | }); 108 | return doc; 109 | }, 110 | outgoing: function (doc) { 111 | Object.keys(doc).forEach(function (field) { 112 | if (field !== '_id' && field !== '_rev' && field !== '_revisions') { 113 | doc[field] = decrypt(doc[field]); 114 | } 115 | }); 116 | return doc; 117 | } 118 | }); 119 | ``` 120 | 121 | (`transform-pouch` will automatically ignore deleted documents, so you don't need to handle that case.) 122 | 123 | Now, the documents are encrypted whenever they're stored in the database. If you want to verify, try opening them with a `Pouch` where you haven't set up any `transforms`. You'll see documents like: 124 | 125 | ```js 126 | { 127 | secret: 'YrAtAEbvp0bPLil8EpbNeA==', 128 | _id: 'doc', 129 | _rev: '1-bfc37cd00225f68671fe3187c054f9e3' 130 | } 131 | ``` 132 | 133 | whereas privileged users will see: 134 | 135 | ```js 136 | { 137 | secret: 'my super secret text!', 138 | _id: 'doc', 139 | _rev: '1-bfc37cd00225f68671fe3187c054f9e3' 140 | } 141 | ``` 142 | 143 | This works for remote CouchDB databases as well. In fact, only the encrypted data is sent over the wire, so it's ideal for protecting sensitive information. 144 | 145 | ## Note on query() 146 | 147 | Since the remote CouchDB doesn't have accesss to the untransformed document, map/reduce functions that are executed directly against CouchDB will be applied to the untransformed version. PouchDB doesn't have this limitation, because everything is local. 148 | 149 | So for instance, if you try to `emit()` an encrypted field in your map function: 150 | 151 | ```js 152 | function (doc) { 153 | emit(doc.secret, 'shhhhh'); 154 | } 155 | ``` 156 | 157 | ... the emitted key will be encrypted when you `query()` the remote database, but decrypted when you `query()` a local database. So be aware that the `query()` functionality is not exactly the same. 158 | 159 | ## Building 160 | 161 | You can build transform-pouch for the browser with `npm run build`: 162 | 163 | npm install 164 | npm run build 165 | 166 | This will place browser bundles, minified and unminified, in the `dist/` folder. 167 | 168 | ## Testing 169 | 170 | You can run the test suite with `npm test`. 171 | 172 | To run tests in Node specifically, using LevelDB: 173 | 174 | npm run test:node 175 | 176 | You can also run tests in a headless browser with [mochify](https://www.npmjs.com/package/mochify): 177 | 178 | npm run test:browser 179 | 180 | You can also check for code coverage using: 181 | 182 | npm run coverage 183 | 184 | You can run single test using options from [mocha](https://www.npmjs.com/package/mocha): 185 | 186 | ``` 187 | TEST_DB=local npm run test:node -- --reporter spec --grep search_phrase 188 | ``` 189 | 190 | The `TEST_DB` environment variable specifies the database that PouchDB should use. 191 | You may specify either `local` (which uses LevelDB) or `http` (which uses the `$COUCH_URL` environment variable to connect to a CouchDB installation.) 192 | 193 | # License 194 | 195 | [Apache-2.0](./LICENSE) 196 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const wrappers = require('pouchdb-wrappers') 4 | 5 | // determine if a document key is an internal field like _rev 6 | function isntInternalKey (key) { 7 | return key[0] !== '_' 8 | } 9 | 10 | // determine if a document should be transformed 11 | function isUntransformable (doc) { 12 | // do not transform local documents 13 | if (typeof doc._id === 'string' && (/^_local/).test(doc._id)) { 14 | return true 15 | } 16 | // do not transform document tombstones 17 | if (doc._deleted) { 18 | return Object.keys(doc).filter(isntInternalKey).length === 0 19 | } 20 | 21 | return false 22 | } 23 | 24 | module.exports = { 25 | transform, 26 | // api.filter provided for backwards compat with the old "filter-pouch" 27 | filter: transform 28 | } 29 | 30 | function transform (config) { 31 | const db = this 32 | 33 | // create incoming handler, which transforms documents before write 34 | const incoming = function (doc) { 35 | if (!isUntransformable(doc) && config.incoming) { 36 | return config.incoming(doc) 37 | } 38 | return doc 39 | } 40 | 41 | // create outgoing handler, which transforms documents after read 42 | const outgoing = function (doc) { 43 | if (!isUntransformable(doc) && config.outgoing) { 44 | return config.outgoing(doc) 45 | } 46 | return doc 47 | } 48 | 49 | const handlers = { 50 | async get (orig, ...args) { 51 | const response = await orig(...args) 52 | 53 | if (Array.isArray(response)) { 54 | // open_revs style, it's a list of docs 55 | await Promise.all(response.map(async (row) => { 56 | if (row.ok) { 57 | row.ok = await outgoing(row.ok) 58 | } 59 | })) 60 | return response 61 | } else { 62 | // response is just one doc 63 | return outgoing(response) 64 | } 65 | }, 66 | 67 | async bulkDocs (orig, docs, ...args) { 68 | if (docs.docs) { 69 | // docs can be an object and not just a list 70 | docs.docs = await Promise.all(docs.docs.map(incoming)) 71 | } else { 72 | // docs is just a list 73 | docs = await Promise.all(docs.map(incoming)) 74 | } 75 | return orig(docs, ...args) 76 | }, 77 | 78 | async allDocs (orig, ...args) { 79 | const response = await orig(...args) 80 | 81 | await Promise.all(response.rows.map(async (row) => { 82 | // run docs through outgoing handler if include_docs was true 83 | if (row.doc) { 84 | row.doc = await outgoing(row.doc) 85 | } 86 | })) 87 | return response 88 | }, 89 | 90 | async bulkGet (orig, ...args) { 91 | const mapDoc = async (doc) => { 92 | // only run the outgoing handler if the doc exists ("ok") 93 | // istanbul ignore else 94 | if (doc.ok) { 95 | return { ok: await outgoing(doc.ok) } 96 | } else { 97 | return doc 98 | } 99 | } 100 | const mapResult = async (result) => { 101 | const { id, docs } = result 102 | // istanbul ignore else 103 | if (id && docs && Array.isArray(docs)) { 104 | // only modify docs if everything looks ok 105 | return { id, docs: await Promise.all(docs.map(mapDoc)) } 106 | } else { 107 | // result wasn't ok so we return it unmodified 108 | return result 109 | } 110 | } 111 | let { results, ...res } = await orig(...args) 112 | results = await Promise.all(results.map(mapResult)) 113 | return { results, ...res } 114 | }, 115 | 116 | changes (orig, ...args) { 117 | async function modifyChange (change) { 118 | // transform a change only if it includes a doc 119 | if (change.doc) { 120 | change.doc = await outgoing(change.doc) 121 | return change 122 | } 123 | return change 124 | } 125 | 126 | async function modifyChanges (res) { 127 | // transform the response only if it contains results 128 | if (res.results) { 129 | res.results = await Promise.all(res.results.map(modifyChange)) 130 | return res 131 | } 132 | return res 133 | } 134 | 135 | const changes = orig(...args) 136 | const { on: origOn, then: origThen } = changes 137 | 138 | return Object.assign(changes, { 139 | // wrap all listeners, but specifically those for 'change' and 'complete' 140 | on (event, listener) { 141 | const origListener = listener 142 | if (event === 'change') { 143 | listener = async (change) => { 144 | origListener(await modifyChange(change)) 145 | } 146 | } else if (event === 'complete') { 147 | // the 'complete' event returns all relevant changes, 148 | // so we submit them all for transformation 149 | listener = async (res) => { 150 | origListener(await modifyChanges(res)) 151 | } 152 | } 153 | return origOn.call(changes, event, listener) 154 | }, 155 | 156 | // `.changes` can be awaited. it then returns all relevant changes 157 | // which we pass to our handler for possible transformation 158 | then (resolve, reject) { 159 | return origThen.call(changes, modifyChanges).then(resolve, reject) 160 | } 161 | }) 162 | } 163 | } 164 | 165 | if (db.type() === 'http') { 166 | // when using its http adapter, pouchdb uses the adapter's `._put` method, 167 | // rather than `._bulkDocs`, 168 | // so we have to wrap `.put` in addition to `.bulkDocs`. 169 | handlers.put = async function (orig, doc, ...args) { 170 | doc = await incoming(doc) 171 | return orig(doc, ...args) 172 | } 173 | // when using its http adapter, pouchdb cannot intercept query results with `.get` 174 | // so we must wrap the `.query` method directly to transform query results. 175 | handlers.query = async function (orig, ...args) { 176 | const response = await orig(...args) 177 | await Promise.all(response.rows.map(async (row) => { 178 | // modify result rows if they contain a doc 179 | if (row.doc) { 180 | row.doc = await outgoing(row.doc) 181 | } 182 | // because js passes objects by reference, 183 | // there is no need to return anything after updating the row object. 184 | })) 185 | return response 186 | } 187 | } 188 | 189 | wrappers.install(db, handlers) 190 | } 191 | 192 | /* istanbul ignore next */ 193 | if (typeof window !== 'undefined' && window.PouchDB) { 194 | window.PouchDB.plugin(exports) 195 | } 196 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "transform-pouch", 3 | "version": "2.0.0", 4 | "description": "Transform Pouch - a PouchDB plugin for transforming documents", 5 | "main": "index.js", 6 | "repository": { 7 | "type": "git", 8 | "url": "git://github.com/pouchdb-community/transform-pouch.git" 9 | }, 10 | "keywords": [ 11 | "pouch", 12 | "pouchdb", 13 | "plugin", 14 | "transform", 15 | "couch", 16 | "couchdb" 17 | ], 18 | "author": "Nolan Lawson ", 19 | "license": "Apache-2.0", 20 | "bugs": { 21 | "url": "https://github.com/pouchdb-community/transform-pouch/issues" 22 | }, 23 | "scripts": { 24 | "test": "run-s test:*", 25 | "test:lint": "standard", 26 | "test:deps": "dependency-check --no-unused --no-dev .", 27 | "test:node": "mocha test.js", 28 | "test:browser": "mochify --transform [ envify ] test.js", 29 | "build": "run-s build:main build:min", 30 | "build:main": "mkdir -p dist && rollup index.js --format umd --file dist/pouchdb.transform-pouch.js", 31 | "build:min": "uglifyjs dist/pouchdb.transform-pouch.js -mc > dist/pouchdb.transform-pouch.min.js", 32 | "coverage": "nyc npm test" 33 | }, 34 | "dependencies": { 35 | "pouchdb-wrappers": "^5.0.0" 36 | }, 37 | "devDependencies": { 38 | "babelify": "^10.0.0", 39 | "dependency-check": "^4.1.0", 40 | "envify": "^4.1.0", 41 | "mocha": "^8.3.2", 42 | "mochify": "^8.1.0", 43 | "npm-run-all": "^4.1.5", 44 | "nyc": "^15.1.0", 45 | "pouchdb": "^7.2.2", 46 | "rollup": "^2.53.0", 47 | "standard": "^16.0.3", 48 | "uglify-js": "^3.13.10", 49 | "uglifyjs": "^2.4.11" 50 | }, 51 | "files": [ 52 | "index.js" 53 | ] 54 | } 55 | -------------------------------------------------------------------------------- /test.js: -------------------------------------------------------------------------------- 1 | /* global describe, it, beforeEach, afterEach, emit, atob, btoa */ 2 | 'use strict' 3 | 4 | const assert = require('assert').strict 5 | const PouchDB = require('pouchdb') 6 | PouchDB.plugin(require('.')) 7 | 8 | const COUCH_URL = process.env.COUCH_URL || 'http://localhost:5984' 9 | const DB_NAME = 'testdb' 10 | const LOCAL_DB = DB_NAME 11 | const REMOTE_DB = COUCH_URL + '/' + DB_NAME 12 | let TEST_DBS 13 | 14 | // allow specifying a specific adapter (local or http) to use 15 | if (process.env.TEST_DB === 'local') { 16 | TEST_DBS = [LOCAL_DB] 17 | } else if (process.env.TEST_DB === 'http') { 18 | TEST_DBS = [REMOTE_DB] 19 | } else { 20 | TEST_DBS = [LOCAL_DB, REMOTE_DB] 21 | } 22 | 23 | // run tests for local and http adapters 24 | TEST_DBS.forEach(function (db) { 25 | const dbType = /^http/.test(db) ? 'http' : 'local' 26 | tests(db, dbType) 27 | }) 28 | 29 | function tests (dbName, dbType) { 30 | describe(dbType + ': basic tests', function () { 31 | this.timeout(30000) 32 | 33 | let db 34 | 35 | beforeEach(function () { 36 | db = new PouchDB(dbName) 37 | }) 38 | afterEach(function () { 39 | return db.destroy() 40 | }) 41 | 42 | it('transforms on PUT', async function () { 43 | db.transform({ 44 | incoming: function (doc) { 45 | doc.foo = 'baz' 46 | return doc 47 | } 48 | }) 49 | 50 | await db.put({ _id: 'foo' }) 51 | const doc = await db.get('foo') 52 | 53 | assert.equal(doc._id, 'foo') 54 | assert.equal(doc.foo, 'baz') 55 | }) 56 | 57 | it('transforms on PUT, with a promise', async function () { 58 | db.transform({ 59 | incoming: async function (doc) { 60 | doc.foo = 'baz' 61 | return doc 62 | } 63 | }) 64 | 65 | await db.put({ _id: 'foo' }) 66 | const doc = await db.get('foo') 67 | 68 | assert.equal(doc._id, 'foo') 69 | assert.equal(doc.foo, 'baz') 70 | }) 71 | 72 | it('transforms on POST', async function () { 73 | db.transform({ 74 | incoming: function (doc) { 75 | doc.foo = 'baz' 76 | return doc 77 | } 78 | }) 79 | const res = await db.post({}) 80 | const doc = await db.get(res.id) 81 | assert.equal(typeof doc._id, 'string') 82 | assert.equal(doc.foo, 'baz') 83 | }) 84 | 85 | it('transforms on POST, with a promise', async function () { 86 | db.transform({ 87 | incoming: async function (doc) { 88 | doc.foo = 'baz' 89 | return doc 90 | } 91 | }) 92 | const res = await db.post({}) 93 | const doc = await db.get(res.id) 94 | assert.equal(typeof doc._id, 'string') 95 | assert.equal(doc.foo, 'baz') 96 | }) 97 | 98 | it('transforms on GET', async function () { 99 | db.transform({ 100 | outgoing: function (doc) { 101 | doc.foo = 'baz' 102 | return doc 103 | } 104 | }) 105 | await db.put({ _id: 'foo' }) 106 | const doc = await db.get('foo') 107 | assert.equal(doc._id, 'foo') 108 | assert.equal(doc.foo, 'baz') 109 | }) 110 | 111 | it('transforms on GET, with a promise', async function () { 112 | db.transform({ 113 | outgoing: async function (doc) { 114 | doc.foo = 'baz' 115 | return doc 116 | } 117 | }) 118 | await db.put({ _id: 'foo' }) 119 | const doc = await db.get('foo') 120 | assert.equal(doc._id, 'foo') 121 | assert.equal(doc.foo, 'baz') 122 | }) 123 | 124 | it('skips local docs', async function () { 125 | db.transform({ 126 | outgoing: function (doc) { 127 | doc.foo = 'baz' 128 | return doc 129 | } 130 | }) 131 | await db.put({ _id: '_local/foo' }) 132 | const doc = await db.get('_local/foo') 133 | assert.equal(doc._id, '_local/foo') 134 | assert(!('foo' in doc)) 135 | }) 136 | 137 | it('skips local docs, incoming', async function () { 138 | db.transform({ 139 | incoming: function (doc) { 140 | doc.foo = 'baz' 141 | return doc 142 | } 143 | }) 144 | await db.put({ _id: '_local/foo' }) 145 | const doc = await db.get('_local/foo') 146 | assert.equal(doc._id, '_local/foo') 147 | assert(!('foo' in doc)) 148 | }) 149 | 150 | it('skips local docs, post', async function () { 151 | db.transform({ 152 | outgoing: function (doc) { 153 | doc.foo = 'baz' 154 | return doc 155 | } 156 | }) 157 | await db.post({ _id: '_local/foo' }) 158 | const doc = await db.get('_local/foo') 159 | assert.equal(doc._id, '_local/foo') 160 | assert(!('foo' in doc)) 161 | }) 162 | 163 | it('skips local docs, bulkDocs', async function () { 164 | db.transform({ 165 | outgoing: function (doc) { 166 | doc.foo = 'baz' 167 | return doc 168 | } 169 | }) 170 | await db.bulkDocs([{ _id: '_local/foo' }]) 171 | const doc = await db.get('_local/foo') 172 | assert.equal(doc._id, '_local/foo') 173 | assert(!('foo' in doc)) 174 | }) 175 | 176 | it('skips deleted docs', async function () { 177 | await db.put({ _id: 'foo', foo: {} }) 178 | const doc = await db.get('foo') 179 | let transformCalledOnDelete = false 180 | 181 | db.transform({ 182 | incoming: function (doc) { 183 | transformCalledOnDelete = true 184 | return doc 185 | } 186 | }) 187 | 188 | await db.remove(doc) 189 | assert.equal(transformCalledOnDelete, false) 190 | }) 191 | 192 | it('transforms deleted docs with custom properties', async function () { 193 | await db.put({ _id: 'foo', foo: {} }) 194 | const doc = await db.get('foo') 195 | let transformCalledOnDelete = false 196 | 197 | db.transform({ 198 | incoming: function (doc) { 199 | transformCalledOnDelete = true 200 | return doc 201 | } 202 | }) 203 | 204 | doc.foo = 'baz' 205 | doc._deleted = true 206 | await db.put(doc) 207 | assert.equal(transformCalledOnDelete, true) 208 | }) 209 | 210 | it('handles sync errors', async function () { 211 | db.transform({ 212 | incoming: function (doc) { 213 | doc.foo.baz = 'baz' 214 | return doc 215 | } 216 | }) 217 | 218 | let res, err 219 | try { 220 | res = await db.put({ _id: 'foo' }) 221 | } catch (error) { 222 | err = error 223 | } 224 | assert.equal(res, undefined) 225 | assert.notEqual(err, undefined) 226 | }) 227 | 228 | it('handles async errors', async function () { 229 | db.transform({ 230 | incoming: function () { 231 | return Promise.reject(new Error('flunking you')) 232 | } 233 | }) 234 | 235 | let res, err 236 | try { 237 | res = await db.put({ _id: 'foo' }) 238 | } catch (error) { 239 | err = error 240 | } 241 | assert.equal(res, undefined) 242 | assert.notEqual(err, undefined) 243 | }) 244 | 245 | it('handles cancel', function () { 246 | db.transform() 247 | const syncHandler = db.sync('my_gateway', {}) 248 | return syncHandler.cancel() 249 | }) 250 | 251 | it('transforms on GET with options', async function () { 252 | db.transform({ 253 | outgoing: function (doc) { 254 | doc.foo = 'baz' 255 | return doc 256 | } 257 | }) 258 | await db.put({ _id: 'foo' }) 259 | const doc = await db.get('foo', {}) 260 | assert.equal(doc._id, 'foo') 261 | assert.equal(doc.foo, 'baz') 262 | }) 263 | 264 | it('transforms on GET with missing open_revs', async function () { 265 | db.transform({ 266 | outgoing: function (doc) { 267 | doc.foo = 'baz' 268 | return doc 269 | } 270 | }) 271 | await db.put({ _id: 'foo' }) 272 | const docs = await db.get('foo', { revs: true, open_revs: ['1-DNE'] }) 273 | assert.equal(docs.length, 1) 274 | assert.equal(docs[0].missing, '1-DNE') 275 | }) 276 | 277 | it('transforms on GET with missing and non-missing open_revs', async function () { 278 | db.transform({ 279 | outgoing: function (doc) { 280 | doc.foo = 'baz' 281 | return doc 282 | } 283 | }) 284 | const res = await db.put({ _id: 'foo' }) 285 | const rev = res.rev 286 | const docs = await db.get('foo', { revs: true, open_revs: ['1-DNE', rev] }) 287 | assert.equal(docs.length, 2) 288 | const okRes = docs[0].ok ? docs[0] : docs[1] 289 | const missingRes = docs[0].ok ? docs[1] : docs[0] 290 | assert.equal(missingRes.missing, '1-DNE') 291 | assert.equal(okRes.ok._rev, rev) 292 | }) 293 | 294 | it('transforms on GET, not found', async function () { 295 | db.transform({ 296 | outgoing: function (doc) { 297 | doc.foo = 'baz' 298 | return doc 299 | } 300 | }) 301 | await db.put({ _id: 'foo' }) 302 | let doc, err 303 | try { 304 | doc = await db.get('quux') 305 | } catch (error) { 306 | err = error 307 | } 308 | assert.equal(doc, undefined) 309 | assert.notEqual(err, undefined) 310 | }) 311 | 312 | it('transforms on bulkGet()', async function () { 313 | db.transform({ 314 | outgoing: async function (doc) { 315 | doc.foo = 'baz' 316 | return doc 317 | } 318 | }) 319 | 320 | await db.bulkDocs([{ _id: 'toto' }, { _id: 'lala' }]) 321 | const docs = await db.bulkGet({ docs: [{ id: 'toto' }, { id: 'lala' }] }) 322 | 323 | assert.equal(docs.results[0].docs[0].ok.foo, 'baz') 324 | assert.equal(docs.results[1].docs[0].ok.foo, 'baz') 325 | }) 326 | 327 | it('transforms on bulk_docs', async function () { 328 | db.transform({ 329 | incoming: function (doc) { 330 | doc.foo = doc._id + '_baz' 331 | return doc 332 | } 333 | }) 334 | const res = await db.bulkDocs([{ _id: 'toto' }, { _id: 'lala' }]) 335 | const doc0 = await db.get(res[0].id) 336 | assert.equal(doc0.foo, 'toto_baz') 337 | const doc1 = await db.get(res[1].id) 338 | assert.equal(doc1.foo, 'lala_baz') 339 | }) 340 | 341 | it('transforms on bulk_docs, new_edits=false 1', async function () { 342 | db.transform({ 343 | incoming: function (doc) { 344 | doc.foo = doc._id + '_baz' 345 | return doc 346 | } 347 | }) 348 | const docsA = [{ 349 | _id: 'selenium-global', 350 | _rev: '5-3b6e1f9846c7aa2ae80ba871cd8bf084', 351 | _deleted: true, 352 | _revisions: { 353 | start: 5, 354 | ids: [ 355 | '3b6e1f9846c7aa2ae80ba871cd8bf084', 356 | '84870906995eb23f6375900296226df6' 357 | ] 358 | } 359 | }] 360 | const docsB = [{ 361 | _id: 'selenium-global', 362 | _rev: '4-84870906995eb23f6375900296226df6', 363 | _revisions: { 364 | start: 4, 365 | ids: [ 366 | '84870906995eb23f6375900296226df6', 367 | '941073451900f1d92a9a39dde8938339' 368 | ] 369 | } 370 | }] 371 | const docsC = [ 372 | { 373 | _id: 'selenium-global', 374 | _rev: '3-8b3a09799ad70999277f0859f0aa1add', 375 | _revisions: { 376 | start: 3, 377 | ids: [ 378 | '8b3a09799ad70999277f0859f0aa1add', 379 | '10ade0f791a6b0dab76dde12d3ffce74' 380 | ] 381 | } 382 | }, 383 | { 384 | _id: 'selenium-global', 385 | _rev: '2-61cb022c4e5f3a702a969e6ac17fea79', 386 | _revisions: { 387 | start: 2, 388 | ids: [ 389 | '61cb022c4e5f3a702a969e6ac17fea79', 390 | '54f0c85a4a6329bd8885470aef5104d7' 391 | ] 392 | } 393 | }, 394 | { 395 | _id: 'selenium-global', 396 | _rev: '12-787d8aa4043f18d8a8747708afcce370', 397 | _revisions: { 398 | start: 12, 399 | ids: [ 400 | '787d8aa4043f18d8a8747708afcce370', 401 | '9d02f7a6634530eafdcc36df0cab54ff', 402 | '328c111479b9aae37cb0c6c38545059b', 403 | 'c9902a757278d99e60dd1571113687c5', 404 | '7c8b0e3a8c6191317664ffafe2a6f40a', 405 | 'e3f4590f30f77ecfafa638235a4d4e24', 406 | '80a589649d8c86e7408d1745edac0484', 407 | 'f7893b80dbeef9566a99c2d879477cf7', 408 | '67b0eb503ba35fd34c5acab77cf9552e', 409 | '5b6eeae4b4edf20a2e5b87a333cb9c5c', 410 | '2913efa5e4a43a53dca80b66bba9b7dc', 411 | '1c0833f56ec15a816a8b2901b7a48176' 412 | ] 413 | } 414 | } 415 | ] 416 | 417 | let results = await db.bulkDocs({ docs: docsA, new_edits: false }) 418 | results.forEach(function (result) { 419 | assert(!('error' in result), 'no doc update coflict') 420 | }) 421 | 422 | results = await db.bulkDocs({ docs: docsB, new_edits: false }) 423 | results.forEach(function (result) { 424 | assert(!('error' in result), 'no doc update coflict') 425 | }) 426 | 427 | results = await db.bulkDocs({ docs: docsC, new_edits: false }) 428 | results.forEach(function (result) { 429 | assert(!('error' in result), 'no doc update coflict') 430 | }) 431 | }) 432 | 433 | it('transforms on bulk_docs, new_edits=false 2', async function () { 434 | db.transform({ 435 | incoming: function (doc) { 436 | doc.foo = doc._id + '_baz' 437 | return doc 438 | } 439 | }) 440 | const docsA = [{ 441 | _id: 'selenium-global', 442 | _rev: '5-3b6e1f9846c7aa2ae80ba871cd8bf084', 443 | _deleted: true, 444 | _revisions: { 445 | start: 5, 446 | ids: [ 447 | '3b6e1f9846c7aa2ae80ba871cd8bf084', 448 | '84870906995eb23f6375900296226df6' 449 | ] 450 | } 451 | }] 452 | const docsB = [{ 453 | _id: 'selenium-global', 454 | _rev: '4-84870906995eb23f6375900296226df6', 455 | _revisions: { 456 | start: 4, 457 | ids: [ 458 | '84870906995eb23f6375900296226df6', 459 | '941073451900f1d92a9a39dde8938339' 460 | ] 461 | } 462 | }] 463 | const docsC = [ 464 | { 465 | _id: 'selenium-global', 466 | _rev: '3-8b3a09799ad70999277f0859f0aa1add', 467 | _revisions: { 468 | start: 3, 469 | ids: [ 470 | '8b3a09799ad70999277f0859f0aa1add', 471 | '10ade0f791a6b0dab76dde12d3ffce74' 472 | ] 473 | } 474 | }, 475 | { 476 | _id: 'selenium-global', 477 | _rev: '2-61cb022c4e5f3a702a969e6ac17fea79', 478 | _revisions: { 479 | start: 2, 480 | ids: [ 481 | '61cb022c4e5f3a702a969e6ac17fea79', 482 | '54f0c85a4a6329bd8885470aef5104d7' 483 | ] 484 | } 485 | }, 486 | { 487 | _id: 'selenium-global', 488 | _rev: '12-787d8aa4043f18d8a8747708afcce370', 489 | _revisions: { 490 | start: 12, 491 | ids: [ 492 | '787d8aa4043f18d8a8747708afcce370', 493 | '9d02f7a6634530eafdcc36df0cab54ff', 494 | '328c111479b9aae37cb0c6c38545059b', 495 | 'c9902a757278d99e60dd1571113687c5', 496 | '7c8b0e3a8c6191317664ffafe2a6f40a', 497 | 'e3f4590f30f77ecfafa638235a4d4e24', 498 | '80a589649d8c86e7408d1745edac0484', 499 | 'f7893b80dbeef9566a99c2d879477cf7', 500 | '67b0eb503ba35fd34c5acab77cf9552e', 501 | '5b6eeae4b4edf20a2e5b87a333cb9c5c', 502 | '2913efa5e4a43a53dca80b66bba9b7dc', 503 | '1c0833f56ec15a816a8b2901b7a48176' 504 | ] 505 | } 506 | } 507 | ] 508 | 509 | let results = await db.bulkDocs(docsA, { new_edits: false }) 510 | results.forEach(function (result) { 511 | assert(!('error' in result), 'no doc update coflict') 512 | }) 513 | 514 | results = await db.bulkDocs(docsB, { new_edits: false }) 515 | results.forEach(function (result) { 516 | assert(!('error' in result), 'no doc update coflict') 517 | }) 518 | 519 | results = await db.bulkDocs(docsC, { new_edits: false }) 520 | results.forEach(function (result) { 521 | assert(!('error' in result), 'no doc update coflict') 522 | }) 523 | }) 524 | 525 | it('transforms on bulk_docs, object style', async function () { 526 | db.transform({ 527 | incoming: function (doc) { 528 | doc.foo = doc._id + '_baz' 529 | return doc 530 | } 531 | }) 532 | const res = await db.bulkDocs({ docs: [{ _id: 'toto' }, { _id: 'lala' }] }) 533 | let doc = await db.get(res[0].id) 534 | assert.equal(doc.foo, 'toto_baz') 535 | doc = await db.get(res[1].id) 536 | assert.equal(doc.foo, 'lala_baz') 537 | }) 538 | 539 | it('transforms on all_docs, incoming', async function () { 540 | db.transform({ 541 | incoming: function (doc) { 542 | doc.foo = doc._id + '_baz' 543 | return doc 544 | } 545 | }) 546 | await db.bulkDocs({ docs: [{ _id: 'toto' }, { _id: 'lala' }] }) 547 | const res = await db.allDocs({ include_docs: true }) 548 | assert.equal(res.rows.length, 2) 549 | assert.equal(res.rows[0].doc.foo, 'lala_baz') 550 | assert.equal(res.rows[1].doc.foo, 'toto_baz') 551 | }) 552 | 553 | it('transforms on all_docs, outgoing', async function () { 554 | db.transform({ 555 | outgoing: function (doc) { 556 | doc.foo = doc._id + '_baz' 557 | return doc 558 | } 559 | }) 560 | await db.bulkDocs({ docs: [{ _id: 'toto' }, { _id: 'lala' }] }) 561 | const res = await db.allDocs({ include_docs: true }) 562 | assert.equal(res.rows.length, 2) 563 | assert.equal(res.rows[0].doc.foo, 'lala_baz') 564 | assert.equal(res.rows[1].doc.foo, 'toto_baz') 565 | }) 566 | 567 | it('transforms on all_docs no opts, outgoing', async function () { 568 | db.transform({ 569 | outgoing: function (doc) { 570 | doc.foo = doc._id + '_baz' 571 | return doc 572 | } 573 | }) 574 | await db.bulkDocs({ docs: [{ _id: 'toto' }, { _id: 'lala' }] }) 575 | const res = await db.allDocs() 576 | assert.equal(res.rows.length, 2) 577 | assert(!('doc' in res.rows[0])) 578 | assert(!('doc' in res.rows[1])) 579 | }) 580 | 581 | it('transforms on query, incoming', async function () { 582 | db.transform({ 583 | incoming: function (doc) { 584 | doc.foo = doc._id + '_baz' 585 | return doc 586 | } 587 | }) 588 | 589 | const ddoc = { 590 | _id: '_design/index', 591 | views: { 592 | index: { 593 | map: function (doc) { 594 | emit(doc._id) 595 | }.toString() 596 | } 597 | } 598 | } 599 | 600 | await db.bulkDocs({ docs: [{ _id: 'toto' }, { _id: 'lala' }, ddoc] }) 601 | const res = await db.query('index', { include_docs: true }) 602 | assert.equal(res.rows.length, 2) 603 | assert.equal(res.rows[0].doc.foo, 'lala_baz') 604 | assert.equal(res.rows[1].doc.foo, 'toto_baz') 605 | }) 606 | 607 | it('transforms on query, outgoing', async function () { 608 | db.transform({ 609 | outgoing: function (doc) { 610 | doc.foo = doc._id + '_baz' 611 | return doc 612 | } 613 | }) 614 | const ddoc = { 615 | _id: '_design/index', 616 | views: { 617 | index: { 618 | map: function (doc) { 619 | emit(doc._id) 620 | }.toString() 621 | } 622 | } 623 | } 624 | await db.bulkDocs({ docs: [{ _id: 'toto' }, { _id: 'lala' }, ddoc] }) 625 | const res = await db.query('index', { include_docs: true }) 626 | assert.equal(res.rows.length, 2) 627 | assert.equal(res.rows[0].doc.foo, 'lala_baz') 628 | assert.equal(res.rows[1].doc.foo, 'toto_baz') 629 | }) 630 | 631 | it('transforms on query no opts, outgoing', async function () { 632 | db.transform({ 633 | outgoing: function (doc) { 634 | doc.foo = doc._id + '_baz' 635 | return doc 636 | } 637 | }) 638 | const ddoc = { 639 | _id: '_design/index', 640 | views: { 641 | index: { 642 | map: function (doc) { 643 | emit(doc._id) 644 | }.toString() 645 | } 646 | } 647 | } 648 | await db.bulkDocs({ docs: [{ _id: 'toto' }, { _id: 'lala' }, ddoc] }) 649 | const res = await db.query('index') 650 | assert.equal(res.rows.length, 2) 651 | assert(!('doc' in res.rows[0])) 652 | assert(!('doc' in res.rows[1])) 653 | }) 654 | 655 | it('transforms ingoing and outgoing', async function () { 656 | db.transform({ 657 | ingoing: function (doc) { 658 | doc.foo = doc.foo.toUpperCase() 659 | return doc 660 | }, 661 | outgoing: function (doc) { 662 | doc.foo = doc.foo.toLowerCase() 663 | return doc 664 | } 665 | }) 666 | await db.put({ _id: 'doc', foo: 'bar' }) 667 | const doc = await db.get('doc') 668 | assert.equal(doc.foo, 'bar') 669 | }) 670 | }) 671 | 672 | describe(dbType + ': advanced tests', function () { 673 | this.timeout(30000) 674 | 675 | let db 676 | 677 | beforeEach(function () { 678 | db = new PouchDB(dbName) 679 | return db 680 | }) 681 | afterEach(function () { 682 | return db.destroy() 683 | }) 684 | 685 | let encrypt 686 | let decrypt 687 | if (typeof process !== 'undefined' && !process.browser) { 688 | const crypto = require('crypto') 689 | 690 | encrypt = function (text) { 691 | // eslint-disable-next-line 692 | const cipher = crypto.createCipher('aes-256-cbc', 'password') 693 | const crypted = cipher.update(text, 'utf8', 'base64') 694 | return crypted + cipher.final('base64') 695 | } 696 | 697 | decrypt = function (text) { 698 | // eslint-disable-next-line 699 | const decipher = crypto.createDecipher('aes-256-cbc', 'password') 700 | const dec = decipher.update(text, 'base64', 'utf8') 701 | return dec + decipher.final('utf8') 702 | } 703 | } else { // browser 704 | encrypt = btoa 705 | decrypt = atob 706 | } 707 | 708 | function transform (db) { 709 | db.transform({ 710 | incoming: function (doc) { 711 | // designDocs should be ignored 712 | // the !doc._id check is for a db.post (without an id) 713 | if (!doc._id || (doc._id && !doc._id.startsWith('_design'))) { 714 | Object.keys(doc).forEach(function (field) { 715 | if (field !== '_id' && field !== '_rev') { 716 | doc[field] = encrypt(doc[field]) 717 | } 718 | }) 719 | } 720 | return doc 721 | }, 722 | outgoing: function (doc) { 723 | // designDocs should be ignored 724 | if (doc._id && doc._id.startsWith('_design')) { 725 | return doc 726 | } 727 | 728 | Object.keys(doc).forEach(function (field) { 729 | if (field !== '_id' && field !== '_rev') { 730 | doc[field] = decrypt(doc[field]) 731 | } 732 | }) 733 | return doc 734 | } 735 | }) 736 | } 737 | 738 | it('test encryption/decryption with puts', async function () { 739 | transform(db) 740 | const id = 'doc' 741 | const secret = 'my super secret text!' 742 | await db.put({ _id: id, secret }) 743 | // check that it gets decrypted 744 | let doc = await db.get(id) 745 | assert.equal(doc.secret, secret) 746 | // check that it's encrypted 747 | const db2 = new PouchDB(dbName) 748 | doc = await db2.get(id) 749 | assert.equal(doc.secret, encrypt(secret)) 750 | }) 751 | 752 | it('test encryption/decryption with posts', async function () { 753 | transform(db) 754 | const res = await db.post({ secret: 'my super secret text!' }) 755 | const id = res.id 756 | let doc = await db.get(id) 757 | assert.equal(doc.secret, 'my super secret text!') 758 | doc = await new PouchDB(dbName).get(id) 759 | assert.equal(doc.secret, encrypt('my super secret text!')) 760 | }) 761 | 762 | it('test encryption/decryption with bulkdocs/alldocs', async function () { 763 | const id = 'doc' 764 | const secret = 'my super secret text!' 765 | transform(db) 766 | await db.bulkDocs([{ _id: id, secret }]) 767 | let res = await db.allDocs({ keys: [id], include_docs: true }) 768 | assert.equal(res.rows.length, 1) 769 | assert.equal(res.rows[0].doc.secret, secret) 770 | res = await new PouchDB(dbName).allDocs({ keys: [id], include_docs: true }) 771 | assert.equal(res.rows.length, 1) 772 | assert.equal(res.rows[0].doc.secret, encrypt(secret)) 773 | }) 774 | 775 | it('test encryption/decryption with bulkdocs/query', async function () { 776 | transform(db) 777 | 778 | const ddoc = { 779 | _id: '_design/index', 780 | views: { 781 | index: { 782 | map: function (doc) { 783 | emit(doc._id) 784 | }.toString() 785 | } 786 | } 787 | } 788 | 789 | await db.bulkDocs([{ _id: 'doc', secret: 'my super secret text!' }, ddoc]) 790 | let res = await db.query('index', { keys: ['doc'], include_docs: true }) 791 | assert.equal(res.rows.length, 1) 792 | assert.equal(res.rows[0].doc.secret, 'my super secret text!') 793 | res = await new PouchDB(dbName).query('index', { keys: ['doc'], include_docs: true }) 794 | assert.equal(res.rows.length, 1) 795 | assert.equal(res.rows[0].doc.secret, encrypt('my super secret text!')) 796 | }) 797 | 798 | it('test encryption/decryption with bulkdocs/changes complete', async function () { 799 | transform(db) 800 | 801 | function changesCompletePromise (db, opts) { 802 | return new Promise(function (resolve, reject) { 803 | db.changes(opts).on('complete', resolve).on('error', reject) 804 | }) 805 | } 806 | 807 | await db.bulkDocs([{ _id: 'doc', secret: 'my super secret text!' }]) 808 | let res = await changesCompletePromise(db, { include_docs: true }) 809 | assert.equal(res.results.length, 1) 810 | assert.equal(res.results[0].doc.secret, 'my super secret text!') 811 | res = await changesCompletePromise(new PouchDB(dbName), { include_docs: true }) 812 | assert.equal(res.results.length, 1) 813 | assert.equal(res.results[0].doc.secret, encrypt('my super secret text!')) 814 | }) 815 | 816 | it('test encryption/decryption with bulkdocs/changes single change', async function () { 817 | transform(db) 818 | 819 | function changesCompletePromise (db, opts) { 820 | return new Promise(function (resolve, reject) { 821 | db.changes(opts).on('change', resolve).on('error', reject) 822 | }) 823 | } 824 | 825 | await db.bulkDocs([{ _id: 'doc', secret: 'my super secret text!' }]) 826 | let res = await changesCompletePromise(db, { include_docs: true }) 827 | assert.equal(res.doc.secret, 'my super secret text!') 828 | res = await changesCompletePromise(new PouchDB(dbName), { include_docs: true }) 829 | assert.equal(res.doc.secret, encrypt('my super secret text!')) 830 | }) 831 | 832 | it('test encryption/decryption with bulkdocs/changes complete, promise style', async function () { 833 | transform(db) 834 | const id = 'doc' 835 | const secret = 'my super secret text!' 836 | 837 | function changesCompletePromise (db, opts) { return db.changes(opts) } 838 | 839 | await db.bulkDocs([{ _id: id, secret }]) 840 | let res = await changesCompletePromise(db, { include_docs: true }) 841 | assert.equal(res.results.length, 1) 842 | assert.equal(res.results[0].doc.secret, secret) 843 | res = await changesCompletePromise(new PouchDB(dbName), { include_docs: true }) 844 | assert.equal(res.results.length, 1) 845 | assert.equal(res.results[0].doc.secret, encrypt(secret)) 846 | }) 847 | 848 | it('test encryption/decryption with bulkdocs/changes complete, no docs', async function () { 849 | transform(db) 850 | const id = 'doc' 851 | const secret = 'my super secret text!' 852 | 853 | function changesCompletePromise (db, opts) { return db.changes(opts) } 854 | 855 | await db.bulkDocs([{ _id: id, secret }]) 856 | let res = await changesCompletePromise(db, {}) 857 | assert.equal(res.results.length, 1) 858 | assert(!('doc' in res.results[0])) 859 | res = await changesCompletePromise(new PouchDB(dbName), {}) 860 | assert.equal(res.results.length, 1) 861 | assert(!('doc' in res.results[0])) 862 | }) 863 | 864 | it('makes sure that the .changes wrapper returns the value (#43)', async function () { 865 | db.transform({}) 866 | const documentId = 'some-id' 867 | 868 | await db.put({ 869 | _id: '_design/some_view', 870 | views: { 871 | some_view: { 872 | map: function (doc) { 873 | emit(doc.id, doc.value) 874 | }.toString(), 875 | reduce: '_sum' 876 | } 877 | } 878 | }) 879 | await db.put({ _id: documentId, value: 5 }) 880 | const response = await db.query('some_view') 881 | assert.equal(response.rows.length, 1) 882 | }) 883 | 884 | // only works locally, since the remote Couch can't see the 885 | // unencrypted field 886 | if (dbType === 'local') { 887 | it('test encryption/decryption with map/reduce', async function () { 888 | transform(db) 889 | const id = 'doc' 890 | const secret = 'my super secret text!' 891 | const mapFun = { 892 | map: function (doc) { 893 | emit(doc.secret) 894 | } 895 | } 896 | await db.put({ _id: id, secret }) 897 | let res = await db.query(mapFun) 898 | assert.equal(res.rows.length, 1) 899 | assert.equal(res.rows[0].key, secret) 900 | res = await new PouchDB(dbName).query(mapFun) 901 | assert.equal(res.rows.length, 1) 902 | assert.equal(res.rows[0].key, encrypt(secret)) 903 | }) 904 | } 905 | }) 906 | 907 | describe(dbType + ': replication tests', function () { 908 | this.timeout(30000) 909 | 910 | let db 911 | let remote 912 | 913 | // Utility function - complex test incoming 914 | const defer = function () { 915 | let resolve, reject 916 | const promise = new Promise(function () { 917 | resolve = arguments[0] 918 | reject = arguments[1] 919 | }) 920 | return { 921 | resolve: resolve, 922 | reject: reject, 923 | promise: promise 924 | } 925 | } 926 | 927 | beforeEach(function () { 928 | db = new PouchDB(dbName) 929 | remote = new PouchDB(dbName + '_other') 930 | }) 931 | 932 | afterEach(async function () { 933 | await db.destroy() 934 | await remote.destroy() 935 | }) 936 | 937 | it('test replication transforms incoming', async function () { 938 | db.transform({ 939 | incoming: function (doc) { 940 | doc.foo = 'baz' 941 | return doc 942 | } 943 | }) 944 | 945 | await remote.put({ _id: 'doc' }) 946 | 947 | await new Promise(function (resolve, reject) { 948 | remote.replicate.to(db).on('complete', resolve).on('error', reject) 949 | }) 950 | 951 | const doc = await db.get('doc') 952 | assert.equal(doc.foo, 'baz') 953 | }) 954 | 955 | it('test replication transforms outgoing', async function () { 956 | db.transform({ 957 | outgoing: function (doc) { 958 | doc.foo = 'baz' 959 | return doc 960 | } 961 | }) 962 | 963 | await db.put({ _id: 'doc' }) 964 | 965 | await new Promise(function (resolve, reject) { 966 | db.replicate.to(remote).on('complete', resolve).on('error', reject) 967 | }) 968 | 969 | const doc = await remote.get('doc') 970 | assert.equal(doc.foo, 'baz') 971 | }) 972 | 973 | it('test live replication transforms', async function () { 974 | // We need to wait until the "incoming" change has happened. 975 | // We'll use a re-assignable deferred object so we can wait multiple times 976 | let d 977 | 978 | db.transform({ 979 | incoming: function (doc) { 980 | doc.foo = 'baz' 981 | // Resolve anything that's waiting for the incoming handler to finish 982 | setTimeout(function () { 983 | d.resolve() 984 | }, 100) 985 | return doc 986 | }, 987 | outgoing: function (doc) { 988 | doc.boo = 'lal' 989 | // Resolve anything that's waiting for the outgoing handler to finish 990 | setTimeout(function () { 991 | d.resolve() 992 | }, 100) 993 | return doc 994 | } 995 | }) 996 | 997 | // Ongoing live replication 998 | const syncHandler = remote.sync(db, { live: true }) 999 | 1000 | // Wait to give replication a chance 1001 | await new Promise((resolve) => setTimeout(resolve, 500)) 1002 | 1003 | // Reset the incoming listener 1004 | d = defer() 1005 | await remote.put({ _id: 'doc' }) 1006 | // Wait for the incoming listener - everything's updated 1007 | await d.promise 1008 | 1009 | let doc = await db.get('doc') 1010 | assert('boo' in doc) 1011 | assert('foo' in doc) 1012 | assert.equal(doc.foo, 'baz') 1013 | 1014 | doc = await remote.get('doc') 1015 | // Reset the incoming listener 1016 | d = defer() 1017 | await remote.put({ _id: 'doc', _rev: doc._rev, moo: 'bar' }) 1018 | // Wait for the incoming listener - everything's updated 1019 | await d.promise 1020 | 1021 | doc = await db.get('doc') 1022 | assert('moo' in doc) 1023 | assert('foo' in doc) 1024 | assert('boo' in doc) 1025 | assert.equal(doc.foo, 'baz') 1026 | assert.equal(doc.moo, 'bar') 1027 | assert.equal(doc.boo, 'lal') 1028 | 1029 | await syncHandler.cancel() 1030 | }) 1031 | }) 1032 | } 1033 | --------------------------------------------------------------------------------