├── .gitignore ├── test ├── fixtures │ ├── animaldb_expected.json.gz │ ├── test2.log │ ├── incomplete_changes.log │ ├── attachment.backup │ ├── test.log │ ├── animaldb_all_docs_4.json │ ├── animaldb_all_docs_2.json │ ├── animaldb_all_docs_3.json │ ├── animaldb_old_shallow.json │ ├── animaldb_expected_shallow.json │ ├── animaldb_all_docs_1.json │ ├── animaldb_expected.json │ └── animaldb_resumed_blank.json ├── ci_largee2e.js ├── ci_encryption.js ├── logfilesummary.js ├── cliutils.js ├── ci_event.js ├── ci_error.js ├── logfilegetbatches.js ├── hooks.js ├── ci_concurrent_backups.js ├── attachmentMappings.js ├── ci_compression.js ├── ci_e2e.js ├── liner.js ├── ci_basic.js ├── restore.js ├── config.js ├── ci_resume.js ├── test_process.js └── compare.js ├── .whitesource ├── .github ├── dco.yml ├── dependabot.yml ├── ISSUE_TEMPLATE.md └── PULL_REQUEST_TEMPLATE.md ├── .npmrc-jenkins ├── examples ├── cos-sdk │ ├── package.json │ ├── README.md │ ├── cos-restore-stream.js │ ├── cos-restore-file.js │ ├── cos-backup-stream.js │ └── cos-backup-file.js ├── cos-s3 │ ├── package.json │ ├── s3-restore-stream.js │ ├── README.md │ ├── s3-backup-stream.js │ ├── s3-backup-file.js │ └── s3-restore-file.js └── README.md ├── .npmignore ├── SECURITY.md ├── package.json ├── DCO1.1.txt ├── includes ├── logfilegetbatches.js ├── cliutils.js ├── logfilesummary.js ├── attachmentMappings.js ├── allDocsGenerator.js ├── restore.js ├── error.js ├── spoolchanges.js ├── config.js ├── parser.js └── request.js ├── eslint.config.mjs ├── bin ├── couchrestore.bin.js └── couchbackup.bin.js ├── AI_CODE_POLICY.md ├── CONTRIBUTING.md └── test-network └── conditions.js /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules 3 | npm-debug.log 4 | backup.txt 5 | out 6 | -------------------------------------------------------------------------------- /test/fixtures/animaldb_expected.json.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/couchbackup/HEAD/test/fixtures/animaldb_expected.json.gz -------------------------------------------------------------------------------- /.whitesource: -------------------------------------------------------------------------------- 1 | { 2 | "settingsInheritedFrom": "whitesource-config/whitesource-config@master", 3 | "issueSettings": { 4 | "issueRepoName": "sdks" 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /test/fixtures/test2.log: -------------------------------------------------------------------------------- 1 | :t batch0 [{"id":"1"},{"id":"2"}] 2 | :t batch1 [{"id":"3"},{"id":"4"}] 3 | :t batch2 [{"id":"5"},{"id":"6"}] 4 | :changes_complete 1-abcetc 5 | -------------------------------------------------------------------------------- /.github/dco.yml: -------------------------------------------------------------------------------- 1 | # This enables DCO bot - https://github.com/probot/dco for more details. 2 | # Exclude org members from DCO sign-off on commits 3 | require: 4 | members: false 5 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "npm" 4 | directory: "/" 5 | open-pull-requests-limit: 10 6 | schedule: 7 | interval: "daily" 8 | -------------------------------------------------------------------------------- /.npmrc-jenkins: -------------------------------------------------------------------------------- 1 | ; NB registry scopes are assumed to have scheme stripped and have a trailing slash 2 | ${NPMRC_REGISTRY}:username=${NPMRC_USER} 3 | ${NPMRC_REGISTRY}:email=${NPMRC_EMAIL} 4 | ${NPMRC_REGISTRY}:_authToken=${NPMRC_TOKEN} 5 | -------------------------------------------------------------------------------- /test/fixtures/incomplete_changes.log: -------------------------------------------------------------------------------- 1 | :t batch0 [{"id":"1"},{"id":"2"},{"id":"3"},{"id":"4"},{"id":"5"}] 2 | :t batch1 [{"id":"6"},{"id":"7"},{"id":"8"},{"id":"9"},{"id":"10"}] 3 | :t batch2 [{"id":"11"},{"id":"12"},{"id":"13"},{"id":"14"},{"id":"15"}] 4 | -------------------------------------------------------------------------------- /examples/cos-sdk/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "couchbackup-examples-cos-sdk", 3 | "version": "0.0.1", 4 | "description": "Examples of using CouchBackup as a library", 5 | "dependencies": { 6 | "@cloudant/couchbackup": "^2.11.6", 7 | "ibm-cos-sdk": "^1.14.1", 8 | "tmp": "^0.2.3", 9 | "verror": "^1.10.1", 10 | "yargs": "^17.7.2" 11 | }, 12 | "license": "Apache-2.0" 13 | } 14 | -------------------------------------------------------------------------------- /test/fixtures/attachment.backup: -------------------------------------------------------------------------------- 1 | {"name":"@cloudant/couchbackup","version":"2.10.3-SNAPSHOT","mode":"full","attachments":true} 2 | [{"_attachments":{"att.txt":{"contentType":"text/plain","data":"TXkgYXR0YWNobWVudCBkYXRh","digest":"md5-mbpDbGREDlAqGyaN6zP1DA==","revpos":2}},"_id":"d1","_rev":"2-dfe86ede5cf44dd4c3d1436c32db4be2","_revisions":{"ids":["dfe86ede5cf44dd4c3d1436c32db4be2","967a00dff5e02add41819138abb3284d"],"start":2}}] 3 | -------------------------------------------------------------------------------- /test/fixtures/test.log: -------------------------------------------------------------------------------- 1 | # this is a comment 2 | :t batch0 [{"id":"1"},{"id":"2"},{"id":"3"},{"id":"4"},{"id":"5"}] 3 | :t batch1 [{"id":"6"},{"id":"7"},{"id":"8"},{"id":"9"},{"id":"10"}] 4 | :t batch2 [{"id":"11"},{"id":"12"},{"id":"13"},{"id":"14"},{"id":"15"}] 5 | :d batch0 6 | :t batch3 [{"id":"16"},{"id":"17"},{"id":"18"},{"id":"19"},{"id":"20"}] 7 | :t batch4 [{"id":"21"},{"id":"22"}] 8 | :d batch3 9 | :changes_complete 10 | :d batch2 11 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | # https://docs.npmjs.com/misc/developers#keeping-files-out-of-your-package 2 | 3 | # build tool config 4 | Jenkinsfile 5 | .travis.yml 6 | 7 | # code examples 8 | examples 9 | 10 | # docs 11 | CONTRIBUTING.md 12 | DCO1.1.txt 13 | 14 | # github config 15 | .github 16 | 17 | # linters 18 | .eslintrc* 19 | 20 | # npm 21 | .npmignore 22 | 23 | # tests 24 | citest 25 | test 26 | toxytests 27 | 28 | # test logs 29 | backup.txt 30 | npm-debug.log 31 | out 32 | -------------------------------------------------------------------------------- /examples/cos-s3/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "couchbackup-examples", 3 | "version": "0.0.2", 4 | "description": "Examples of using CouchBackup as a library", 5 | "dependencies": { 6 | "@cloudant/couchbackup": "^2.9.16", 7 | "@aws-sdk/client-s3": "^3.499.0", 8 | "@aws-sdk/credential-providers": "^3.499.0", 9 | "@aws-sdk/lib-storage": "^3.499.0", 10 | "verror": "^1.10.1", 11 | "yargs": "^17.7.2", 12 | "tmp": "^0.2.3" 13 | }, 14 | "license": "Apache-2.0" 15 | } 16 | 17 | 18 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | | Version | Supported | 6 | | ------- | ------------------ | 7 | | latest | :white_check_mark: | 8 | 9 | ## Reporting a Vulnerability 10 | 11 | If you believe you have found a vulnerability please report it responsibly. IBM has documented the process for reporting vulnerabilities at https://www.ibm.com/trust/security-psirt. 12 | 13 | ## Vulnerabilities in dependencies 14 | 15 | This repo is regularly scanned for known CVEs in dependencies and updates to versions with remediations are applied and released as soon as possible. 16 | Reporting known CVEs in dependencies in this repository is not necessary and will not lead to a faster resolution. 17 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Please [read these guidelines](http://ibm.biz/cdt-issue-guide) before opening an issue. 2 | 3 | 4 | 5 | ## Bug Description 6 | 7 | ### 1. Steps to reproduce and the simplest code sample possible to demonstrate the issue 8 | 12 | 13 | ### 2. What you expected to happen 14 | 15 | ### 3. What actually happened 16 | 17 | ## Environment details 18 | 24 | -------------------------------------------------------------------------------- /test/fixtures/animaldb_all_docs_4.json: -------------------------------------------------------------------------------- 1 | { 2 | "total_rows": 11, 3 | "offset": 0, 4 | "rows": [ 5 | { 6 | "id": "snipe", 7 | "key": "snipe", 8 | "value": { 9 | "rev": "1-cb497ff296134b6e8c311963d1d41ea6" 10 | }, 11 | "doc": { 12 | "_id": "snipe", 13 | "_rev": "1-cb497ff296134b6e8c311963d1d41ea6", 14 | "min_length": 0.25, 15 | "min_weight": 0.08, 16 | "latin_name": "Gallinago gallinago", 17 | "diet": "omnivore", 18 | "max_length": 0.27, 19 | "wiki_page": "http://en.wikipedia.org/wiki/Common_Snipe", 20 | "class": "bird", 21 | "max_weight": 0.14 22 | } 23 | }, 24 | { 25 | "id": "zebra", 26 | "key": "zebra", 27 | "value": { 28 | "rev": "1-154a9574f7aa96df26a6752fc10b7759" 29 | }, 30 | "doc": { 31 | "_id": "zebra", 32 | "_rev": "1-154a9574f7aa96df26a6752fc10b7759", 33 | "min_length": 2, 34 | "min_weight": 175, 35 | "diet": "herbivore", 36 | "max_length": 2.5, 37 | "wiki_page": "http://en.wikipedia.org/wiki/Plains_zebra", 38 | "class": "mammal", 39 | "max_weight": 387 40 | } 41 | } 42 | ] 43 | } 44 | -------------------------------------------------------------------------------- /test/ci_largee2e.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2023 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /* global describe it */ 16 | 17 | const u = require('./citestutils.js'); 18 | 19 | [{ useApi: true }, { useApi: false }].forEach(function(params) { 20 | describe(u.scenario('#slowest End to end backup and restore', params), function() { 21 | // 10 GB is about the largest the CI can handle before getting very upset 22 | // about how long things are taking 23 | it('should backup and restore largedb10g', async function() { 24 | u.setTimeout(this, 350 * 60); 25 | return u.testDirectBackupAndRestore(params, 'largedb10g', this.dbName); 26 | }); 27 | }); 28 | }); 29 | -------------------------------------------------------------------------------- /test/ci_encryption.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2023 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /* global describe it */ 16 | 17 | const u = require('./citestutils.js'); 18 | 19 | describe('Encryption tests', function() { 20 | // Note CLI only to use openssl command 21 | const p = { useApi: false, encryption: true }; 22 | 23 | it('should backup and restore animaldb via an encrypted file', async function() { 24 | // Allow up to 60 s for backup and restore of animaldb 25 | u.setTimeout(this, 60); 26 | const encryptedBackup = `./${this.fileName}`; 27 | return u.testBackupAndRestoreViaFile(p, 'animaldb', encryptedBackup, this.dbName).then(() => { 28 | return u.assertEncryptedFile(encryptedBackup); 29 | }); 30 | }); 31 | }); 32 | -------------------------------------------------------------------------------- /test/logfilesummary.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2023 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /* global describe it */ 16 | 17 | const assert = require('assert'); 18 | const logFileSummary = require('../includes/logfilesummary.js'); 19 | 20 | describe('#unit Fetching summary from the log file', function() { 21 | it('should fetch a summary correctly', async function() { 22 | const summary = await logFileSummary('./test/fixtures/test.log'); 23 | assert.ok(summary); 24 | assert.strictEqual(summary.changesComplete, true); 25 | assert.ok(summary.batches instanceof Map); 26 | assert.strictEqual(summary.batches.size, 2); 27 | assert.deepStrictEqual(summary.batches.get(1), true); 28 | assert.deepStrictEqual(summary.batches.get(4), true); 29 | }); 30 | }); 31 | -------------------------------------------------------------------------------- /test/cliutils.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2025 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /* global describe it */ 16 | 17 | const cliutils = require('../includes/cliutils.js'); 18 | const assert = require('assert'); 19 | 20 | describe('#unit Check URL handling', function() { 21 | it('should encode database names', async function() { 22 | const server = 'http://foo.example'; 23 | const dbName = 'a_$()+/-'; 24 | const expectedEncodedDbName = 'a_%24()%2B%2F-'; 25 | const encodedDbName = encodeURIComponent(dbName); 26 | assert.strictEqual(encodedDbName, 'a_%24()%2B%2F-', 27 | `The encoded DB name was ${encodedDbName} but should match the expected ${expectedEncodedDbName}`); 28 | const expectedUrl = `${server}/${expectedEncodedDbName}`; 29 | const url = cliutils.databaseUrl(server, dbName); 30 | assert.strictEqual(url, expectedUrl, 31 | `The url was ${url} but should be ${expectedUrl}` 32 | ); 33 | }); 34 | }); 35 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@cloudant/couchbackup", 3 | "version": "2.11.13-SNAPSHOT", 4 | "description": "CouchBackup - command-line backup utility for Cloudant/CouchDB", 5 | "homepage": "https://github.com/IBM/couchbackup", 6 | "repository": { 7 | "type": "git", 8 | "url": "git+https://github.com/IBM/couchbackup.git" 9 | }, 10 | "keywords": [ 11 | "CouchDB", 12 | "Cloudant", 13 | "couch", 14 | "backup", 15 | "command-line" 16 | ], 17 | "bugs": { 18 | "url": "https://github.com/IBM/couchbackup/issues", 19 | "email": "cldtsdks@us.ibm.com" 20 | }, 21 | "license": "Apache-2.0", 22 | "engines": { 23 | "node": "^20 || ^22 || ^24" 24 | }, 25 | "dependencies": { 26 | "@ibm-cloud/cloudant": "0.12.13", 27 | "commander": "14.0.2", 28 | "debug": "4.4.3" 29 | }, 30 | "peerDependencies": { 31 | "ibm-cloud-sdk-core": "^5.4.5", 32 | "axios": "^1.13.2" 33 | }, 34 | "main": "app.js", 35 | "bin": { 36 | "couchbackup": "bin/couchbackup.bin.js", 37 | "couchrestore": "bin/couchrestore.bin.js" 38 | }, 39 | "devDependencies": { 40 | "eslint": "9.39.2", 41 | "eslint-plugin-header": "3.1.1", 42 | "eslint-plugin-import": "2.32.0", 43 | "http-proxy": "1.18.1", 44 | "mocha": "11.7.5", 45 | "neostandard": "0.12.2", 46 | "nock": "13.5.6", 47 | "tail": "2.2.6", 48 | "uuid": "13.0.0" 49 | }, 50 | "scripts": { 51 | "lint": "eslint .", 52 | "unit": "mocha --grep \"#unit\"", 53 | "test": "npm run lint && npm run unit" 54 | }, 55 | "files": [ 56 | "/app.js", 57 | "/bin", 58 | "/includes" 59 | ] 60 | } 61 | -------------------------------------------------------------------------------- /test/ci_event.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2023 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /* global describe it */ 16 | 17 | const u = require('./citestutils.js'); 18 | 19 | describe('Event tests', function() { 20 | it('should get a finished event when using stdout', async function() { 21 | u.setTimeout(this, 40); 22 | // Use the API so we can get events, pass eventEmitter so we get the emitter back 23 | const params = { useApi: true, useStdOut: true }; 24 | // All API backups now set an event listener for finished and it is part of the backup 25 | // promise, so if the backup passes the finished event fired. 26 | return u.testBackup(params, 'animaldb', process.stdout); 27 | }); 28 | 29 | it('should get a finished event when using file output', async function() { 30 | u.setTimeout(this, 40); 31 | // Use the API so we can get events, pass eventEmitter so we get the emitter back 32 | const params = { useApi: true }; 33 | const actualBackup = `./${this.fileName}`; 34 | return u.testBackupToFile(params, 'animaldb', actualBackup); 35 | }); 36 | }); 37 | -------------------------------------------------------------------------------- /DCO1.1.txt: -------------------------------------------------------------------------------- 1 | Developer Certificate of Origin 2 | Version 1.1 3 | 4 | Copyright (C) 2004, 2006 The Linux Foundation and its contributors. 5 | 1 Letterman Drive 6 | Suite D4700 7 | San Francisco, CA, 94129 8 | 9 | Everyone is permitted to copy and distribute verbatim copies of this 10 | license document, but changing it is not allowed. 11 | 12 | 13 | Developer's Certificate of Origin 1.1 14 | 15 | By making a contribution to this project, I certify that: 16 | 17 | (a) The contribution was created in whole or in part by me and I 18 | have the right to submit it under the open source license 19 | indicated in the file; or 20 | 21 | (b) The contribution is based upon previous work that, to the best 22 | of my knowledge, is covered under an appropriate open source 23 | license and I have the right under that license to submit that 24 | work with modifications, whether created in whole or in part 25 | by me, under the same open source license (unless I am 26 | permitted to submit under a different license), as indicated 27 | in the file; or 28 | 29 | (c) The contribution was provided directly to me by some other 30 | person who certified (a), (b) or (c) and I have not modified 31 | it. 32 | 33 | (d) I understand and agree that this project and the contribution 34 | are public and that a record of the contribution (including all 35 | personal information I submit with it, including my sign-off) is 36 | maintained indefinitely and may be redistributed consistent with 37 | this project or the open source license(s) involved. 38 | -------------------------------------------------------------------------------- /includes/logfilegetbatches.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2023 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | const fs = require('node:fs'); 16 | const { LogMapper } = require('./backupMappings.js'); 17 | const { Liner } = require('./liner.js'); 18 | const { FilterStream, MappingStream } = require('./transforms.js'); 19 | 20 | /** 21 | * Return an array of streams that when pipelined will produce 22 | * pending backup batches from a log file. 23 | * 24 | * @param {string} log - log file name 25 | * @param {Map} batches - a log summary batches Map of pending batch numbers 26 | * @returns a log summary object 27 | */ 28 | module.exports = function(log, batches) { 29 | const logMapper = new LogMapper(); 30 | return [ 31 | fs.createReadStream(log), // log file 32 | new Liner(), // split it into lines 33 | new MappingStream(logMapper.logLineToBackupBatch), // parse line to a backup batch 34 | new FilterStream((metadata) => { 35 | // delete returns true if the key exists, false otherwise 36 | return batches.delete(metadata.batch); 37 | }) // filter out already done batches 38 | ]; 39 | }; 40 | -------------------------------------------------------------------------------- /test/fixtures/animaldb_all_docs_2.json: -------------------------------------------------------------------------------- 1 | { 2 | "total_rows": 11, 3 | "offset": 0, 4 | "rows": [ 5 | { 6 | "id": "elephant", 7 | "key": "elephant", 8 | "value": { 9 | "rev": "1-4698ce7bbcdfb6644c1719e205a4130d" 10 | }, 11 | "doc": { 12 | "_id": "elephant", 13 | "_rev": "1-4698ce7bbcdfb6644c1719e205a4130d", 14 | "min_length": 3.2, 15 | "min_weight": 4700, 16 | "diet": "herbivore", 17 | "max_length": 4, 18 | "wiki_page": "http://en.wikipedia.org/wiki/African_elephant", 19 | "class": "mammal", 20 | "max_weight": 6050 21 | } 22 | }, 23 | { 24 | "id": "giraffe", 25 | "key": "giraffe", 26 | "value": { 27 | "rev": "1-5874d944abf1261c2a244a92b14c2f18" 28 | }, 29 | "doc": { 30 | "_id": "giraffe", 31 | "_rev": "1-5874d944abf1261c2a244a92b14c2f18", 32 | "min_length": 5, 33 | "min_weight": 830, 34 | "diet": "herbivore", 35 | "max_length": 6, 36 | "wiki_page": "http://en.wikipedia.org/wiki/Giraffe", 37 | "class": "mammal", 38 | "max_weight": 1600 39 | } 40 | }, 41 | { 42 | "id": "kookaburra", 43 | "key": "kookaburra", 44 | "value": { 45 | "rev": "1-0266ecfcdf2f0be7cbfa696dc7f0088e" 46 | }, 47 | "doc": { 48 | "_id": "kookaburra", 49 | "_rev": "1-0266ecfcdf2f0be7cbfa696dc7f0088e", 50 | "min_length": 0.28, 51 | "latin_name": "Dacelo novaeguineae", 52 | "diet": "carnivore", 53 | "max_length": 0.42, 54 | "wiki_page": "http://en.wikipedia.org/wiki/Kookaburra", 55 | "class": "bird" 56 | } 57 | } 58 | ] 59 | } 60 | -------------------------------------------------------------------------------- /examples/README.md: -------------------------------------------------------------------------------- 1 | # CouchBackup Examples 2 | 3 | This folder contains example Node.js scripts which use the `couchbackup` library. 4 | 5 | These scripts are for inspiration and demonstration. 6 | They are not a supported part of couchbackup and should not be considered production ready. 7 | 8 | See README.md files in the appropriate SDK folders ([cos-s3/README.md](cos-s3/README.md), [cos-sdk/README.md](cos-sdk/README.md)). 9 | 10 | ## Current examples 11 | 12 | ### AWS S3 SDK 13 | 14 | 1. [cos-s3/s3-backup-file.js](cos-s3/s3-backup-file.js) -- backup a database (Cloudant or CouchDB) to an S3-API compatible store using a intermediate file on disk to store the backup before upload. 15 | 2. [cos-s3/s3-backup-stream.js](cos-s3/s3-backup-stream.js) -- backup a database (Cloudant or CouchDB) to an S3-API compatible store by streaming the backup data directly from CouchDB or Cloudant into an object. 16 | 17 | ### IBM Cloud Object Store SDK 18 | 19 | #### Backup Scripts 20 | 3. [cos-sdk/cos-backup-file.js](cos-sdk/cos-backup-file.js) -- backup a database (Cloudant) to an IBM Cloud Object Store using a intermediate file on disk to store the backup before upload. 21 | 4. [cos-sdk/cos-backup-stream.js](cos-sdk/cos-backup-stream.js) -- backup a database (Cloudant) to an IBM Cloud Object Store by streaming the backup data directly from Cloudant into an object. 22 | 23 | #### Restore Scripts 24 | 5. [cos-sdk/cos-restore-file.js](cos-sdk/cos-restore-file.js) -- restore a Cloudant or CouchDB database from an IBM Cloud Object Store backup using an intermediary file on disk. 25 | 6. [cos-sdk/cos-restore-stream.js](cos-sdk/cos-restore-stream.js) -- restore a Cloudant or CouchDB database from an IBM Cloud Object Store backup via direct streaming without intermediate files. -------------------------------------------------------------------------------- /test/fixtures/animaldb_all_docs_3.json: -------------------------------------------------------------------------------- 1 | { 2 | "total_rows": 11, 3 | "offset": 0, 4 | "rows": [ 5 | { 6 | "id": "llama", 7 | "key": "llama", 8 | "value": { 9 | "rev": "1-342255201b0d698e70f9828b7002e30a" 10 | }, 11 | "doc": { 12 | "_id": "llama", 13 | "_rev": "1-342255201b0d698e70f9828b7002e30a", 14 | "min_length": 1.7, 15 | "min_weight": 130, 16 | "latin_name": "Lama glama", 17 | "diet": "herbivore", 18 | "max_length": 1.8, 19 | "wiki_page": "http://en.wikipedia.org/wiki/Llama", 20 | "class": "mammal", 21 | "max_weight": 200 22 | } 23 | }, 24 | { 25 | "id": "panda", 26 | "key": "panda", 27 | "value": { 28 | "rev": "1-45e8c0010dd80b7e60d3a50f5bcf348f" 29 | }, 30 | "doc": { 31 | "_id": "panda", 32 | "_rev": "1-45e8c0010dd80b7e60d3a50f5bcf348f", 33 | "min_length": 1.2, 34 | "min_weight": 75, 35 | "diet": "carnivore", 36 | "max_length": 1.8, 37 | "wiki_page": "http://en.wikipedia.org/wiki/Panda", 38 | "class": "mammal", 39 | "max_weight": 115 40 | } 41 | }, 42 | { 43 | "id": "snipe", 44 | "key": "snipe", 45 | "value": { 46 | "rev": "1-cb497ff296134b6e8c311963d1d41ea6" 47 | }, 48 | "doc": { 49 | "_id": "snipe", 50 | "_rev": "1-cb497ff296134b6e8c311963d1d41ea6", 51 | "min_length": 0.25, 52 | "min_weight": 0.08, 53 | "latin_name": "Gallinago gallinago", 54 | "diet": "omnivore", 55 | "max_length": 0.27, 56 | "wiki_page": "http://en.wikipedia.org/wiki/Common_Snipe", 57 | "class": "bird", 58 | "max_weight": 0.14 59 | } 60 | } 61 | ] 62 | } 63 | -------------------------------------------------------------------------------- /test/ci_error.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2024 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /* global describe it */ 16 | 17 | const assert = require('assert'); 18 | const { mkdtemp, open, rm } = require('fs/promises'); 19 | const u = require('./citestutils.js'); 20 | 21 | describe('Write error tests', function() { 22 | it('calls callback with error set when stream is not writeable', async function() { 23 | u.setTimeout(this, 10); 24 | // Make a temp directory 25 | const dirname = await mkdtemp('test_backup_'); 26 | const filename = dirname + '/test.backup'; 27 | // Create a backup file 28 | const file = await open(filename, 'w'); 29 | // Use a read stream instead of a write stream 30 | const backupStream = await file.createReadStream(); 31 | const params = { useApi: true }; 32 | // try to do backup and check err was set in callback 33 | return assert.rejects(u.testBackup(params, 'animaldb', backupStream), { name: 'TypeError', message: 'dest.write is not a function' }) 34 | .finally(async () => { 35 | // Destroy the read stream we didn't use 36 | backupStream.destroy(); 37 | // cleanup temp dir 38 | await rm(dirname, { recursive: true }); 39 | }); 40 | }); 41 | }); 42 | -------------------------------------------------------------------------------- /test/logfilegetbatches.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2023 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /* global describe it */ 16 | 17 | const assert = require('assert'); 18 | const { Writable } = require('node:stream'); 19 | const { pipeline } = require('node:stream/promises'); 20 | const logFileGetBatches = require('../includes/logfilegetbatches.js'); 21 | 22 | describe('#unit Fetching batches from a log file', function() { 23 | it('should fetch multiple batches correctly', async function() { 24 | const output = []; 25 | // Test to get batches 1 and 4 26 | const summaryBatches = new Map().set(1, true).set(4, true); 27 | // Make a pipeline from the logFileGetBatches source 28 | await pipeline(...logFileGetBatches('./test/fixtures/test.log', summaryBatches), new Writable({ 29 | objectMode: true, 30 | write: (chunk, encoding, callback) => { 31 | output.push(chunk); 32 | callback(); 33 | } 34 | })); 35 | 36 | // Output array should contain 2 backup batch objects 37 | // one for batch 1 and one for batch 4 38 | const expected = [ 39 | { command: 't', batch: 1, docs: [{ id: '6' }, { id: '7' }, { id: '8' }, { id: '9' }, { id: '10' }] }, 40 | { command: 't', batch: 4, docs: [{ id: '21' }, { id: '22' }] } 41 | ]; 42 | assert.deepStrictEqual(output, expected); 43 | }); 44 | }); 45 | -------------------------------------------------------------------------------- /eslint.config.mjs: -------------------------------------------------------------------------------- 1 | import header from 'eslint-plugin-header'; 2 | import importPlugin from 'eslint-plugin-import'; 3 | import neostandard, { resolveIgnoresFromGitignore } from 'neostandard'; 4 | 5 | // Disable schema checking for eslint-plugin-header 6 | header.rules.header.meta.schema = false; 7 | 8 | // Export the litning config 9 | export default [ 10 | // Standard rules with semi 11 | ...neostandard({ 12 | ignores: resolveIgnoresFromGitignore(), 13 | languageOptions: { 14 | ecmaVersion: 2022, 15 | }, 16 | semi: true, 17 | }), 18 | // Customizations 19 | { 20 | rules: { 21 | 'handle-callback-err': 'off', 22 | strict: ['error', 'global'], 23 | '@stylistic/space-before-function-paren': ['error', { 24 | anonymous: 'never', 25 | named: 'never', 26 | asyncArrow: 'always', 27 | }], 28 | } 29 | }, 30 | // Header plugin 31 | { 32 | plugins: { 33 | header 34 | }, 35 | ignores: ['eslint.config.mjs'], 36 | rules: { 37 | 'header/header': [2, 'line', [ 38 | { pattern: '^\\ Copyright © 20\\d\\d(?:, 20\\d\\d)? IBM Corp\\. All rights reserved\\.$' }, 39 | '', 40 | ' Licensed under the Apache License, Version 2.0 (the "License");', 41 | ' you may not use this file except in compliance with the License.', 42 | ' You may obtain a copy of the License at', 43 | '', 44 | ' http://www.apache.org/licenses/LICENSE-2.0', 45 | '', 46 | ' Unless required by applicable law or agreed to in writing, software', 47 | ' distributed under the License is distributed on an "AS IS" BASIS,', 48 | ' WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.', 49 | ' See the License for the specific language governing permissions and', 50 | ' limitations under the License.' 51 | ]] 52 | } 53 | }, 54 | // Import plugin 55 | { 56 | ...importPlugin.flatConfigs.recommended, 57 | languageOptions: { 58 | ecmaVersion: 2022, 59 | }, 60 | } 61 | ]; 62 | -------------------------------------------------------------------------------- /includes/cliutils.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2018 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /** 16 | * Utility methods for the command line interface. 17 | * @module cliutils 18 | * @see module:cliutils 19 | */ 20 | 21 | const url = require('url'); 22 | const error = require('./error.js'); 23 | 24 | module.exports = { 25 | 26 | /** 27 | * Combine a base URL and a database name, ensuring at least single slash 28 | * between root and database name. This allows users to have Couch behind 29 | * proxies that mount Couch's / endpoint at some other mount point. 30 | * @param {string} root - root URL 31 | * @param {string} databaseName - database name 32 | * @return concatenated URL. 33 | * 34 | * @private 35 | */ 36 | databaseUrl: function databaseUrl(root, databaseName) { 37 | if (!root.endsWith('/')) { 38 | root = root + '/'; 39 | } 40 | try { 41 | return new url.URL(encodeURIComponent(databaseName), root).toString(); 42 | } catch (err) { 43 | throw error.wrapPossibleInvalidUrlError(err); 44 | } 45 | }, 46 | 47 | /** 48 | * Generate CLI argument usage text. 49 | * 50 | * @param {string} description - argument description. 51 | * @param {string} defaultValue - default argument value. 52 | * 53 | * @private 54 | */ 55 | getUsage: function getUsage(description, defaultValue) { 56 | return `${description} ${defaultValue !== undefined ? ` (default: ${defaultValue})` : ''}`; 57 | } 58 | }; 59 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 4 | ## Checklist 5 | 6 | - [ ] Added tests for code changes _or_ test/build only changes 7 | - [ ] Updated the change log file (`CHANGES.md`) _or_ test/build only changes 8 | - [ ] Completed the PR template below: 9 | 10 | ## Description 11 | 28 | 29 | ## Approach 30 | 31 | 37 | 38 | ## Schema & API Changes 39 | 40 | 51 | 52 | ## Security and Privacy 53 | 54 | 65 | 66 | ## Testing 67 | 68 | 91 | 92 | ## Monitoring and Logging 93 | 102 | -------------------------------------------------------------------------------- /bin/couchrestore.bin.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | // Copyright © 2017, 2024 IBM Corp. All rights reserved. 3 | // 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | // 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | // 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | 16 | const error = require('../includes/error.js'); 17 | const cliutils = require('../includes/cliutils.js'); 18 | const couchbackup = require('../app.js'); 19 | const parser = require('../includes/parser.js'); 20 | const debug = require('debug'); 21 | const restoreDebug = debug('couchbackup:restore'); 22 | const restoreBatchDebug = debug('couchbackup:restore:batch'); 23 | 24 | restoreDebug.enabled = true; 25 | 26 | try { 27 | const program = parser.parseRestoreArgs(); 28 | const databaseUrl = cliutils.databaseUrl(program.url, program.db); 29 | const opts = { 30 | bufferSize: program.bufferSize, 31 | parallelism: program.parallelism, 32 | requestTimeout: program.requestTimeout, 33 | iamApiKey: program.iamApiKey, 34 | iamTokenUrl: program.iamTokenUrl, 35 | attachments: program.attachments 36 | }; 37 | 38 | // log configuration to console 39 | console.error('='.repeat(80)); 40 | console.error('Performing restore on ' + databaseUrl.replace(/\/\/.+@/g, '//****:****@') + ' using configuration:'); 41 | console.error(JSON.stringify(opts, null, 2).replace(/"iamApiKey": "[^"]+"/, '"iamApiKey": "****"')); 42 | console.error('='.repeat(80)); 43 | 44 | restoreBatchDebug.enabled = !program.quiet; 45 | 46 | couchbackup.restore( 47 | process.stdin, // restore from stdin 48 | databaseUrl, 49 | opts, 50 | error.terminationCallback 51 | ).on('restored', function(obj) { 52 | restoreBatchDebug('Restored batch ID:', obj.batch, 'Total document revisions restored:', obj.total, 'Time:', obj.time); 53 | }).on('finished', function(obj) { 54 | restoreDebug('finished', obj); 55 | }); 56 | } catch (err) { 57 | error.terminationCallback(err); 58 | } 59 | -------------------------------------------------------------------------------- /includes/logfilesummary.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2023 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | const { createReadStream } = require('node:fs'); 16 | const { Writable } = require('node:stream'); 17 | const { pipeline } = require('node:stream/promises'); 18 | const { Liner } = require('./liner.js'); 19 | const { LogMapper } = require('./backupMappings.js'); 20 | const { MappingStream } = require('./transforms.js'); 21 | 22 | /** 23 | * Generate a list of remaining batches from a download file. 24 | * Creates a summary containing a changesComplete boolean for 25 | * if the :changes_complete log file entry was found and a map 26 | * of pending batch numbers that have yet to be backed up 27 | * (i.e. the difference of :t and :d log file entries). 28 | * 29 | * @param {string} log - log file name 30 | * @returns a log summary object 31 | */ 32 | module.exports = async function(log) { 33 | const logMapper = new LogMapper(); 34 | const state = { changesComplete: false, batches: new Map() }; 35 | 36 | await pipeline( 37 | createReadStream(log), // read the log file 38 | new Liner(), // split it into lines 39 | new MappingStream(logMapper.logLineToMetadata), // parse line to metadata 40 | new Writable({ 41 | objectMode: true, 42 | write: (metadata, encoding, callback) => { 43 | switch (metadata.command) { 44 | case 't': 45 | state.batches.set(metadata.batch, true); 46 | break; 47 | case 'd': 48 | state.batches.delete(metadata.batch); 49 | break; 50 | case 'changes_complete': 51 | state.changesComplete = true; 52 | break; 53 | default: 54 | break; 55 | } 56 | callback(); 57 | } 58 | }) // Save the done batch number in an array 59 | ); 60 | return state; 61 | }; 62 | -------------------------------------------------------------------------------- /includes/attachmentMappings.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2024 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | const debug = require('debug'); 16 | const mappingDebug = debug('couchbackup:mappings'); 17 | 18 | /** 19 | * The cloudant-node-sdk helpfully automatically converts the base64 encoded 20 | * inline attachments into Buffer so the binary data can be used by consuming 21 | * applicaitons without the need to decode b64. 22 | * However, in the case of couchbackup we actually want the b64 data so that 23 | * we can write it in the inline attachment format to the backup file. 24 | * This class provides the mappings between Buffer and Base64 binary data. 25 | */ 26 | class Attachments { 27 | encode(backupBatch) { 28 | backupBatch.docs.map(doc => { 29 | if (doc._attachments) { 30 | Object.entries(doc._attachments).forEach(([k, attachment]) => { 31 | mappingDebug(`Preparing attachment ${k} for backup.`); 32 | // Attachment data is a Buffer 33 | // Base64 encode the attachment data for the backup file 34 | attachment.data = attachment.data.toString('base64'); 35 | return [k, attachment]; 36 | }); 37 | } 38 | return doc; 39 | }); 40 | return backupBatch; 41 | } 42 | 43 | decode(restoreBatch) { 44 | restoreBatch.docs.map(doc => { 45 | if (doc._attachments) { 46 | Object.entries(doc._attachments).forEach(([k, attachment]) => { 47 | mappingDebug(`Preparing attachment ${k} for restore.`); 48 | // Attachment data is a Base64 string 49 | // Base64 decode the attachment data into a Buffer 50 | attachment.data = Buffer.from(attachment.data, 'base64'); 51 | return [k, attachment]; 52 | }); 53 | } 54 | return doc; 55 | }); 56 | return restoreBatch; 57 | } 58 | } 59 | 60 | module.exports = { 61 | Attachments 62 | }; 63 | -------------------------------------------------------------------------------- /test/hooks.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2023 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /* global beforeEach afterEach */ 16 | 17 | const uuid = require('uuid').v4; 18 | const fs = require('fs'); 19 | const { newSimpleClient } = require('../includes/request.js'); 20 | 21 | const sharedClient = makeSharedClient(); 22 | function makeSharedClient() { 23 | const url = (process.env.COUCH_BACKEND_URL) ? process.env.COUCH_BACKEND_URL : 'https://no-couch-backend-url-set.test'; 24 | const opts = {}; 25 | opts.iamApiKey = process.env.COUCHBACKUP_TEST_IAM_API_KEY || null; 26 | opts.iamTokenUrl = process.env.CLOUDANT_IAM_TOKEN_URL || null; 27 | return newSimpleClient(url, opts).service; 28 | } 29 | 30 | // Mocha hooks that will be at the root context so run for all tests 31 | 32 | beforeEach('Create test database', async function() { 33 | // Don't run hook for unit tests, just for CI 34 | if (!this.currentTest.fullTitle().includes('#unit')) { 35 | // Allow 10 seconds to create the DB 36 | this.timeout(10 * 1000); 37 | const unique = uuid(); 38 | this.fileName = `${unique}`; 39 | this.dbName = 'couchbackup_test_' + unique; 40 | return sharedClient.putDatabase({ db: this.dbName }); 41 | } 42 | }); 43 | 44 | afterEach('Delete test database', async function() { 45 | // Don't run hook for unit tests, just for CI 46 | if (!this.currentTest.fullTitle().includes('#unit')) { 47 | // Allow 10 seconds to delete the DB 48 | this.timeout(10 * 1000); 49 | deleteIfExists(this.fileName); 50 | deleteIfExists(`${this.fileName}.log`); 51 | return sharedClient.deleteDatabase({ db: this.dbName }); 52 | } 53 | }); 54 | 55 | function deleteIfExists(fileName) { 56 | fs.unlink(fileName, function(err) { 57 | if (err) { 58 | if (err.code !== 'ENOENT') { 59 | console.error(`${err.code} ${err.message}`); 60 | } 61 | } 62 | }); 63 | } 64 | 65 | module.exports = { 66 | sharedClient 67 | }; 68 | -------------------------------------------------------------------------------- /includes/allDocsGenerator.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2023, 2024 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | const debug = require('debug')('couchbackup:alldocsgenerator'); 16 | const { BackupError } = require('./error.js'); 17 | 18 | /** 19 | * Async generator function for paginating _all_docs for shallow backups. 20 | * 21 | * @param {object} dbClient - object for connection to source database containing name, service and url 22 | * @param {object} options - backup configuration 23 | * @yields {object} a "done" type backup batch {command: d, batch: #, docs: [{_id: id, ...}, ...]} 24 | */ 25 | module.exports = async function * (dbClient, options = {}) { 26 | let batch = 0; 27 | let lastPage = false; 28 | let startKey = null; 29 | const opts = { db: dbClient.dbName, limit: options.bufferSize, includeDocs: true }; 30 | if (options.attachments === true) { 31 | opts.attachments = true; 32 | } 33 | do { 34 | if (startKey) opts.startKey = startKey; 35 | yield dbClient.service.postAllDocs(opts).then(response => { 36 | if (!(response.result && response.result.rows)) { 37 | throw new BackupError('AllDocsError', 'Invalid all docs response'); 38 | } 39 | debug(`Got page from start key '${startKey}'`); 40 | const docs = response.result.rows; 41 | debug(`Received ${docs.length} docs`); 42 | lastPage = docs.length < opts.limit; 43 | if (docs.length > 0) { 44 | const lastKey = docs[docs.length - 1].id; 45 | debug(`Received up to key ${lastKey}`); 46 | // To avoid double fetching a document solely for the purposes of getting 47 | // the next ID to use as a startKey for the next page we instead use the 48 | // last ID of the current page and append the lowest unicode sort 49 | // character. 50 | startKey = `${lastKey}\0`; 51 | } 52 | return { command: 'd', batch: batch++, docs: docs.map(doc => { return doc.doc; }) }; 53 | }); 54 | } while (!lastPage); 55 | }; 56 | -------------------------------------------------------------------------------- /test/ci_concurrent_backups.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2018, 2025 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /* global describe it */ 16 | 17 | const assert = require('node:assert'); 18 | const fs = require('node:fs'); 19 | const { once } = require('node:events'); 20 | const uuid = require('uuid').v4; 21 | const u = require('./citestutils.js'); 22 | const { Liner } = require('../includes/liner.js'); 23 | 24 | const params = { useApi: true }; 25 | 26 | describe(u.scenario('Concurrent database backups', params), function() { 27 | it('should run concurrent API database backups correctly #slower', async function() { 28 | // Allow up to 900 s to backup and compare (it should be much faster)! 29 | u.setTimeout(this, 900); 30 | 31 | const checkForEmptyBatches = async function(fileName) { 32 | assert.ok(await fs.createReadStream(fileName) // backup file 33 | .pipe(new Liner(true)) // split to lines 34 | .map(linerLine => JSON.parse(linerLine.line)) // parse JSON 35 | .filter(parsedJson => Array.isArray(parsedJson)) // we want batches so filter to arrays 36 | // Note: Empty batch arrays indicate that the running backup is 37 | // incorrectly sharing a log file with another ongoing backup job. 38 | .every(batch => batch.length > 0), 39 | `Backup file ${fileName} contains empty batches.`); 40 | }; 41 | 42 | const backupPromise = async function() { 43 | const actualBackup = `./${uuid()}`; 44 | const output = fs.createWriteStream(actualBackup); 45 | return once(output, 'open').then(() => { 46 | return u.testBackup(params, 'largedb1g', output); 47 | }).then(() => { 48 | return checkForEmptyBatches(actualBackup); 49 | }); 50 | }; 51 | 52 | // [1] Run 'largedb1g' database backup 53 | const backup1 = backupPromise(); 54 | 55 | // [2] Run 'largedb1g' database backup 56 | const backup2 = backupPromise(); 57 | 58 | return Promise.all([backup1, backup2]); 59 | }); 60 | }); 61 | -------------------------------------------------------------------------------- /test/attachmentMappings.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2024 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /* global describe it */ 16 | 17 | const assert = require('node:assert'); 18 | const { Attachments } = require('../includes/attachmentMappings.js'); 19 | 20 | describe('#unit attachment mappings', function() { 21 | // Test data 22 | const stringData = 'My attachment data'; 23 | const bufferData = Buffer.from(stringData); 24 | const b64Data = bufferData.toString('base64'); 25 | const docTempate = { 26 | _attachments: { 27 | 'att.txt': { 28 | contentType: 'text/plain', 29 | revpos: 2 30 | } 31 | }, 32 | _id: 'd1', 33 | _rev: '2-1c7820dce2c9543d9417323a047e2896', 34 | _revisions: { ids: ['1c7820dce2c9543d9417323a047e2896', '967a00dff5e02add41819138abb3284d'], start: 2 } 35 | }; 36 | 37 | describe('encode', function() { 38 | it('should correctly convert a Buffer to Base64', function() { 39 | const docWithBufferAttachment = { ...docTempate, ...{ _attachments: { 'att.txt': { data: bufferData } } } }; 40 | const docWithBase64Attachment = { ...docTempate, ...{ _attachments: { 'att.txt': { data: b64Data } } } }; 41 | const bufferBatch = { docs: [docWithBufferAttachment] }; 42 | const b64Batch = { docs: [docWithBase64Attachment] }; 43 | const actualOutput = new Attachments().encode(bufferBatch); 44 | assert.deepStrictEqual(actualOutput, b64Batch); 45 | }); 46 | }); 47 | 48 | describe('decode', function() { 49 | it('should correctly convert Base64 to a Buffer', function() { 50 | const docWithBufferAttachment = { ...docTempate, ...{ _attachments: { 'att.txt': { data: bufferData } } } }; 51 | const docWithBase64Attachment = { ...docTempate, ...{ _attachments: { 'att.txt': { data: b64Data } } } }; 52 | const bufferBatch = { docs: [docWithBufferAttachment] }; 53 | const b64Batch = { docs: [docWithBase64Attachment] }; 54 | const actualOutput = new Attachments().decode(b64Batch); 55 | assert.deepStrictEqual(actualOutput, bufferBatch); 56 | }); 57 | }); 58 | }); 59 | -------------------------------------------------------------------------------- /AI_CODE_POLICY.md: -------------------------------------------------------------------------------- 1 | # Policy on submitting AI-generated code to IBM Open Source software projects (v.1.0) 2 | 3 | Although IBM does not generally prohibit AI-generated code (“AI Code”) from being contributed to IBM- maintained Open Source software projects (“IBM Projects”), any such contributions should comply with this policy (“Policy”). This Policy applies to contributions to IBM Projects made by IBMers and non-IBMers alike. 4 | 5 | AI Code can create potential copyright infringement issues for an IBM Project if it is substantially similar to code used to train the generative AI model (“Training Code”) that outputs the AI Code. As a result, IBM asks that contributors comply with the following guidance when deciding whether it is appropriate to submit a piece of AI Code to an IBM Project: 6 | 7 | 1. Only use a generative AI tool (“AI Tool”) with functionality specifically designed to mitigate the risk of AI Code that is substantially similar to Training Code, either by trying to (a) filter AI Code that is substantially similar to Training Code or (b) identify AI Code that is substantially similar to the Training Code such that the contributor can identify and comply with the Training Code’s license, including its notice and attribution requirements. You must ensure the mitigation functionality is enabled during your use of the AI Tool. 8 | 2. Ensure that the terms and conditions of the generative AI tool (“AI Tool”) (a) allow its AI Code to be used for external Open Source development and (b) are consistent with submitting the AI Code under the IBM Project’s Developer Certificate of Origin (DCO) or Contributor License Agreement (CLA), including enabling it to be used by the IBM Project under its Open Source license. 9 | 3. Review and understand the terms of the DCO or CLA specific to the IBM Project and ensure that contribution of the AI Code complies with such terms or other applicable project guidance. If the AI Tool indicates the AI Code is substantially similar to Training Code, comply with the license requirements of the Training Code’s license (including notice and attribution terms) when contributing the AI Code to the IBM Project, and only submit if its license is compatible with the IBM Project license. 10 | 4. As a best practice, if your contribution to this project contains AI Code, indicate so with a pair of inline comment tags that note the beginning and end of the AI Code content that identify the AI Tool used (including version if applicable). For example, if you modified a file and your contribution contains AI Code, include a comment before the AI-generated content such as “// Begin modifications with assistance from AI Tool [X v.Y] ” paired with a comment after the content “// End modifications with assistance from AI Tool [X v.Y].” If you have substantially altered the file using an AI tool, we recommend adding a comment in the file header (“// this file has been modified with the assistance of AI Tool [X v.Y]”). 11 | -------------------------------------------------------------------------------- /bin/couchbackup.bin.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | // Copyright © 2017, 2024 IBM Corp. All rights reserved. 3 | // 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | // 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | // 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | 16 | const error = require('../includes/error.js'); 17 | const fs = require('fs'); 18 | const cliutils = require('../includes/cliutils.js'); 19 | const couchbackup = require('../app.js'); 20 | const parser = require('../includes/parser.js'); 21 | const debug = require('debug'); 22 | const backupDebug = debug('couchbackup:backup'); 23 | const backupBatchDebug = debug('couchbackup:backup:batch'); 24 | 25 | backupDebug.enabled = true; 26 | 27 | try { 28 | const program = parser.parseBackupArgs(); 29 | const databaseUrl = cliutils.databaseUrl(program.url, program.db); 30 | 31 | const opts = { 32 | bufferSize: program.bufferSize, 33 | log: program.log, 34 | mode: program.mode, 35 | parallelism: program.parallelism, 36 | requestTimeout: program.requestTimeout, 37 | resume: program.resume, 38 | iamApiKey: program.iamApiKey, 39 | iamTokenUrl: program.iamTokenUrl, 40 | attachments: program.attachments 41 | }; 42 | 43 | // log configuration to console 44 | console.error('='.repeat(80)); 45 | console.error('Performing backup on ' + databaseUrl.replace(/\/\/.+@/g, '//****:****@') + ' using configuration:'); 46 | console.error(JSON.stringify(opts, null, 2).replace(/"iamApiKey": "[^"]+"/, '"iamApiKey": "****"')); 47 | console.error('='.repeat(80)); 48 | 49 | backupBatchDebug.enabled = !program.quiet; 50 | 51 | let ws = process.stdout; 52 | 53 | // open output file 54 | if (program.output) { 55 | let flags = 'w'; 56 | if (program.log && program.resume) { 57 | flags = 'a'; 58 | } 59 | const fd = fs.openSync(program.output, flags); 60 | ws = fs.createWriteStream(null, { fd }); 61 | } 62 | 63 | backupDebug('Fetching all database changes...'); 64 | 65 | couchbackup.backup( 66 | databaseUrl, 67 | ws, 68 | opts, 69 | error.terminationCallback 70 | ).on('changes', function(batch) { 71 | backupBatchDebug('Total batches received:', batch + 1); 72 | }).on('written', function(obj) { 73 | backupBatchDebug('Written batch ID:', obj.batch, 'Total document revisions written:', obj.total, 'Time:', obj.time); 74 | }).on('finished', function(obj) { 75 | backupDebug('Finished - Total document revisions written:', obj.total); 76 | }); 77 | } catch (err) { 78 | error.terminationCallback(err); 79 | } 80 | -------------------------------------------------------------------------------- /test/ci_compression.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2024 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /* global describe it */ 16 | 17 | const fs = require('fs'); 18 | const { once } = require('node:events'); 19 | const u = require('./citestutils.js'); 20 | 21 | [{ useApi: true }, { useApi: false }].forEach(function(params) { 22 | describe(u.scenario('Compression tests', params), function() { 23 | const p = u.p(params, { compression: true }); 24 | 25 | it('should backup animaldb to a compressed file', async function() { 26 | // Allow up to 60 s for backup of animaldb 27 | u.setTimeout(this, 60); 28 | const compressedBackup = `./${this.fileName}`; 29 | const output = fs.createWriteStream(compressedBackup); 30 | return once(output, 'open') 31 | .then(() => { 32 | return u.testBackup(p, 'animaldb', output); 33 | }) 34 | .then(() => { 35 | return u.assertGzipFile(compressedBackup); 36 | }); 37 | }); 38 | 39 | it('should restore animaldb from a compressed file', async function() { 40 | // Allow up to 60 s for backup of animaldb 41 | u.setTimeout(this, 60); 42 | const input = fs.createReadStream('./test/fixtures/animaldb_expected.json.gz'); 43 | return once(input, 'open') 44 | .then(() => { 45 | return u.testRestore(p, input, this.dbName); 46 | }) 47 | .then(() => { 48 | return u.dbCompare('animaldb', this.dbName); 49 | }); 50 | }); 51 | 52 | it('should backup and restore animaldb via a compressed file', async function() { 53 | // Allow up to 60 s for backup and restore of animaldb 54 | u.setTimeout(this, 60); 55 | const compressedBackup = `./${this.fileName}`; 56 | return u.testBackupAndRestoreViaFile(p, 'animaldb', compressedBackup, this.dbName).then(() => { 57 | return u.assertGzipFile(compressedBackup); 58 | }); 59 | }); 60 | 61 | it('should backup and restore animaldb via a compressed stream', async function() { 62 | // Allow up to 60 s for backup and restore of animaldb 63 | u.setTimeout(this, 60); 64 | return u.testDirectBackupAndRestore(p, 'animaldb', this.dbName); 65 | }); 66 | 67 | it('should backup and restore largedb2g via a compressed file #slower', async function() { 68 | // Takes ~ 25 min using CLI, but sometimes over an hour with API 69 | u.setTimeout(this, 180 * 60); 70 | const compressedBackup = `./${this.fileName}`; 71 | params.compression = true; 72 | return u.testBackupAndRestoreViaFile(p, 'largedb2g', compressedBackup, this.dbName); 73 | }); 74 | }); 75 | }); 76 | -------------------------------------------------------------------------------- /test/ci_e2e.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2024 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /* global describe it */ 16 | 17 | delete require.cache[require.resolve('./citestutils.js')]; 18 | const u = require('./citestutils.js'); 19 | const client = require('./hooks.js').sharedClient; 20 | const { Writable } = require('node:stream'); 21 | const { pipeline } = require('node:stream/promises'); 22 | const assert = require('node:assert'); 23 | 24 | [{ useApi: true }, { useApi: false }].forEach(function(params) { 25 | describe(u.scenario('End to end backup and restore', params), function() { 26 | it('should backup and restore animaldb', async function() { 27 | // Allow up to 60 s for backup and restore of animaldb 28 | u.setTimeout(this, 60); 29 | return u.testDirectBackupAndRestore(params, 'animaldb', this.dbName); 30 | }); 31 | 32 | it('should backup and restore largedb1g #slow', async function() { 33 | // Allow up to 30 m for backup and restore of largedb1g 34 | // This is a long time but when many builds run in parallel it can take a 35 | // while to get this done. 36 | u.setTimeout(this, 30 * 60); 37 | return u.testDirectBackupAndRestore(params, 'largedb1g', this.dbName); 38 | }); 39 | 40 | it('should restore and backup attachment', async function() { 41 | // Allow up to 60 s 42 | u.setTimeout(this, 60); 43 | const p = u.p(params, { opts: { attachments: true } }); 44 | const expectedBackupFile = './test/fixtures/attachment.backup'; 45 | const actualBackup = `./${this.fileName}`; 46 | const actualRestoredAttachmentChunks = []; 47 | return u.testRestoreFromFile(p, expectedBackupFile, this.dbName) 48 | .then(() => { 49 | return u.testBackupToFile(p, this.dbName, actualBackup); 50 | }).then(() => { 51 | return u.backupFileCompare(actualBackup, expectedBackupFile); 52 | }).then(() => { 53 | return client.getAttachment({ 54 | db: this.dbName, 55 | docId: 'd1', 56 | attachmentName: 'att.txt' 57 | }); 58 | }).then(response => { 59 | return pipeline( 60 | response.result, new Writable({ 61 | write(chunk, encoding, callback) { 62 | actualRestoredAttachmentChunks.push(chunk); 63 | callback(); 64 | } 65 | })); 66 | }).then(() => { 67 | const actualRestoredAttachment = Buffer.concat(actualRestoredAttachmentChunks).toString('utf8'); 68 | assert.strictEqual(actualRestoredAttachment, 'My attachment data'); 69 | }); 70 | }); 71 | }); 72 | }); 73 | -------------------------------------------------------------------------------- /includes/restore.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2024 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | const debug = require('debug')('couchbackup:restore'); 16 | const { Attachments } = require('./attachmentMappings.js'); 17 | const { Liner } = require('./liner.js'); 18 | const { Restore } = require('./restoreMappings.js'); 19 | const { BatchingStream, MappingStream } = require('./transforms.js'); 20 | const { Writable } = require('node:stream'); 21 | const { pipeline } = require('node:stream/promises'); 22 | 23 | /** 24 | * Function for performing a restore. 25 | * 26 | * @param {object} dbClient - object for connection to source database containing name, service and url 27 | * @param {object} options - restore configuration 28 | * @param {Readable} readstream - the backup file content 29 | * @param {EventEmitter} ee - the user facing EventEmitter 30 | * @returns a promise that resolves when the restore is complete or rejects if it errors 31 | */ 32 | module.exports = function(dbClient, options, readstream, ee) { 33 | const restore = new Restore(dbClient, options); 34 | const start = new Date().getTime(); // restore start time 35 | let total = 0; // the total restored 36 | 37 | const output = new Writable({ 38 | objectMode: true, 39 | write: (restoreBatch, encoding, cb) => { 40 | debug(' restored ', restoreBatch.documents); 41 | total += restoreBatch.documents; 42 | const totalRunningTimeSec = (new Date().getTime() - start) / 1000; 43 | try { 44 | ee.emit('restored', { ...restoreBatch, total, time: totalRunningTimeSec }); 45 | } finally { 46 | cb(); 47 | } 48 | } 49 | }); 50 | 51 | const batchPreparationStreams = [ 52 | readstream, // the backup file 53 | new Liner(true), // line by line (for Node.js 24 compatibility santize unicode line separators) 54 | new MappingStream(restore.backupLineToDocsArray), // convert line to a docs array 55 | new BatchingStream(options.bufferSize, true), // make new arrays of the correct buffer size 56 | new MappingStream(restore.docsToRestoreBatch) // make a restore batch 57 | ]; 58 | const mappingStreams = []; 59 | const restoreStreams = [ 60 | new MappingStream(restore.pendingToRestored, options.parallelism), // do the restore at the desired level of concurrency 61 | output // emit restored events 62 | ]; 63 | 64 | if (options.attachments) { 65 | mappingStreams.push( 66 | new MappingStream(new Attachments().decode, options.parallelism) 67 | ); 68 | } 69 | 70 | return pipeline( 71 | ...batchPreparationStreams, 72 | ...mappingStreams, 73 | ...restoreStreams 74 | ).then(() => { 75 | return { total }; 76 | }); 77 | }; 78 | -------------------------------------------------------------------------------- /test/liner.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2023 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /* global beforeEach describe it */ 16 | 17 | const assert = require('node:assert'); 18 | const fs = require('node:fs'); 19 | const { versions } = require('node:process'); 20 | const { Readable, Writable } = require('node:stream'); 21 | const { pipeline } = require('node:stream/promises'); 22 | const { Liner } = require('../includes/liner.js'); 23 | 24 | describe('#unit liner', function() { 25 | // Use a liner to make the line objects 26 | let liner; 27 | let destination; 28 | let output; 29 | 30 | beforeEach('set up liner and sink', function() { 31 | liner = new Liner(); 32 | output = []; 33 | destination = new Writable({ 34 | objectMode: true, 35 | write: (chunk, encoding, callback) => { 36 | output.push(chunk); 37 | callback(); 38 | } 39 | }); 40 | }); 41 | 42 | it('should split to the correct number of lines', async function() { 43 | await pipeline(fs.createReadStream('./test/fixtures/test.log'), liner, destination); 44 | assert.strictEqual(output.length, 10); 45 | }); 46 | 47 | it('should count lines correctly', async function() { 48 | await pipeline(fs.createReadStream('./test/fixtures/test.log'), liner, destination); 49 | assert.strictEqual(liner.lineNumber, 10); 50 | }); 51 | 52 | it('should stream line numbers correctly', async function() { 53 | const input = Array.from({ length: 10000 }, (_, i) => `A test line with a number ${i}`); 54 | const inputLines = input.map(e => `${e}\n`); 55 | const expected = input.map((e, i) => { return { lineNumber: i + 1, line: e }; }); 56 | await pipeline(inputLines, liner, destination); 57 | assert.deepStrictEqual(output, expected); 58 | }); 59 | 60 | it('should split on unicode separators if not sanitizing', async function() { 61 | // This test will only split on /u2028 and /u2029 in Node.js >=24 62 | const nodeMajorVersion = parseInt(versions.node.split('.', 2)[0]); 63 | const expectedLines = nodeMajorVersion >= 24 ? ['foo', 'bar', 'foo', 'bar', 'foo'] : ['foo', 'bar', 'foo\u2028bar\u2029foo']; 64 | const input = 'foo\nbar\nfoo\u2028bar\u2029foo'; 65 | const expected = expectedLines.map((e, i) => { return { lineNumber: i + 1, line: e }; }); 66 | await pipeline(Readable.from(input), liner, destination); 67 | assert.deepStrictEqual(output, expected); 68 | }); 69 | 70 | it('should sanitize unicode separators when enabled', async function() { 71 | const expected = ['foo', 'bar', 'foo\\u2028bar\\u2029foo'].map((e, i) => { return { lineNumber: i + 1, line: e }; }); 72 | const input = 'foo\nbar\nfoo\u2028bar\u2029foo'; 73 | await pipeline(Readable.from(input), new Liner(true), destination); 74 | assert.deepStrictEqual(output, expected); 75 | }); 76 | }); 77 | -------------------------------------------------------------------------------- /includes/error.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2024 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | // fatal errors 16 | const codes = { 17 | Error: 1, 18 | InvalidOption: 2, 19 | DatabaseNotFound: 10, 20 | Unauthorized: 11, 21 | Forbidden: 12, 22 | DatabaseNotEmpty: 13, 23 | NoLogFileName: 20, 24 | LogDoesNotExist: 21, 25 | IncompleteChangesInLogFile: 22, 26 | LogFileExists: 23, 27 | SpoolChangesError: 30, 28 | HTTPFatalError: 40, 29 | BulkGetError: 50, 30 | AttachmentsNotEnabledError: 60, 31 | AttachmentsMetadataAbsent: 61 32 | }; 33 | 34 | class BackupError extends Error { 35 | constructor(name, message) { 36 | super(message); 37 | this.name = name; 38 | } 39 | } 40 | 41 | class OptionError extends BackupError { 42 | constructor(message) { 43 | super('InvalidOption', message); 44 | } 45 | } 46 | 47 | class HTTPError extends BackupError { 48 | constructor(responseError, name) { 49 | // Special case some names for more useful error messages 50 | switch (responseError.status) { 51 | case 401: 52 | name = 'Unauthorized'; 53 | break; 54 | case 403: 55 | name = 'Forbidden'; 56 | break; 57 | default: 58 | name = name || 'HTTPFatalError'; 59 | } 60 | super(name, responseError.message); 61 | } 62 | } 63 | 64 | /** 65 | * A function for converting between error types and improving error messages. 66 | * 67 | * Cases: 68 | * - BackupError - return as is. 69 | * - response "like" errors - convert to HTTPError. 70 | * - ERR_INVALID_URL - convert to OptionError. 71 | * - Error (general case) - augment with additional statusText 72 | * or description if available. 73 | * 74 | * @param {Error} e 75 | * @returns {Error} the modified error 76 | */ 77 | function convertError(e) { 78 | if (e instanceof BackupError) { 79 | // If it's already a BackupError just pass it on 80 | return e; 81 | } else if (e && e.status && e.status >= 400) { 82 | return new HTTPError(e); 83 | } else if (e.code === 'ERR_INVALID_URL') { 84 | // Wrap ERR_INVALID_URL in our own InvalidOption 85 | return new OptionError(e.message); 86 | } else { 87 | // For errors that don't have a status code, we are likely looking at a cxn 88 | // error. 89 | // Try to augment the message with more detail (core puts the code in statusText) 90 | if (e && e.statusText) { 91 | e.message = `${e.message} ${e.statusText}`; 92 | } 93 | if (e && e.description) { 94 | e.message = `${e.message} ${e.description}`; 95 | } 96 | return e; 97 | } 98 | } 99 | 100 | module.exports = { 101 | BackupError, 102 | OptionError, 103 | HTTPError, 104 | convertError, 105 | terminationCallback: function terminationCallback(err, data) { 106 | if (err) { 107 | console.error(`ERROR: ${err.message}`); 108 | process.exitCode = codes[err.name] || 1; 109 | process.exit(); 110 | } 111 | } 112 | }; 113 | -------------------------------------------------------------------------------- /test/fixtures/animaldb_old_shallow.json: -------------------------------------------------------------------------------- 1 | [{"_id":"_design/views101","views":{"latin_name_jssum":{"map":"function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}","reduce":"function (key, values, rereduce){\n return sum(values);\n}"},"latin_name":{"map":"function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}"},"diet_sum":{"map":"function(doc) {\n if(doc.diet){\n emit(doc.diet, 1);\n }\n}","reduce":"_sum"},"diet_count":{"map":"function(doc) {\n if(doc.diet && doc.latin_name){\n emit(doc.diet, doc.latin_name);\n }\n}","reduce":"_count"},"complex_count":{"map":"function(doc){\n if(doc.class && doc.diet){\n emit([doc.class, doc.diet], 1);\n }\n}","reduce":"_count"},"diet":{"map":"function(doc) {\n if(doc.diet){\n emit(doc.diet, 1);\n }\n}"},"complex_latin_name_count":{"map":"function(doc){\n if(doc.latin_name){\n emit([doc.class, doc.diet, doc.latin_name], doc.latin_name.length)\n }\n}","reduce":"_count"},"diet_jscount":{"map":"function(doc) {\n if(doc.diet){\n emit(doc.diet, 1);\n }\n}","reduce":"function (key, values, rereduce){\n return values.length;\n}"},"latin_name_count":{"map":"function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}","reduce":"_count"},"latin_name_sum":{"map":"function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}","reduce":"_sum"}},"indexes":{"animals":{"index":"function(doc){\n index(\"default\", doc._id);\n if(doc.min_length){\n index(\"min_length\", doc.min_length, {\"store\": \"yes\"});\n }\n if(doc.diet){\n index(\"diet\", doc.diet, {\"store\": \"yes\"});\n }\n if (doc.latin_name){\n index(\"latin_name\", doc.latin_name, {\"store\": \"yes\"});\n }\n if (doc['class']){\n index(\"class\", doc['class'], {\"store\": \"yes\"});\n }\n}"}}},{"_id":"aardvark","min_weight":40,"max_weight":65,"min_length":1,"max_length":2.2,"latin_name":"Orycteropus afer","wiki_page":"http://en.wikipedia.org/wiki/Aardvark","class":"mammal","diet":"omnivore"},{"_id":"badger","wiki_page":"http://en.wikipedia.org/wiki/Badger","min_weight":7,"max_weight":30,"min_length":0.6,"max_length":0.9,"latin_name":"Meles meles","class":"mammal","diet":"omnivore"},{"_id":"elephant","wiki_page":"http://en.wikipedia.org/wiki/African_elephant","min_weight":4700,"max_weight":6050,"min_length":3.2,"max_length":4,"class":"mammal","diet":"herbivore"},{"_id":"giraffe","min_weight":830,"min_length":5,"max_weight":1600,"max_length":6,"wiki_page":"http://en.wikipedia.org/wiki/Giraffe","class":"mammal","diet":"herbivore"},{"_id":"kookaburra","min_length":0.28,"max_length":0.42,"wiki_page":"http://en.wikipedia.org/wiki/Kookaburra","class":"bird","diet":"carnivore","latin_name":"Dacelo novaeguineae"},{"_id":"lemur","wiki_page":"http://en.wikipedia.org/wiki/Ring-tailed_lemur","min_weight":2.2,"max_weight":2.2,"min_length":0.95,"max_length":1.1,"class":"mammal","diet":"omnivore"},{"_id":"llama","min_weight":130,"max_weight":200,"min_length":1.7,"max_length":1.8,"latin_name":"Lama glama","wiki_page":"http://en.wikipedia.org/wiki/Llama","class":"mammal","diet":"herbivore"},{"_id":"panda","wiki_page":"http://en.wikipedia.org/wiki/Panda","min_weight":75,"max_weight":115,"min_length":1.2,"max_length":1.8,"class":"mammal","diet":"carnivore"},{"_id":"snipe","min_weight":0.08,"max_weight":0.14,"min_length":0.25,"max_length":0.27,"latin_name":"Gallinago gallinago","wiki_page":"http://en.wikipedia.org/wiki/Common_Snipe","class":"bird","diet":"omnivore"},{"_id":"zebra","wiki_page":"http://en.wikipedia.org/wiki/Plains_zebra","min_length":2,"max_length":2.5,"min_weight":175,"max_weight":387,"class":"mammal","diet":"herbivore"}] 2 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | ## Issues 4 | 5 | Please [read these guidelines](http://ibm.biz/cdt-issue-guide) before opening an issue. 6 | If you still need to open an issue then we ask that you complete the template as 7 | fully as possible. 8 | 9 | ## Pull requests 10 | 11 | We welcome pull requests, but ask contributors to keep in mind the following: 12 | 13 | * Only PRs with the template completed will be accepted 14 | * We will not accept PRs for user specific functionality 15 | 16 | ### Developer Certificate of Origin 17 | 18 | In order for us to accept pull-requests, the contributor must sign-off a 19 | [Developer Certificate of Origin (DCO)](DCO1.1.txt). This clarifies the 20 | intellectual property license granted with any contribution. It is for your 21 | protection as a Contributor as well as the protection of IBM and its customers; 22 | it does not change your rights to use your own Contributions for any other purpose. 23 | 24 | Please read the agreement and acknowledge it by ticking the appropriate box in the PR 25 | text, for example: 26 | 27 | Please read the agreement and acknowledge it by signing-off your commit. 28 | 29 | ### AI-generated code policy 30 | 31 | Before submitting your pull request, please ensure you've reviewed and adhere to our [AI policy](AI_CODE_POLICY.md). 32 | 33 | ## General information 34 | 35 | ### Output and debugging 36 | 37 | The [`debug` package](https://www.npmjs.com/package/debug) is used to control 38 | the output and debug statements. 39 | 40 | The `DEBUG` environment variable controls the debugging. 41 | * `couchbackup:backup` and `couchbackup:restore` are enabled by default and 42 | produce the CLI stderr output statements. 43 | * `couchbackup` - all debug statements 44 | * `couchbackup:` - to enable the debug statements for a given module 45 | 46 | ### Code Style 47 | 48 | This project uses [semi-standard](https://github.com/Flet/semistandard). 49 | If you `npm install`, you'll get a local [eslint](http://eslint.org/) 50 | configured with our settings which your editor will hopefully pick up. 51 | 52 | ## Requirements 53 | 54 | Node.js and npm, other dependencies will be installed automatically via `npm` 55 | and the `package.json` `dependencies` and `devDependencies`. 56 | 57 | ### Setup 58 | 59 | 1. Clone or fork this repository. 60 | 2. Code 61 | 3. To install the dependencies run: 62 | ```sh 63 | npm install 64 | ``` 65 | 4. To use the local copy instead of `couchbackup` run: 66 | ```sh 67 | ./bin/couchbackup.bin.js 68 | ``` 69 | 70 | ## Testing 71 | 72 | ### Unit tests 73 | 74 | Unit tests are in the `test` folder and are run using the command: 75 | 76 | ```sh 77 | npm test 78 | ``` 79 | 80 | Unit tests should be tagged with `#unit` so that they can be run separately from 81 | the integration tests. 82 | 83 | ### Integration tests 84 | 85 | Integration tests are in files prefixed `ci_` in the `test` folder. 86 | These tests invoke `couchbackup` and `couchrestore` to work with real databases. 87 | The integration tests require credentials to create databases for restoration and 88 | to download the database comparison tool so whilst they do run as part of the 89 | Jenkins CI they cannot be run for all dev environments. 90 | 91 | Internal developers with credentials and the compare tool can test the CI 92 | locally by using these environment variables for example: 93 | ``` 94 | export COUCH_URL=https://... 95 | export COUCH_BACKEND_URL=$COUCH_URL 96 | ``` 97 | 98 | and then run the non-slow integration tests by issuing the command: 99 | ```sh 100 | ./node_modules/mocha/bin/mocha -i -g '#unit|#slow' 101 | ``` 102 | -------------------------------------------------------------------------------- /includes/spoolchanges.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2024 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | const { createWriteStream } = require('node:fs'); 16 | const { pipeline } = require('node:stream/promises'); 17 | const { BackupError } = require('./error.js'); 18 | const { BatchingStream, DelegateWritable, MappingStream } = require('./transforms.js'); 19 | const debug = require('debug')('couchbackup:spoolchanges'); 20 | const { ChangesFollower } = require('@ibm-cloud/cloudant'); 21 | 22 | /** 23 | * Write log file for all changes from a database, ready for downloading 24 | * in batches. 25 | * 26 | * @param {object} dbClient - object for connection to source database containing name, service and url 27 | * @param {string} log - path to log file to use 28 | * @param {function} eeFn - event emitter function to call after each write 29 | * @param {number} bufferSize - the number of changes per batch/log line 30 | * @param {number} tolerance - changes follower error tolerance 31 | */ 32 | module.exports = function(dbClient, log, eeFn, bufferSize = 500, tolerance = 600000) { 33 | let lastSeq; 34 | let batch = 0; 35 | let totalBuffer = 0; 36 | 37 | class LogWriter extends DelegateWritable { 38 | constructor(log) { 39 | super('logFileChangesWriter', // name for debug 40 | createWriteStream(log, { flags: 'a' }), // log file write stream (append mode) 41 | () => { 42 | debug('finished streaming database changes'); 43 | return ':changes_complete ' + lastSeq + '\n'; 44 | }, // Function to write complete last chunk 45 | mapBackupBatchToPendingLogLine, // map the changes batch to a log line 46 | eeFn // postWrite function to emit the 'batch' event 47 | ); 48 | } 49 | } 50 | 51 | // Map a batch of changes to document IDs, checking for errors 52 | const mapChangesToIds = function(changesBatch) { 53 | return changesBatch.map((changeResultItem) => { 54 | if (changeResultItem.changes && changeResultItem.changes.length > 0) { 55 | if (changeResultItem.seq) { 56 | lastSeq = changeResultItem.seq; 57 | } 58 | // Extract the document ID from the change 59 | return { id: changeResultItem.id }; 60 | } else { 61 | throw new BackupError('SpoolChangesError', `Received invalid change: ${JSON.stringify(changeResultItem)}`); 62 | } 63 | }); 64 | }; 65 | 66 | const mapChangesBatchToBackupBatch = function(changesBatch) { 67 | return { command: 't', batch: batch++, docs: mapChangesToIds(changesBatch) }; 68 | }; 69 | 70 | const mapBackupBatchToPendingLogLine = function(backupBatch) { 71 | totalBuffer += backupBatch.docs.length; 72 | debug('writing', backupBatch.docs.length, 'changes to the backup log file with total of', totalBuffer); 73 | return `:t batch${backupBatch.batch} ${JSON.stringify(backupBatch.docs)}\n`; 74 | }; 75 | 76 | const changesParams = { 77 | db: dbClient.dbName, 78 | seqInterval: bufferSize 79 | }; 80 | 81 | const changesFollower = new ChangesFollower(dbClient.service, changesParams, tolerance); 82 | return pipeline( 83 | changesFollower.startOneOff(), // stream of changes from the DB 84 | new BatchingStream(bufferSize), // group changes into bufferSize batches for mapping 85 | new MappingStream(mapChangesBatchToBackupBatch), // map a batch of ChangesResultItem to doc IDs 86 | new LogWriter(log) 87 | ); 88 | }; 89 | -------------------------------------------------------------------------------- /test/fixtures/animaldb_expected_shallow.json: -------------------------------------------------------------------------------- 1 | {"name":"@cloudant/couchbackup","version":"2.9.10","mode":"shallow","attachments":false} 2 | [{"_id":"_design/views101","_rev":"1-a918dd4f11704143b535f0ab3af4bf75","views":{"latin_name_jssum":{"map":"function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}","reduce":"function (key, values, rereduce){\n return sum(values);\n}"},"latin_name":{"map":"function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}"},"diet_sum":{"map":"function(doc) {\n if(doc.diet){\n emit(doc.diet, 1);\n }\n}","reduce":"_sum"},"diet_count":{"map":"function(doc) {\n if(doc.diet && doc.latin_name){\n emit(doc.diet, doc.latin_name);\n }\n}","reduce":"_count"},"complex_count":{"map":"function(doc){\n if(doc.class && doc.diet){\n emit([doc.class, doc.diet], 1);\n }\n}","reduce":"_count"},"diet":{"map":"function(doc) {\n if(doc.diet){\n emit(doc.diet, 1);\n }\n}"},"complex_latin_name_count":{"map":"function(doc){\n if(doc.latin_name){\n emit([doc.class, doc.diet, doc.latin_name], doc.latin_name.length)\n }\n}","reduce":"_count"},"diet_jscount":{"map":"function(doc) {\n if(doc.diet){\n emit(doc.diet, 1);\n }\n}","reduce":"function (key, values, rereduce){\n return values.length;\n}"},"latin_name_count":{"map":"function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}","reduce":"_count"},"latin_name_sum":{"map":"function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}","reduce":"_sum"}},"indexes":{"animals":{"index":"function(doc){\n index(\"default\", doc._id);\n if(doc.min_length){\n index(\"min_length\", doc.min_length, {\"store\": \"yes\"});\n }\n if(doc.diet){\n index(\"diet\", doc.diet, {\"store\": \"yes\"});\n }\n if (doc.latin_name){\n index(\"latin_name\", doc.latin_name, {\"store\": \"yes\"});\n }\n if (doc['class']){\n index(\"class\", doc['class'], {\"store\": \"yes\"});\n }\n}"}}},{"_id":"aardvark","_rev":"3-fe45a3e06244adbe7ba145e74e57aba5","min_weight":40,"max_weight":65,"min_length":1,"max_length":2.2,"latin_name":"Orycteropus afer","wiki_page":"http://en.wikipedia.org/wiki/Aardvark","class":"mammal","diet":"omnivore"},{"_id":"badger","_rev":"4-51aa94e4b0ef37271082033bba52b850","wiki_page":"http://en.wikipedia.org/wiki/Badger","min_weight":7,"max_weight":30,"min_length":0.6,"max_length":0.9,"latin_name":"Meles meles","class":"mammal","diet":"omnivore"},{"_id":"elephant","_rev":"3-f812228f45b5f4e496250556195372b2","wiki_page":"http://en.wikipedia.org/wiki/African_elephant","min_weight":4700,"max_weight":6050,"min_length":3.2,"max_length":4,"class":"mammal","diet":"herbivore"},{"_id":"giraffe","_rev":"3-7665c3e66315ff40616cceef62886bd8","min_weight":830,"min_length":5,"max_weight":1600,"max_length":6,"wiki_page":"http://en.wikipedia.org/wiki/Giraffe","class":"mammal","diet":"herbivore"},{"_id":"kookaburra","_rev":"4-6038cf35dfe1211f85484dec951142c7","min_length":0.28,"max_length":0.42,"wiki_page":"http://en.wikipedia.org/wiki/Kookaburra","class":"bird","diet":"carnivore","latin_name":"Dacelo novaeguineae"},{"_id":"lemur","_rev":"3-552d9dbf91fa914a07756e69b9ceaafa","wiki_page":"http://en.wikipedia.org/wiki/Ring-tailed_lemur","min_weight":2.2,"max_weight":2.2,"min_length":0.95,"max_length":1.1,"class":"mammal","diet":"omnivore"},{"_id":"llama","_rev":"4-631ea89ca94b23a3093c1ef7dfce10e0","min_weight":130,"max_weight":200,"min_length":1.7,"max_length":1.8,"latin_name":"Lama glama","wiki_page":"http://en.wikipedia.org/wiki/Llama","class":"mammal","diet":"herbivore"},{"_id":"panda","_rev":"2-f578490963b0bd266f6c5bbf92302977","wiki_page":"http://en.wikipedia.org/wiki/Panda","min_weight":75,"max_weight":115,"min_length":1.2,"max_length":1.8,"class":"mammal","diet":"carnivore"},{"_id":"snipe","_rev":"3-4b2fb3b7d6a226b13951612d6ca15a6b","min_weight":0.08,"max_weight":0.14,"min_length":0.25,"max_length":0.27,"latin_name":"Gallinago gallinago","wiki_page":"http://en.wikipedia.org/wiki/Common_Snipe","class":"bird","diet":"omnivore"},{"_id":"zebra","_rev":"3-750dac460a6cc41e6999f8943b8e603e","wiki_page":"http://en.wikipedia.org/wiki/Plains_zebra","min_length":2,"max_length":2.5,"min_weight":175,"max_weight":387,"class":"mammal","diet":"herbivore"}] 3 | -------------------------------------------------------------------------------- /test/fixtures/animaldb_all_docs_1.json: -------------------------------------------------------------------------------- 1 | { 2 | "total_rows": 11, 3 | "offset": 0, 4 | "rows": [ 5 | { 6 | "id": "_design/views101", 7 | "key": "_design/views101", 8 | "value": { 9 | "rev": "1-af8f4cfc28e685f171f52fcdde460be2" 10 | }, 11 | "doc": { 12 | "_id": "_design/views101", 13 | "_rev": "1-af8f4cfc28e685f171f52fcdde460be2", 14 | "indexes": { 15 | "animals": { 16 | "index": "function(doc){\n index(\"default\", doc._id);\n if(doc.min_length){\n index(\"min_length\", doc.min_length, {\"store\": \"yes\"});\n }\n if(doc.diet){\n index(\"diet\", doc.diet, {\"store\": \"yes\"});\n }\n if (doc.latin_name){\n index(\"latin_name\", doc.latin_name, {\"store\": \"yes\"});\n }\n if (doc['class']){\n index(\"class\", doc['class'], {\"store\": \"yes\"});\n }\n}" 17 | } 18 | }, 19 | "views": { 20 | "latin_name_jssum": { 21 | "map": "function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}", 22 | "reduce": "function (key, values, rereduce){\n return sum(values);\n}" 23 | }, 24 | "latin_name": { 25 | "map": "function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}" 26 | }, 27 | "diet_sum": { 28 | "map": "function(doc) {\n if(doc.diet){\n emit(doc.diet, 1);\n }\n}", 29 | "reduce": "_sum" 30 | }, 31 | "diet_count": { 32 | "map": "function(doc) {\n if(doc.diet && doc.latin_name){\n emit(doc.diet, doc.latin_name);\n }\n}", 33 | "reduce": "_count" 34 | }, 35 | "complex_count": { 36 | "map": "function(doc){\n if(doc.class && doc.diet){\n emit([doc.class, doc.diet], 1);\n }\n}", 37 | "reduce": "_count" 38 | }, 39 | "diet": { 40 | "map": "function(doc) {\n if(doc.diet){\n emit(doc.diet, 1);\n }\n}" 41 | }, 42 | "complex_latin_name_count": { 43 | "map": "function(doc){\n if(doc.latin_name){\n emit([doc.class, doc.diet, doc.latin_name], doc.latin_name.length)\n }\n}", 44 | "reduce": "_count" 45 | }, 46 | "diet_jscount": { 47 | "map": "function(doc) {\n if(doc.diet){\n emit(doc.diet, 1);\n }\n}", 48 | "reduce": "function (key, values, rereduce){\n return values.length;\n}" 49 | }, 50 | "latin_name_count": { 51 | "map": "function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}", 52 | "reduce": "_count" 53 | }, 54 | "latin_name_sum": { 55 | "map": "function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}", 56 | "reduce": "_sum" 57 | } 58 | } 59 | } 60 | }, 61 | { 62 | "id": "aardvark", 63 | "key": "aardvark", 64 | "value": { 65 | "rev": "1-be3290452e032dc34de9c0e9e8c1739e" 66 | }, 67 | "doc": { 68 | "_id": "aardvark", 69 | "_rev": "1-be3290452e032dc34de9c0e9e8c1739e", 70 | "min_length": 1, 71 | "min_weight": 40, 72 | "latin_name": "Orycteropus afer", 73 | "diet": "omnivore", 74 | "max_length": 2.2, 75 | "wiki_page": "http://en.wikipedia.org/wiki/Aardvark", 76 | "class": "mammal", 77 | "max_weight": 65 78 | } 79 | }, 80 | { 81 | "id": "badger", 82 | "key": "badger", 83 | "value": { 84 | "rev": "1-d4e5759e694a9659d1acaa98040e52fd" 85 | }, 86 | "doc": { 87 | "_id": "badger", 88 | "_rev": "1-d4e5759e694a9659d1acaa98040e52fd", 89 | "min_length": 0.6, 90 | "min_weight": 7, 91 | "latin_name": "Meles meles", 92 | "diet": "omnivore", 93 | "max_length": 0.9, 94 | "wiki_page": "http://en.wikipedia.org/wiki/Badger", 95 | "class": "mammal", 96 | "max_weight": 30 97 | } 98 | } 99 | ] 100 | } 101 | -------------------------------------------------------------------------------- /includes/config.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2024 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | const { mkdtempSync } = require('node:fs'); 16 | const { tmpdir } = require('node:os'); 17 | const { join, normalize } = require('node:path'); 18 | 19 | /** 20 | Return API default settings. 21 | */ 22 | function apiDefaults() { 23 | return { 24 | attachments: false, 25 | parallelism: 5, 26 | bufferSize: 500, 27 | requestTimeout: 120000, 28 | log: join(mkdtempSync(join(tmpdir(), 'couchbackup-')), `${Date.now()}`), 29 | resume: false, 30 | mode: 'full' 31 | }; 32 | } 33 | 34 | /** 35 | Return CLI default settings. 36 | */ 37 | function cliDefaults() { 38 | const defaults = apiDefaults(); 39 | 40 | // add additional legacy settings 41 | defaults.db = 'test'; 42 | defaults.url = 'http://localhost:5984'; 43 | 44 | // add CLI only option 45 | defaults.quiet = false; 46 | 47 | return defaults; 48 | } 49 | 50 | /** 51 | Override settings **in-place** with environment variables. 52 | */ 53 | function applyEnvironmentVariables(opts) { 54 | // if we have a custom CouchDB url 55 | if (typeof process.env.COUCH_URL !== 'undefined') { 56 | opts.url = process.env.COUCH_URL; 57 | } 58 | 59 | // if we have a specified databases 60 | if (typeof process.env.COUCH_DATABASE !== 'undefined') { 61 | opts.db = process.env.COUCH_DATABASE; 62 | } 63 | 64 | // if we have a specified buffer size 65 | if (typeof process.env.COUCH_BUFFER_SIZE !== 'undefined') { 66 | opts.bufferSize = parseInt(process.env.COUCH_BUFFER_SIZE); 67 | } 68 | 69 | // if we have a specified parallelism 70 | if (typeof process.env.COUCH_PARALLELISM !== 'undefined') { 71 | opts.parallelism = parseInt(process.env.COUCH_PARALLELISM); 72 | } 73 | 74 | // if we have a specified request timeout 75 | if (typeof process.env.COUCH_REQUEST_TIMEOUT !== 'undefined') { 76 | opts.requestTimeout = parseInt(process.env.COUCH_REQUEST_TIMEOUT); 77 | } 78 | 79 | // if we have a specified log file 80 | if (typeof process.env.COUCH_LOG !== 'undefined') { 81 | opts.log = normalize(process.env.COUCH_LOG); 82 | } 83 | 84 | // if we are instructed to resume 85 | if (typeof process.env.COUCH_RESUME !== 'undefined' && process.env.COUCH_RESUME === 'true') { 86 | opts.resume = true; 87 | } 88 | 89 | // if we are given an output filename 90 | if (typeof process.env.COUCH_OUTPUT !== 'undefined') { 91 | opts.output = normalize(process.env.COUCH_OUTPUT); 92 | } 93 | 94 | // if we only want a shallow copy 95 | if (typeof process.env.COUCH_MODE !== 'undefined' && process.env.COUCH_MODE === 'shallow') { 96 | opts.mode = 'shallow'; 97 | } 98 | 99 | // if we are instructed to be quiet 100 | if (typeof process.env.COUCH_QUIET !== 'undefined' && process.env.COUCH_QUIET === 'true') { 101 | opts.quiet = true; 102 | } 103 | 104 | // if we have a specified API key 105 | if (typeof process.env.CLOUDANT_IAM_API_KEY !== 'undefined') { 106 | opts.iamApiKey = process.env.CLOUDANT_IAM_API_KEY; 107 | } 108 | 109 | // if we have a specified IAM token endpoint 110 | if (typeof process.env.CLOUDANT_IAM_TOKEN_URL !== 'undefined') { 111 | opts.iamTokenUrl = process.env.CLOUDANT_IAM_TOKEN_URL; 112 | } 113 | 114 | // if we are instructed to be quiet 115 | if (typeof process.env.COUCH_ATTACHMENTS !== 'undefined' && process.env.COUCH_ATTACHMENTS === 'true') { 116 | opts.attachments = true; 117 | } 118 | } 119 | 120 | module.exports = { 121 | apiDefaults, 122 | cliDefaults, 123 | applyEnvironmentVariables 124 | }; 125 | -------------------------------------------------------------------------------- /test-network/conditions.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /* eslint space-before-function-paren: ["error", { "anonymous": "ignore" }] */ 16 | /* global after before describe */ 17 | 18 | const assert = require('assert'); 19 | const axios = require('axios'); 20 | const net = require('node:net'); 21 | 22 | const httpProxy = require('http-proxy'); 23 | 24 | // Import the common hooks 25 | require('../test/hooks.js'); 26 | 27 | const poisons = [ 28 | { 29 | name: 'normal' 30 | }, 31 | { 32 | name: 'bandwidth-limit-upstream', 33 | type: 'bandwidth', 34 | stream: 'upstream', // client -> server 35 | attributes: { rate: 512 } // 0.5 MB/s 36 | }, 37 | { 38 | name: 'bandwidth-limit-downstream', 39 | type: 'bandwidth', 40 | stream: 'downstream', // client <- server 41 | attributes: { rate: 512 } 42 | }, 43 | { 44 | name: 'latency', 45 | type: 'latency', 46 | attributes: { latency: 875, jitter: 625 }, // max: 1500, mix: 250 47 | toxicity: 0.6 // probability: 60% 48 | }, 49 | { 50 | name: 'slow-read', 51 | type: 'slicer', 52 | attributes: { average_size: 256, delay: 100 }, 53 | toxicity: 0.1 // probability: 10% 54 | } 55 | ]; 56 | 57 | const proxyURL = process.env.PROXY_URL + '/proxies/couchdb'; 58 | 59 | const waitForSocket = (port) => { 60 | return new Promise((resolve) => { 61 | const socket = new net.Socket(); 62 | const connect = () => socket.connect({ port }); 63 | let reConnect = false; 64 | 65 | socket.on('connect', async () => { 66 | if (reConnect !== false) { 67 | clearInterval(reConnect); 68 | reConnect = false; 69 | } 70 | socket.end(); 71 | resolve(socket); 72 | }); 73 | 74 | socket.on('error', () => { 75 | if (reConnect === false) { 76 | reConnect = setInterval(connect, 1000); 77 | } 78 | }); 79 | 80 | connect(); 81 | }); 82 | }; 83 | 84 | describe('unreliable network tests', function() { 85 | let proxy; 86 | before('add proxy', async function() { 87 | // wait up to 10 sec for both proxies to allocate ports. 88 | this.timeout(10000); 89 | 90 | proxy = httpProxy.createProxyServer({ 91 | target: process.env.COUCH_BACKEND_URL, 92 | changeOrigin: true 93 | }).listen(8080); 94 | 95 | await waitForSocket(8080); 96 | 97 | const toxiProxy = { 98 | name: 'couchdb', 99 | listen: '127.0.0.1:8888', 100 | upstream: '127.0.0.1:8080', 101 | enabled: true 102 | }; 103 | const resp = await axios.post(process.env.PROXY_URL + '/proxies', toxiProxy); 104 | assert.equal(resp.status, 201, 'Should create proxy "couchdb".'); 105 | await waitForSocket(8888); 106 | }); 107 | 108 | after('remove proxy', async function() { 109 | const resp = await axios.delete(proxyURL); 110 | assert.equal(resp.status, 204, 'Should remove proxy "couchdb".'); 111 | // shutdown http proxy 112 | return new Promise((resolve) => { 113 | proxy.close(() => { 114 | resolve(); 115 | }); 116 | }); 117 | }); 118 | 119 | poisons.forEach(function(poison) { 120 | describe(`tests using poison '${poison.name}'`, function() { 121 | before(`add toxic ${poison.name}`, async function() { 122 | if (poison.name === 'normal') return; 123 | const resp = await axios.post(proxyURL + '/toxics', poison); 124 | assert.equal(resp.status, 200, `Should create toxic ${poison.name}`); 125 | }); 126 | 127 | after(`remove toxic ${poison.name}`, async function() { 128 | if (poison.name === 'normal') return; 129 | const resp = await axios.delete(proxyURL + '/toxics/' + poison.name); 130 | assert.equal(resp.status, 204, `Should remove toxic ${poison.name}`); 131 | }); 132 | 133 | delete require.cache[require.resolve('../test/ci_e2e.js')]; 134 | require('../test/ci_e2e.js'); 135 | }); 136 | }); 137 | }); 138 | -------------------------------------------------------------------------------- /test/ci_basic.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2024 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /* global describe it */ 16 | 17 | const fs = require('fs'); 18 | const { once } = require('node:events'); 19 | const u = require('./citestutils.js'); 20 | 21 | [{ useApi: true }, { useApi: false }].forEach(function(params) { 22 | describe(u.scenario('Basic backup and restore', params), function() { 23 | it('should backup animaldb to a file correctly', async function() { 24 | // Allow up to 40 s to backup and compare (it should be much faster)! 25 | u.setTimeout(this, 40); 26 | const actualBackup = `./${this.fileName}`; 27 | // Create a file and backup to it 28 | const output = fs.createWriteStream(actualBackup); 29 | return once(output, 'open') 30 | .then(() => { 31 | return u.testBackup(params, 'animaldb', output); 32 | }).then(() => { 33 | return u.backupFileCompare(actualBackup, './test/fixtures/animaldb_expected.json'); 34 | }); 35 | }); 36 | 37 | it('should restore animaldb to a database correctly', async function() { 38 | // Allow up to 60 s to restore and compare (again it should be faster)! 39 | u.setTimeout(this, 60); 40 | const input = fs.createReadStream('./test/fixtures/animaldb_expected.json'); 41 | const dbName = this.dbName; 42 | return once(input, 'open').then(() => { 43 | return u.testRestore(params, input, dbName); 44 | }).then(() => { 45 | return u.dbCompare('animaldb', dbName); 46 | }); 47 | }); 48 | 49 | it('should execute a shallow mode backup successfully', async function() { 50 | // Allow 30 s 51 | u.setTimeout(this, 30); 52 | const actualBackup = `./${this.fileName}`; 53 | const output = fs.createWriteStream(actualBackup); 54 | // Add the shallow mode option 55 | const p = u.p(params, { opts: { mode: 'shallow' } }); 56 | return once(output, 'open') 57 | .then(() => { 58 | return u.testBackup(p, 'animaldb', output); 59 | }).then(() => { 60 | return u.backupFileCompare(actualBackup, './test/fixtures/animaldb_expected_shallow.json'); 61 | }); 62 | }); 63 | 64 | describe(u.scenario('Buffer size tests', params), function() { 65 | it('should backup/restore animaldb with the same buffer size', async function() { 66 | // Allow up to 60 s for backup and restore of animaldb 67 | u.setTimeout(this, 60); 68 | const actualBackup = `./${this.fileName}`; 69 | const logFile = `./${this.fileName}` + '.log'; 70 | const p = u.p(params, { opts: { log: logFile, bufferSize: 1 } }); 71 | return u.testBackupAndRestoreViaFile(p, 'animaldb', actualBackup, this.dbName); 72 | }); 73 | 74 | it('should backup/restore animaldb with backup buffer > restore buffer', async function() { 75 | // Allow up to 60 s for backup and restore of animaldb 76 | u.setTimeout(this, 60); 77 | const actualBackup = `./${this.fileName}`; 78 | const logFile = `./${this.fileName}` + '.log'; 79 | const dbName = this.dbName; 80 | const p = u.p(params, { opts: { log: logFile, bufferSize: 2 } }); // backup 81 | const q = u.p(params, { opts: { bufferSize: 1 } }); // restore 82 | return u.testBackupToFile(p, 'animaldb', actualBackup).then(() => { 83 | return u.testRestoreFromFile(q, actualBackup, dbName); 84 | }).then(() => { 85 | return u.dbCompare('animaldb', dbName); 86 | }); 87 | }); 88 | 89 | it('should backup/restore animaldb with backup buffer < restore buffer', async function() { 90 | // Allow up to 60 s for backup and restore of animaldb 91 | u.setTimeout(this, 60); 92 | const actualBackup = `./${this.fileName}`; 93 | const logFile = `./${this.fileName}` + '.log'; 94 | const dbName = this.dbName; 95 | const p = u.p(params, { opts: { log: logFile, bufferSize: 1 } }); // backup 96 | const q = u.p(params, { opts: { bufferSize: 2 } }); // restore 97 | return u.testBackupToFile(p, 'animaldb', actualBackup).then(() => { 98 | return u.testRestoreFromFile(q, actualBackup, dbName); 99 | }).then(() => { 100 | return u.dbCompare('animaldb', dbName); 101 | }); 102 | }); 103 | }); 104 | }); 105 | }); 106 | -------------------------------------------------------------------------------- /test/restore.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2023 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /* global describe it beforeEach */ 16 | 17 | const assert = require('assert'); 18 | const { EventEmitter } = require('events'); 19 | const fs = require('fs'); 20 | const nock = require('nock'); 21 | const { newClient } = require('../includes/request.js'); 22 | const restorePipeline = require('../includes/restore.js'); 23 | 24 | const { convertError } = require('../includes/error.js'); 25 | const longTestTimeout = 3000; 26 | 27 | describe('#unit Check database restore', function() { 28 | const dbUrl = 'http://localhost:5984/animaldb'; 29 | const dbClient = newClient(dbUrl, { parallelism: 1 }); 30 | 31 | beforeEach('Reset nocks', function() { 32 | nock.cleanAll(); 33 | }); 34 | 35 | function getRestorePipeline(fileName = './test/fixtures/animaldb_expected.json') { 36 | return restorePipeline( 37 | dbClient, 38 | { bufferSize: 500, parallelism: 1 }, 39 | fs.createReadStream(fileName), 40 | new EventEmitter() 41 | ).then((summary) => { 42 | assert.ok(nock.isDone()); 43 | // Return the total for assertion 44 | return summary.total; 45 | }).catch((e) => { 46 | // Error conversion takes place in the top level functions 47 | // so to facilitate unit testing we just do the same conversion here. 48 | throw convertError(e); 49 | }); 50 | } 51 | 52 | it('should complete successfully', async function() { 53 | nock(dbUrl) 54 | .post('/_bulk_docs') 55 | .reply(200, []); // success 56 | 57 | return getRestorePipeline().then((total) => { 58 | assert.strictEqual(total, 15); 59 | }); 60 | }); 61 | 62 | it('should terminate on a fatal error', async function() { 63 | nock(dbUrl) 64 | .post('/_bulk_docs') 65 | .reply(401, { error: 'Unauthorized' }); // fatal error 66 | 67 | return assert.rejects( 68 | getRestorePipeline(), 69 | (err) => { 70 | assert.strictEqual(err.name, 'Unauthorized'); 71 | assert.strictEqual(err.message, 'Access is denied due to invalid credentials.'); 72 | assert.ok(nock.isDone()); 73 | return true; 74 | } 75 | ); 76 | }); 77 | 78 | it('should retry on transient errors', async function() { 79 | nock(dbUrl) 80 | .post('/_bulk_docs') 81 | .reply(429, { error: 'Too Many Requests' }) // transient error 82 | .post('/_bulk_docs') 83 | .reply(500, { error: 'Internal Server Error' }) // transient error 84 | .post('/_bulk_docs') 85 | .reply(200, { ok: true }); // third time lucky success 86 | 87 | return getRestorePipeline().then((total) => { 88 | assert.strictEqual(total, 15); 89 | }); 90 | }).timeout(longTestTimeout); 91 | 92 | it('should fail after 3 transient errors', async function() { 93 | nock(dbUrl) 94 | .post('/_bulk_docs') 95 | .reply(429, { error: 'Too Many Requests' }) // transient error 96 | .post('/_bulk_docs') 97 | .reply(500, { error: 'Internal Server Error' }) // transient error 98 | .post('/_bulk_docs') 99 | .reply(503, { error: 'Service Unavailable' }); // Final transient error 100 | 101 | return assert.rejects( 102 | getRestorePipeline(), 103 | (err) => { 104 | assert.strictEqual(err.name, 'HTTPFatalError'); 105 | assert.strictEqual(err.message, `503 post ${dbUrl}/_bulk_docs - Error: Service Unavailable`); 106 | assert.ok(nock.isDone()); 107 | return true; 108 | } 109 | ); 110 | }).timeout(longTestTimeout); 111 | 112 | it('should restore shallow backups without rev info successfully', async function() { 113 | nock(dbUrl) 114 | .post('/_bulk_docs') 115 | .reply(200, [{ ok: true, id: 'foo', rev: '1-abc' }]); // success 116 | 117 | return getRestorePipeline('./test/fixtures/animaldb_old_shallow.json') 118 | .then((total) => { 119 | assert.strictEqual(total, 11); 120 | }); 121 | }); 122 | 123 | it('should get a batch error for non-empty array response with newEdits false', async function() { 124 | nock(dbUrl) 125 | .post('/_bulk_docs') 126 | .reply(200, [{ id: 'foo', error: 'foo', reason: 'bar' }]); 127 | 128 | return assert.rejects( 129 | getRestorePipeline(), 130 | (err) => { 131 | assert.strictEqual(err.name, 'Error'); 132 | assert.strictEqual(err.message, 'Error writing batch 0 with newEdits:false and 1 items'); 133 | assert.ok(nock.isDone()); 134 | return true; 135 | } 136 | ); 137 | }); 138 | }); 139 | -------------------------------------------------------------------------------- /examples/cos-sdk/README.md: -------------------------------------------------------------------------------- 1 | # CouchBackup IBM COS Examples 2 | 3 | This folder contains example Node.js scripts which use the `couchbackup` library and the IBM COS SDK. 4 | 5 | These scripts are for inspiration and demonstration. 6 | They are not a supported part of couchbackup and should not be considered production ready. 7 | 8 | ## Prerequisites 9 | 10 | ### Install the dependencies 11 | 12 | Use `npm install` in this folder to install the script 13 | dependencies. 14 | Note: this uses the latest release of couchbackup, not the 15 | checked out version. 16 | 17 | ### IBM COS SDK configuration 18 | 19 | The scripts expect the following values: 20 | * shared credentials file `~/.bluemix/cos_credentials` or target file from `COS_CREDENTIALS_FILE` environment variable 21 | * `CLOUDANT_IAM_API_KEY` environment variable set to API key with permission to the Cloudant instance 22 | * (optional) `CLOUDANT_IAM_TOKEN_URL` environment variable set to the URL of token endpoint (defaults to `https://iam.cloud.ibm.com`) 23 | 24 | #### IBM COS 25 | 26 | When using IBM Cloud Object Storage create a service credential with __disabled__ `Include HMAC Credential` option. 27 | 28 | Copy the credentials into `~/.bluemix/cos_credentials` or generate it using the `ibmcloud` CLI tool: 29 | ```bash 30 | ibmcloud resource service-key-create --instance-name 31 | 32 | ibmcloud resource service-key --output JSON | jq '.[].credentials' > ~/.bluemix/cos_credentials 33 | ``` 34 | More info on generating the credentials: 35 | https://cloud.ibm.com/docs/cloud-object-storage?topic=cloud-object-storage-service-credentials 36 | 37 | #### Service Credentials file structure: 38 | ```json 39 | { 40 | "apikey": "", 41 | "endpoints": "https://control.cloud-object-storage.cloud.ibm.com/v2/endpoints", 42 | "iam_apikey_description": "Auto-generated for key crn:v1:...f9d5b", 43 | "iam_apikey_id": "ApiKey-6f...b1", 44 | "iam_apikey_name": "", 45 | "iam_role_crn": "...Writer", 46 | "iam_serviceid_crn": "crn:v1:...", 47 | "resource_instance_id": "crn:v1:..." 48 | } 49 | ``` 50 | 51 | #### IBM COS 52 | 53 | Run the scripts with the `--cos_url` option pointing to your COS instance S3 endpoint. 54 | 55 | Corresponding endpoint URLs can be found under the link found in the Service Credentials file or on the IBM Cloud UI (`endpoints` field). 56 | 57 | ## Usage 58 | 59 | ### Backup Scripts 60 | 61 | Run a backup script without arguments to receive help e.g. 62 | 63 | ```bash 64 | node cos-backup-file.js 65 | ``` 66 | 67 | The source database and destination bucket are required options. 68 | The minimum needed to run the backup scripts are thus: 69 | 70 | ```bash 71 | node cos-backup-file.js -s 'https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/sourcedb' -b 'examplebucket' --cos_url 's3.eu-de.cloud-object-storage.appdomain.cloud' 72 | ``` 73 | 74 | The object created in the bucket for the backup file will be 75 | named according to a prefix (default `couchbackup`), DB name and timestamp e.g. 76 | 77 | `couchbackup-sourcedb-2024-01-25T09:45:11.730Z` 78 | 79 | ### Restore Scripts 80 | 81 | Run a restore script without arguments to receive help e.g. 82 | 83 | ```bash 84 | node cos-restore-file.js 85 | ``` 86 | 87 | The target database URL, source bucket, and backup object name are required options. 88 | The minimum needed to run the restore scripts are thus: 89 | 90 | ```bash 91 | node cos-restore-file.js -t 'https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/targetdb' -b 'examplebucket' -o 'couchbackup-sourcedb-2024-01-25T09:45:11.730Z' --cos_url 's3.eu-de.cloud-object-storage.appdomain.cloud' 92 | ``` 93 | 94 | ## Progress and debug 95 | 96 | To see detailed progress of the backup/restore and upload/download or additional debug information 97 | use the `DEBUG` environment variable with label `couchbackup-cos` e.g. 98 | 99 | ```bash 100 | DEBUG='couchbackup-cos' node cos-backup-file.js -s 'https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/sourcedb' -b 'couchbackup-example' --cos_url "s3.eu-de.cloud-object-storage.appdomain.cloud" 101 | ``` 102 | 103 | ``` 104 | couchbackup-cos Creating a new backup of https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/sourcedb at couchbackup-example/couchbackup-sourcedb-2025-05-27T13:04:51.321Z... +0ms 105 | couchbackup-cos couchbackup to file done; uploading to IBM COS S3 +2s 106 | couchbackup-cos Uploading from /var/folders/lf/0mhmct8912qbgxq_hyv8nr9m0000gn/T/tmp-6623-dC9cBol6Y2Qj to couchbackup-example/couchbackup-sourcedb-2025-05-27T13:04:51.321Z +0ms 107 | couchbackup-cos IBM COS S3 upload done +611ms 108 | couchbackup-cos Upload succeeded +0ms 109 | couchbackup-cos { 110 | couchbackup-cos ETag: '"937f4ad657897f7cf883bdad0a6dfb76"', 111 | couchbackup-cos Location: 'https://couchbackup-example.s3.eu-de.cloud-object-storage.appdomain.cloud/couchbackup-sourcedb-2025-05-27T13%3A04%3A51.321Z', 112 | couchbackup-cos key: 'couchbackup-sourcedb-2025-05-27T13:04:51.321Z', 113 | couchbackup-cos Key: 'couchbackup-sourcedb-2025-05-27T13:04:51.321Z', 114 | couchbackup-cos Bucket: 'couchbackup-example' 115 | couchbackup-cos } +0ms 116 | couchbackup-cos Backup successful! +2ms 117 | couchbackup-cos done. +1ms 118 | ``` -------------------------------------------------------------------------------- /test/config.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2024 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /* global describe it before after */ 16 | 17 | const assert = require('assert'); 18 | const applyEnvVars = require('../includes/config.js').applyEnvironmentVariables; 19 | 20 | describe('#unit Configuration', function() { 21 | let processEnvCopy; 22 | 23 | before('Save env', function() { 24 | // Copy env so we can reset it after the tests 25 | processEnvCopy = JSON.parse(JSON.stringify(process.env)); 26 | }); 27 | 28 | after('Reset env', function() { 29 | process.env = processEnvCopy; 30 | }); 31 | 32 | it('respects the COUCH_URL env variable', function() { 33 | process.env.COUCH_URL = 'http://user:pass@myurl.com'; 34 | const config = {}; 35 | applyEnvVars(config); 36 | assert.strictEqual(typeof config.url, 'string'); 37 | assert.strictEqual(config.url, process.env.COUCH_URL); 38 | }); 39 | 40 | it('respects the COUCH_DATABASE env variable', function() { 41 | process.env.COUCH_DATABASE = 'mydb'; 42 | const config = {}; 43 | applyEnvVars(config); 44 | assert.strictEqual(typeof config.db, 'string'); 45 | assert.strictEqual(config.db, process.env.COUCH_DATABASE); 46 | }); 47 | 48 | it('respects the COUCH_BUFFER_SIZE env variable', function() { 49 | process.env.COUCH_BUFFER_SIZE = '1000'; 50 | const config = {}; 51 | applyEnvVars(config); 52 | assert.strictEqual(typeof config.bufferSize, 'number'); 53 | assert.strictEqual(config.bufferSize, 1000); 54 | }); 55 | 56 | it('respects the COUCH_PARALLELISM env variable', function() { 57 | process.env.COUCH_PARALLELISM = '20'; 58 | const config = {}; 59 | applyEnvVars(config); 60 | assert.strictEqual(typeof config.parallelism, 'number'); 61 | assert.strictEqual(config.parallelism, 20); 62 | }); 63 | 64 | it('respects the COUCH_REQUEST_TIMEOUT env variable', function() { 65 | process.env.COUCH_REQUEST_TIMEOUT = '10000'; 66 | const config = {}; 67 | applyEnvVars(config); 68 | assert.strictEqual(typeof config.requestTimeout, 'number'); 69 | assert.strictEqual(config.requestTimeout, 10000); 70 | }); 71 | 72 | it('respects the CLOUDANT_IAM_API_KEY env variable', function() { 73 | const key = 'ABC123-ZYX987_cba789-xyz321'; 74 | process.env.CLOUDANT_IAM_API_KEY = key; 75 | const config = {}; 76 | applyEnvVars(config); 77 | assert.strictEqual(typeof config.iamApiKey, 'string'); 78 | assert.strictEqual(config.iamApiKey, key); 79 | }); 80 | 81 | it('respects the CLOUDANT_IAM_TOKEN_URL env variable', function() { 82 | const u = 'https://testhost.example:1234/identity/token'; 83 | process.env.CLOUDANT_IAM_TOKEN_URL = u; 84 | const config = {}; 85 | applyEnvVars(config); 86 | assert.strictEqual(typeof config.iamTokenUrl, 'string'); 87 | assert.strictEqual(config.iamTokenUrl, u); 88 | }); 89 | 90 | it('respects the COUCH_LOG env variable', function() { 91 | process.env.COUCH_LOG = 'my.log'; 92 | const config = {}; 93 | applyEnvVars(config); 94 | assert.strictEqual(typeof config.log, 'string'); 95 | assert.strictEqual(config.log, process.env.COUCH_LOG); 96 | }); 97 | 98 | it('respects the COUCH_RESUME env variable', function() { 99 | process.env.COUCH_RESUME = 'true'; 100 | const config = {}; 101 | applyEnvVars(config); 102 | assert.strictEqual(typeof config.resume, 'boolean'); 103 | assert.strictEqual(config.resume, true); 104 | }); 105 | 106 | it('respects the COUCH_OUTPUT env variable', function() { 107 | process.env.COUCH_OUTPUT = 'myfile.txt'; 108 | const config = {}; 109 | applyEnvVars(config); 110 | assert.strictEqual(typeof config.output, 'string'); 111 | assert.strictEqual(config.output, process.env.COUCH_OUTPUT); 112 | }); 113 | 114 | it('respects the COUCH_MODE env variable', function() { 115 | process.env.COUCH_MODE = 'shallow'; 116 | const config = {}; 117 | applyEnvVars(config); 118 | assert.strictEqual(typeof config.mode, 'string'); 119 | assert.strictEqual(config.mode, 'shallow'); 120 | }); 121 | 122 | it('respects the COUCH_QUIET env variable', function() { 123 | process.env.COUCH_QUIET = 'true'; 124 | const config = {}; 125 | applyEnvVars(config); 126 | assert.strictEqual(typeof config.quiet, 'boolean'); 127 | assert.strictEqual(config.quiet, true); 128 | }); 129 | 130 | it('respects the COUCH_ATTACHMENTS env variable', function() { 131 | process.env.COUCH_ATTACHMENTS = 'true'; 132 | const config = {}; 133 | applyEnvVars(config); 134 | assert.strictEqual(typeof config.attachments, 'boolean'); 135 | assert.strictEqual(config.attachments, true); 136 | }); 137 | }); 138 | -------------------------------------------------------------------------------- /test/ci_resume.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2024 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /* global describe it */ 16 | 17 | const assert = require('assert'); 18 | const fs = require('fs'); 19 | const { once } = require('node:events'); 20 | const u = require('./citestutils.js'); 21 | 22 | [{ useApi: true }, { useApi: false }].forEach(function(params) { 23 | describe(u.scenario('Resume tests', params), function() { 24 | it('should create a log file', async function() { 25 | // Allow up to 90 s for this test 26 | u.setTimeout(this, 60); 27 | 28 | const actualBackup = `./${this.fileName}`; 29 | const logFile = `./${this.fileName}` + '.log'; 30 | const p = u.p(params, { opts: { log: logFile } }); 31 | return u.testBackupToFile(p, 'animaldb', actualBackup).then(() => { 32 | assert.ok(fs.existsSync(logFile), 'The log file should exist.'); 33 | }); 34 | }); 35 | 36 | it('should restore resumed corrupted animaldb to a database correctly', async function() { 37 | // Allow up to 60 s to restore and compare (again it should be faster)! 38 | u.setTimeout(this, 60); 39 | const input = fs.createReadStream('./test/fixtures/animaldb_corrupted_resume.json'); 40 | const dbName = this.dbName; 41 | return once(input, 'open') 42 | .then(() => { 43 | return u.testRestore(params, input, dbName); 44 | }).then(() => { 45 | return u.dbCompare('animaldb', dbName); 46 | }); 47 | }); 48 | 49 | it('should throw error for restore of corrupted animaldb to a database', async function() { 50 | // Allow up to 60 s to restore and compare (again it should be faster)! 51 | u.setTimeout(this, 60); 52 | const input = fs.createReadStream('./test/fixtures/animaldb_corrupted.json'); 53 | const dbName = this.dbName; 54 | const p = u.p(params, { expectedRestoreError: { name: 'BackupFileJsonError', code: 1 } }); 55 | return once(input, 'open') 56 | .then(() => u.testRestore(p, input, dbName)); 57 | }); 58 | 59 | it('should restore older version of corrupted animaldb to a database correctly', async function() { 60 | // Allow up to 60 s to restore and compare (again it should be faster)! 61 | u.setTimeout(this, 60); 62 | const input = fs.createReadStream('./test/fixtures/animaldb_corrupted_old.json'); 63 | const dbName = this.dbName; 64 | return once(input, 'open') 65 | .then(() => { 66 | return u.testRestore(params, input, dbName); 67 | }).then(() => { 68 | return u.dbCompare('animaldb', dbName); 69 | }); 70 | }); 71 | 72 | it('should restore resumed animaldb with blank line to a database correctly', async function() { 73 | // Allow up to 60 s to restore and compare (again it should be faster)! 74 | u.setTimeout(this, 60); 75 | const input = fs.createReadStream('./test/fixtures/animaldb_resumed_blank.json'); 76 | const dbName = this.dbName; 77 | return once(input, 'open') 78 | .then(() => { 79 | return u.testRestore(params, input, dbName); 80 | }).then(() => { 81 | return u.dbCompare('animaldb', dbName); 82 | }); 83 | }); 84 | }); 85 | }); 86 | 87 | describe('Resume tests', function() { 88 | // Currently cannot abort API backups, when we do this test should be run for 89 | // both API and CLI 90 | it('should correctly backup and restore backup10m', async function() { 91 | // Allow up to 90 s for this test 92 | u.setTimeout(this, 90); 93 | 94 | const actualBackup = `./${this.fileName}`; 95 | const logFile = `./${this.fileName}` + '.log'; 96 | // Use abort parameter to terminate the backup 97 | const p = u.p(params, { abort: true }, { opts: { log: logFile } }); 98 | const restoreDb = this.dbName; 99 | // Set the database doc count as fewer than this should be written during 100 | // resumed backup. 101 | p.exclusiveMaxExpected = 5096; 102 | 103 | return u.testBackupAbortResumeRestore(p, 'backup10m', actualBackup, restoreDb); 104 | }); 105 | // Note --output is only valid for CLI usage, this test should only run for CLI 106 | const params = { useApi: false }; 107 | it('should correctly backup and restore backup10m using --output', async function() { 108 | // Allow up to 90 s for this test 109 | u.setTimeout(this, 90); 110 | 111 | const actualBackup = `./${this.fileName}`; 112 | const logFile = `./${this.fileName}` + '.log'; 113 | // Use abort parameter to terminate the backup 114 | const p = u.p(params, { abort: true }, { opts: { output: actualBackup, log: logFile } }); 115 | const restoreDb = this.dbName; 116 | // Set the database doc count as fewer than this should be written during 117 | // resumed backup. 118 | p.exclusiveMaxExpected = 5096; 119 | 120 | return await u.testBackupAbortResumeRestore(p, 'backup10m', actualBackup, restoreDb); 121 | }); 122 | }); 123 | -------------------------------------------------------------------------------- /examples/cos-sdk/cos-restore-stream.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2025 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | // Small script which restores a Cloudant or CouchDB database from an IBM Cloud Object Storage (COS) 16 | // bucket via direct stream rather than on-disk file 17 | 18 | const IBM_COS = require('ibm-cos-sdk'); 19 | const VError = require('verror'); 20 | const couchbackup = require('@cloudant/couchbackup'); 21 | const debug = require('debug')('couchbackup-cos'); 22 | const url = require('url'); 23 | 24 | function main() { 25 | const argv = require('yargs') 26 | .usage('Usage: $0 [options]') 27 | .example('$0 -t https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/targetdb -b -o --cos_url ', 'Restore database from a bucket via direct streaming') 28 | .options({ 29 | target: { alias: 't', nargs: 1, demandOption: true, describe: 'Target database URL' }, 30 | bucket: { alias: 'b', nargs: 1, demandOption: true, describe: 'Source bucket containing backup' }, 31 | object: { alias: 'o', nargs: 1, demandOption: true, describe: 'Backup Object name in IBM COS' }, 32 | cos_url: { nargs: 1, demandOption: true, describe: 'IBM COS S3 endpoint URL' }, 33 | }) 34 | .help('h').alias('h', 'help') 35 | .epilog('Copyright (C) IBM 2025') 36 | .argv; 37 | 38 | const restoreBucket = argv.bucket; 39 | const objectKey = argv.object; 40 | const cosEndpoint = argv.cos_url; 41 | const targetUrl = argv.target; 42 | 43 | const cloudantApiKey = process.env.CLOUDANT_IAM_API_KEY; 44 | 45 | const config = { 46 | endpoint: cosEndpoint, 47 | credentials: new IBM_COS.SharedJSONFileCredentials(), 48 | }; 49 | 50 | const COS = new IBM_COS.S3(config); 51 | 52 | objectAccessible(COS, restoreBucket, objectKey) 53 | .then(() => { 54 | return restoreFromCOS(COS, restoreBucket, objectKey, targetUrl, cloudantApiKey); 55 | }) 56 | .then(() => { 57 | debug('Restore completed successfully'); 58 | process.exit(0); 59 | }) 60 | .catch((err) => { 61 | console.error('Restore failed:', err.message); 62 | process.exit(1); 63 | }); 64 | } 65 | 66 | /** 67 | * Check if object is accessible in COS 68 | * @param {IBM_COS.S3} s3 69 | * @param {string} bucketName 70 | * @param {string} objectKey 71 | */ 72 | async function objectAccessible(s3, bucketName, objectKey) { 73 | const params = { 74 | Key: objectKey, 75 | Bucket: bucketName, 76 | }; 77 | try { 78 | await s3.headObject(params).promise(); 79 | debug(`Object '${objectKey}' is accessible`); 80 | } catch (reason) { 81 | debug(reason); 82 | throw new VError(reason, 'Object is not accessible'); 83 | } 84 | } 85 | 86 | /** 87 | * Restore directly from a backup file on IBM COS S3 to a new and empty CouchDB or Cloudant database. 88 | * Uses direct streaming without intermediate files. 89 | * 90 | * @param {IBM_COS.S3} cosClient Object store client 91 | * @param {string} cosBucket Backup source bucket 92 | * @param {string} cosObjectKey Backup file name on IBM COS 93 | * @param {string} targetUrl URL of database 94 | * @param {string} cloudantApiKey IAM API key for Cloudant authentication 95 | */ 96 | async function restoreFromCOS(cosClient, cosBucket, cosObjectKey, targetUrl, cloudantApiKey) { 97 | debug(`Starting direct stream restore from ${cosBucket}/${cosObjectKey} to ${s(targetUrl)}`); 98 | 99 | const cosInputStream = cosClient.getObject({ 100 | Bucket: cosBucket, 101 | Key: cosObjectKey 102 | }).createReadStream({ 103 | highWaterMark: 16 * 1024 * 1024 // 16MB buffer 104 | }); 105 | 106 | cosInputStream.on('error', (err) => { 107 | debug('COS input stream error:', err); 108 | throw new VError(err, 'Failed to read from COS object'); 109 | }); 110 | 111 | const restorePromise = new Promise((resolve, reject) => { 112 | const params = { 113 | iamApiKey: cloudantApiKey, 114 | ...(process.env.CLOUDANT_IAM_TOKEN_URL && { iamTokenUrl: process.env.CLOUDANT_IAM_TOKEN_URL }), 115 | }; 116 | 117 | const restoreStream = couchbackup.restore( 118 | cosInputStream, 119 | targetUrl, 120 | params, 121 | (err, data) => { 122 | if (err) { 123 | reject(err); 124 | } else { 125 | resolve(data); 126 | } 127 | } 128 | ); 129 | restoreStream.on('restored', progress => { 130 | debug('Restored batch:', progress.batch, 'Total document revisions written:', progress.total, 'Time:', progress.time); 131 | }); 132 | }); 133 | 134 | try { 135 | const result = await restorePromise; 136 | debug(`Couchbackup restore to ${s(targetUrl)} complete; restored ${result.total} documents`); 137 | return result; 138 | } catch (err) { 139 | debug(err); 140 | throw new VError(err, 'Couchbackup restore failed'); 141 | } 142 | } 143 | 144 | /** 145 | * Remove credentials from a URL for safe logging 146 | * @param {string} originalUrl URL to sanitize 147 | */ 148 | function s(originalUrl) { 149 | const parts = new url.URL(originalUrl); 150 | return url.format(parts, { auth: false }); 151 | } 152 | 153 | main(); 154 | -------------------------------------------------------------------------------- /examples/cos-s3/s3-restore-stream.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2025 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | // Small script which restores a Cloudant or CouchDB database from an S3 compatible 16 | // bucket via direct stream rather than on-disk file 17 | 18 | const { S3Client, GetObjectCommand, HeadObjectCommand } = require('@aws-sdk/client-s3'); 19 | const { fromIni } = require('@aws-sdk/credential-providers'); 20 | const VError = require('verror').VError; 21 | const { restore } = require('@cloudant/couchbackup'); 22 | const debug = require('debug')('couchbackup-s3'); 23 | const url = require('url'); 24 | 25 | /* 26 | Main function, run from base of file. 27 | */ 28 | 29 | function main() { 30 | const argv = require('yargs') 31 | .usage('Usage: $0 [options]') 32 | .example('$0 -t https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/targetdb -b -o --s3_url ', 'Restore database from a bucket via direct streaming') 33 | .options({ 34 | target: { alias: 't', nargs: 1, demandOption: true, describe: 'Target database URL' }, 35 | bucket: { alias: 'b', nargs: 1, demandOption: true, describe: 'Source bucket containing backup' }, 36 | object: { alias: 'o', nargs: 1, demandOption: true, describe: 'Backup Object name in S3 instance' }, 37 | cos_url: { nargs: 1, describe: 'S3 endpoint URL' }, 38 | awsprofile: { nargs: 1, describe: 'The profile section to use in the ~/.aws/credentials file', default: 'default' } 39 | }) 40 | .help('h').alias('h', 'help') 41 | .epilog('Copyright (C) IBM 2025') 42 | .argv; 43 | 44 | const cloudantURL = argv.target; 45 | const restoreBucket = argv.bucket; 46 | const restoreObject = argv.object; 47 | const s3Endpoint = argv.s3url; 48 | 49 | const awsProfile = argv.awsprofile; 50 | const cloudantApiKey = process.env.CLOUDANT_IAM_API_KEY; 51 | 52 | const awsOpts = { 53 | signatureVersion: 'v4', 54 | credentials: fromIni({ profile: awsProfile }) 55 | }; 56 | if (typeof s3Endpoint !== 'undefined') { 57 | awsOpts.endpoint = s3Endpoint; 58 | } 59 | const s3 = new S3Client(awsOpts); 60 | 61 | debug(`Restoring from ${restoreBucket}/${restoreObject} to ${cloudantURL}`); 62 | 63 | objectAccessible(s3, restoreBucket, restoreObject) 64 | .then(() => { 65 | return restoreFromS3(s3, restoreBucket, restoreObject, cloudantURL, cloudantApiKey); 66 | }) 67 | .then(() => { 68 | debug('Restore completed successfully'); 69 | process.exit(0); 70 | }) 71 | .catch((err) => { 72 | console.error('Restore failed:', err.message); 73 | process.exit(1); 74 | }); 75 | } 76 | 77 | /** 78 | * 79 | * @param {S3Client} S3 80 | * @param {any} restoreBucket 81 | * @param {any} objectKey 82 | */ 83 | async function objectAccessible(S3, restoreBucket, objectKey) { 84 | try { 85 | await S3.send(new HeadObjectCommand({ 86 | Bucket: restoreBucket, 87 | Key: objectKey 88 | })); 89 | debug(`Object '${objectKey}' is accessible`); 90 | } catch (reason) { 91 | debug(reason); 92 | throw new VError(reason, 'Object is not accessible'); 93 | } 94 | } 95 | 96 | /** 97 | * Restore directly from a backup file on S3 to a new and empty CouchDB or Cloudant database. 98 | * 99 | * @param {S3Client} s3Client Object store client 100 | * @param {string} s3Bucket Backup source bucket 101 | * @param {string} s3Key Backup file name on S3 102 | * @param {string} targetUrl URL of database 103 | * @param {string} cloudantApiKey IAM API key for Cloudant authentication 104 | */ 105 | async function restoreFromS3(s3Client, s3Bucket, s3Key, targetUrl, cloudantApiKey) { 106 | debug(`Starting direct stream restore from ${s3Bucket}/${s3Key} to ${s(targetUrl)}`); 107 | const inputStream = await s3Client.send(new GetObjectCommand({ 108 | Bucket: s3Bucket, 109 | Key: s3Key 110 | })); 111 | 112 | const restorePromise = new Promise((resolve, reject) => { 113 | const params = { 114 | iamApiKey: cloudantApiKey, 115 | ...(process.env.CLOUDANT_IAM_TOKEN_URL && { iamTokenUrl: process.env.CLOUDANT_IAM_TOKEN_URL }), 116 | }; 117 | const restoreStream = restore( 118 | inputStream.Body, 119 | targetUrl, 120 | params, 121 | (err, done) => { 122 | if (err) { 123 | reject(err); 124 | } else { 125 | resolve(done); 126 | } 127 | } 128 | ); 129 | restoreStream.on('restored', progress => { 130 | debug('Restored batch:', progress.batch, 'Total document revisions written:', progress.total, 'Time:', progress.time); 131 | }); 132 | }); 133 | 134 | try { 135 | const result = await restorePromise; 136 | debug(`Couchbackup restore to ${s(targetUrl)} complete; restored ${result.total} documents`); 137 | return result; 138 | } catch (err) { 139 | debug(err); 140 | throw new VError(err, 'Couchbackup restore failed'); 141 | } 142 | } 143 | 144 | /** 145 | * Remove credentials from a URL for safe logging 146 | * @param {string} originalUrl URL to sanitize 147 | */ 148 | function s(originalUrl) { 149 | const parts = new url.URL(originalUrl); 150 | return url.format(parts, { auth: false }); 151 | } 152 | 153 | main(); 154 | -------------------------------------------------------------------------------- /test/test_process.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2023, 2025 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /* global */ 16 | 17 | const { fork, spawn } = require('node:child_process'); 18 | const { once } = require('node:events'); 19 | const { Duplex } = require('node:stream'); 20 | const debug = require('debug'); 21 | const logProcess = debug('couchbackup:test:process'); 22 | 23 | class TestProcess { 24 | constructor(cmd, args, mode) { 25 | this.cmd = cmd; 26 | // Child process stdio [stdin, stdout, stderr, ...extra channels] 27 | const childProcessOptions = { stdio: [] }; 28 | switch (mode) { 29 | case 'readable': 30 | // Readable only, no writing to stdin so ignore it 31 | childProcessOptions.stdio = ['ignore', 'pipe', 'inherit']; 32 | break; 33 | case 'writable': 34 | // Writable only, no reading from stdout so ignore it 35 | childProcessOptions.stdio = ['pipe', 'ignore', 'inherit']; 36 | break; 37 | default: 38 | // Default Duplex mode pipe both stdin and stdout 39 | childProcessOptions.stdio = ['pipe', 'pipe', 'inherit']; 40 | break; 41 | } 42 | if (cmd.endsWith('.js')) { 43 | // Add Node fork ipc channel 44 | childProcessOptions.stdio.push('ipc'); 45 | logProcess(`Forking Node process for ${cmd} with stdio:[${childProcessOptions.stdio}]`); 46 | this.childProcess = fork(cmd, args, childProcessOptions); 47 | } else { 48 | logProcess(`Spawning process for ${cmd} with stdio:[${childProcessOptions.stdio}]`); 49 | this.childProcess = spawn(cmd, args, childProcessOptions); 50 | } 51 | 52 | this.childProcessPromise = once(this.childProcess, 'close').then(() => { 53 | const code = this.childProcess.exitCode; 54 | const signal = this.childProcess.signalCode; 55 | logProcess(`Test process ${cmd} closed with code ${code} and signal ${signal}`); 56 | if (code === 0) { 57 | logProcess(`Resolving process promise for ${cmd}`); 58 | return Promise.resolve(code); 59 | } else { 60 | const e = new Error(`Test child process ${cmd} exited with code ${code} and ${signal}. This may be normal for error case testing.`); 61 | e.code = code; 62 | e.signal = signal; 63 | logProcess(`Will reject process promise for ${cmd} with ${e}`); 64 | return Promise.reject(e); 65 | } 66 | }); 67 | 68 | switch (mode) { 69 | case 'readable': 70 | this.stream = this.childProcess.stdout; 71 | break; 72 | case 'writable': 73 | this.stream = this.childProcess.stdin; 74 | break; 75 | default: 76 | // Default is duplex 77 | this.stream = Duplex.from({ writable: this.childProcess.stdin, readable: this.childProcess.stdout }); 78 | } 79 | } 80 | } 81 | 82 | module.exports = { 83 | TestProcess, 84 | cliBackup: function(databaseName, params = {}) { 85 | const args = ['--db', databaseName]; 86 | if (params.opts) { 87 | if (params.opts.mode) { 88 | args.push('--mode'); 89 | args.push(params.opts.mode); 90 | } 91 | if (params.opts.output) { 92 | args.push('--output'); 93 | args.push(params.opts.output); 94 | } 95 | if (params.opts.log) { 96 | args.push('--log'); 97 | args.push(params.opts.log); 98 | } 99 | if (params.opts.resume) { 100 | args.push('--resume'); 101 | } 102 | if (params.opts.bufferSize) { 103 | args.push('--buffer-size'); 104 | args.push(params.opts.bufferSize); 105 | } 106 | if (params.opts.iamApiKey) { 107 | args.push('--iam-api-key'); 108 | args.push(params.opts.iamApiKey); 109 | } 110 | if (params.opts.attachments) { 111 | args.push('--attachments'); 112 | } 113 | } 114 | return new TestProcess('./bin/couchbackup.bin.js', args, 'readable'); 115 | }, 116 | cliRestore: function(databaseName, params) { 117 | const args = ['--db', databaseName]; 118 | if (params.opts) { 119 | if (params.opts.bufferSize) { 120 | args.push('--buffer-size'); 121 | args.push(params.opts.bufferSize); 122 | } 123 | if (params.opts.parallelism) { 124 | args.push('--parallelism'); 125 | args.push(params.opts.parallelism); 126 | } 127 | if (params.opts.requestTimeout) { 128 | args.push('--request-timeout'); 129 | args.push(params.opts.requestTimeout); 130 | } 131 | if (params.opts.iamApiKey) { 132 | args.push('--iam-api-key'); 133 | args.push(params.opts.iamApiKey); 134 | } 135 | if (params.opts.attachments) { 136 | args.push('--attachments'); 137 | } 138 | } 139 | return new TestProcess('./bin/couchrestore.bin.js', args, 'writable'); 140 | }, 141 | cliGzip: function() { 142 | return new TestProcess('gzip', []); 143 | }, 144 | cliGunzip: function() { 145 | return new TestProcess('gunzip', []); 146 | }, 147 | cliEncrypt: function() { 148 | return new TestProcess('openssl', [ 149 | 'enc', 150 | '-base64', 151 | '-aes256', 152 | '-md', 'sha512', 153 | '-pbkdf2', 154 | '-iter', '100', 155 | '-pass', 'pass:12345']); 156 | }, 157 | cliDecrypt: function() { 158 | return new TestProcess('openssl', [ 159 | 'enc', 160 | '-d', 161 | '-base64', 162 | '-aes256', 163 | '-md', 'sha512', 164 | '-pbkdf2', 165 | '-iter', '100', 166 | '-pass', 'pass:12345']); 167 | } 168 | }; 169 | -------------------------------------------------------------------------------- /includes/parser.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2025 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | const cliutils = require('./cliutils.js'); 16 | const config = require('./config.js'); 17 | const error = require('./error.js'); 18 | const path = require('path'); 19 | const pkg = require('../package.json'); 20 | 21 | function parseBackupArgs() { 22 | const { program } = require('commander'); 23 | 24 | // Option CLI defaults 25 | const defaults = config.cliDefaults(); 26 | 27 | // Options set by environment variables 28 | const envVarOptions = {}; 29 | config.applyEnvironmentVariables(envVarOptions); 30 | 31 | program 32 | .version(pkg.version) 33 | .description('Backup a CouchDB/Cloudant database to a backup text file.') 34 | .usage('[options...]') 35 | .option('-a, --attachments', 36 | cliutils.getUsage('*EXPERIMENTAL/UNSUPPORTED*: enable backup of attachments', defaults.attachments)) 37 | .option('-b, --buffer-size ', 38 | cliutils.getUsage('number of documents fetched at once', defaults.bufferSize), 39 | Number) 40 | .option('-d, --db ', 41 | cliutils.getUsage('name of the database to backup', defaults.db)) 42 | .option('-k, --iam-api-key ', 43 | cliutils.getUsage('IAM API key to access the Cloudant server')) 44 | .option('-l, --log ', 45 | cliutils.getUsage('file to store logging information during backup; invalid in "shallow" mode', 'a temporary file'), 46 | path.normalize) 47 | .option('-m, --mode ', 48 | cliutils.getUsage('"shallow" if only a superficial backup is done (ignoring conflicts and revision tokens), else "full" for complete backup', defaults.mode), 49 | (mode) => { return mode.toLowerCase(); }) 50 | .option('-o, --output ', 51 | cliutils.getUsage('file name to store the backup data', 'stdout'), 52 | path.normalize) 53 | .option('-p, --parallelism ', 54 | cliutils.getUsage('number of HTTP requests to perform in parallel when performing a backup; ignored in "shallow" mode', defaults.parallelism), 55 | Number) 56 | .option('-q, --quiet', 57 | cliutils.getUsage('suppress batch messages', defaults.quiet)) 58 | .option('-r, --resume', 59 | cliutils.getUsage('continue a previous backup from its last known position; invalid in "shallow" mode', defaults.resume)) 60 | .option('-t, --request-timeout ', 61 | cliutils.getUsage('milliseconds to wait for a response to a HTTP request before retrying the request', defaults.requestTimeout), 62 | Number) 63 | .option('-u, --url ', 64 | cliutils.getUsage('URL of the CouchDB/Cloudant server', defaults.url)) 65 | .parse(process.argv); 66 | 67 | // Remove defaults that don't apply when using shallow mode 68 | if (program.opts().mode === 'shallow' || envVarOptions.mode === 'shallow') { 69 | delete defaults.parallelism; 70 | delete defaults.log; 71 | delete defaults.resume; 72 | } 73 | 74 | // Apply the options in order so that the CLI overrides env vars and env variables 75 | // override defaults. 76 | const opts = Object.assign({}, defaults, envVarOptions, program.opts()); 77 | 78 | if (opts.resume && (opts.log === defaults.log)) { 79 | // If resuming and the log file arg is the newly generated tmp name from defaults then we know that --log wasn't specified. 80 | // We have to do this check here for the CLI case because of the default. 81 | error.terminationCallback(new error.BackupError('NoLogFileName', 'To resume a backup, a log file must be specified')); 82 | } 83 | return opts; 84 | } 85 | 86 | function parseRestoreArgs() { 87 | const { program } = require('commander'); 88 | 89 | // Option CLI defaults 90 | const defaults = config.cliDefaults(); 91 | 92 | // Options set by environment variables 93 | const envVarOptions = {}; 94 | config.applyEnvironmentVariables(envVarOptions); 95 | 96 | program 97 | .version(pkg.version) 98 | .description('Restore a CouchDB/Cloudant database from a backup text file.') 99 | .usage('[options...]') 100 | .option('-a, --attachments', 101 | cliutils.getUsage('*EXPERIMENTAL/UNSUPPORTED*: enable restore of attachments', defaults.attachments)) 102 | .option('-b, --buffer-size ', 103 | cliutils.getUsage('number of documents restored at once', defaults.bufferSize), 104 | Number) 105 | .option('-d, --db ', 106 | cliutils.getUsage('name of the new, existing database to restore to', defaults.db)) 107 | .option('-k, --iam-api-key ', 108 | cliutils.getUsage('IAM API key to access the Cloudant server')) 109 | .option('-p, --parallelism ', 110 | cliutils.getUsage('number of HTTP requests to perform in parallel when restoring a backup', defaults.parallelism), 111 | Number) 112 | .option('-q, --quiet', 113 | cliutils.getUsage('suppress batch messages', defaults.quiet)) 114 | .option('-t, --request-timeout ', 115 | cliutils.getUsage('milliseconds to wait for a response to a HTTP request before retrying the request', defaults.requestTimeout), 116 | Number) 117 | .option('-u, --url ', 118 | cliutils.getUsage('URL of the CouchDB/Cloudant server', defaults.url)) 119 | .parse(process.argv); 120 | 121 | // Apply the options in order so that the CLI overrides env vars and env variables 122 | // override defaults. 123 | const opts = Object.assign({}, defaults, envVarOptions, program.opts()); 124 | 125 | return opts; 126 | } 127 | 128 | module.exports = { 129 | parseBackupArgs, 130 | parseRestoreArgs 131 | }; 132 | -------------------------------------------------------------------------------- /examples/cos-s3/README.md: -------------------------------------------------------------------------------- 1 | # CouchBackup AWS S3 Examples 2 | 3 | This folder contains example Node.js scripts which use the `couchbackup` library and the AWS S3 SDK. 4 | 5 | These scripts are for inspiration and demonstration. 6 | They are not a supported part of couchbackup and should not be considered production ready. 7 | 8 | ## Prerequisites 9 | 10 | ### Install the dependencies 11 | 12 | Use `npm install` in this folder to install the script 13 | dependencies. 14 | Note: this uses the latest release of couchbackup, not the 15 | checked out version. 16 | 17 | ### AWS SDK configuration 18 | 19 | The scripts expect AWS ini files: 20 | * shared credentials file `~/.aws/credentials` or target file from `AWS_SHARED_CREDENTIALS_FILE` environment variable 21 | * shared configuration file `~/.aws/config` or target file from `AWS_CONFIG_FILE` environment variable 22 | * `CLOUDANT_IAM_API_KEY` environment variable set to API key with permission to the Cloudant instance 23 | * (optional) `CLOUDANT_IAM_TOKEN_URL` environment variable set to the URL of token endpoint (defaults to `https://iam.cloud.ibm.com`) 24 | 25 | #### IBM COS 26 | 27 | When using IBM Cloud Object Storage create a service credential with the `Include HMAC Credential` option enabled. 28 | 29 | The `access_key_id` and `secret_access_key` from the `cos_hmac_keys` entry in the generated credential are 30 | the ones required to make an AWS credentials file e.g. 31 | ```ini 32 | [default] 33 | aws_access_key_id=paste access_key_id here 34 | aws_secret_access_key=paste secret_access_key here 35 | ``` 36 | 37 | #### AWS Configuration 38 | 39 | The AWS SDK requires a region to initialize so ensure the config file has one named e.g. 40 | ```ini 41 | [default] 42 | region=eu-west-2 43 | ``` 44 | 45 | #### AWS S3 46 | 47 | Run the scripts with the `--s3url` option pointing to your S3 instance endpoint. 48 | 49 | For IBM COS, corresponding endpoint URLs can be found under the link found in the Service Credentials file or on the IBM Cloud UI. 50 | 51 | ## Usage 52 | 53 | ### Backup Scripts 54 | 55 | Run a backup script without arguments to receive help e.g. 56 | 57 | ```bash 58 | node s3-backup-file.js 59 | ``` 60 | 61 | The source database and destination bucket are required options. 62 | The minimum needed to run the backup scripts are thus: 63 | 64 | ```bash 65 | node s3-backup-stream.js -s 'https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/sourcedb' -b 'examplebucket' 66 | ``` 67 | 68 | The object created in the bucket for the backup file will be 69 | named according to a prefix (default `couchbackup`), DB name and timestamp e.g. 70 | 71 | `couchbackup-sourcedb-2024-01-25T09:45:11.730Z` 72 | 73 | ### Restore Scripts 74 | 75 | Run a restore script without arguments to receive help e.g. 76 | 77 | ```bash 78 | node s3-restore-stream.js 79 | ``` 80 | 81 | The target database URL, source bucket, and backup object name are required options. 82 | The minimum needed to run the restore scripts are thus: 83 | 84 | ```bash 85 | node s3-restore-stream.js -t 'https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/targetdb' -b 'examplebucket' -o 'couchbackup-sourcedb-2024-01-25T09:45:11.730Z' --s3url 's3.eu-de.cloud-object-storage.appdomain.cloud' 86 | ``` 87 | 88 | ## Progress and debug 89 | 90 | To see detailed progress of the backup/restore and upload/download or additional debug information 91 | use the `DEBUG` environment variable with label `couchbackup-s3` e.g. 92 | 93 | ```bash 94 | DEBUG='couchbackup-s3' node s3-backup-stream.js -s 'https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/sourcedb' -b 'couchbackup-example' --s3url "s3.eu-de.cloud-object-storage.appdomain.cloud" 95 | ``` 96 | 97 | ``` 98 | couchbackup-s3 Creating a new backup of https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/sourcedb at couchbackup-example/couchbackup-sourcedb-2025-05-27T13:04:51.321Z... +0ms 99 | couchbackup-s3 Setting up S3 upload to couchbackup-example/couchbackup-sourcedb-2025-05-27T13:04:51.321Z +686ms 100 | couchbackup-s3 Starting streaming data from https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/sourcedb +2ms 101 | couchbackup-s3 Couchbackup changes batch: 0 +136ms 102 | couchbackup-s3 Fetched batch: 0 Total document revisions written: 15 Time: 0.067 +34ms 103 | couchbackup-s3 couchbackup download from https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/sourcedb complete; backed up 15 +2ms 104 | couchbackup-s3 S3 upload progress: {"loaded":6879,"total":6879,"part":1,"Key":"couchbackup-sourcedb-2025-05-27T13:04:51.321Z","Bucket":"couchbackup-example"} +623ms 105 | couchbackup-s3 S3 upload done +1ms 106 | couchbackup-s3 Upload succeeded +0ms 107 | couchbackup-s3 done. +0ms 108 | ``` 109 | 110 | For restore operations: 111 | 112 | ```bash 113 | DEBUG='couchbackup-s3' node s3-restore-stream.js -t 'https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/targetdb' -b 'couchbackup-example' -o 'couchbackup-sourcedb-2025-05-27T13:04:51.321Z' --s3url "s3.eu-de.cloud-object-storage.appdomain.cloud" 114 | ``` 115 | 116 | ``` 117 | couchbackup-s3 Restoring from couchbackup-example/couchbackup-sourcedb-2025-05-27T13:04:51.321Z to https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/targetdb +0ms 118 | couchbackup-s3 Object 'couchbackup-sourcedb-2025-05-27T13:04:51.321Z' is accessible +245ms 119 | couchbackup-s3 Starting direct stream restore from couchbackup-example/couchbackup-sourcedb-2025-05-27T13:04:51.321Z to https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/targetdb +1ms 120 | couchbackup-s3 Restored batch: 0 Total document revisions written: 15 Time: 0.089 +156ms 121 | couchbackup-s3 Couchbackup restore to https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/targetdb complete; restored 15 documents +2ms 122 | couchbackup-s3 Restore completed successfully +0ms 123 | ``` 124 | 125 | ## Known issues 126 | 127 | The S3 SDK does not appear to apply back-pressure to a Node `stream.Readable`. As such in environments 128 | where the upload speed to S3 is significantly slower than either the speed of downloading from the database 129 | or reading the backup file then the scripts may fail. -------------------------------------------------------------------------------- /test/compare.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2023, 2025 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | const assert = require('node:assert'); 16 | const { setInterval } = require('node:timers/promises'); 17 | const client = require('./hooks.js').sharedClient; 18 | /** 19 | * Compare 2 databases to check the contents match. 20 | * Since all docs is ordered the comparison can be 21 | * done 1 batch at a time to reduce the overhead. 22 | * 23 | * Batches are collected from all docs of each DB 24 | * in parallel before being compared for equality. 25 | * 26 | * If the batches are equal, then the leaf revisions 27 | * are fetched in parallel and compared. 28 | * 29 | * @param {*} database1 name of the "expected" database (i.e. the source) 30 | * @param {*} database2 name of the "actual" database (i.e. the target) 31 | * @returns Promise resolving to true if the contents match or rejecting with an assertion error 32 | */ 33 | const compare = async function(database1, database2) { 34 | const sourceDbInfoResponse = await client.getDatabaseInformation({ db: database1 }); 35 | const sourceDocCount = sourceDbInfoResponse.result.docCount; 36 | const sourceDocDelCount = sourceDbInfoResponse.result.docDelCount; 37 | // Assert the doc counts match, allowing multiple attempts for eventual constency to settle after the restore completes 38 | // Abort signal for document count check iterator 39 | const ac = new AbortController(); 40 | // Check once per second while doc counts are changing until the assertion passes. 41 | // If doc counts aren't changing then try up to 5 times for a change before failing. 42 | let lastTargetDocCount = 0; 43 | let lastTargetDocDelCount = 0; 44 | let tryCount = 0; 45 | try { 46 | for await (const maxTries of setInterval(1000, 5, { signal: ac.signal })) { 47 | const resp = await client.getDatabaseInformation({ db: database2 }); 48 | const targetDocCount = resp.result.docCount; 49 | const targetDocDelCount = resp.result.docDelCount; 50 | try { 51 | assert.strictEqual(targetDocCount, sourceDocCount); 52 | assert.strictEqual(targetDocDelCount, sourceDocDelCount); 53 | // Assertion passed, break the loop 54 | break; 55 | } catch (e) { 56 | // Assertion failed, check if making progress 57 | if (targetDocCount > lastTargetDocCount || targetDocDelCount > lastTargetDocDelCount) { 58 | // Making progress, set new values and continue 59 | // assertion failure is suppressed 60 | lastTargetDocCount = targetDocCount; 61 | lastTargetDocDelCount = targetDocDelCount; 62 | tryCount = 0; 63 | } else { 64 | // Not making progress, suppress exception and try again 65 | if (++tryCount > maxTries) { 66 | throw e; 67 | } 68 | } 69 | } 70 | } 71 | } finally { 72 | // Clean up the interval iterator 73 | ac.abort(); 74 | } 75 | const limit = 2000; 76 | let startKey = '\u0000'; 77 | let count = 0; 78 | do { 79 | const allDocsOpts = { startKey, limit }; 80 | try { 81 | // Fetch batches in parallel from db1 and db2 82 | await Promise.all([client.postAllDocs({ db: database1, ...allDocsOpts }), client.postAllDocs({ db: database2, ...allDocsOpts })]) 83 | .then(results => { 84 | const db1Rows = results[0].result.rows; 85 | const db2Rows = results[1].result.rows; 86 | // Asserts that the IDs and winning revs match 87 | assert.deepStrictEqual(db2Rows, db1Rows); 88 | // extract the IDs (we use only one db because we already know the IDs are equal) 89 | return resultRowsToIds(db1Rows); 90 | }) 91 | .then(async docIDs => { 92 | // Post the id/fake rev list to revs diff to get all leaf revisions 93 | const documentRevisions = revsDiffBodyForIds(docIDs); 94 | const revsDiffResponses = await Promise.all([client.postRevsDiff({ db: database1, documentRevisions }), client.postRevsDiff({ db: database2, documentRevisions })]); 95 | // The responses are a map of doc IDs to a map of missing and possible ancestor arrays of rev IDs. 96 | // The missing will be our fake rev ID and possible ancestors should be all the leaf revisions. 97 | // We can assert these maps match to identify any discrepencies in the rev tree. 98 | assert.deepStrictEqual(revsDiffResponses[1].result, revsDiffResponses[0].result); 99 | // Return the original list of doc IDs to prepare for the next page 100 | return docIDs; 101 | }).then(docIds => { 102 | // Increment the counter 103 | count += docIds.length; 104 | if (docIds.length < limit) { 105 | // Last page 106 | // Set null to break the loop 107 | startKey = null; 108 | // Assert that we actually got all the docs 109 | assert.strictEqual(count, sourceDocCount); 110 | } else { 111 | // Set start key for next page 112 | startKey = docIds[limit - 1] + '\u0000'; 113 | } 114 | }); 115 | } catch (e) { 116 | return Promise.reject(e); 117 | } 118 | } while (startKey != null); 119 | return true; 120 | }; 121 | 122 | function resultRowsToIds(rows) { 123 | return rows.map(r => r.id); 124 | } 125 | 126 | function revsDiffBodyForIds(docIDs) { 127 | // Make a map of each doc ID to a fake revision 128 | // use a fake revision ID to fetch all leaf revisions 129 | const fakeRevisionId = '99999-a'; 130 | const documentRevisions = Object.create(null); 131 | docIDs.forEach(id => (documentRevisions[id] = [fakeRevisionId])); 132 | return documentRevisions; 133 | } 134 | 135 | module.exports = { 136 | compare 137 | }; 138 | -------------------------------------------------------------------------------- /examples/cos-sdk/cos-restore-file.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2025 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | // Small script which restores a Cloudant or CouchDB database from an IBM Cloud Object Storage (COS) 16 | // bucket using an intermediary file on disk 17 | 18 | const IBM_COS = require('ibm-cos-sdk'); 19 | const VError = require('verror'); 20 | const couchbackup = require('@cloudant/couchbackup'); 21 | const debug = require('debug')('couchbackup-cos'); 22 | const url = require('url'); 23 | const fs = require('fs'); 24 | const tmp = require('tmp'); 25 | const { pipeline } = require('stream/promises'); 26 | 27 | function main() { 28 | const argv = require('yargs') 29 | .usage('Usage: $0 [options]') 30 | .example('$0 -t https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/targetdb -b -o --cos_url ', 'Restore database from a bucket') 31 | .options({ 32 | target: { alias: 't', nargs: 1, demandOption: true, describe: 'Target database URL' }, 33 | bucket: { alias: 'b', nargs: 1, demandOption: true, describe: 'Source bucket containing backup' }, 34 | object: { alias: 'o', nargs: 1, demandOption: true, describe: 'Backup Object name in IBM COS' }, 35 | cos_url: { nargs: 1, demandOption: true, describe: 'IBM COS S3 endpoint URL' }, 36 | }) 37 | .help('h').alias('h', 'help') 38 | .epilog('Copyright (C) IBM 2025') 39 | .argv; 40 | 41 | const restoreBucket = argv.bucket; 42 | const objectKey = argv.object; 43 | const cosEndpoint = argv.cos_url; 44 | const targetUrl = argv.target; 45 | 46 | const cloudantApiKey = process.env.CLOUDANT_IAM_API_KEY; 47 | const restoreTmpFile = tmp.fileSync(); 48 | 49 | const config = { 50 | endpoint: cosEndpoint, 51 | credentials: new IBM_COS.SharedJSONFileCredentials(), 52 | }; 53 | const COS = new IBM_COS.S3(config); 54 | restoreProcess(COS, restoreBucket, objectKey, targetUrl, cloudantApiKey, restoreTmpFile.name) 55 | .then(() => { 56 | debug('Restore completed successfully'); 57 | }) 58 | .catch((err) => { 59 | console.error('Restore failed:', err.message); 60 | }); 61 | } 62 | 63 | /** 64 | * Main restore process 65 | */ 66 | async function restoreProcess(COS, restoreBucket, objectKey, targetUrl, cloudantApiKey, restoreTmpFilePath) { 67 | await objectAccessible(COS, restoreBucket, objectKey); 68 | 69 | await createRestoreFile(COS, restoreBucket, objectKey, restoreTmpFilePath); 70 | 71 | await restoreFromFile(restoreTmpFilePath, targetUrl, cloudantApiKey); 72 | } 73 | 74 | /** 75 | * Check if object is accessible in COS 76 | * @param {IBM_COS.S3} s3 77 | * @param {string} bucketName 78 | * @param {string} objectKey 79 | */ 80 | async function objectAccessible(s3, bucketName, objectKey) { 81 | const params = { 82 | Key: objectKey, 83 | Bucket: bucketName, 84 | }; 85 | try { 86 | await s3.headObject(params).promise(); 87 | debug(`Object '${objectKey}' is accessible`); 88 | } catch (reason) { 89 | debug(reason); 90 | throw new VError(reason, 'Object is not accessible'); 91 | } 92 | } 93 | 94 | /** 95 | * Download backup file from COS to local temporary file 96 | * @param {IBM_COS.S3} COS 97 | * @param {string} restoreBucket 98 | * @param {string} objectKey 99 | * @param {string} restoreTmpFilePath 100 | */ 101 | async function createRestoreFile(COS, restoreBucket, objectKey, restoreTmpFilePath) { 102 | debug(`Downloading from ${restoreBucket}/${objectKey} to ${restoreTmpFilePath}`); 103 | 104 | const inputStream = COS.getObject({ 105 | Bucket: restoreBucket, 106 | Key: objectKey 107 | }).createReadStream({ 108 | highWaterMark: 16 * 1024 * 1024 // 16MB buffer 109 | }); 110 | 111 | const outputStream = fs.createWriteStream(restoreTmpFilePath, { 112 | highWaterMark: 16 * 1024 * 1024 // 16MB buffer 113 | }); 114 | 115 | try { 116 | await pipeline(inputStream, outputStream); 117 | debug('Download completed successfully'); 118 | } catch (err) { 119 | debug(err); 120 | throw new VError(err, 'Failed to download backup file from COS'); 121 | } 122 | } 123 | 124 | /** 125 | * Restore from a local backup file to Cloudant database 126 | * @param {string} restoreFileName Path to backup file 127 | * @param {string} targetUrl URL of target database 128 | * @param {string} cloudantApiKey IAM API key for Cloudant 129 | */ 130 | async function restoreFromFile(restoreFileName, targetUrl, cloudantApiKey) { 131 | debug(`Starting restore from ${restoreFileName} to ${s(targetUrl)}`); 132 | 133 | const inputStream = fs.createReadStream(restoreFileName); 134 | 135 | // promisify restore 136 | const restorePromise = new Promise((resolve, reject) => { 137 | const params = { 138 | iamApiKey: cloudantApiKey, 139 | ...(process.env.CLOUDANT_IAM_TOKEN_URL && { iamTokenUrl: process.env.CLOUDANT_IAM_TOKEN_URL }), 140 | }; 141 | 142 | const restoreStream = couchbackup.restore( 143 | inputStream, 144 | targetUrl, 145 | params, 146 | (err, data) => { 147 | if (err) { 148 | reject(err); 149 | } else { 150 | resolve(data); 151 | } 152 | } 153 | ); 154 | 155 | restoreStream.on('restored', progress => { 156 | debug('Restored batch:', progress.batch, 'Total document revisions written:', progress.total, 'Time:', progress.time); 157 | }); 158 | }); 159 | 160 | try { 161 | const result = await restorePromise; 162 | debug(`Couchbackup restore to ${s(targetUrl)} complete; restored ${result.total} documents`); 163 | return result; 164 | } catch (err) { 165 | debug(err); 166 | throw new VError(err, 'Couchbackup restore failed'); 167 | } 168 | } 169 | 170 | /** 171 | * Remove credentials from a URL for safe logging 172 | * @param {string} originalUrl URL to sanitize 173 | */ 174 | function s(originalUrl) { 175 | const parts = new url.URL(originalUrl); 176 | return url.format(parts, { auth: false }); 177 | } 178 | 179 | main(); 180 | -------------------------------------------------------------------------------- /test/fixtures/animaldb_expected.json: -------------------------------------------------------------------------------- 1 | {"name":"@cloudant/couchbackup","version":"2.9.10","mode":"full","attachments":false} 2 | [{"_id":"_design/validation","_rev":"2-97e93126a6337d173f9b2810c0b9c0b6","_deleted":true,"_revisions":{"start":2,"ids":["97e93126a6337d173f9b2810c0b9c0b6","65b7a2bd321eab71f4985da5a3ce9f89"]}},{"_id":"llama","_rev":"4-631ea89ca94b23a3093c1ef7dfce10e0","min_weight":130,"max_weight":200,"min_length":1.7,"max_length":1.8,"latin_name":"Lama glama","wiki_page":"http://en.wikipedia.org/wiki/Llama","class":"mammal","diet":"herbivore","_revisions":{"start":4,"ids":["631ea89ca94b23a3093c1ef7dfce10e0","8d2acaafcc52df876498043cb6493966","b972aafbd51d5b98eb4d4b9f9443ca7e","967a00dff5e02add41819138abb3284d"]}},{"_id":"badger","_rev":"4-51aa94e4b0ef37271082033bba52b850","wiki_page":"http://en.wikipedia.org/wiki/Badger","min_weight":7,"max_weight":30,"min_length":0.6,"max_length":0.9,"latin_name":"Meles meles","class":"mammal","diet":"omnivore","_revisions":{"start":4,"ids":["51aa94e4b0ef37271082033bba52b850","f9fb951ca8dadec1459450156b2205cf","617a372bba833d7acf3ccf2e7dece15a","967a00dff5e02add41819138abb3284d"]}},{"_id":"lemur","_rev":"3-552d9dbf91fa914a07756e69b9ceaafa","wiki_page":"http://en.wikipedia.org/wiki/Ring-tailed_lemur","min_weight":2.2,"max_weight":2.2,"min_length":0.95,"max_length":1.1,"class":"mammal","diet":"omnivore","_revisions":{"start":3,"ids":["552d9dbf91fa914a07756e69b9ceaafa","01101b0b2629741cdaa186740155c091","967a00dff5e02add41819138abb3284d"]}},{"_id":"_design/views101","_rev":"1-a918dd4f11704143b535f0ab3af4bf75","views":{"latin_name_jssum":{"map":"function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}","reduce":"function (key, values, rereduce){\n return sum(values);\n}"},"latin_name":{"map":"function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}"},"diet_sum":{"map":"function(doc) {\n if(doc.diet){\n emit(doc.diet, 1);\n }\n}","reduce":"_sum"},"diet_count":{"map":"function(doc) {\n if(doc.diet && doc.latin_name){\n emit(doc.diet, doc.latin_name);\n }\n}","reduce":"_count"},"complex_count":{"map":"function(doc){\n if(doc.class && doc.diet){\n emit([doc.class, doc.diet], 1);\n }\n}","reduce":"_count"},"diet":{"map":"function(doc) {\n if(doc.diet){\n emit(doc.diet, 1);\n }\n}"},"complex_latin_name_count":{"map":"function(doc){\n if(doc.latin_name){\n emit([doc.class, doc.diet, doc.latin_name], doc.latin_name.length)\n }\n}","reduce":"_count"},"diet_jscount":{"map":"function(doc) {\n if(doc.diet){\n emit(doc.diet, 1);\n }\n}","reduce":"function (key, values, rereduce){\n return values.length;\n}"},"latin_name_count":{"map":"function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}","reduce":"_count"},"latin_name_sum":{"map":"function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}","reduce":"_sum"}},"indexes":{"animals":{"index":"function(doc){\n index(\"default\", doc._id);\n if(doc.min_length){\n index(\"min_length\", doc.min_length, {\"store\": \"yes\"});\n }\n if(doc.diet){\n index(\"diet\", doc.diet, {\"store\": \"yes\"});\n }\n if (doc.latin_name){\n index(\"latin_name\", doc.latin_name, {\"store\": \"yes\"});\n }\n if (doc['class']){\n index(\"class\", doc['class'], {\"store\": \"yes\"});\n }\n}"}},"_revisions":{"start":1,"ids":["a918dd4f11704143b535f0ab3af4bf75"]}},{"_id":"_design/views101","_rev":"13-7826307a6b395070429e83f261352a3b","_deleted":true,"_revisions":{"start":13,"ids":["7826307a6b395070429e83f261352a3b","dfa39d5771438be0671b9aeb9cfaf03f","7cfc8cb30e742f3f0c47f00a41641095","b3347b33df6707d3edc30d3d2c92514e","feaa975470f75021a04af0efc5892131","50b9a2143521785c5d5459601be442b0","448436f26eb65552b3a7f42b66735d40","2154f4f4b84c346dcb2ac1f9beb00f22","21efe72e81dbbc12bb9e3468444a19da","7683cb78d66ddebdba78d983936be969","243ffe5a29873491c8283907c0314943","125a0b4b63635c587c07397218c9232b","45b7e56b0761e3a817bb69b336997a90"]}},{"_id":"kookaburra","_rev":"4-6038cf35dfe1211f85484dec951142c7","min_length":0.28,"max_length":0.42,"wiki_page":"http://en.wikipedia.org/wiki/Kookaburra","class":"bird","diet":"carnivore","latin_name":"Dacelo novaeguineae","_revisions":{"start":4,"ids":["6038cf35dfe1211f85484dec951142c7","6152e66a832ae9fc684dd85a55231797","757760a36a1997504d5086f01a2ea862","967a00dff5e02add41819138abb3284d"]}},{"_id":"elephant","_rev":"3-f812228f45b5f4e496250556195372b2","wiki_page":"http://en.wikipedia.org/wiki/African_elephant","min_weight":4700,"max_weight":6050,"min_length":3.2,"max_length":4,"class":"mammal","diet":"herbivore","_revisions":{"start":3,"ids":["f812228f45b5f4e496250556195372b2","87fd00c631695ea23156464c318f7381","967a00dff5e02add41819138abb3284d"]}},{"_id":"cat","_rev":"2-eec205a9d413992850a6e32678485900","_deleted":true,"_revisions":{"start":2,"ids":["eec205a9d413992850a6e32678485900","967a00dff5e02add41819138abb3284d"]}},{"_id":"panda","_rev":"2-f578490963b0bd266f6c5bbf92302977","wiki_page":"http://en.wikipedia.org/wiki/Panda","min_weight":75,"max_weight":115,"min_length":1.2,"max_length":1.8,"class":"mammal","diet":"carnivore","_revisions":{"start":2,"ids":["f578490963b0bd266f6c5bbf92302977","064c3a3c68465eb86b1c320998bfd309"]}},{"_id":"870908b66ac0ed114512e6fb6d00260f","_rev":"2-eec205a9d413992850a6e32678485900","_deleted":true,"_revisions":{"start":2,"ids":["eec205a9d413992850a6e32678485900","967a00dff5e02add41819138abb3284d"]}},{"_id":"zebra","_rev":"3-750dac460a6cc41e6999f8943b8e603e","wiki_page":"http://en.wikipedia.org/wiki/Plains_zebra","min_length":2,"max_length":2.5,"min_weight":175,"max_weight":387,"class":"mammal","diet":"herbivore","_revisions":{"start":3,"ids":["750dac460a6cc41e6999f8943b8e603e","784f6b108c3be1bcbfc7eec4b9ad134c","967a00dff5e02add41819138abb3284d"]}},{"_id":"snipe","_rev":"3-4b2fb3b7d6a226b13951612d6ca15a6b","min_weight":0.08,"max_weight":0.14,"min_length":0.25,"max_length":0.27,"latin_name":"Gallinago gallinago","wiki_page":"http://en.wikipedia.org/wiki/Common_Snipe","class":"bird","diet":"omnivore","_revisions":{"start":3,"ids":["4b2fb3b7d6a226b13951612d6ca15a6b","babeb6a21b505a3fddb4f2555499ebea","967a00dff5e02add41819138abb3284d"]}},{"_id":"aardvark","_rev":"3-fe45a3e06244adbe7ba145e74e57aba5","min_weight":40,"max_weight":65,"min_length":1,"max_length":2.2,"latin_name":"Orycteropus afer","wiki_page":"http://en.wikipedia.org/wiki/Aardvark","class":"mammal","diet":"omnivore","_revisions":{"start":3,"ids":["fe45a3e06244adbe7ba145e74e57aba5","d06eb56cb789ce78ec85cb1af49ea8c2","967a00dff5e02add41819138abb3284d"]}},{"_id":"giraffe","_rev":"3-7665c3e66315ff40616cceef62886bd8","min_weight":830,"min_length":5,"max_weight":1600,"max_length":6,"wiki_page":"http://en.wikipedia.org/wiki/Giraffe","class":"mammal","diet":"herbivore","_revisions":{"start":3,"ids":["7665c3e66315ff40616cceef62886bd8","aaaf10d5a68cdf22d95a5482a0e95549","967a00dff5e02add41819138abb3284d"]}}] 3 | -------------------------------------------------------------------------------- /test/fixtures/animaldb_resumed_blank.json: -------------------------------------------------------------------------------- 1 | {"name":"@cloudant/couchbackup","version":"2.9.10","mode":"full","attachments":false} 2 | [{"_id":"_design/validation","_rev":"2-97e93126a6337d173f9b2810c0b9c0b6","_deleted":true,"_revisions":{"start":2,"ids":["97e93126a6337d173f9b2810c0b9c0b6","65b7a2bd321eab71f4985da5a3ce9f89"]}},{"_id":"llama","_rev":"4-631ea89ca94b23a3093c1ef7dfce10e0","min_weight":130,"max_weight":200,"min_length":1.7,"max_length":1.8,"latin_name":"Lama glama","wiki_page":"http://en.wikipedia.org/wiki/Llama","class":"mammal","diet":"herbivore","_revisions":{"start":4,"ids":["631ea89ca94b23a3093c1ef7dfce10e0","8d2acaafcc52df876498043cb6493966","b972aafbd51d5b98eb4d4b9f9443ca7e","967a00dff5e02add41819138abb3284d"]}},{"_id":"badger","_rev":"4-51aa94e4b0ef37271082033bba52b850","wiki_page":"http://en.wikipedia.org/wiki/Badger","min_weight":7,"max_weight":30,"min_length":0.6,"max_length":0.9,"latin_name":"Meles meles","class":"mammal","diet":"omnivore","_revisions":{"start":4,"ids":["51aa94e4b0ef37271082033bba52b850","f9fb951ca8dadec1459450156b2205cf","617a372bba833d7acf3ccf2e7dece15a","967a00dff5e02add41819138abb3284d"]}},{"_id":"lemur","_rev":"3-552d9dbf91fa914a07756e69b9ceaafa","wiki_page":"http://en.wikipedia.org/wiki/Ring-tailed_lemur","min_weight":2.2,"max_weight":2.2,"min_length":0.95,"max_length":1.1,"class":"mammal","diet":"omnivore","_revisions":{"start":3,"ids":["552d9dbf91fa914a07756e69b9ceaafa","01101b0b2629741cdaa186740155c091","967a00dff5e02add41819138abb3284d"]}},{"_id":"_design/views101","_rev":"1-a918dd4f11704143b535f0ab3af4bf75","views":{"latin_name_jssum":{"map":"function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}","reduce":"function (key, values, rereduce){\n return sum(values);\n}"},"latin_name":{"map":"function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}"},"diet_sum":{"map":"function(doc) {\n if(doc.diet){\n emit(doc.diet, 1);\n }\n}","reduce":"_sum"},"diet_count":{"map":"function(doc) {\n if(doc.diet && doc.latin_name){\n emit(doc.diet, doc.latin_name);\n }\n}","reduce":"_count"},"complex_count":{"map":"function(doc){\n if(doc.class && doc.diet){\n emit([doc.class, doc.diet], 1);\n }\n}","reduce":"_count"},"diet":{"map":"function(doc) {\n if(doc.diet){\n emit(doc.diet, 1);\n }\n}"},"complex_latin_name_count":{"map":"function(doc){\n if(doc.latin_name){\n emit([doc.class, doc.diet, doc.latin_name], doc.latin_name.length)\n }\n}","reduce":"_count"},"diet_jscount":{"map":"function(doc) {\n if(doc.diet){\n emit(doc.diet, 1);\n }\n}","reduce":"function (key, values, rereduce){\n return values.length;\n}"},"latin_name_count":{"map":"function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}","reduce":"_count"},"latin_name_sum":{"map":"function(doc) {\n if(doc.latin_name){\n emit(doc.latin_name, doc.latin_name.length);\n }\n}","reduce":"_sum"}},"indexes":{"animals":{"index":"function(doc){\n index(\"default\", doc._id);\n if(doc.min_length){\n index(\"min_length\", doc.min_length, {\"store\": \"yes\"});\n }\n if(doc.diet){\n index(\"diet\", doc.diet, {\"store\": \"yes\"});\n }\n if (doc.latin_name){\n index(\"latin_name\", doc.latin_name, {\"store\": \"yes\"});\n }\n if (doc['class']){\n index(\"class\", doc['class'], {\"store\": \"yes\"});\n }\n}"}},"_revisions":{"start":1,"ids":["a918dd4f11704143b535f0ab3af4bf75"]}},{"_id":"_design/views101","_rev":"13-7826307a6b395070429e83f261352a3b","_deleted":true,"_revisions":{"start":13,"ids":["7826307a6b395070429e83f261352a3b","dfa39d5771438be0671b9aeb9cfaf03f","7cfc8cb30e742f3f0c47f00a41641095","b3347b33df6707d3edc30d3d2c92514e","feaa975470f75021a04af0efc5892131","50b9a2143521785c5d5459601be442b0","448436f26eb65552b3a7f42b66735d40","2154f4f4b84c346dcb2ac1f9beb00f22","21efe72e81dbbc12bb9e3468444a19da","7683cb78d66ddebdba78d983936be969","243ffe5a29873491c8283907c0314943","125a0b4b63635c587c07397218c9232b","45b7e56b0761e3a817bb69b336997a90"]}}] 3 | {"marker":"@cloudant/couchbackup:resume"} 4 | [{"_id":"kookaburra","_rev":"4-6038cf35dfe1211f85484dec951142c7","min_length":0.28,"max_length":0.42,"wiki_page":"http://en.wikipedia.org/wiki/Kookaburra","class":"bird","diet":"carnivore","latin_name":"Dacelo novaeguineae","_revisions":{"start":4,"ids":["6038cf35dfe1211f85484dec951142c7","6152e66a832ae9fc684dd85a55231797","757760a36a1997504d5086f01a2ea862","967a00dff5e02add41819138abb3284d"]}},{"_id":"elephant","_rev":"3-f812228f45b5f4e496250556195372b2","wiki_page":"http://en.wikipedia.org/wiki/African_elephant","min_weight":4700,"max_weight":6050,"min_length":3.2,"max_length":4,"class":"mammal","diet":"herbivore","_revisions":{"start":3,"ids":["f812228f45b5f4e496250556195372b2","87fd00c631695ea23156464c318f7381","967a00dff5e02add41819138abb3284d"]}},{"_id":"cat","_rev":"2-eec205a9d413992850a6e32678485900","_deleted":true,"_revisions":{"start":2,"ids":["eec205a9d413992850a6e32678485900","967a00dff5e02add41819138abb3284d"]}},{"_id":"panda","_rev":"2-f578490963b0bd266f6c5bbf92302977","wiki_page":"http://en.wikipedia.org/wiki/Panda","min_weight":75,"max_weight":115,"min_length":1.2,"max_length":1.8,"class":"mammal","diet":"carnivore","_revisions":{"start":2,"ids":["f578490963b0bd266f6c5bbf92302977","064c3a3c68465eb86b1c320998bfd309"]}},{"_id":"870908b66ac0ed114512e6fb6d00260f","_rev":"2-eec205a9d413992850a6e32678485900","_deleted":true,"_revisions":{"start":2,"ids":["eec205a9d413992850a6e32678485900","967a00dff5e02add41819138abb3284d"]}},{"_id":"zebra","_rev":"3-750dac460a6cc41e6999f8943b8e603e","wiki_page":"http://en.wikipedia.org/wiki/Plains_zebra","min_length":2,"max_length":2.5,"min_weight":175,"max_weight":387,"class":"mammal","diet":"herbivore","_revisions":{"start":3,"ids":["750dac460a6cc41e6999f8943b8e603e","784f6b108c3be1bcbfc7eec4b9ad134c","967a00dff5e02add41819138abb3284d"]}},{"_id":"snipe","_rev":"3-4b2fb3b7d6a226b13951612d6ca15a6b","min_weight":0.08,"max_weight":0.14,"min_length":0.25,"max_length":0.27,"latin_name":"Gallinago gallinago","wiki_page":"http://en.wikipedia.org/wiki/Common_Snipe","class":"bird","diet":"omnivore","_revisions":{"start":3,"ids":["4b2fb3b7d6a226b13951612d6ca15a6b","babeb6a21b505a3fddb4f2555499ebea","967a00dff5e02add41819138abb3284d"]}},{"_id":"aardvark","_rev":"3-fe45a3e06244adbe7ba145e74e57aba5","min_weight":40,"max_weight":65,"min_length":1,"max_length":2.2,"latin_name":"Orycteropus afer","wiki_page":"http://en.wikipedia.org/wiki/Aardvark","class":"mammal","diet":"omnivore","_revisions":{"start":3,"ids":["fe45a3e06244adbe7ba145e74e57aba5","d06eb56cb789ce78ec85cb1af49ea8c2","967a00dff5e02add41819138abb3284d"]}},{"_id":"giraffe","_rev":"3-7665c3e66315ff40616cceef62886bd8","min_weight":830,"min_length":5,"max_weight":1600,"max_length":6,"wiki_page":"http://en.wikipedia.org/wiki/Giraffe","class":"mammal","diet":"herbivore","_revisions":{"start":3,"ids":["7665c3e66315ff40616cceef62886bd8","aaaf10d5a68cdf22d95a5482a0e95549","967a00dff5e02add41819138abb3284d"]}}] 5 | -------------------------------------------------------------------------------- /includes/request.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2025 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | const pkg = require('../package.json'); 16 | const { CloudantV1, CouchdbSessionAuthenticator } = require('@ibm-cloud/cloudant'); 17 | const { IamAuthenticator, NoAuthAuthenticator } = require('ibm-cloud-sdk-core'); 18 | const debug = require('debug')('couchbackup:request'); 19 | 20 | const userAgent = 'couchbackup-cloudant/' + pkg.version + ' (Node.js ' + 21 | process.version + ')'; 22 | 23 | // An interceptor function to help augment error bodies with a little 24 | // extra information so we can continue to use consistent messaging 25 | // after the ugprade to @ibm-cloud/cloudant 26 | function errorHelper(err) { 27 | debug('Entering error helper interceptor'); 28 | let method; 29 | let requestUrl; 30 | if (err.response) { 31 | debug('Error has a response'); 32 | if (err.response.config.url) { 33 | debug('Getting request URL and method for error'); 34 | requestUrl = err.response.config.url; 35 | method = err.response.config.method; 36 | } 37 | debug('Applying response error message with status, url, and method'); 38 | // Override the status text with an improved message 39 | let errorMsg = `${err.response.status} ${method} ${requestUrl}`; 40 | if (err.response.data) { 41 | debug('Found response data'); 42 | // Check if we have a JSON response and try to get the error/reason 43 | if (err.response.headers['content-type'] === 'application/json') { 44 | debug('Response data is JSON'); 45 | // Append the 'errors' message if available 46 | if (err.response.data.errors && err.response.data.errors.length > 0) { 47 | const originalError = err.response.data.errors[0]; 48 | originalError.message = `${errorMsg} - Error: ${originalError.message}`; 49 | } 50 | } else { 51 | errorMsg += err.response.data; 52 | // Set a new message for use by the node-sdk-core 53 | // We use the errors array because it gets processed 54 | // ahead of all other service errors. 55 | err.response.data.errors = [{ message: errorMsg }]; 56 | } 57 | } 58 | } else if (err.request) { 59 | debug('Error did not include a response'); 60 | if (!err.message.includes(err.config.url)) { 61 | debug('Augmenting request error message with URL and method'); 62 | // Augment the message with the URL and method 63 | // but don't do it again if we already have the URL. 64 | err.message = `${err.message}: ${err.config.method} ${err.config.url}`; 65 | } else { 66 | debug('Request error message already augmented'); 67 | } 68 | } 69 | return Promise.reject(err); 70 | } 71 | 72 | // Interceptor function to add the User-Agent header. 73 | // An interceptor is used because setting UA in headers 74 | // option during client initialization means it gets overwritten 75 | // by the default value during a request. 76 | // This interceptor is further along the chain and able to 77 | // replace the default value. 78 | function userAgentHelper(requestConfig) { 79 | requestConfig.headers['User-Agent'] = userAgent; 80 | return requestConfig; 81 | } 82 | 83 | function newSimpleClient(rawUrl, opts) { 84 | const url = new URL(rawUrl); 85 | // Split the URL to separate service from database 86 | // Use origin as the "base" to remove auth elements 87 | const actUrl = new URL(url.pathname.substring(0, url.pathname.lastIndexOf('/')), url.origin); 88 | const dbName = decodeURIComponent(url.pathname.substring(url.pathname.lastIndexOf('/') + 1)); 89 | let authenticator; 90 | // Default to cookieauth unless an IAM key is provided 91 | if (opts.iamApiKey) { 92 | const iamAuthOpts = { apikey: opts.iamApiKey }; 93 | if (opts.iamTokenUrl) { 94 | iamAuthOpts.url = opts.iamTokenUrl; 95 | } 96 | authenticator = new IamAuthenticator(iamAuthOpts); 97 | } else if (url.username) { 98 | authenticator = new CouchdbSessionAuthenticator({ 99 | username: decodeURIComponent(url.username), 100 | password: decodeURIComponent(url.password) 101 | }); 102 | } else { 103 | authenticator = new NoAuthAuthenticator(); 104 | } 105 | const serviceOpts = { 106 | authenticator, 107 | timeout: opts.requestTimeout, 108 | // Axios performance options 109 | maxContentLength: -1 110 | }; 111 | 112 | const service = new CloudantV1(serviceOpts); 113 | service.setServiceUrl(actUrl.toString()); 114 | if (authenticator instanceof CouchdbSessionAuthenticator) { 115 | // Awkward workaround for known Couch issue with compression on _session requests 116 | // It is not feasible to disable compression on all requests with the amount of 117 | // data this lib needs to move, so override the property in the tokenManager instance. 118 | authenticator.tokenManager.requestWrapperInstance.compressRequestData = false; 119 | } 120 | return { service, dbName, actUrl }; 121 | } 122 | 123 | function newClient(rawUrl, opts) { 124 | const { service, dbName, actUrl } = newSimpleClient(rawUrl, opts); 125 | const authenticator = service.getAuthenticator(); 126 | 127 | // Add interceptors 128 | // Request interceptor to set the User-Agent header 129 | // Response interceptor to put URLs in error messages 130 | // Add for the token manager if present 131 | if (authenticator.tokenManager && authenticator.tokenManager.requestWrapperInstance) { 132 | authenticator.tokenManager.requestWrapperInstance.axiosInstance.interceptors.request.use(userAgentHelper, null); 133 | authenticator.tokenManager.requestWrapperInstance.axiosInstance.interceptors.response.use(null, errorHelper); 134 | } 135 | // and add for the client 136 | service.getHttpClient().interceptors.request.use(userAgentHelper, null); 137 | service.getHttpClient().interceptors.response.use(null, errorHelper); 138 | 139 | // Configure retries 140 | // Note: this MUST happen last after all other interceptors have been registered 141 | const maxRetries = 2; // for 3 total attempts 142 | service.enableRetries({ maxRetries }); 143 | 144 | return { service, dbName, url: actUrl.toString() }; 145 | } 146 | 147 | module.exports = { 148 | newSimpleClient, 149 | newClient 150 | }; 151 | -------------------------------------------------------------------------------- /examples/cos-sdk/cos-backup-stream.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2025 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | // Small script which backs up a Cloudant database to an IBM Cloud Object Storage (COS) 16 | // bucket via a stream rather than on-disk file, authenticating with IBM IAM 17 | // 18 | // The script generates the backup object name by combining together the path 19 | // part of the database URL and the current time. 20 | 21 | const stream = require('stream'); 22 | const IBM_COS = require('ibm-cos-sdk'); 23 | const VError = require('verror').VError; 24 | const couchbackup = require('@cloudant/couchbackup'); 25 | const debug = require('debug')('couchbackup-cos'); 26 | const url = require('url'); 27 | 28 | /* 29 | Main function, run from base of file. 30 | */ 31 | function main() { 32 | const argv = require('yargs') 33 | .usage('Usage: $0 [options]') 34 | .example('$0 -s https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/sourcedb -b --cos_url ', 'Backup db to bucket using IAM authentication') 35 | .options({ 36 | source: { alias: 's', nargs: 1, demandOption: true, describe: 'Source database URL' }, 37 | bucket: { alias: 'b', nargs: 1, demandOption: true, describe: 'Destination bucket' }, 38 | prefix: { alias: 'p', nargs: 1, describe: 'Prefix for backup object key', default: 'couchbackup' }, 39 | cos_url: { nargs: 1, demandOption: true, describe: 'IBM COS S3 endpoint URL' }, // An endpoint from 'endpoints' list in cos_credentials 40 | shallow: { describe: 'Backup the documents winning revisions only', type: 'boolean' } 41 | }) 42 | .help('h').alias('h', 'help') 43 | .epilog('Copyright (C) IBM 2025') 44 | .argv; 45 | 46 | const sourceUrl = argv.source; 47 | const backupBucket = argv.bucket; 48 | const backupName = new url.URL(sourceUrl).pathname.split('/').filter(function(x) { return x; }).join('-'); 49 | const backupKeyPrefix = `${argv.prefix}-${backupName}`; 50 | const backupKey = `${backupKeyPrefix}-${new Date().toISOString()}`; 51 | const cosEndpoint = argv.cos_url; 52 | const cloudantApiKey = process.env.CLOUDANT_IAM_API_KEY; 53 | const mode = argv.shallow ? 'shallow' : 'full'; 54 | 55 | /* 56 | * Creds are from ~/.bluemix/cos_credentials, generated by ibmcloud CLI tool 57 | * See: https://cloud.ibm.com/docs/cloud-object-storage?topic=cloud-object-storage-service-credentials 58 | * 59 | * corresponding 'endpoint' can be found on IBM Cloud UI at the COS instance, 60 | * or under the 'endpoints' link provided in the generated file (~/.bluemix/cos_credentials) 61 | * */ 62 | const config = { 63 | endpoint: cosEndpoint, 64 | credentials: new IBM_COS.SharedJSONFileCredentials(), 65 | }; 66 | const COS = new IBM_COS.S3(config); 67 | debug(`Creating a new backup of ${sourceUrl} at ${backupBucket}/${backupKey}...`); 68 | bucketAccessible(COS, backupBucket) 69 | .then(() => { 70 | return backupToS3(sourceUrl, COS, backupBucket, backupKey, cloudantApiKey, mode); 71 | }) 72 | .then(() => { 73 | debug('Backup successful!'); 74 | }) 75 | .catch((reason) => { 76 | debug(`Error: ${reason}`); 77 | process.exit(1); 78 | }); 79 | } 80 | 81 | /** 82 | * Return a promise that resolves if the bucket is available and 83 | * rejects if not. 84 | * 85 | * @param {IBM_COS.S3} s3 IBM COS S3 client object 86 | * @param {any} bucketName Bucket name 87 | * @returns Promise 88 | */ 89 | function bucketAccessible(s3, bucketName) { 90 | const params = { 91 | Bucket: bucketName 92 | }; 93 | return s3.headBucket(params).promise() 94 | .then(() => { debug('Bucket is accessible'); }) 95 | .catch((reason) => { 96 | console.error(reason); 97 | throw new VError(reason, 'Bucket is not accessible'); 98 | }); 99 | } 100 | 101 | /** 102 | * Backup directly from Cloudant to an object store object via a stream. 103 | * 104 | * @param {any} sourceUrl URL of database 105 | * @param {any} s3Client Object store client 106 | * @param {any} s3Bucket Backup destination bucket 107 | * @param {any} s3Key Backup destination key name (shouldn't exist) 108 | * @param {any} mode couchbackup `shallow` or `full` mode 109 | * @returns Promise 110 | */ 111 | function backupToS3(sourceUrl, s3Client, s3Bucket, s3Key, cloudantApiKey, mode) { 112 | debug(`Setting up IBM COS upload to ${s3Bucket}/${s3Key}`); 113 | 114 | // A pass through stream that has couchbackup's output 115 | // written to it and it then read by the S3 upload client. 116 | // It has a 64MB highwater mark to allow for fairly 117 | // uneven network connectivity. 118 | const streamToUpload = new stream.PassThrough({ highWaterMark: 64 * 1024 * 1024 }); 119 | 120 | // Set up IBM COS upload. 121 | const params = { 122 | Bucket: s3Bucket, 123 | Key: s3Key, 124 | Body: streamToUpload 125 | }; 126 | const options = { 127 | partSize: 5 * 1024 * 1024, // max 5 MB part size (minimum size) 128 | queueSize: 5 // allow 5 parts at a time 129 | }; 130 | const upload = s3Client.upload(params, options); 131 | upload.on('httpUploadProgress', (progress) => { 132 | debug(`IBM COS upload progress: ${JSON.stringify(progress)}`); 133 | }); 134 | debug(`Starting streaming data from ${sourceUrl}`); 135 | debug(`Using couchbackup mode: ${mode}`); 136 | const couchbackupParams = { 137 | iamApiKey: cloudantApiKey, 138 | mode, 139 | ...(process.env.CLOUDANT_IAM_TOKEN_URL && { iamTokenUrl: process.env.CLOUDANT_IAM_TOKEN_URL }), 140 | }; 141 | const backupPromise = new Promise((resolve, reject) => couchbackup.backup( 142 | sourceUrl, 143 | streamToUpload, 144 | couchbackupParams, 145 | (err, done) => { 146 | if (err) { 147 | debug(err); 148 | reject(new VError(err, 'CouchBackup process failed')); 149 | } else { 150 | streamToUpload.end(); // must call end() to complete upload. 151 | resolve(done); 152 | } 153 | } 154 | ) 155 | .on('changes', batch => debug('Couchbackup changes batch: ', batch)) 156 | .on('written', progress => debug('Fetched batch:', progress.batch, 'Total document revisions written:', progress.total, 'Time:', progress.time)) 157 | ); 158 | return Promise.all([backupPromise, upload.promise()]); 159 | } 160 | 161 | main(); 162 | -------------------------------------------------------------------------------- /examples/cos-s3/s3-backup-stream.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2024 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | // Small script which backs up a Cloudant or CouchDB database to an S3 16 | // bucket via a stream rather than on-disk file. 17 | // 18 | // The script generates the backup object name by combining together the path 19 | // part of the database URL and the current time. 20 | 21 | const { PassThrough } = require('node:stream'); 22 | const url = require('node:url'); 23 | 24 | const { backup } = require('@cloudant/couchbackup'); 25 | const { fromIni } = require('@aws-sdk/credential-providers'); 26 | const { HeadBucketCommand, S3Client } = require('@aws-sdk/client-s3'); 27 | const { Upload } = require('@aws-sdk/lib-storage'); 28 | const debug = require('debug')('s3-backup'); 29 | const VError = require('verror').VError; 30 | 31 | /* 32 | Main function, run from base of file. 33 | */ 34 | function main() { 35 | const argv = require('yargs') 36 | .usage('Usage: $0 [options]') 37 | .example('$0 -s https://user:pass@host/sourcedb -b ', 'Backup db to bucket') 38 | .options({ 39 | source: { alias: 's', nargs: 1, demandOption: true, describe: 'Source database URL' }, 40 | bucket: { alias: 'b', nargs: 1, demandOption: true, describe: 'Destination bucket' }, 41 | prefix: { alias: 'p', nargs: 1, describe: 'Prefix for backup object key', default: 'couchbackup' }, 42 | s3url: { nargs: 1, describe: 'S3 endpoint URL' }, 43 | awsprofile: { nargs: 1, describe: 'The profile section to use in the ~/.aws/credentials file', default: 'default' } 44 | }) 45 | .help('h').alias('h', 'help') 46 | .epilog('Copyright (C) IBM 2017, 2024') 47 | .argv; 48 | 49 | const sourceUrl = argv.source; 50 | const backupName = new url.URL(sourceUrl).pathname.split('/').filter(function(x) { return x; }).join('-'); 51 | const backupBucket = argv.bucket; 52 | const backupKeyPrefix = `${argv.prefix}-${backupName}`; 53 | const shallow = argv.shallow; 54 | 55 | const backupKey = `${backupKeyPrefix}-${new Date().toISOString()}`; 56 | 57 | const s3Endpoint = argv.s3url; 58 | const awsProfile = argv.awsprofile; 59 | 60 | // Creds are from ~/.aws/credentials, environment etc. (see S3 docs). 61 | const awsOpts = { 62 | signatureVersion: 'v4', 63 | credentials: fromIni({ profile: awsProfile }) 64 | }; 65 | if (typeof s3Endpoint !== 'undefined') { 66 | awsOpts.endpoint = s3Endpoint; 67 | } 68 | const s3 = new S3Client(awsOpts); 69 | 70 | debug(`Creating a new backup of ${s(sourceUrl)} at ${backupBucket}/${backupKey}...`); 71 | bucketAccessible(s3, backupBucket) 72 | .then(() => { 73 | return backupToS3(sourceUrl, s3, backupBucket, backupKey, shallow); 74 | }) 75 | .then(() => { 76 | debug('done.'); 77 | }) 78 | .catch((reason) => { 79 | debug(`Error: ${reason}`); 80 | process.exit(1); 81 | }); 82 | } 83 | 84 | /** 85 | * Return a promise that resolves if the bucket is available and 86 | * rejects if not. 87 | * 88 | * @param {any} s3 S3 client object 89 | * @param {any} bucketName Bucket name 90 | * @returns Promise 91 | */ 92 | function bucketAccessible(s3, bucketName) { 93 | return s3.send(new HeadBucketCommand({ 94 | Bucket: bucketName 95 | })).catch(e => { throw new VError(e, 'S3 bucket not accessible'); }); 96 | } 97 | 98 | /** 99 | * Backup directly from Cloudant to an object store object via a stream. 100 | * 101 | * @param {any} sourceUrl URL of database 102 | * @param {any} s3Client Object store client 103 | * @param {any} s3Bucket Backup destination bucket 104 | * @param {any} s3Key Backup destination key name (shouldn't exist) 105 | * @param {any} shallow Whether to use the couchbackup `shallow` mode 106 | * @returns Promise 107 | */ 108 | function backupToS3(sourceUrl, s3Client, s3Bucket, s3Key, shallow) { 109 | debug(`Setting up S3 upload to ${s3Bucket}/${s3Key}`); 110 | 111 | // A pass through stream that has couchbackup's output 112 | // written to it and it then read by the S3 upload client. 113 | // No highWaterMark as we don't want to double-buffer, just connect two streams 114 | const streamToUpload = new PassThrough({ highWaterMark: 0 }); 115 | 116 | // Set up S3 upload. 117 | let s3Promise; 118 | try { 119 | const upload = new Upload({ 120 | client: s3Client, 121 | params: { 122 | Bucket: s3Bucket, 123 | Key: s3Key, 124 | Body: streamToUpload 125 | }, 126 | queueSize: 5, // match the default couchbackup concurrency 127 | partSize: 1024 * 1024 * 64 // 64 MB part size 128 | }); 129 | upload.on('httpUploadProgress', (progress) => { 130 | debug(`S3 upload progress: ${JSON.stringify(progress)}`); 131 | }); 132 | // Return the promise for the completed upload 133 | s3Promise = upload.done().finally(() => { 134 | debug('S3 upload done'); 135 | }) 136 | .then(() => { 137 | debug('Upload succeeded'); 138 | }) 139 | .catch(err => { 140 | debug(err); 141 | throw new VError(err, 'Upload failed'); 142 | }); 143 | } catch (err) { 144 | debug(err); 145 | s3Promise = Promise.reject(new VError(err, 'Upload could not start')); 146 | } 147 | 148 | debug(`Starting streaming data from ${s(sourceUrl)}`); 149 | 150 | const backupPromise = new Promise((resolve, reject) => { 151 | backup( 152 | sourceUrl, 153 | streamToUpload, 154 | shallow ? { mode: 'shallow' } : {}, 155 | (err, done) => { 156 | if (err) { 157 | reject(err); 158 | } else { 159 | resolve(done); 160 | } 161 | } 162 | ) 163 | .on('changes', batch => debug('Couchbackup changes batch: ', batch)) 164 | .on('written', progress => debug('Fetched batch:', progress.batch, 'Total document revisions written:', progress.total, 'Time:', progress.time)); 165 | }) 166 | .then((done) => { 167 | debug(`couchbackup download from ${s(sourceUrl)} complete; backed up ${done.total}`); 168 | }) 169 | .catch((err) => { 170 | debug(err); 171 | throw new VError(err, 'couchbackup process failed'); 172 | }); 173 | 174 | return Promise.all([backupPromise, s3Promise]); 175 | } 176 | 177 | /** 178 | * Remove creds from a URL, e.g., before logging 179 | * 180 | * @param {string} url URL to safen 181 | */ 182 | function s(originalUrl) { 183 | const parts = new url.URL(originalUrl); 184 | return url.format(parts, { auth: false }); 185 | } 186 | 187 | main(); 188 | -------------------------------------------------------------------------------- /examples/cos-s3/s3-backup-file.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2017, 2024 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | // Small script which backs up a Cloudant or CouchDB database to an S3 16 | // bucket, using an intermediary file on disk. 17 | // 18 | // The script generates the backup object name by combining together the path 19 | // part of the database URL and the current time. 20 | 21 | const { createReadStream, createWriteStream, mkdtempSync } = require('node:fs'); 22 | const { tmpdir } = require('node:os'); 23 | const { join } = require('node:path'); 24 | const url = require('node:url'); 25 | 26 | const { backup } = require('@cloudant/couchbackup'); 27 | const { fromIni } = require('@aws-sdk/credential-providers'); 28 | const { Upload } = require('@aws-sdk/lib-storage'); 29 | const { HeadBucketCommand, S3Client } = require('@aws-sdk/client-s3'); 30 | const debug = require('debug')('s3-backup'); 31 | const VError = require('verror').VError; 32 | 33 | /* 34 | Main function, run from base of file. 35 | */ 36 | function main() { 37 | const argv = require('yargs') 38 | .usage('Usage: $0 [options]') 39 | .example('$0 -s https://user:pass@host/sourcedb -b ', 'Backup db to bucket') 40 | .options({ 41 | source: { alias: 's', nargs: 1, demandOption: true, describe: 'Source database URL' }, 42 | bucket: { alias: 'b', nargs: 1, demandOption: true, describe: 'Destination bucket' }, 43 | prefix: { alias: 'p', nargs: 1, describe: 'Prefix for backup object key', default: 'couchbackup' }, 44 | s3url: { nargs: 1, describe: 'S3 endpoint URL' }, 45 | awsprofile: { nargs: 1, describe: 'The profile section to use in the ~/.aws/credentials file', default: 'default' } 46 | }) 47 | .help('h').alias('h', 'help') 48 | .epilog('Copyright (C) IBM 2017, 2024') 49 | .argv; 50 | 51 | const sourceUrl = argv.source; 52 | const backupBucket = argv.bucket; 53 | const backupName = new url.URL(sourceUrl).pathname.split('/').filter(function(x) { return x; }).join('-'); 54 | const backupKeyPrefix = `${argv.prefix}-${backupName}`; 55 | 56 | const backupDate = Date.now(); 57 | const isoDate = new Date(backupDate).toISOString(); 58 | const backupKey = `${backupKeyPrefix}-${isoDate}`; 59 | const backupTmpFile = join(mkdtempSync(join(tmpdir(), 'couchbackup-s3-backup-')), `${backupDate}`); 60 | 61 | const s3Endpoint = argv.s3url; 62 | const awsProfile = argv.awsprofile; 63 | 64 | // Creds are from ~/.aws/credentials, environment etc. (see S3 docs). 65 | const awsOpts = { 66 | signatureVersion: 'v4', 67 | credentials: fromIni({ profile: awsProfile }) 68 | }; 69 | if (typeof s3Endpoint !== 'undefined') { 70 | awsOpts.endpoint = s3Endpoint; 71 | } 72 | const s3 = new S3Client(awsOpts); 73 | 74 | debug(`Creating a new backup of ${s(sourceUrl)} at ${backupBucket}/${backupKey}...`); 75 | bucketAccessible(s3, backupBucket) 76 | .then(() => { 77 | return createBackupFile(sourceUrl, backupTmpFile); 78 | }) 79 | .then(() => { 80 | return uploadNewBackup(s3, backupTmpFile, backupBucket, backupKey); 81 | }) 82 | .then(() => { 83 | debug('Backup successful!'); 84 | }) 85 | .catch((reason) => { 86 | debug(`Error: ${reason}`); 87 | }); 88 | } 89 | 90 | /** 91 | * Return a promise that resolves if the bucket is available and 92 | * rejects if not. 93 | * 94 | * @param {any} s3 S3 client object 95 | * @param {any} bucketName Bucket name 96 | * @returns Promise 97 | */ 98 | function bucketAccessible(s3, bucketName) { 99 | return s3.send(new HeadBucketCommand({ 100 | Bucket: bucketName 101 | })).catch(e => { throw new VError(e, 'S3 bucket not accessible'); }); 102 | } 103 | 104 | /** 105 | * Use couchbackup to create a backup of the specified database to a file path. 106 | * 107 | * @param {any} sourceUrl Database URL 108 | * @param {any} backupTmpFilePath Path to write file 109 | * @returns Promise 110 | */ 111 | function createBackupFile(sourceUrl, backupTmpFilePath) { 112 | return new Promise((resolve, reject) => { 113 | backup( 114 | sourceUrl, 115 | createWriteStream(backupTmpFilePath), 116 | (err, done) => { 117 | if (err) { 118 | reject(err); 119 | } else { 120 | resolve(done); 121 | } 122 | } 123 | ) 124 | .on('changes', batch => debug('Couchbackup changes batch: ', batch)) 125 | .on('written', progress => debug('Fetched batch:', progress.batch, 'Total document revisions written:', progress.total, 'Time:', progress.time)); 126 | }) 127 | .then((done) => { 128 | debug(`couchbackup to file done; backed up ${done.total}`); 129 | debug('Ready to upload to S3'); 130 | }) 131 | .catch((err) => { 132 | throw new VError(err, 'CouchBackup process failed'); 133 | }); 134 | } 135 | 136 | /** 137 | * Upload a backup file to an S3 bucket. 138 | * 139 | * @param {any} s3 Object store client 140 | * @param {any} backupTmpFilePath Path of backup file to write. 141 | * @param {any} bucket Object store bucket name 142 | * @param {any} key Object store key name 143 | * @returns Promise 144 | */ 145 | function uploadNewBackup(s3, backupTmpFilePath, bucket, key) { 146 | debug(`Uploading from ${backupTmpFilePath} to ${bucket}/${key}`); 147 | const inputStream = createReadStream(backupTmpFilePath); 148 | try { 149 | const upload = new Upload({ 150 | client: s3, 151 | params: { 152 | Bucket: bucket, 153 | Key: key, 154 | Body: inputStream 155 | }, 156 | queueSize: 5, // allow 5 parts at a time 157 | partSize: 1024 * 1024 * 64 // 64 MB part size 158 | }); 159 | upload.on('httpUploadProgress', (progress) => { 160 | debug(`S3 upload progress: ${JSON.stringify(progress)}`); 161 | }); 162 | // Return a promise for the completed or aborted upload 163 | return upload.done().finally(() => { 164 | debug('S3 upload done'); 165 | }) 166 | .then(() => { 167 | debug('Upload succeeded'); 168 | }) 169 | .catch(err => { 170 | debug(err); 171 | throw new VError(err, 'Upload failed'); 172 | }); 173 | } catch (err) { 174 | debug(err); 175 | return Promise.reject(new VError(err, 'Upload could not start')); 176 | } 177 | } 178 | 179 | /** 180 | * Remove creds from a URL, e.g., before logging 181 | * 182 | * @param {string} url URL to safen 183 | */ 184 | function s(originalUrl) { 185 | const parts = new url.URL(originalUrl); 186 | return url.format(parts, { auth: false }); 187 | } 188 | 189 | main(); 190 | -------------------------------------------------------------------------------- /examples/cos-s3/s3-restore-file.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2025 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | // Small script which restores a Cloudant or CouchDB database from an S3 compatible 16 | // bucket using an intermediary file on disk 17 | 18 | const { S3Client, GetObjectCommand, HeadObjectCommand } = require('@aws-sdk/client-s3'); 19 | const { fromIni } = require('@aws-sdk/credential-providers'); 20 | const VError = require('verror').VError; 21 | const { restore } = require('@cloudant/couchbackup'); 22 | const debug = require('debug')('couchbackup-s3'); 23 | const url = require('url'); 24 | const fs = require('fs'); 25 | const tmp = require('tmp'); 26 | const { pipeline } = require('stream/promises'); 27 | 28 | function main() { 29 | const argv = require('yargs') 30 | .usage('Usage: $0 [options]') 31 | .example('$0 -t https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/targetdb -b -o --s3_url ', 'Restore database from a bucket using intermediate file') 32 | .options({ 33 | target: { alias: 't', nargs: 1, demandOption: true, describe: 'Target database URL' }, 34 | bucket: { alias: 'b', nargs: 1, demandOption: true, describe: 'Source bucket containing backup' }, 35 | object: { alias: 'o', nargs: 1, demandOption: true, describe: 'Backup Object name in S3 instance' }, 36 | s3_url: { nargs: 1, describe: 'S3 endpoint URL' }, 37 | awsprofile: { nargs: 1, describe: 'The profile section to use in the ~/.aws/credentials file', default: 'default' }, 38 | }) 39 | .help('h').alias('h', 'help') 40 | .epilog('Copyright (C) IBM 2025') 41 | .argv; 42 | 43 | const cloudantURL = argv.target; 44 | const restoreBucket = argv.bucket; 45 | const restoreObject = argv.object; 46 | const s3Endpoint = argv.s3_url; 47 | const awsProfile = argv.awsprofile; 48 | const cloudantApiKey = process.env.CLOUDANT_IAM_API_KEY; 49 | const restoreTmpFile = tmp.fileSync(); 50 | 51 | const awsOpts = { 52 | signatureVersion: 'v4', 53 | credentials: fromIni({ profile: awsProfile }) 54 | }; 55 | if (typeof s3Endpoint !== 'undefined') { 56 | awsOpts.endpoint = s3Endpoint; 57 | } 58 | const s3 = new S3Client(awsOpts); 59 | 60 | debug(`Restoring from ${restoreBucket}/${restoreObject} to ${cloudantURL} via file`); 61 | 62 | // Start the restore process 63 | restoreProcess(s3, restoreBucket, restoreObject, cloudantURL, cloudantApiKey, restoreTmpFile.name) 64 | .then(() => { 65 | debug('Restore completed successfully'); 66 | process.exit(0); 67 | }) 68 | .catch((err) => { 69 | console.error('Restore failed:', err.message); 70 | process.exit(1); 71 | }); 72 | } 73 | 74 | /** 75 | * Main restore process 76 | */ 77 | async function restoreProcess(s3, restoreBucket, restoreObject, targetUrl, cloudantApiKey, restoreTmpFilePath) { 78 | await objectAccessible(s3, restoreBucket, restoreObject); 79 | 80 | await createRestoreFile(s3, restoreBucket, restoreObject, restoreTmpFilePath); 81 | 82 | await restoreFromFile(restoreTmpFilePath, targetUrl, cloudantApiKey); 83 | } 84 | 85 | /** 86 | * Check if object is accessible in S3 87 | * @param {S3Client} s3 88 | * @param {string} bucketName 89 | * @param {string} objectKey 90 | */ 91 | async function objectAccessible(s3, bucketName, objectKey) { 92 | try { 93 | await s3.send(new HeadObjectCommand({ 94 | Bucket: bucketName, 95 | Key: objectKey 96 | })); 97 | debug(`Object '${objectKey}' is accessible`); 98 | } catch (reason) { 99 | debug(reason); 100 | throw new VError(reason, 'Object is not accessible'); 101 | } 102 | } 103 | 104 | /** 105 | * Download backup file from S3 to local temporary file with optimized buffer sizes 106 | * @param {S3Client} s3 107 | * @param {string} restoreBucket 108 | * @param {string} objectKey 109 | * @param {string} restoreTmpFilePath 110 | */ 111 | async function createRestoreFile(s3, restoreBucket, objectKey, restoreTmpFilePath) { 112 | debug(`Downloading from ${restoreBucket}/${objectKey} to ${restoreTmpFilePath}`); 113 | 114 | const response = await s3.send(new GetObjectCommand({ 115 | Bucket: restoreBucket, 116 | Key: objectKey 117 | })); 118 | 119 | const inputStream = response.Body; 120 | 121 | const outputStream = fs.createWriteStream(restoreTmpFilePath, { 122 | highWaterMark: 16 * 1024 * 1024 // 16MB buffer for efficient disk writes 123 | }); 124 | 125 | try { 126 | await pipeline(inputStream, outputStream); 127 | debug('Download completed successfully'); 128 | } catch (err) { 129 | debug(err); 130 | throw new VError(err, 'Failed to download backup file from S3'); 131 | } 132 | } 133 | 134 | /** 135 | * Restore from a local backup file to Cloudant database with optimized buffer 136 | * @param {string} restoreFileName Path to backup file 137 | * @param {string} targetUrl URL of target database 138 | * @param {string} cloudantApiKey IAM API key for Cloudant 139 | */ 140 | async function restoreFromFile(restoreFileName, targetUrl, cloudantApiKey) { 141 | debug(`Starting restore from ${restoreFileName} to ${s(targetUrl)}`); 142 | 143 | const inputStream = fs.createReadStream(restoreFileName, { 144 | highWaterMark: 16 * 1024 * 1024 // 16MB buffer for efficient file reading 145 | }); 146 | 147 | const restorePromise = new Promise((resolve, reject) => { 148 | const params = { 149 | iamApiKey: cloudantApiKey, 150 | ...(process.env.CLOUDANT_IAM_TOKEN_URL && { iamTokenUrl: process.env.CLOUDANT_IAM_TOKEN_URL }), 151 | }; 152 | 153 | const restoreStream = restore( 154 | inputStream, 155 | targetUrl, 156 | params, 157 | (err, data) => { 158 | if (err) { 159 | reject(err); 160 | } else { 161 | resolve(data); 162 | } 163 | } 164 | ); 165 | 166 | restoreStream.on('restored', progress => { 167 | debug('Restored batch:', progress.batch, 'Total document revisions written:', progress.total, 'Time:', progress.time); 168 | }); 169 | }); 170 | 171 | try { 172 | const result = await restorePromise; 173 | debug(`Couchbackup restore to ${s(targetUrl)} complete; restored ${result.total} documents`); 174 | return result; 175 | } catch (err) { 176 | debug(err); 177 | throw new VError(err, 'Couchbackup restore failed'); 178 | } 179 | } 180 | 181 | /** 182 | * Remove credentials from a URL for safe logging 183 | * @param {string} originalUrl URL to sanitize 184 | */ 185 | function s(originalUrl) { 186 | const parts = new url.URL(originalUrl); 187 | return url.format(parts, { auth: false }); 188 | } 189 | 190 | main(); 191 | -------------------------------------------------------------------------------- /examples/cos-sdk/cos-backup-file.js: -------------------------------------------------------------------------------- 1 | // Copyright © 2025 IBM Corp. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | // Small script which backs up a Cloudant database to an IBM Cloud Object Storage (COS) 16 | // bucket, using an intermediary file on disk, using IAM authentication 17 | // 18 | // The script generates the backup object name by combining together the path 19 | // part of the database URL and the current time. 20 | 21 | const IBM_COS = require('ibm-cos-sdk'); 22 | const fs = require('fs'); 23 | const VError = require('verror'); 24 | const tmp = require('tmp'); 25 | const couchbackup = require('@cloudant/couchbackup'); 26 | const debug = require('debug')('couchbackup-cos'); 27 | const url = require('url'); 28 | 29 | /* 30 | Main function, run from base of file. 31 | */ 32 | function main() { 33 | const argv = require('yargs') 34 | .usage('Usage: $0 [options]') 35 | .example('$0 -s https://~replaceWithYourUniqueHost~.cloudantnosqldb.appdomain.cloud/sourcedb -b --cos_url ', 'Backup db to bucket using IAM authentication') 36 | .options({ 37 | source: { alias: 's', nargs: 1, demandOption: true, describe: 'Source database URL' }, 38 | bucket: { alias: 'b', nargs: 1, demandOption: true, describe: 'Destination bucket' }, 39 | prefix: { alias: 'p', nargs: 1, describe: 'Prefix for backup object key', default: 'couchbackup' }, 40 | cos_url: { nargs: 1, demandOption: true, describe: 'IBM COS S3 endpoint URL' }, // An endpoint from 'endpoints' list in cos_credentials 41 | shallow: { describe: 'Backup the documents winning revisions only', type: 'boolean' } 42 | }) 43 | .help('h').alias('h', 'help') 44 | .epilog('Copyright (C) IBM 2025') 45 | .argv; 46 | 47 | const sourceUrl = argv.source; 48 | const backupBucket = argv.bucket; 49 | const backupName = new url.URL(sourceUrl).pathname.split('/').filter(function(x) { return x; }).join('-'); 50 | const backupKeyPrefix = `${argv.prefix}-${backupName}`; 51 | const backupKey = `${backupKeyPrefix}-${new Date().toISOString()}`; 52 | const cosEndpoint = argv.cos_url; 53 | const cloudantApiKey = process.env.CLOUDANT_IAM_API_KEY; 54 | const mode = argv.shallow ? 'shallow' : 'full'; 55 | const backupTmpFile = tmp.fileSync(); 56 | 57 | /* 58 | * Creds are from ~/.bluemix/cos_credentials, generated by ibmcloud CLI tool 59 | * See: https://cloud.ibm.com/docs/cloud-object-storage?topic=cloud-object-storage-service-credentials 60 | * 61 | * corresponding 'endpoint' can be found on IBM Cloud UI at the COS instance, 62 | * or under the 'endpoints' link provided in the generated file (~/.bluemix/cos_credentials) 63 | * */ 64 | const config = { 65 | endpoint: cosEndpoint, 66 | credentials: new IBM_COS.SharedJSONFileCredentials(), 67 | }; 68 | const COS = new IBM_COS.S3(config); 69 | debug(`Creating a new backup of ${sourceUrl} at ${backupBucket}/${backupKey}...`); 70 | bucketAccessible(COS, backupBucket) 71 | .then(() => { 72 | return createBackupFile(sourceUrl, backupTmpFile.name, cloudantApiKey, mode); 73 | }) 74 | .then(() => { 75 | return uploadNewBackup(COS, backupTmpFile.name, backupBucket, backupKey); 76 | }) 77 | .then(() => { 78 | debug('Backup successful!'); 79 | }) 80 | .catch((reason) => { 81 | debug(`Error: ${reason}`); 82 | process.exit(1); 83 | }); 84 | } 85 | 86 | /** 87 | * Return a promise that resolves if the bucket is available and 88 | * rejects if not. 89 | * 90 | * @param {IBM_COS.S3} s3 IBM COS S3 client object 91 | * @param {any} bucketName Bucket name 92 | * @returns Promise 93 | */ 94 | function bucketAccessible(s3, bucketName) { 95 | const params = { 96 | Bucket: bucketName 97 | }; 98 | return s3.headBucket(params).promise() 99 | .then(() => { debug('Bucket is accessible'); }) 100 | .catch((reason) => { 101 | console.error(reason); 102 | throw new VError(reason, 'Bucket is not accessible'); 103 | }); 104 | } 105 | 106 | /** 107 | * Use couchbackup to create a backup of the specified database to a file path. 108 | * 109 | * @param {any} sourceUrl Database URL 110 | * @param {any} backupTmpFilePath Path to write file 111 | * @returns Promise 112 | */ 113 | function createBackupFile(sourceUrl, backupTmpFilePath, cloudantApiKey, mode) { 114 | return new Promise((resolve, reject) => { 115 | debug(`Using couchbackup mode: ${mode}`); 116 | const params = { 117 | iamApiKey: cloudantApiKey, 118 | mode, 119 | ...(process.env.CLOUDANT_IAM_TOKEN_URL && { iamTokenUrl: process.env.CLOUDANT_IAM_TOKEN_URL }), 120 | }; 121 | couchbackup.backup( 122 | sourceUrl, 123 | fs.createWriteStream(backupTmpFilePath), 124 | params, 125 | (err, done) => { 126 | if (err) { 127 | reject(new VError(err, 'CouchBackup process failed')); 128 | } else { 129 | resolve(done); 130 | } 131 | } 132 | ) 133 | .on('changes', batch => debug('Couchbackup changes batch: ', batch)) 134 | .on('written', progress => debug('Fetched batch:', progress.batch, 'Total document revisions written:', progress.total, 'Time:', progress.time)); 135 | }) 136 | .then((done) => { 137 | debug(`couchbackup to file done; backed up ${done.total}`); 138 | debug('Ready to upload to IBM COS'); 139 | }) 140 | .catch((err) => { 141 | throw new VError(err, 'CouchBackup process failed'); 142 | }); 143 | } 144 | 145 | /** 146 | * Upload a backup file to an IBM COS bucket. 147 | * 148 | * @param {IBM_COS.S3} cos Object store client 149 | * @param {any} backupTmpFilePath Path of backup file to write. 150 | * @param {any} bucket Object store bucket name 151 | * @param {any} key Object store key name 152 | * @returns Promise 153 | */ 154 | function uploadNewBackup(cos, backupTmpFilePath, bucket, key) { 155 | debug(`Uploading from ${backupTmpFilePath} to ${bucket}/${key}`); 156 | 157 | const inputStream = fs.createReadStream(backupTmpFilePath, { highWaterMark: 5 * 1024 * 1024 }); 158 | const params = { 159 | Bucket: bucket, 160 | Key: key, 161 | Body: inputStream 162 | }; 163 | const options = { 164 | partSize: 5 * 1024 * 1024, // max 5 MB part size (minimum size) 165 | queueSize: 5 // allow 5 parts at a time 166 | }; 167 | 168 | const upload = cos.upload(params, options); 169 | upload.on('httpUploadProgress', (progress) => { 170 | debug(`IBM COS S3 upload progress: ${JSON.stringify(progress)}`); 171 | }); 172 | 173 | return upload.promise() 174 | .then(() => { 175 | debug('Upload succeeded!'); 176 | }) 177 | .catch(err => { 178 | debug(err); 179 | throw new VError(err); 180 | }); 181 | } 182 | 183 | main(); 184 | --------------------------------------------------------------------------------