├── .gitignore ├── .travis.yml ├── LICENSE ├── longest-common-prefix.js ├── iam.js ├── package.json ├── completer.js ├── README.md └── app.js /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules 3 | start.sh 4 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | node_js: 3 | - "node" 4 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The Apache License, version 2.0 2 | 3 | licensed under the apache license, version 2.0 (the "license"); you may not use this file except in compliance with the license. you may obtain a copy of the license at 4 | 5 | http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | unless required by applicable law or agreed to in writing, software distributed under the license is distributed on an "as is" basis, without warranties or conditions of any kind, either express or implied. see the license for the specific language governing permissions and limitations under the license. 8 | -------------------------------------------------------------------------------- /longest-common-prefix.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | // find the longest common prefix in a list of strings 4 | module.exports = function longestCommonPrefix (strings) { 5 | if (!strings.length) { 6 | return '' 7 | } 8 | 9 | let candidate = strings[0] 10 | for (let i = 1; i < strings.length; i++) { 11 | const str = strings[i] 12 | while (str.indexOf(candidate) !== 0 && candidate) { 13 | candidate = candidate.substring(0, candidate.length - 1) 14 | } 15 | if (!candidate) { 16 | break 17 | } 18 | } 19 | return candidate 20 | } 21 | -------------------------------------------------------------------------------- /iam.js: -------------------------------------------------------------------------------- 1 | const ccurllib = require('ccurllib') 2 | 3 | const getToken = async (IAM_API_KEY) => { 4 | if (IAM_API_KEY) { 5 | let obj 6 | obj = ccurllib.get(IAM_API_KEY) 7 | if (!obj) { 8 | obj = await ccurllib.getBearerToken(IAM_API_KEY) 9 | if (obj) { 10 | ccurllib.set(IAM_API_KEY, obj) 11 | } 12 | } 13 | if (!obj) { 14 | throw new Error('Could not perform IAM authentication') 15 | } 16 | return obj.access_token 17 | } else { 18 | return null 19 | } 20 | } 21 | 22 | module.exports = { 23 | getToken 24 | } 25 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "couchshell", 3 | "version": "1.4.0", 4 | "description": "A shell to interact with CouchDB as if it were a file system", 5 | "scripts": { 6 | "run": "node app.js" 7 | }, 8 | "dependencies": { 9 | "ascii-tree": "0.3.0", 10 | "ccurllib": "^1.4.0", 11 | "nano": "^10.1.3", 12 | "shell": "https://github.com/glynnbird/node-shell.git" 13 | }, 14 | "repository": "https://github.com/glynnbird/couchshell.git", 15 | "keywords": [ 16 | "CouchDB", 17 | "shell", 18 | "couch", 19 | "command-line" 20 | ], 21 | "author": "Glynn Bird", 22 | "license": "Apache-2.0", 23 | "bin": { 24 | "couchshell": "app.js" 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /completer.js: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | ### 4 | Completer plugin 5 | ================ 6 | Provides tab completion. Options passed during creation are: 7 | - `shell` , (required) A reference to your shell application. 8 | ### 9 | module.exports = (settings) -> 10 | # Validation 11 | throw new Error 'No shell provided' if not settings.shell 12 | shell = settings.shell 13 | # Plug completer to interface 14 | return unless shell.isShell 15 | shell.interface().completer = (text, cb) -> 16 | suggestions = [] 17 | routes = shell.routes 18 | for route in routes 19 | command = route.command 20 | if command.substr(0, text.length) is text 21 | suggestions.push command 22 | cb(false, [suggestions, text]) 23 | null 24 | */ 25 | 26 | const longestCommonPrefix = require('./longest-common-prefix') 27 | 28 | function processSuggestions (suggestions, startkey, text, cb) { 29 | if (suggestions.length) { 30 | const prefix = longestCommonPrefix(suggestions).substring(startkey.length) 31 | 32 | if (prefix.length) { // one common prefix, so complete it 33 | suggestions = [text + prefix] 34 | } 35 | } 36 | cb(null, [suggestions, text]) 37 | } 38 | 39 | module.exports = function (settings) { 40 | if (!settings.shell) { 41 | throw new Error('No shell provided') 42 | } 43 | if (!settings.appsettings) { 44 | throw new Error('No appsettings provided') 45 | } 46 | const shell = settings.shell 47 | const appsettings = settings.appsettings 48 | if (!shell.isShell) { 49 | return 50 | } 51 | shell.interface().completer = async function (text, cb) { 52 | // first let's see if the command has spaces in 53 | const bits = text.split(' ') 54 | 55 | // if we have no space, then we haven't finished typing the command - so we want command auto-completion 56 | if (bits.length === 1) { 57 | const suggestions = [] 58 | const routes = shell.routes 59 | for (const i in routes) { 60 | const command = routes[i].command 61 | if (command.substr(0, text.length) === text) { 62 | suggestions.push(command) 63 | } 64 | } 65 | cb(null, [suggestions, text]) 66 | } else { 67 | // if we are in a sub-directory, we want documentid auto-completion 68 | let startkey 69 | if (appsettings.cloudantdb) { 70 | startkey = bits[bits.length - 1] || '' 71 | appsettings.cloudantdb.list({ 72 | limit: 10, 73 | startkey, 74 | endkey: startkey + '\uffff' 75 | }, function (err, data) { 76 | if (err) { 77 | // handle error 78 | } 79 | const suggestions = data.rows.map(function (row) { 80 | return row.id 81 | }) 82 | processSuggestions(suggestions, startkey, text, cb) 83 | }) 84 | } else { 85 | // database/documentid autocompletion 86 | startkey = bits[bits.length - 1] || '' 87 | try { 88 | const dbs = await settings.nano.db.list() 89 | const suggestions = dbs.filter(function (db) { 90 | return db.indexOf(startkey) === 0 91 | }) 92 | processSuggestions(suggestions, startkey, text, cb) 93 | } catch (e) { 94 | console.log(e) 95 | cb(null, []) 96 | // do nothing 97 | } 98 | } 99 | } 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # couchshell 2 | 3 | ``` 4 | _____ _ _____ _ _ _ 5 | / ____| | | / ____| | | | | 6 | | | ___ _ _ ___| |__ | (___ | |__ ___| | | 7 | | | / _ \| | | |/ __| '_ \ \___ \| '_ \ / _ \ | | 8 | | |___| (_) | |_| | (__| | | |____) | | | | __/ | | 9 | \_____\___/ \__,_|\___|_| |_|_____/|_| |_|\___|_|_| 10 | 11 | ``` 12 | 13 | Couchshell is a command-line shell utility that allows you to interact with a CouchDB/Cloudant interface as if it were a Unix file system. 14 | 15 | * mkdir - create database 16 | * rmdir - remove database 17 | * cd - change database 18 | * ls/ll - list contents of database or list of databases 19 | * cat - show contents of document or datbase stats 20 | * echo - create document 21 | * rm - remove document 22 | * cp - copy a document or database 23 | * head - show first few documents from a database 24 | * touch - load and save a document 25 | * pwd - show the current database 26 | * tree - show revision history and conflicts from a document 27 | * du - show disk space usage of a database 28 | * fsck - remove conflicts from a document 29 | 30 | ## Installation 31 | 32 | npm install -g couchshell 33 | 34 | ## Define the URL of your CouchDB/Cloudant server 35 | 36 | Set an environment variable called `COUCH_URL` which contains the URL of your CouchDB or Cloudant instance e.g. 37 | 38 | export COUCH_URL=http://127.0.0.1:5984 39 | 40 | or 41 | 42 | export COUCH_URL=https://myusername:mypassword@myhost.cloudant.com 43 | 44 | ### IAM Authentication 45 | 46 | Alternatively, if you are using IAM authentication with IBM Cloudant, then supply two environment variables: 47 | 48 | - `COUCH_URL` - the URL of your Cloudant host e.g. `https://myhost.cloudant.com` (note absence of username and password in URL). 49 | - `IAM_API_KEY` - the IAM API KEY e.g. `ABC123515-151215`. 50 | 51 | ## Starting couchshell 52 | 53 | $ couchshell 54 | Type "help" or press enter for a list of commands 55 | >> 56 | 57 | You now have access to your CouchDB server as if is a file system with databases represented as directories at the root level and documents as files inside the directories. 58 | 59 | 60 | ## Create database (directory) 61 | 62 | >> mkdir mydb 63 | 64 | ## Change directory 65 | 66 | We can deal with a database by using the `cd` shell commmand: 67 | 68 | >> cd mydb 69 | mydb >> 70 | 71 | Or return back to the home directory with 72 | 73 | >> cd .. 74 | >> 75 | 76 | ## View the contents of a directory 77 | 78 | We can see the contents of a directory with `ls`: 79 | 80 | >> ls 81 | _replicator _users accidents anagrammer articles cache conference db10 feeds geoquiz geoquiz_stats houseprices 82 | >> 83 | 84 | or `ll`: 85 | 86 | >> ll 87 | _replicator 88 | _users 89 | accidents 90 | anagrammer 91 | articles 92 | cache 93 | conference 94 | db10 95 | feeds 96 | geoquiz 97 | geoquiz_stats 98 | houseprices 99 | hp 100 | mydb 101 | nottage 102 | pt_test 103 | remote 104 | test 105 | >> 106 | 107 | At the top level we see a list of databases. When we have `cd`'d to database, we see a list of documents: 108 | 109 | geoquiz >> ll 110 | _design/fetch 111 | afghanistan 112 | alabama 113 | alaska 114 | albania 115 | algeria 116 | angola 117 | antarctica 118 | argentina 119 | arizona 120 | geoquiz 121 | >> 122 | 123 | We can also add the starting letters of a document's id to narrow the list: 124 | 125 | geoquiz >> ll do 126 | dogger 127 | dominican republic 128 | dorset 129 | dover 130 | geoquiz >> 131 | 132 | ## Viewing the contents of a database 133 | 134 | When at the top level, using `cat` shows a databases's stats: 135 | 136 | >> cat geoquiz 137 | {"db_name":"geoquiz","doc_count":292,"doc_del_count":2,"update_seq":296,"purge_seq":0,"compact_running":false,"disk_size":2682993,"data_size":2634563,"instance_start_time":"1427369661867062","disk_format_version":6,"committed_update_seq":296} 138 | >> 139 | 140 | When inside a database, `cat` shows the contents of a document: 141 | 142 | geoquiz >> cat dover 143 | {"_id":"dover","_rev":"1-7ea98285628d4bb3203a0ef3b1f34247","type":"Feature","properties":{"name":"Dover","group":"Shipping Forecast"},"geometry":{"type":"Polygon","coordinates":[[[0.439453125,50.83369767098071],[1.58203125,50.233151832472245],[1.494140625,50.84757295365389],[1.7578125,50.94458443495011],[2.48291015625,51.08282186160978],[2.9443359375,51.28940590271679],[1.3842773437499998,51.26191485308451],[1.29638671875,51.12421275782688],[1.03271484375,51.069016659603896],[0.9667968749999999,50.93073802371819],[0.791015625,50.958426723359935],[0.439453125,50.83369767098071]]]}} 144 | geoquiz >> 145 | 146 | ## Creating data 147 | 148 | This is where the anaology of directories & files --> databases & documents is stretched. We use the `echo` command to generate data: 149 | 150 | testdb >> echo '{"a":1,"b":2,"c":"three"}' 151 | {"ok":true,"id":"996dfcc55a3676485a6223b09d00b958","rev":"1-debc5c8de13e1f36787fe391da8191a6"} 152 | testdb >> 153 | 154 | or we can specify the id by piping to a 'file': 155 | 156 | testdb >> echo '{"a":1,"b":2,"c":"three"}' > mydoc 157 | {"ok":true,"id":"mydoc","rev":"1-debc5c8de13e1f36787fe391da8191a6"} 158 | testdb >> 159 | 160 | ## Touching data 161 | 162 | You can create a new empty document, or 'touch' an existing one (load it and save it) by using `touch`: 163 | 164 | test >> touch moo 165 | {"ok":true,"id":"moo","rev":"1-967a00dff5e02add41819138abb3284d"} 166 | test >> touch moo 167 | {"ok":true,"id":"moo","rev":"2-7051cbe5c8faecd085a3fa619e6e6337"} 168 | test >> 169 | 170 | ## Deleting data 171 | 172 | We can remove documents with `rm`: 173 | 174 | testdb >> rm mydoc 175 | {"ok":true,"id":"mydoc","rev":"2-c1b6d2ae1a60056eac56f1f440b7b593"} 176 | testdb >> 177 | 178 | or remove whole directories with `rmdir`: 179 | 180 | >> rmdir test 181 | {"ok":true} 182 | 183 | ## Copying a document 184 | 185 | When inside a directory (database), we can copy a document with: 186 | 187 | testdb >> cp doc1 doc2 188 | {"ok":true,"id":"doc2","rev":"1-fda016d0fc74921c9b324b7aff5cbbdb"} 189 | 190 | If the destination document is already there, we will get an error: 191 | 192 | testdb >> cp doc1 doc2 193 | 409: Document update conflict. 194 | 195 | ## Copying a database 196 | 197 | When at the top of the directory tree, we can replicate one database to another with the `cp` commnand 198 | 199 | >> cp databasea databaseb 200 | Replication scheduled: 201 | {"ok":true,"id":"30990d73131ad3674d3d778dbb461d85","rev":"1-6bf28911ef8daa72ecc51762955e6f9a"} 202 | 203 | Replication happens asynchronously. We can check on its progress by using `cat` with the name of the target database: 204 | 205 | >> cat databaseb 206 | {"db_name":"crimea","doc_count":18500,"doc_del_count":0,"update_seq":18500,"purge_seq":0,"compact_running":false,"disk_size":12021880,"data_size":11890503,"instance_start_time":"1427978103035439","disk_format_version":6,"committed_update_seq":18500} 207 | 208 | We can even replicate to and from a remote URL: 209 | 210 | >> cp databasea https://myusername:mypassword@myhost.cloudant.com/databaseb 211 | Replication scheduled: 212 | {"ok":true,"id":"30990d73131ad3674d3d778dbb461d85","rev":"1-6bf28911ef8daa72ecc51762955e6f9a"} 213 | 214 | ## Showing first few documents from a database 215 | 216 | When at the top of the directory tree, we can output the first ten of a database's documents with `head`: 217 | 218 | >> head geoquiz 219 | [{"id":"_design/fetch","key":"_design/fetch","value":{"rev":"1-a15cb9ce7b3a4466eb369f882fb0b717"}},{"id":"afghanistan","key":"afghanistan","value":{"rev":"1-9558a91d8b99d812baead834644dbb20"}},{"id":"alabama","key":"alabama","value":{"rev":"1-dda5ed5297b54d709d5946e1ca64f30a"}},{"id":"alaska","key":"alaska","value":{"rev":"1-aaac41905347745378f8b53d4cb4c407"}},{"id":"albania","key":"albania","value":{"rev":"1-594d450b3d155ca7a30e8fb097f4cba7"}},{"id":"algeria","key":"algeria","value":{"rev":"1-1a3a846e82373946eb4ef6066993441a"}},{"id":"angola","key":"angola","value":{"rev":"1-251dc285ef7c60330041350fae377047"}},{"id":"antarctica","key":"antarctica","value":{"rev":"1-eb7b0d1b313034977a266bda6bf3eb54"}},{"id":"argentina","key":"argentina","value":{"rev":"1-7c562dcca2e94e922ecf22e200adad0b"}},{"id":"arizona","key":"arizona","value":{"rev":"1-c02750010054f7ff3a0aa420747ef3c7"}}] 220 | 221 | 222 | ## Showing the revision history of a document 223 | 224 | When inside a database (directory), you can see a visualisation of the revision history with `tree`: 225 | 226 | A document with only one revision will simply show it's id and revision token: 227 | 228 | testdb >> tree 87c8882011c89970bbe077ac67003479 229 | id = 87c8882011c89970bbe077ac67003479 230 | └─ 1-e14063a7ba34a22b100284ce731ad6ac * 231 | 232 | A document with many conflicts on revision 1 will look like this: 233 | 234 | testdb >> tree 87c8882011c89970bbe077ac67003479 235 | id = 87c8882011c89970bbe077ac67003479 236 | └─ 1 237 | ├─ 1-100785dab5598961c8588790a810d37c 238 | ├─ 1-1234cfba23d341a6d3916372a782fd65 239 | ├─ 1-16d159e554c289d5848a3ca8854f9807 240 | ├─ 1-1f10158068c458605d02ed0199ccd23b 241 | ├─ 1-45b83b90c5112878ad1a961b3e7ccaee 242 | ├─ 1-5e52a4558f111a53afe6ef72ee831af8 243 | ├─ 1-6a74408eb56ad564c1d461c97e3c47dc 244 | ├─ 1-6f49e763289e848f1650a94043a1e792 245 | ├─ 1-93b0bd6be8d346e28f95584a972c4e24 246 | └─ 1-94ec1c1571a5b00a5f0bf4121af1ddef * 247 | 248 | And a more complicated revision history may look like this: 249 | 250 | testdb >> tree 87c8882011c89970bbe077ac67003479 251 | id = 87c8882011c89970bbe077ac67003479 252 | ├─ 1-46d69249075d3c7edebff00bb1eab65e 253 | ├─ 2-cc0b8b79af66fba84a8443484bff5160 254 | ├─ 3-52d91622d40f174894c567cc1fcee2e3 255 | ├─ 4 256 | │ ├─ 4-5c58ab6c4a15f5b8b880994fa52dfa68 257 | │ └─ 4-8171912e80397748fbde74cc09d42c6e 258 | ├─ 5-edec47733ea830362a5913b7f6312fe6 259 | └─ 6 260 | ├─ 6-5f24a47d9c6cbd78261830ef179aebfd 261 | └─ 6-f564b9850dca8e61019aeabdd5480f3f * 262 | 263 | The winning revision is marked with an asterisk. 264 | 265 | ## Removing conflicts from a document 266 | 267 | We can delete all conflicting revisions from a document (other than the winning revision) using `fsck `: 268 | 269 | ``` 270 | mydb >> tree mydoc 271 | id = mydoc 272 | └─ 1 273 | ├─ 1-1a0f63dc5b1e38a31c3a42dbb4afe3db 274 | ├─ 1-5eb7c1177ad06bf192c3dacf776cf3d3 275 | ├─ 1-7c1dafbec62feefc2f0e875aea2f6093 276 | ├─ 1-94ec1c1571a5b00a5f0bf4121af1ddef 277 | ├─ 1-95630339bde96adda2be19207c4772c8 278 | ├─ 1-9d1d7a18212c0c74369208e4aef7fa13 279 | ├─ 1-d2701441808e0caee7c394d77fbe7550 280 | ├─ 1-da86decbdae0ef60ad41c33c12870860 281 | ├─ 1-e91608e02ae31b5c6085d5ca17d964e2 282 | └─ 1-f5401b77bb604d6f55c04a0e661f69d4 * 283 | mydb >> fsck mydoc [{"ok":true,"id":"mydoc","rev":"2-96d4749bff11e619f31c3918671ec072"},{"ok":true,"id":"mydoc","rev":"2-0720b0ca56a1918feb5fac5bc0f5f7f6"},{"ok":true,"id":"mydoc","rev":"2-41e157d902abb4a9a84e8fcd905b17da"},{"ok":true,"id":"mydoc","rev":"2-421cc57d13e5f922fcc91f52e0884d3b"},{"ok":true,"id":"mydoc","rev":"2-da7d32f6a0201bf1509632d0f4b58359"},{"ok":true,"id":"mydoc","rev":"2-ef061a6709fa93bbc92f07277b81990a"},{"ok":true,"id":"mydoc","rev":"2-558011a4162d26bdcec71166556627c6"},{"ok":true,"id":"mydoc","rev":"2-19ffd7f841ddf020b42151d17f0012cc"},{"ok":true,"id":"mydoc","rev":"2-4d8e91ebc73462334b69a76610264799"}] 284 | mydb >> tree mydoc 285 | id = mydoc 286 | └─ 1-f5401b77bb604d6f55c04a0e661f69d4 * 287 | ``` 288 | 289 | All of the conflicting revisions are deleted in a single bulk operation. You see the response to the bulk operation. 290 | 291 | If we want to keep a specific revision (that is not current winning revision), then we can do `fsck `: 292 | 293 | ``` 294 | mydb >> tree mydoc 295 | id = mydoc 296 | └─ 1 297 | ├─ 1-16d5fd150971e97f6adec5e17f515594 298 | ├─ 1-2d9097166eeaeffc5ae70346fea0b988 299 | ├─ 1-32616223ff8de17ee8a1afbcccc05e8e 300 | ├─ 1-36602b53bf2918b393b1bef3c7648767 301 | ├─ 1-49dedfaefc6f706b609bc75958499a13 302 | ├─ 1-94ec1c1571a5b00a5f0bf4121af1ddef 303 | ├─ 1-9e9b49aff3c1b99dcfa01e6292053aa9 304 | ├─ 1-a3eefcfb591f999b87284663a287d3b9 305 | ├─ 1-a7f639949923d35e26974b3b81522116 306 | └─ 1-a8f68832e5dd0acc1d24d099dceea335 * 307 | mydb >> fsck mydoc 1-999 308 | The revision 1-999 does not exist in the document. 309 | mydb >> fsck mydoc 1-a7f639949923d35e26974b3b81522116 310 | [{"ok":true,"id":"mydoc","rev":"2-a10f20dc2f70275e85c36305086e0ce8"},{"ok":true,"id":"mydoc","rev":"2-5ab18477afda68b7320e490113d35e74"},{"ok":true,"id":"mydoc","rev":"2-ef061a6709fa93bbc92f07277b81990a"},{"ok":true,"id":"mydoc","rev":"2-8ad6479190c7144359198ac216215ef3"},{"ok":true,"id":"mydoc","rev":"2-c6ca34d99535e5c451752adae33a0336"},{"ok":true,"id":"mydoc","rev":"2-e5aa1fb11293c85c0f84d23e4a0c6ae0"},{"ok":true,"id":"mydoc","rev":"2-875565098342b16627e54b7cd6044818"},{"ok":true,"id":"mydoc","rev":"2-4b432cb9348e2e07c9165d10dbdb1139"},{"ok":true,"id":"mydoc","rev":"2-b9fbc1a19742fac04d1d98db95fb7b43"}] 311 | mydb >> tree mydoc 312 | id = mydoc 313 | └─ 1-a7f639949923d35e26974b3b81522116 * 314 | mydb >> 315 | ``` 316 | 317 | ## Showing the disk usage of a database 318 | 319 | When at the top level, we can show the statistics of a database using `du `: 320 | 321 | >> du ebooks 322 | {"db_name":"ebooks","doc_count":41,"doc_del_count":0,"update_seq":41,"purge_seq":0,"compact_running":false,"disk_size":163944,"data_size":13856,"instance_start_time":"1445498614131939","disk_format_version":6,"committed_update_seq":41} 323 | 324 | Similarly if we are inside a database, we can simply use `du`: 325 | 326 | >> cd ebooks 327 | ebooks >> du 328 | {"db_name":"ebooks","doc_count":41,"doc_del_count":0,"update_seq":41,"purge_seq":0,"compact_running":false,"disk_size":163944,"data_size":13856,"instance_start_time":"1445498614131939","disk_format_version":6,"committed_update_seq":41} 329 | 330 | ## Todo 331 | 332 | * force rmdir to require confirmation before deleting a database -------------------------------------------------------------------------------- /app.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | const Shell = require('shell') 3 | 4 | const url = require('url') 5 | const iam = require('./iam.js') 6 | 7 | const appsettings = { 8 | cloudantdb: null, 9 | cloudantdbname: null 10 | } 11 | 12 | const completer = require('./completer.js') 13 | let nano 14 | const asciitree = require('ascii-tree') 15 | 16 | if (!process.env.COUCH_URL) { 17 | console.log('Please specify the URL of your CouchDB instance by setting a COUCH_URL environment variable') 18 | process.exit() 19 | } 20 | 21 | const instantiateNano = async () => { 22 | if (process.env.IAM_API_KEY) { 23 | const t = await iam.getToken(process.env.IAM_API_KEY) 24 | const opts = { 25 | url: process.env.COUCH_URL 26 | } 27 | if (t) { 28 | opts.defaultHeaders = { Authorization: 'Bearer ' + t } 29 | } 30 | nano = require('nano')(opts) 31 | } else { 32 | nano = require('nano')(process.env.COUCH_URL) 33 | } 34 | } 35 | 36 | const main = async () => { 37 | await instantiateNano() 38 | 39 | const app = new Shell({ chdir: __dirname }) 40 | app.client = nano 41 | // Middleware registration 42 | app.configure(function () { 43 | app.use(async function (req, res, next) { 44 | // re-instantiate 45 | await instantiateNano() 46 | app.client = nano 47 | next() 48 | }) 49 | app.use(Shell.history({ 50 | shell: app 51 | })) 52 | app.use(completer({ 53 | shell: app, 54 | appsettings, 55 | nano 56 | })) 57 | app.use(Shell.router({ 58 | shell: app 59 | })) 60 | app.use(Shell.help({ 61 | shell: app, 62 | introduction: true 63 | })) 64 | }) 65 | 66 | // Command registration 67 | app.cmd('ls', 'List dbs/documents', function (req, res, next) { 68 | if (appsettings.cloudantdb) { 69 | appsettings.cloudantdb.list({ limit: 10 }).then((data) => { 70 | res.cyan(formatDocs(data.rows, ' ') + '\n' || 'no documents') 71 | res.prompt() 72 | }).catch((err) => { 73 | res.cyan(formatErr(err)) 74 | res.prompt() 75 | }) 76 | } else { 77 | app.client.db.list().then((data) => { 78 | res.cyan(data.join(' ') + '\n' || 'no databases') 79 | res.prompt() 80 | }).catch((err) => { 81 | res.cyan(formatErr(err)) 82 | res.prompt() 83 | }) 84 | } 85 | }) 86 | 87 | // Command registration 88 | app.cmd('ll', 'List databases', function (req, res, next) { 89 | if (appsettings.cloudantdb) { 90 | appsettings.cloudantdb.list({ limit: 10 }).then((data) => { 91 | res.cyan(formatDocs(data.rows, '\n') || 'no documents') 92 | res.prompt() 93 | }).catch((err) => { 94 | res.red(formatErr(err)) 95 | res.prompt() 96 | }) 97 | } else { 98 | app.client.db.list().then((data) => { 99 | res.cyan(data.join('\n') + '\n' || 'no databases') 100 | res.prompt() 101 | }).catch((err) => { 102 | res.red(formatErr(err)) 103 | res.prompt() 104 | }) 105 | } 106 | }) 107 | 108 | // Command registration 109 | app.cmd('cat :id', 'Print database summary or document contents', function (req, res, next) { 110 | if (appsettings.cloudantdb) { 111 | appsettings.cloudantdb.get(req.params.id).then((data) => { 112 | res.cyan(JSON.stringify(data) + '\n') 113 | res.prompt() 114 | }).catch((err) => { 115 | res.red(formatErr(err)) 116 | res.prompt() 117 | }) 118 | } else { 119 | app.client.db.get(req.params.id).then((data) => { 120 | res.cyan(JSON.stringify(data) + '\n') 121 | res.prompt() 122 | }).catch((err) => { 123 | res.red(formatErr(err)) 124 | res.prompt() 125 | }) 126 | } 127 | }) 128 | 129 | app.cmd('ls :key', function (req, res, next) { 130 | if (appsettings.cloudantdb) { 131 | appsettings.cloudantdb.list({ limit: 10, startkey: req.params.key, endkey: req.params.key + 'z' }).then((data) => { 132 | res.cyan(formatDocs(data.rows, ' ') + '\n' || 'no documents') 133 | res.prompt() 134 | }).catch((err) => { 135 | res.red(formatErr(err)) 136 | res.prompt() 137 | }) 138 | } else { 139 | res.red("You cannot do 'ls ' from the top level\n") 140 | res.prompt() 141 | } 142 | }) 143 | 144 | app.cmd('ll :key', function (req, res, next) { 145 | if (appsettings.cloudantdb) { 146 | appsettings.cloudantdb.list({ limit: 10, startkey: req.params.key, endkey: req.params.key + 'z' }).then((data) => { 147 | res.cyan(formatDocs(data.rows, '\n') || 'no documents') 148 | res.prompt() 149 | }).catch((err) => { 150 | res.red(formatErr(err)) 151 | res.prompt() 152 | }) 153 | } else { 154 | res.red("You cannot do 'll ' from the top level\n") 155 | res.prompt() 156 | } 157 | }) 158 | 159 | app.cmd('rm :id', 'Remove a document', function (req, res, next) { 160 | if (appsettings.cloudantdb) { 161 | appsettings.cloudantdb.get(req.params.id).then((data) => { 162 | return appsettings.cloudantdb.destroy(data._id, data._rev) 163 | }).then((data) => { 164 | res.cyan(JSON.stringify(data) + '\n') 165 | res.prompt() 166 | }).catch((err) => { 167 | res.red(formatErr(err)) 168 | res.prompt() 169 | }) 170 | } else { 171 | res.red("You cannot do 'rm ' from the top level\n") 172 | res.prompt() 173 | } 174 | }) 175 | 176 | app.cmd('cp :sourceid :destinationid', 'Copy a document/database', function (req, res, next) { 177 | if (appsettings.cloudantdb) { 178 | appsettings.cloudantdb.get(req.params.sourceid).then((data) => { 179 | data._id = req.params.destinationid 180 | delete data._rev 181 | return appsettings.cloudantdb.insert(data) 182 | }).then((data) => { 183 | res.cyan(JSON.stringify(data) + '\n') 184 | res.prompt() 185 | }).catch((err) => { 186 | res.red(formatErr(err)) 187 | res.prompt() 188 | }) 189 | } else { 190 | // when at the top level, trigger replication 191 | const repl = { 192 | source: convertToURL(req.params.sourceid), 193 | target: convertToURL(req.params.destinationid), 194 | create_target: true 195 | } 196 | const r = { 197 | db: '_replicator', 198 | body: repl, 199 | method: 'post' 200 | } 201 | app.client.request(r).then((data) => { 202 | res.cyan('Replication scheduled:\n') 203 | res.cyan(JSON.stringify(data) + '\n') 204 | res.prompt() 205 | }).catch((err) => { 206 | res.red(formatErr(err)) 207 | res.prompt() 208 | }) 209 | } 210 | }) 211 | 212 | app.cmd('mkdir :db', 'Create database', function (req, res, next) { 213 | if (appsettings.cloudantdb) { 214 | res.red('You cannot create a database inside a database!\n') 215 | res.prompt() 216 | } else { 217 | app.client.db.create(req.params.db).then((data) => { 218 | res.cyan(JSON.stringify(data) + '\n') 219 | res.prompt() 220 | }).catch((err) => { 221 | res.red(formatErr(err)) 222 | res.prompt() 223 | }) 224 | } 225 | }) 226 | 227 | app.cmd('rmdir :db', 'Remove a database', function (req, res, next) { 228 | if (appsettings.cloudantdb) { 229 | res.red('You cannot remove a database from here!\n') 230 | res.prompt() 231 | } else { 232 | app.client.db.destroy(req.params.db).then((data) => { 233 | res.cyan(JSON.stringify(data) + '\n') 234 | res.prompt() 235 | }).catch((err) => { 236 | res.red(formatErr(err)) 237 | res.prompt() 238 | }) 239 | } 240 | }) 241 | 242 | app.cmd('cd ..', 'Return to home', function (req, res, next) { 243 | appsettings.cloudantdb = null 244 | appsettings.cloudantdbname = null 245 | app.set('prompt', '>> ') 246 | res.prompt() 247 | }) 248 | 249 | app.cmd('cd :db', function (req, res, next) { 250 | app.client.db.get(req.params.db).then((data) => { 251 | appsettings.cloudantdb = app.client.use(req.params.db) 252 | appsettings.cloudantdbname = req.params.db 253 | app.set('prompt', req.params.db + ' >> ') 254 | res.prompt() 255 | }).catch((err) => { 256 | res.red('Database does not exist\n') 257 | res.red(formatErr(err)) 258 | res.prompt() 259 | }) 260 | }) 261 | 262 | app.cmd('echo :json > :id', 'Create a document', function (req, res, next) { 263 | if (appsettings.cloudantdb) { 264 | try { 265 | const str = req.params.json.replace(/^'/, '').replace(/'$/, '') 266 | const obj = JSON.parse(str) 267 | obj._id = req.params.id 268 | appsettings.cloudantdb.insert(obj).then((data) => { 269 | res.cyan(JSON.stringify(data) + '\n') 270 | res.prompt() 271 | }).catch((err) => { 272 | res.red(formatErr(err)) 273 | res.prompt() 274 | }) 275 | } catch (e) { 276 | res.red('Invalid JSON - ' + req.params.json + '\n') 277 | res.prompt() 278 | } 279 | } else { 280 | res.red("You cannot do 'echo :json' from the top level\n") 281 | res.prompt() 282 | } 283 | }) 284 | 285 | app.cmd('echo :json', 'Create a document with auto-generated id', function (req, res, next) { 286 | if (appsettings.cloudantdb) { 287 | try { 288 | const str = req.params.json.replace(/^'/, '').replace(/'$/, '') 289 | const obj = JSON.parse(str) 290 | appsettings.cloudantdb.insert(obj).then((data) => { 291 | res.cyan(JSON.stringify(data) + '\n') 292 | res.prompt() 293 | }).catch((err) => { 294 | res.red(formatErr(err)) 295 | res.prompt() 296 | }) 297 | } catch (e) { 298 | res.red('Invalid JSON - ' + req.params.json + '\n') 299 | res.prompt() 300 | } 301 | } else { 302 | res.red("You cannot do 'echo :json from the top level\n") 303 | res.prompt() 304 | } 305 | }) 306 | 307 | app.cmd('touch :id', 'Create a new empty document, or change an existing one', function (req, res, next) { 308 | if (appsettings.cloudantdb) { 309 | appsettings.cloudantdb.get(req.params.id).then((data) => { 310 | return appsettings.cloudantdb.insert(data) 311 | }).catch((err) => { 312 | if (err.statusCode === 404) { 313 | const doc = { _id: req.params.id } 314 | return appsettings.cloudantdb.insert(doc) 315 | } 316 | }).then((data) => { 317 | res.cyan(JSON.stringify(data) + '\n') 318 | res.prompt() 319 | }).catch((err) => { 320 | res.red(formatErr(err)) 321 | res.prompt() 322 | }) 323 | } else { 324 | res.red("You cannot do 'touch :id from the top level\n") 325 | res.prompt() 326 | } 327 | }) 328 | 329 | app.cmd('tree :id', 'View the revision history of a document', function (req, res, next) { 330 | if (appsettings.cloudantdb) { 331 | appsettings.cloudantdb.get(req.params.id, { conflicts: true, revs_info: true }).then((data) => { 332 | let revs = [] 333 | let i 334 | for (i in data._revs_info) { 335 | revs.push(data._revs_info[i].rev) 336 | } 337 | for (i in data._conflicts) { 338 | revs.push(data._conflicts[i]) 339 | } 340 | revs = revs.sort() 341 | const revslist = { } 342 | for (i in revs) { 343 | const match = revs[i].match(/^[0-9]+/) 344 | if (match) { 345 | const rev = match[0] 346 | if (!revslist[rev]) { 347 | revslist[rev] = [] 348 | } 349 | revslist[rev].push(revs[i]) 350 | } 351 | } 352 | let output = '#id = ' + req.params.id + '\n' 353 | for (i in revslist) { 354 | let prefix = '##' 355 | if (revslist[i].length === 1) { 356 | output += prefix + revslist[i][0] 357 | if (revslist[i][0] === data._rev) { 358 | output += ' *' 359 | } 360 | output += '\n' 361 | } else { 362 | output += prefix + revslist[i][0].match(/^[0-9]+/)[0] 363 | output += '\n' 364 | prefix += '#' 365 | for (const j in revslist[i]) { 366 | output += prefix + revslist[i][j] 367 | if (revslist[i][j] === data._rev) { 368 | output += ' *' 369 | } 370 | output += '\n' 371 | } 372 | } 373 | } 374 | res.cyan(asciitree.generate(output) + '\n') 375 | res.prompt() 376 | }).catch((err) => { 377 | res.red(formatErr(err)) 378 | res.prompt() 379 | }) 380 | } else { 381 | res.red("You cannot do 'tree :id from the top level\n") 382 | res.prompt() 383 | } 384 | }) 385 | 386 | app.cmd('head :db', 'Show first ten documents from a database', function (req, res, next) { 387 | if (appsettings.cloudantdb) { 388 | res.red("You cannot do 'head :id from the db level\n") 389 | res.prompt() 390 | } else { 391 | const d = app.client.db.use(req.params.db) 392 | d.list({ limit: 10, include_docs: true }).then((data) => { 393 | res.cyan(JSON.stringify(data.rows) + '\n') 394 | res.prompt() 395 | }).catch((err) => { 396 | res.red(formatErr(err)) 397 | res.prompt() 398 | }) 399 | } 400 | }) 401 | 402 | app.cmd('pwd', 'Print working directory', function (req, res, next) { 403 | if (appsettings.cloudantdb) { 404 | res.cyan(appsettings.cloudantdbname + '\n') 405 | res.prompt() 406 | } else { 407 | res.cyan('/ \n') 408 | res.prompt() 409 | } 410 | }) 411 | 412 | app.cmd('du :db', 'Disk usage of a database', function (req, res, next) { 413 | app.client.db.get(req.params.db).then((data) => { 414 | res.cyan(JSON.stringify(data) + '\n') 415 | res.prompt() 416 | }).catch((err) => { 417 | res.red(formatErr(err)) 418 | res.prompt() 419 | }) 420 | }) 421 | 422 | app.cmd('du', 'Disk usage of a database', function (req, res, next) { 423 | if (appsettings.cloudantdb) { 424 | app.client.db.get(appsettings.cloudantdbname).then((data) => { 425 | res.cyan(JSON.stringify(data) + '\n') 426 | res.prompt() 427 | }).catch((err) => { 428 | res.red(formatErr(err)) 429 | res.prompt() 430 | }) 431 | } else { 432 | res.red("You cannot do 'du' from the top level. Try 'du '\n") 433 | res.prompt() 434 | } 435 | }) 436 | 437 | app.cmd('fsck :id :rev', 'Repair document (remove conflicts) by defining a winning revision', function (req, res, next) { 438 | if (appsettings.cloudantdb) { 439 | appsettings.cloudantdb.get(req.params.id, { conflicts: true, revs_info: true }).then((data) => { 440 | if (!data._conflicts) { 441 | res.red('No conflicts found.\n') 442 | res.prompt() 443 | return 444 | } 445 | 446 | // check that the proposed winner is one of the existing conflicts 447 | if (data._conflicts.indexOf(req.params.rev) === -1) { 448 | res.red('The revision ' + req.params.rev + ' does not exist in the document.\n') 449 | res.prompt() 450 | return 451 | } 452 | 453 | // delete all conflics, leaving the nominated revision as the uncontested winner 454 | const deletions = [] 455 | for (const i in data._conflicts) { 456 | if (data._conflicts[i] !== req.params.rev) { 457 | deletions.push({ _id: req.params.id, _rev: data._conflicts[i], _deleted: true }) 458 | } 459 | } 460 | 461 | // delete the current winner if required 462 | if (data._rev !== req.params.rev) { 463 | deletions.push({ _id: req.params.id, _rev: data._rev, _deleted: true }) 464 | } 465 | 466 | // perform bulk operation 467 | return appsettings.cloudantdb.bulk({ docs: deletions }) 468 | }).then((data) => { 469 | res.cyan(JSON.stringify(data) + '\n') 470 | res.prompt() 471 | }).catch((err) => { 472 | res.red(formatErr(err)) 473 | res.prompt() 474 | }) 475 | } else { 476 | res.red("You cannot do 'fsck :id' from the top level.\n") 477 | res.prompt() 478 | } 479 | }) 480 | 481 | app.cmd('fsck :id', 'Repair document (remove conflicts)', function (req, res, next) { 482 | if (appsettings.cloudantdb) { 483 | appsettings.cloudantdb.get(req.params.id, { conflicts: true, revs_info: true }).then((data) => { 484 | if (!data._conflicts) { 485 | res.red('No conflicts found.\n') 486 | res.prompt() 487 | return 488 | } 489 | // delete all conflics, leaving the winning revision as the uncontested winner 490 | const deletions = [] 491 | for (const i in data._conflicts) { 492 | deletions.push({ _id: req.params.id, _rev: data._conflicts[i], _deleted: true }) 493 | } 494 | return appsettings.cloudantdb.bulk({ docs: deletions }) 495 | }).then((data) => { 496 | res.cyan(JSON.stringify(data) + '\n') 497 | res.prompt() 498 | }).catch((err) => { 499 | res.red(formatErr(err)) 500 | res.prompt() 501 | }) 502 | } else { 503 | res.red("You cannot do 'fsck :id' from the top level.\n") 504 | res.prompt() 505 | } 506 | }) 507 | 508 | // Event notification 509 | app.on('quit', function () { 510 | process.exit() 511 | }) 512 | } 513 | 514 | const formatErr = function (err) { 515 | if (err.statusCode) { 516 | const retval = err.statusCode + ': ' + err.description + '\n' 517 | return retval 518 | } else { 519 | return '' 520 | } 521 | } 522 | 523 | const formatDocs = function (docs, separator) { 524 | const retval = [] 525 | for (const i in docs) { 526 | retval.push(docs[i].id) 527 | } 528 | return retval.join(separator) + '\n' 529 | } 530 | 531 | // convert a database name to a URL, if it isn't already 532 | const convertToURL = function (x) { 533 | try { 534 | const parsed = new url.URL(x) 535 | return parsed.href 536 | } catch (e) { 537 | return process.env.COUCH_URL + '/' + encodeURIComponent(x) 538 | } 539 | } 540 | 541 | main() 542 | --------------------------------------------------------------------------------