├── .clang-format ├── .editorconfig ├── .eslintrc.js ├── .gitignore ├── .gitmodules ├── .istanbul.yml ├── .npmignore ├── .prettierrc ├── BUILDING.md ├── CMakeLists.txt ├── CONTRIBUTING.md ├── LICENSE ├── Makefile ├── README.md ├── couchbase-sdk-nodejs-black-duck-manifest.yaml ├── lib ├── analyticsexecutor.ts ├── analyticsindexmanager.ts ├── analyticstypes.ts ├── authenticators.ts ├── binarycollection.ts ├── binding.ts ├── bindingutilities.ts ├── bucket.ts ├── bucketmanager.ts ├── cluster.ts ├── collection.ts ├── collectionmanager.ts ├── configProfile.ts ├── connspec.ts ├── couchbase.ts ├── crudoptypes.ts ├── datastructures.ts ├── diagnosticsexecutor.ts ├── diagnosticstypes.ts ├── errorcontexts.ts ├── errors.ts ├── eventingfunctionmanager.ts ├── generaltypes.ts ├── httpexecutor.ts ├── mutationstate.ts ├── queryexecutor.ts ├── queryindexmanager.ts ├── querytypes.ts ├── rangeScan.ts ├── scope.ts ├── scopeeventingfunctionmanager.ts ├── scopesearchindexmanager.ts ├── sdspecs.ts ├── sdutils.ts ├── searchexecutor.ts ├── searchfacet.ts ├── searchindexmanager.ts ├── searchquery.ts ├── searchsort.ts ├── searchtypes.ts ├── streamablepromises.ts ├── transactions.ts ├── transcoders.ts ├── usermanager.ts ├── utilities.ts ├── utilities_internal.ts ├── vectorsearch.ts ├── viewexecutor.ts ├── viewindexmanager.ts └── viewtypes.ts ├── package-lock.json ├── package.json ├── scripts ├── buildPrebuild.js ├── install.js ├── prebuilds.js └── prune.js ├── src ├── addondata.cpp ├── addondata.hpp ├── binding.cpp ├── cas.cpp ├── cas.hpp ├── connection.cpp ├── connection.hpp ├── connection_autogen.cpp ├── constants.cpp ├── constants.hpp ├── instance.cpp ├── instance.hpp ├── jstocbpp.hpp ├── jstocbpp_autogen.hpp ├── jstocbpp_basic.hpp ├── jstocbpp_cpptypes.hpp ├── jstocbpp_defs.hpp ├── jstocbpp_errors.hpp ├── jstocbpp_transactions.hpp ├── mutationtoken.cpp ├── mutationtoken.hpp ├── scan_iterator.cpp ├── scan_iterator.hpp ├── transaction.cpp ├── transaction.hpp ├── transactions.cpp ├── transactions.hpp ├── transcoder.hpp └── utils.hpp ├── test ├── .eslintrc.js ├── analytics.test.js ├── analyticslinks.test.js ├── bucketmanager.test.js ├── cluster.test.js ├── collectionmanager.test.js ├── connspec.test.js ├── consistencyutil.js ├── crud.test.js ├── data │ ├── search_index.json │ ├── test_vector.json │ ├── test_vector_search_docs.json │ └── vector_search_index.json ├── datastructures.test.js ├── diag.test.js ├── errors.test.js ├── eventing.test.js ├── harness.js ├── jcbmock.js ├── management.test.js ├── n1ql.test.js ├── rangescan.test.js ├── sdutils.test.js ├── search.test.js ├── streamablepromise.test.js ├── testdata.js ├── transactions.test.js ├── transcoder.test.js ├── users.test.js ├── utilities.test.js ├── views.test.js ├── workerthread.test.js └── workerthread.worker.js ├── tools ├── gen-bindings-js.js └── gen-bindings-json.py ├── tsconfig.json └── typedoc.json /.clang-format: -------------------------------------------------------------------------------- 1 | --- 2 | BasedOnStyle: LLVM 3 | Standard: Cpp11 4 | BreakBeforeBraces: Linux 5 | ColumnLimit: '80' 6 | IndentWidth: 4 7 | DerivePointerAlignment: false 8 | PointerAlignment: Right 9 | 10 | AccessModifierOffset: -4 11 | AllowShortFunctionsOnASingleLine: None 12 | AllowShortCaseLabelsOnASingleLine: false 13 | AllowShortBlocksOnASingleLine: false 14 | KeepEmptyLinesAtTheStartOfBlocks: true 15 | AlwaysBreakTemplateDeclarations: true 16 | BreakConstructorInitializers: BeforeComma 17 | CommentPragmas: '^[^ ]' 18 | ... 19 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # editorconfig.org 2 | root = true 3 | 4 | [*] 5 | indent_style = space 6 | indent_size = 2 7 | end_of_line = lf 8 | charset = utf-8 9 | trim_trailing_whitespace = true 10 | insert_final_newline = true 11 | 12 | [*.md] 13 | trim_trailing_whitespace = false 14 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Defines the AST nodes we expect to have documentation for. It includes 3 | * most publicly defined stuff with the following exceptions: 4 | * - No need for descriptions on Options interfaces. 5 | * - No need to document setters. 6 | * - No need to document protected or private. 7 | * - No need to document inline types in function parameters. 8 | */ 9 | const needsDocsContexts = [ 10 | 'TSInterfaceDeclaration[id.name!=/.*Options/]', 11 | 'TSTypeAliasDeclaration', 12 | 'TSEnumDeclaration', 13 | 'TSEnumMember', 14 | 'TSMethodSignature[accessibility!=/(private|protected)/]', 15 | 'ClassBody > TSPropertySignature[accessibility!=/(private|protected)/]', 16 | 'TSInterfaceBody > TSPropertySignature[accessibility!=/(private|protected)/]', 17 | 'FunctionDeclaration', 18 | 'ClassDeclaration', 19 | 'MethodDefinition[accessibility!=/(private|protected)/][kind!=/(set|constructor)/]', 20 | 'ClassBody > ClassProperty[accessibility!=/(private|protected)/]', 21 | ] 22 | 23 | module.exports = { 24 | root: true, 25 | parser: '@typescript-eslint/parser', 26 | extends: [ 27 | 'eslint:recommended', 28 | 'plugin:@typescript-eslint/recommended', 29 | 'plugin:node/recommended', 30 | 'plugin:mocha/recommended', 31 | 'plugin:jsdoc/recommended', 32 | 'prettier', 33 | ], 34 | settings: { 35 | jsdoc: { 36 | ignorePrivate: true, 37 | ignoreInternal: true, 38 | }, 39 | }, 40 | rules: { 41 | // We intentionally use `any` in a few places for user values. 42 | '@typescript-eslint/explicit-module-boundary-types': [ 43 | 'error', 44 | { 45 | allowArgumentsExplicitlyTypedAsAny: true, 46 | }, 47 | ], 48 | 49 | // We use the typescript compiler to transpile import statements into 50 | // require statements, so this isn't actually valid 51 | 'node/no-unsupported-features/es-syntax': [ 52 | 'error', 53 | { 54 | ignores: ['modules'], 55 | }, 56 | ], 57 | 58 | // Reconfigure the checker to include ts files. 59 | 'node/no-missing-import': [ 60 | 'error', 61 | { 62 | tryExtensions: ['.js', '.ts'], 63 | }, 64 | ], 65 | 'node/no-missing-require': [ 66 | 'error', 67 | { 68 | tryExtensions: ['.js', '.ts'], 69 | }, 70 | ], 71 | 72 | // Add the category and internal tags that we use. 73 | 'jsdoc/check-tag-names': [ 74 | 'warn', 75 | { 76 | definedTags: ['category', 'internal', 'experimental'], 77 | }, 78 | ], 79 | 80 | // Reconfigure jsdoc to require doc blocks for anything which we do 81 | // not have marked as private or protected. 82 | 'jsdoc/require-jsdoc': [ 83 | 'warn', 84 | { 85 | contexts: needsDocsContexts, 86 | }, 87 | ], 88 | 89 | // Reconfigure jsdoc to require descriptions for all doc blocks. This is 90 | // really an extension of the above requirement. 91 | 'jsdoc/require-description': [ 92 | 'warn', 93 | { 94 | contexts: needsDocsContexts, 95 | }, 96 | ], 97 | 'jsdoc/require-description-complete-sentence': 'warn', 98 | 99 | // We get type information from typescript. 100 | 'jsdoc/require-returns': 'off', 101 | 'jsdoc/require-param-type': 'off', 102 | 103 | 'jsdoc/tag-lines': [ 104 | 'warn', 105 | 'any', 106 | { 107 | startLines: 1, 108 | }, 109 | ], 110 | 111 | 'jsdoc/no-undefined-types': [ 112 | 'warn', 113 | { 114 | definedTypes: [ 115 | 'durabilityLevel', 116 | 'effectiveRoles', 117 | 'GetOptions', 118 | 'IBucketSettings', 119 | 'MutationState', 120 | 'StorageBackend', 121 | ], 122 | }, 123 | ], 124 | 125 | // We intentionally use `any` in a few places for user values. 126 | '@typescript-eslint/no-explicit-any': 'off', 127 | 128 | // There are a number of places we need to do this for code clarity, 129 | // especially around handling backwards-compatibility. 130 | 'prefer-rest-params': 'off', 131 | }, 132 | } 133 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Local test configuration 2 | config.json 3 | 4 | # Compiled Object files 5 | *.slo 6 | *.lo 7 | *.o 8 | 9 | # Compiled Dynamic libraries 10 | *.so 11 | 12 | # Compiled Static libraries 13 | *.lai 14 | *.la 15 | *.a 16 | 17 | # The original file after running astyle: 18 | *.orig 19 | 20 | # Ignore some IDE-specific folder 21 | /.idea 22 | /.vscode 23 | 24 | # User folders 25 | *~ 26 | 27 | # Build stuff 28 | /.lock-wscript 29 | /build/ 30 | /node_modules/ 31 | /docs/ 32 | /test/CouchbaseMock.jar 33 | /builderror.log 34 | /configcache.txt 35 | /coverage/ 36 | /benchmarks/ 37 | /.nyc_output/ 38 | /prebuilds/ 39 | /deps/couchbase-cxx-cache 40 | *.tar.gz 41 | /tools/bindings.json 42 | 43 | # Extra stuff 44 | /dist/ 45 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "deps/couchbase-cxx-client"] 2 | path = deps/couchbase-cxx-client 3 | url = https://github.com/couchbaselabs/couchbase-cxx-client.git -------------------------------------------------------------------------------- /.istanbul.yml: -------------------------------------------------------------------------------- 1 | instrumentation: 2 | excludes: ['lib/binding.ts'] 3 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | # Local test configuration 2 | config.json 3 | 4 | # Compiled Object files 5 | *.slo 6 | *.lo 7 | *.o 8 | 9 | # Compiled Dynamic libraries 10 | *.so 11 | 12 | # Compiled Static libraries 13 | *.lai 14 | *.la 15 | *.a 16 | 17 | # The original file after running astyle: 18 | *.orig 19 | 20 | # Ignore some IDE-specific folder 21 | /.idea 22 | /.vscode 23 | 24 | # User folders 25 | *~ 26 | 27 | # Build stuff 28 | /.lock-wscript 29 | /build/ 30 | /node_modules/ 31 | /docs/ 32 | /test/CouchbaseMock.jar 33 | /builderror.log 34 | /configcache.txt 35 | /coverage/ 36 | /benchmarks/ 37 | /.nyc_output/ 38 | /prebuilds/ 39 | /prebuildsDebug/ 40 | *.tar.gz 41 | 42 | # Extra stuff 43 | /.git 44 | /lib/ 45 | /test/ 46 | .clang-format 47 | .editorconfig 48 | .eslintrc.js 49 | .gitignore 50 | .istanbul.yml 51 | .prettierrc 52 | Makefile 53 | README.md 54 | tsconfig.json 55 | typedoc.json 56 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "semi": false, 3 | "arrowParens": "always", 4 | "singleQuote": true, 5 | "trailingComma": "es5" 6 | } 7 | -------------------------------------------------------------------------------- /BUILDING.md: -------------------------------------------------------------------------------- 1 | # Setup 2 | 3 | Make sure to have cloned the [SDK repository](https://github.com/couchbase/couchnode.git) and have the submodules appropriately synced (`git submodule update --init --recursive`). 4 | 5 | # Building 6 | 7 | ## Set CPM Cache 8 | The C++ core utilizes the CMake Package Manager (CPM) to include depencies. These can be set to a cache directory and can be used for future builds. Periodically the dependencies should be updated. So, in general it is good practice to configure the build environment by setting the CPM cache. 9 | 10 | ### Via npm command 11 | ```console 12 | npm run prebuild -- --configure --set-cpm-cache --use-boringssl 13 | ``` 14 | 15 | ### Available Options 16 | >Note: Section under construction 17 | 18 | ### Via cmake-js 19 | 20 | Set the cache directory `CXXCBC_CACHE_DIR`: 21 | ```console 22 | export CXXCBC_CACHE_DIR=$PWD/deps/couchbase-cxx-cache 23 | ``` 24 | 25 | Remove the cache directory 26 | ```console 27 | rm -rf $CXXCBC_CACHE_DIR 28 | ``` 29 | 30 | Configure the build: 31 | ```console 32 | $ npx cmake-js configure \ 33 | --runtime node \ 34 | --runtime-version $(node --version) \ 35 | --CDUSE_STATIC_OPENSSL=OFF \ 36 | --CDCPM_DOWNLOAD_ALL=OFF \ 37 | --CDCPM_USE_NAMED_CACHE_DIRECTORIES=ON \ 38 | --CDCPM_USE_LOCAL_PACKAGES=OFF \ 39 | --CDCPM_SOURCE_CACHE=$CXXCBC_CACHE_DIR 40 | ``` 41 | 42 | ## Build the client binary 43 | 44 | ### Via npm command 45 | ```console 46 | npm run prebuild -- --use-boringssl 47 | ``` 48 | 49 | ### Available Options 50 | >Note: Section under construction 51 | 52 | ### Via cmake-js 53 | 54 | >NOTE: If using the `compile` command, the build will automatically clean and re-execute a build upon a failure. Use the `build` command to only attempt a single build. 55 | 56 | Set the cache directory (if it has not already been set) `CXXCBC_CACHE_DIR`: 57 | ```console 58 | export CXXCBC_CACHE_DIR=$PWD/deps/couchbase-cxx-cache 59 | ``` 60 | 61 | ```console 62 | npx cmake-js compile \ 63 | --runtime node \ 64 | --runtime-version $(node --version) \ 65 | --CDUSE_STATIC_OPENSSL=OFF \ 66 | --CDCPM_DOWNLOAD_ALL=OFF \ 67 | --CDCPM_USE_NAMED_CACHE_DIRECTORIES=ON \ 68 | --CDCPM_USE_LOCAL_PACKAGES=OFF \ 69 | --CDCPM_SOURCE_CACHE=$CXXCBC_CACHE_DIR 70 | ``` 71 | 72 | # Autogen 73 | 74 | >**IMPORTANT**: Autogen is only needed for maintainers of the library. If not making updates to the core bindings, running the autogen tooling should *NOT* be required. 75 | 76 | Move into the `tools` directory prior to running any autogen commands. 77 | 78 | ## Python Environment 79 | 80 | >NOTE: Python >= 3.9 required 81 | 82 | Setup virtual env: 83 | ```console 84 | python3 -m venv 85 | ``` 86 | Example: `python3 -m venv $(pwd)/couchnode` 87 | 88 | Activate virtual env: 89 | ```console 90 | source /bin/activate 91 | ``` 92 | Example: `source $(pwd)/couchnode/bin/activate` 93 | 94 | Install `clang` from PyPI: 95 | ```console 96 | python3 -m pip install clang 97 | ``` 98 | 99 | Generate `bindings.json`. If no arguments are passed in the binding generator will attempt to determine the necessary version, lib directory, include directory and system headers directory. 100 | ```console 101 | python3 gen-bindings-json.py 102 | ``` 103 | Alternatively, options can be provided (or ENV variables may be set): 104 | ```console 105 | python gen-bindings-json.py -v $(llvm-config --version) \ 106 | -i $(llvm-config --includedir) \ 107 | -l $(llvm-config --libdir) \ 108 | -s $(xcrun --show-sdk-path) 109 | ``` 110 | 111 | Available Environment Variables: 112 | - `CN_LLVM_VERSION`: LLVM version 113 | - `CN_LLVM_INCLUDE`: LLVM include directory path 114 | - `CN_LLVM_LIB`: LLVM lib directory path 115 | - `CN_SYS_HEADERS`: System headers path 116 | 117 | ## Node.js 118 | 119 | Populate SDK autogen code sections: 120 | ```console 121 | node gen-bindings.js.js 122 | ``` 123 | 124 | ## clean-up 125 | ### Format C++ source files. 126 | 127 | On MacOS, make sure LLVM clang-format is used (configure the PATH appropriately): 128 | ```console 129 | export PATH="/opt/homebrew/opt/llvm/bin:$PATH" 130 | ``` 131 | 132 | >NOTE: Be aware of the current working directory (commands below assume the CWD is `tools`). 133 | 134 | ```console 135 | clang-format -i ../src/connection.cpp 136 | clang-format -i ../src/connection.hpp 137 | clang-format -i ../src/connection_autogen.cpp 138 | clang-format -i ../src/constants.cpp 139 | clang-format -i ../src/jstocbpp_autogen.hpp 140 | ``` 141 | ### Format Node.js source files. 142 | 143 | >NOTE: Be aware of the current working directory (commands below assume the CWD is `tools`). 144 | 145 | ```console 146 | npx prettier --write ../lib/binding.ts 147 | ``` 148 | 149 | ### Remove bindings.json 150 | 151 | ```console 152 | rm bindings.json 153 | ``` 154 | 155 | ### Format autogen scripts. 156 | 157 | This should rarely be needed (e.g. updating the autogen logic). 158 | 159 | >NOTE: Be aware of the current working directory (commands below assume the CWD is `tools`). 160 | 161 | #### Python 162 | 163 | Install `autopep8` from PyPI: 164 | ```console 165 | python3 -m pip install autopep8 166 | ``` 167 | 168 | ```console 169 | autopep8 -i -a -a --max-line-length 120 gen-bindings-json.py 170 | ``` 171 | 172 | #### Node.js 173 | ```console 174 | npx prettier --write gen-bindings-js.js 175 | ``` 176 | 177 | If a virtualenv was setup (hopefully it was ;)), deactivate and the environment 178 | ```console 179 | deactivate 180 | rm -rf 181 | ``` 182 | Example: `deactivate && rm -rf $(pwd)/couchnode` -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | In addition to filing bugs, you may contribute by submitting patches to fix bugs in the library. Contributions may be submitted to . We use Gerrit as our code review system - and thus submitting a change requires an account there. While Github pull requests are not ignored, Gerrit pull requests will be responded to more quickly (and most likely with more detail). 4 | 5 | For something to be accepted into the codebase, it must be formatted properly and have undergone proper testing. 6 | 7 | ## Branches and Tags 8 | 9 | * The `master` branch represents the mainline branch. The master branch typically consists of content going into the next release. 10 | * For older series of the Couchbase Node.js SDK see the corresponding branches: 2.x = `v2` and 3.x = `v3`. 11 | 12 | ## Contributing Patches 13 | 14 | If you wish to contribute a new feature or a bug fix to the library, try to follow the following guidelines to help ensure your change gets merged upstream. 15 | 16 | ### Before you begin 17 | 18 | For any code change, ensure the new code you write looks similar to the code surrounding it. We have no strict code style policies, but do request that your code stand out as little as possible from its surrounding neighborhood (unless of course your change is stylistic in nature). 19 | 20 | If your change is going to involve a substantial amount of time or effort, please attempt to discuss it with the project developers first who will provide assistance and direction where possible. 21 | 22 | #### For new features 23 | 24 | Ensure the feature you are adding does not already exist, and think about how this feature may be useful for other users. In general less intrusive changes are more likely to be accepted. 25 | 26 | #### For fixing bugs 27 | 28 | Ensure the bug you are fixing is actually a bug (and not a usage) error, and that it has not been fixed in a more recent version. Please read the [release notes](https://docs.couchbase.com/nodejs-sdk/current/project-docs/sdk-release-notes.html) as well as the [issue tracker](https://jira.issues.couchbase.com/projects/JSCBC/issues/) to see a list of open and resolved issues. 29 | 30 | ### Code Review 31 | 32 | #### Signing up on Gerrit 33 | 34 | Everything that is merged into the library goes through a code review process. The code review process is done via [Gerrit](http://review.couchbase.org). 35 | 36 | To sign up for a Gerrit account, go to http://review.couchbase.org and click on the _Register_ link at the top right. Once you've signed in you will need to agree to the CLA (Contributor License Agreement) by going you your Gerrit account page and selecting the _Agreements_ link on the left. When you've done that, everything should flow through just fine. Be sure that you have registered your email address at http://review.couchbase.org/#/settings/contact as many sign-up methods won't pass emails along. Note that your email address in your code commit and in the Gerrit settings must match. 37 | 38 | Add your public SSH key to Gerrit before submitting. 39 | 40 | #### Setting up your fork with Gerrit 41 | 42 | Assuming you have a repository created like so: 43 | 44 | ``` 45 | $ git clone https://github.com/couchbase/couchnode.git 46 | ``` 47 | 48 | you can simply perform two simple steps to get started with Gerrit: 49 | 50 | ``` 51 | $ git remote add gerrit ssh://${USERNAME}@review.couchbase.org:29418/couchnode 52 | $ scp -P 29418 ${USERNAME}@review.couchbase.org:hooks/commit-msg .git/hooks 53 | $ chmod a+x .git/hooks/commit-msg 54 | ``` 55 | 56 | The last change is required for annotating each commit message with a special header known as `Change-Id`. This allows Gerrit to group together different revisions of the same patch. 57 | 58 | #### Pushing a changeset 59 | 60 | Now that you have your change and a Gerrit account to push to, you need to upload the change for review. To do so, invoke the following incantation: 61 | 62 | ``` 63 | $ git push gerrit HEAD:refs/for/master 64 | ``` 65 | 66 | Where `gerrit` is the name of the _remote_ added earlier. 67 | 68 | #### Pushing a new patchset 69 | 70 | After a change has been pushed to Gerrit, further revisions can be made and then uploaded. These revisions are called patchsets and are associated with the the `Change-Id` created from the initial commit (see above). To push a new revision, simply ammend the commit (can also add the `--no-edit` option if no edits to the commit message are needed): 71 | 72 | ``` 73 | $ git commit --amend 74 | ``` 75 | 76 | Then push the revision to Gerrit: 77 | 78 | ``` 79 | $ git push gerrit HEAD:refs/for/master 80 | ``` 81 | 82 | Where `gerrit` is the name of the _remote_ added earlier. 83 | 84 | #### Troubleshooting 85 | 86 | You may encounter some errors when pushing. The most common are: 87 | 88 | * "You are not authorized to push to this repository". You will get this if your account has not yet been approved. Please reach out in the [forums](https://www.couchbase.com/forums/c/node-js-sdk/12) if blocked. 89 | * "Missing Change-Id". You need to install the `commit-msg` hook as described above. Note that even once you do this, you will need to ensure that any prior commits already have this header - this may be done by doing an interactive rebase (e.g. `git rebase -i origin/master` and selecting `reword` for all the commits; which will automatically fill in the Change-Id). 90 | 91 | #### Reviewers 92 | 93 | Once you've pushed your changeset you can add people to review. Currently these are: 94 | 95 | * Jared Casey 96 | * Matt Wozakowski 97 | * Brett Lawson 98 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | all: binding src/ deps/ 2 | @node-gyp build 3 | 4 | binding: binding.gyp 5 | @node-gyp configure 6 | 7 | clean: 8 | @node-gyp clean 9 | 10 | install: 11 | @npm install 12 | 13 | node_modules: 14 | @npm install 15 | 16 | checkdeps: 17 | npm run check-deps 18 | 19 | checkaudit: 20 | npm audit 21 | 22 | test: node_modules 23 | npm run test 24 | fasttest: node_modules 25 | npm run test-fast 26 | 27 | lint: node_modules 28 | npm run lint 29 | 30 | cover: node_modules 31 | npm run cover 32 | fastcover: node_modules 33 | npm run cover-fast 34 | 35 | check: checkdeps checkaudit docs lint test cover 36 | 37 | docs: node_modules 38 | npm run build-docs 39 | 40 | prebuilds: 41 | npm run prebuild 42 | 43 | .PHONY: all test clean docs browser prebuilds 44 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Couchbase Node.js Client 2 | 3 | The Node.js SDK library allows you to connect to a Couchbase cluster from 4 | Node.js. It is a native Node.js module and uses the very fast libcouchbase 5 | library to handle communicating to the cluster over the Couchbase binary 6 | protocol. 7 | 8 | ## Useful Links 9 | 10 | Source - [https://github.com/couchbase/couchnode](https://github.com/couchbase/couchnode) 11 | 12 | Bug Tracker - [https://jira.issues.couchbase.com/projects/JSCBC/issues/](https://jira.issues.couchbase.com/projects/JSCBC/issues/) 13 | 14 | Couchbase Developer Portal - [https://docs.couchbase.com/](https://docs.couchbase.com/nodejs-sdk/3.0/hello-world/start-using-sdk.html) 15 | 16 | Release Notes - [https://docs.couchbase.com/nodejs-sdk/3.0/project-docs/sdk-release-notes.html](https://docs.couchbase.com/nodejs-sdk/3.0/project-docs/sdk-release-notes.html) 17 | 18 | ## Installing 19 | 20 | To install the lastest release using npm, run: 21 | 22 | ```bash 23 | npm install couchbase 24 | ``` 25 | 26 | To install the development version directly from github, run: 27 | 28 | ```bash 29 | npm install "git+https://github.com/couchbase/couchnode.git#master" 30 | ``` 31 | 32 | ## Introduction 33 | 34 | Connecting to a Couchbase bucket is as simple as creating a new `Cluster` 35 | instance to represent the `Cluster` you are using, and then using the 36 | `bucket` and `collection` commands against this to open a connection to 37 | open your specific bucket and collection. You are able to execute most 38 | operations immediately, and they will be queued until the connection is 39 | successfully established. 40 | 41 | Here is a simple example of instantiating a connection, adding a new document 42 | into the bucket and then retrieving its contents: 43 | 44 | **Javascript:** 45 | ```javascript 46 | const couchbase = require('couchbase') 47 | 48 | async function main() { 49 | const cluster = await couchbase.connect( 50 | 'couchbase://127.0.0.1', 51 | { 52 | username: 'username', 53 | password: 'password', 54 | }) 55 | 56 | const bucket = cluster.bucket('default') 57 | const coll = bucket.defaultCollection() 58 | await coll.upsert('testdoc', { foo: 'bar' }) 59 | 60 | const res = await coll.get('testdoc') 61 | console.log(res.content) 62 | } 63 | 64 | // Run the main function 65 | main() 66 | .then((_) => { 67 | console.log ('Success!') 68 | }) 69 | .catch((err) => { 70 | console.log('ERR:', err) 71 | }) 72 | ``` 73 | 74 | **Typescript:** 75 | ```javascript 76 | import { 77 | Bucket, 78 | Cluster, 79 | Collection, 80 | connect, 81 | GetResult, 82 | } from 'couchbase' 83 | 84 | async function main() { 85 | const cluster: Cluster = await connect( 86 | 'couchbase://127.0.0.1', 87 | { 88 | username: 'username', 89 | password: 'password', 90 | }) 91 | 92 | const bucket: Bucket = cluster.bucket('default') 93 | const coll: Collection = bucket.defaultCollection() 94 | await coll.upsert('testdoc', { foo: 'bar' }) 95 | 96 | const res: GetResult = await coll.get('testdoc') 97 | console.log(res.content) 98 | } 99 | 100 | // Run the main function 101 | main() 102 | .then((_) => { 103 | console.log ('Success!') 104 | }) 105 | .catch((err) => { 106 | console.log('ERR:', err) 107 | }) 108 | ``` 109 | 110 | ## AWS Lambda 111 | 112 | Version 4.2.5 of the SDK significantly reduces the size of the prebuilt binary provided with the SDK on supported platforms. The reduction 113 | enables the SDK to meet the [minimum size requirements](https://docs.aws.amazon.com/lambda/latest/dg/gettingstarted-limits.html) for an AWS lambda deployment package without extra steps for reducing the size of the package. However, if further size reduction is desired, the SDK provides a script to provide recommendations for size reduction. 114 | 115 | **Script:** 116 | ```bash 117 | npm explore couchbase -- npm run help-prune 118 | ``` 119 | 120 | **Example output:** 121 | ```bash 122 | Checking for platform packages in /tmp/couchnode-test/node_modules/@couchbase that do not match the expected platform package (couchbase-linux-x64-openssl1). 123 | Found mismatch: Path=/tmp/couchnode-test/node_modules/@couchbase/couchbase-linuxmusl-x64-openssl1 124 | 125 | Recommendations for pruning: 126 | 127 | Removing mismatched platform=couchbase-linuxmusl-x64-openssl1 (path=/tmp/couchnode-test/node_modules/@couchbase/couchbase-linuxmusl-x64-openssl1) saves ~13.31 MB on disk. 128 | Removing Couchbase deps/ (path=/tmp/couchnode-test/node_modules/couchbase/deps) saves ~45.51 MB on disk. 129 | Removing Couchbase src/ (path=/tmp/couchnode-test/node_modules/couchbase/src) saves ~0.61 MB on disk. 130 | ``` 131 | 132 | ## Documentation 133 | 134 | An extensive documentation is available on the Couchbase website - [https://docs.couchbase.com/nodejs-sdk/3.0/hello-world/start-using-sdk.html](https://docs.couchbase.com/nodejs-sdk/3.0/hello-world/start-using-sdk.html) - 135 | including numerous examples and code samples. 136 | 137 | Visit our [Couchbase Node.js SDK forum](https://forums.couchbase.com/c/node-js-sdk) for help. 138 | Or get involved in the [Couchbase Community](https://couchbase.com/community) on the [Couchbase](https://couchbase.com) website. 139 | 140 | ## Source Control 141 | 142 | The source code is available at 143 | [https://github.com/couchbase/couchnode](https://github.com/couchbase/couchnode). 144 | Once you have cloned the repository, you may contribute changes through our 145 | gerrit server. For more details see 146 | [CONTRIBUTING.md](https://github.com/couchbase/couchnode/blob/master/CONTRIBUTING.md). 147 | 148 | To build the client, follow the steps outlined on the [BUILDING page](https://github.com/couchbase/couchnode/blob/main/BUILDING.md) 149 | 150 | To execute our test suite, run `make test` from the root directory. 151 | 152 | To execute our code coverage, run `make cover` from the root directory. 153 | 154 | In addition to the full test suite and full code coverage, you may additionally 155 | execute a subset of the tests which excludes slow-running tests for quick 156 | verifications. These can be run through `make fasttest` and `make fastcover` 157 | respectively. 158 | 159 | Finally, to build the API reference for the project, run `make docs` from the 160 | root directory, and a docs folder will be created with the api reference. 161 | 162 | # Support & Additional Resources 163 | 164 | If you found an issue, please file it in our [Github issues](https://github.com/couchbase/couchnode/issues). We will bring over the issue to our [JIRA](https://jira.issues.couchbase.com/projects/JSCBC/issues/) as needed. 165 | 166 | The Couchbase Discord server is a place where you can collaborate about all things Couchbase. Connect with others from the community, learn tips and tricks, and ask questions. [Join Discord and contribute](https://discord.com/invite/sQ5qbPZuTh). 167 | 168 | You can ask questions in our [forums](https://forums.couchbase.com/). 169 | 170 | ## License 171 | 172 | Copyright 2013 Couchbase Inc. 173 | 174 | Licensed under the Apache License, Version 2.0. 175 | 176 | See 177 | [LICENSE](https://github.com/couchbase/couchnode/blob/master/LICENSE) 178 | for further details. 179 | -------------------------------------------------------------------------------- /couchbase-sdk-nodejs-black-duck-manifest.yaml: -------------------------------------------------------------------------------- 1 | include-projects: 2 | - couchbase-sdk-cxx 3 | -------------------------------------------------------------------------------- /lib/analyticsexecutor.ts: -------------------------------------------------------------------------------- 1 | /* eslint jsdoc/require-jsdoc: off */ 2 | import { 3 | AnalyticsQueryOptions, 4 | AnalyticsResult, 5 | AnalyticsMetaData, 6 | AnalyticsWarning, 7 | AnalyticsMetrics, 8 | } from './analyticstypes' 9 | import { 10 | analyticsScanConsistencyToCpp, 11 | analyticsStatusFromCpp, 12 | errorFromCpp, 13 | } from './bindingutilities' 14 | import { Cluster } from './cluster' 15 | import { StreamableRowPromise } from './streamablepromises' 16 | 17 | /** 18 | * @internal 19 | */ 20 | export class AnalyticsExecutor { 21 | private _cluster: Cluster 22 | 23 | /** 24 | * @internal 25 | */ 26 | constructor(cluster: Cluster) { 27 | this._cluster = cluster 28 | } 29 | 30 | /** 31 | * @internal 32 | */ 33 | query( 34 | query: string, 35 | options: AnalyticsQueryOptions 36 | ): StreamableRowPromise, TRow, AnalyticsMetaData> { 37 | const emitter = new StreamableRowPromise< 38 | AnalyticsResult, 39 | TRow, 40 | AnalyticsMetaData 41 | >((rows, meta) => { 42 | return new AnalyticsResult({ 43 | rows: rows, 44 | meta: meta, 45 | }) 46 | }) 47 | 48 | const timeout = options.timeout || this._cluster.analyticsTimeout 49 | 50 | this._cluster.conn.analytics( 51 | { 52 | statement: query, 53 | timeout, 54 | client_context_id: options.clientContextId, 55 | readonly: options.readOnly || false, 56 | priority: options.priority || false, 57 | scope_qualifier: options.queryContext, 58 | scan_consistency: analyticsScanConsistencyToCpp( 59 | options.scanConsistency 60 | ), 61 | raw: options.raw 62 | ? Object.fromEntries( 63 | Object.entries(options.raw) 64 | .filter(([, v]) => v !== undefined) 65 | .map(([k, v]) => [k, JSON.stringify(v)]) 66 | ) 67 | : {}, 68 | positional_parameters: 69 | options.parameters && Array.isArray(options.parameters) 70 | ? options.parameters.map((v) => JSON.stringify(v ?? null)) 71 | : [], 72 | named_parameters: 73 | options.parameters && !Array.isArray(options.parameters) 74 | ? Object.fromEntries( 75 | Object.entries(options.parameters as { [key: string]: any }) 76 | .filter(([, v]) => v !== undefined) 77 | .map(([k, v]) => [k, JSON.stringify(v)]) 78 | ) 79 | : {}, 80 | body_str: '', 81 | }, 82 | (cppErr, resp) => { 83 | const err = errorFromCpp(cppErr) 84 | if (err) { 85 | emitter.emit('error', err) 86 | emitter.emit('end') 87 | return 88 | } 89 | 90 | resp.rows.forEach((row) => { 91 | emitter.emit('row', JSON.parse(row)) 92 | }) 93 | 94 | { 95 | const metaData = resp.meta 96 | 97 | let warnings: AnalyticsWarning[] 98 | if (metaData.warnings) { 99 | warnings = metaData.warnings.map( 100 | (warningData: any) => 101 | new AnalyticsWarning({ 102 | code: warningData.code, 103 | message: warningData.message, 104 | }) 105 | ) 106 | } else { 107 | warnings = [] 108 | } 109 | 110 | const metricsData = metaData.metrics 111 | const metrics = new AnalyticsMetrics({ 112 | elapsedTime: metricsData.elapsed_time, 113 | executionTime: metricsData.execution_time, 114 | resultCount: metricsData.result_count, 115 | resultSize: metricsData.result_size, 116 | errorCount: metricsData.error_count, 117 | processedObjects: metricsData.processed_objects, 118 | warningCount: metricsData.warning_count, 119 | }) 120 | 121 | const meta = new AnalyticsMetaData({ 122 | requestId: metaData.request_id, 123 | clientContextId: metaData.client_context_id, 124 | status: analyticsStatusFromCpp(metaData.status), 125 | signature: metaData.signature 126 | ? JSON.parse(metaData.signature) 127 | : undefined, 128 | warnings: warnings, 129 | metrics: metrics, 130 | }) 131 | 132 | emitter.emit('meta', meta) 133 | } 134 | 135 | emitter.emit('end') 136 | return 137 | } 138 | ) 139 | 140 | return emitter 141 | } 142 | } 143 | -------------------------------------------------------------------------------- /lib/analyticstypes.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Represents the status of an analytics query. 3 | * 4 | * @category Analytics 5 | */ 6 | export enum AnalyticsStatus { 7 | /** 8 | * Indicates the query is still running. 9 | */ 10 | Running = 'running', 11 | 12 | /** 13 | * Indicates that the query completed successfully. 14 | */ 15 | Success = 'success', 16 | 17 | /** 18 | * Indicates that the query completed with errors. 19 | */ 20 | Errors = 'errors', 21 | 22 | /** 23 | * Indicates that the query completed but the outcome was unknown. 24 | */ 25 | Completed = 'completed', 26 | 27 | /** 28 | * Indicates that the query was stopped. 29 | */ 30 | Stopped = 'stopped', 31 | 32 | /** 33 | * Indicates that the query timed out during execution. 34 | */ 35 | Timeout = 'timeout', 36 | 37 | /** 38 | * Indicates that a connection was closed during execution of the query. 39 | */ 40 | Closed = 'closed', 41 | 42 | /** 43 | * Indicates that the query stopped with fatal errors. 44 | */ 45 | Fatal = 'fatal', 46 | 47 | /** 48 | * Indicates that the query was aborted while executing. 49 | */ 50 | Aborted = 'aborted', 51 | 52 | /** 53 | * Indicates that the status of the query is unknown. 54 | */ 55 | Unknown = 'unknown', 56 | } 57 | 58 | /** 59 | * Contains the results of an analytics query. 60 | * 61 | * @category Analytics 62 | */ 63 | export class AnalyticsResult { 64 | /** 65 | * The rows which have been returned by the query. 66 | */ 67 | rows: TRow[] 68 | 69 | /** 70 | * The meta-data which has been returned by the query. 71 | */ 72 | meta: AnalyticsMetaData 73 | 74 | /** 75 | * @internal 76 | */ 77 | constructor(data: AnalyticsResult) { 78 | this.rows = data.rows 79 | this.meta = data.meta 80 | } 81 | } 82 | 83 | /** 84 | * Contains the meta-data that is returend from an analytics query. 85 | * 86 | * @category Analytics 87 | */ 88 | export class AnalyticsMetaData { 89 | /** 90 | * The request ID which is associated with the executed query. 91 | */ 92 | requestId: string 93 | 94 | /** 95 | * The client context id which is assoicated with the executed query. 96 | */ 97 | clientContextId: string 98 | 99 | /** 100 | * The status of the query at the time the query meta-data was generated. 101 | */ 102 | status: AnalyticsStatus 103 | 104 | /** 105 | * Provides the signature of the query. 106 | */ 107 | signature?: any 108 | 109 | /** 110 | * Any warnings that occurred during the execution of the query. 111 | */ 112 | warnings: AnalyticsWarning[] 113 | 114 | /** 115 | * Various metrics which are made available by the query engine. 116 | */ 117 | metrics: AnalyticsMetrics 118 | 119 | /** 120 | * @internal 121 | */ 122 | constructor(data: AnalyticsMetaData) { 123 | this.requestId = data.requestId 124 | this.clientContextId = data.clientContextId 125 | this.status = data.status 126 | this.signature = data.signature 127 | this.warnings = data.warnings 128 | this.metrics = data.metrics 129 | } 130 | } 131 | 132 | /** 133 | * Contains information about a warning which occurred during the 134 | * execution of an analytics query. 135 | * 136 | * @category Analytics 137 | */ 138 | export class AnalyticsWarning { 139 | /** 140 | * The numeric code associated with the warning which occurred. 141 | */ 142 | code: number 143 | 144 | /** 145 | * A human readable representation of the warning which occurred. 146 | */ 147 | message: string 148 | 149 | /** 150 | * @internal 151 | */ 152 | constructor(data: AnalyticsWarning) { 153 | this.code = data.code 154 | this.message = data.message 155 | } 156 | } 157 | 158 | /** 159 | * Contains various metrics that are returned by the server following 160 | * the execution of an analytics query. 161 | * 162 | * @category Analytics 163 | */ 164 | export class AnalyticsMetrics { 165 | /** 166 | * The total amount of time spent running the query, in milliseconds. 167 | */ 168 | elapsedTime: number 169 | 170 | /** 171 | * The total amount of time spent executing the query, in milliseconds. 172 | */ 173 | executionTime: number 174 | 175 | /** 176 | * The total number of rows which were part of the result set. 177 | */ 178 | resultCount: number 179 | 180 | /** 181 | * The total number of bytes which were generated as part of the result set. 182 | */ 183 | resultSize: number 184 | 185 | /** 186 | * The total number of errors which were encountered during the execution of the query. 187 | */ 188 | errorCount: number 189 | 190 | /** 191 | * The total number of objects that were processed as part of execution of the query. 192 | */ 193 | processedObjects: number 194 | 195 | /** 196 | * The total number of warnings which were encountered during the execution of the query. 197 | */ 198 | warningCount: number 199 | 200 | /** 201 | * @internal 202 | */ 203 | constructor(data: AnalyticsMetrics) { 204 | this.elapsedTime = data.elapsedTime 205 | this.executionTime = data.executionTime 206 | this.resultCount = data.resultCount 207 | this.resultSize = data.resultSize 208 | this.errorCount = data.errorCount 209 | this.processedObjects = data.processedObjects 210 | this.warningCount = data.warningCount 211 | } 212 | } 213 | 214 | /** 215 | * Represents the various scan consistency options that are available when 216 | * querying against the analytics service. 217 | * 218 | * @category Analytics 219 | */ 220 | export enum AnalyticsScanConsistency { 221 | /** 222 | * Indicates that no specific consistency is required, this is the fastest 223 | * options, but results may not include the most recent operations which have 224 | * been performed. 225 | */ 226 | NotBounded = 'not_bounded', 227 | 228 | /** 229 | * Indicates that the results to the query should include all operations that 230 | * have occurred up until the query was started. This incurs a performance 231 | * penalty of waiting for the index to catch up to the most recent operations, 232 | * but provides the highest level of consistency. 233 | */ 234 | RequestPlus = 'request_plus', 235 | } 236 | 237 | /** 238 | * @category Analytics 239 | */ 240 | export interface AnalyticsQueryOptions { 241 | /** 242 | * Values to be used for the placeholders within the query. 243 | */ 244 | parameters?: { [key: string]: any } | any[] 245 | 246 | /** 247 | * Specifies the consistency requirements when executing the query. 248 | * 249 | * @see AnalyticsScanConsistency 250 | */ 251 | scanConsistency?: AnalyticsScanConsistency 252 | 253 | /** 254 | * The returned client context id for this query. 255 | */ 256 | clientContextId?: string 257 | 258 | /** 259 | * Indicates whether this query should be executed with a specific priority level. 260 | */ 261 | priority?: boolean 262 | 263 | /** 264 | * Indicates whether this query should be executed in read-only mode. 265 | */ 266 | readOnly?: boolean 267 | 268 | /** 269 | * Specifies the context within which this query should be executed. This can be 270 | * scoped to a scope or a collection within the dataset. 271 | */ 272 | queryContext?: string 273 | 274 | /** 275 | * Specifies any additional parameters which should be passed to the query engine 276 | * when executing the query. 277 | */ 278 | raw?: { [key: string]: any } 279 | 280 | /** 281 | * The timeout for this operation, represented in milliseconds. 282 | */ 283 | timeout?: number 284 | } 285 | -------------------------------------------------------------------------------- /lib/authenticators.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * IPasswordAuthenticator specifies an authenticator which uses an RBAC 3 | * username and password to authenticate with the cluster. 4 | * 5 | * @category Authentication 6 | */ 7 | export interface IPasswordAuthenticator { 8 | /** 9 | * The username to authenticate with. 10 | */ 11 | username: string 12 | 13 | /** 14 | * The password to authenticate with. 15 | */ 16 | password: string 17 | 18 | /** 19 | * The sasl mechanisms to authenticate with. 20 | */ 21 | allowed_sasl_mechanisms?: string[] 22 | } 23 | 24 | /** 25 | * IPasswordAuthenticator specifies an authenticator which uses an SSL 26 | * certificate and key to authenticate with the cluster. 27 | * 28 | * @category Authentication 29 | */ 30 | export interface ICertificateAuthenticator { 31 | /** 32 | * The path to the certificate which should be used for certificate authentication. 33 | */ 34 | certificatePath: string 35 | 36 | /** 37 | * The path to the key which should be used for certificate authentication. 38 | */ 39 | keyPath: string 40 | } 41 | 42 | /** 43 | * PasswordAuthenticator implements a simple IPasswordAuthenticator. 44 | * 45 | * @category Authentication 46 | */ 47 | export class PasswordAuthenticator implements IPasswordAuthenticator { 48 | /** 49 | * The username that will be used to authenticate with. 50 | */ 51 | username: string 52 | 53 | /** 54 | * The password that will be used to authenticate with. 55 | */ 56 | password: string 57 | 58 | /** 59 | * The sasl mechanisms to authenticate with. 60 | */ 61 | allowed_sasl_mechanisms?: string[] | undefined 62 | 63 | /** 64 | * Constructs this PasswordAuthenticator with the passed username and password. 65 | * 66 | * @param username The username to initialize this authenticator with. 67 | * @param password The password to initialize this authenticator with. 68 | */ 69 | constructor(username: string, password: string) { 70 | this.username = username 71 | this.password = password 72 | } 73 | 74 | /** 75 | * Creates a LDAP compatible password authenticator which is INSECURE if not used with TLS. 76 | * 77 | * Please note that this is INSECURE and will leak user credentials on the wire to eavesdroppers. 78 | * This should only be enabled in trusted environments. 79 | * 80 | * @param username The username to initialize this authenticator with. 81 | * @param password The password to initialize this authenticator with. 82 | */ 83 | public static ldapCompatible( 84 | username: string, 85 | password: string 86 | ): PasswordAuthenticator { 87 | const auth = new PasswordAuthenticator(username, password) 88 | auth.allowed_sasl_mechanisms = ['PLAIN'] 89 | return auth 90 | } 91 | } 92 | 93 | /** 94 | * CertificateAuthenticator implements a simple ICertificateAuthenticator. 95 | * 96 | * @category Authentication 97 | */ 98 | export class CertificateAuthenticator implements ICertificateAuthenticator { 99 | /** 100 | * The path to the certificate which should be used for certificate authentication. 101 | */ 102 | certificatePath: string 103 | 104 | /** 105 | * The path to the key which should be used for certificate authentication. 106 | */ 107 | keyPath: string 108 | 109 | /** 110 | * Constructs this CertificateAuthenticator with the passed certificate and key paths. 111 | * 112 | * @param certificatePath The certificate path to initialize this authenticator with. 113 | * @param keyPath The key path to initialize this authenticator with. 114 | */ 115 | constructor(certificatePath: string, keyPath: string) { 116 | this.certificatePath = certificatePath 117 | this.keyPath = keyPath 118 | } 119 | } 120 | 121 | /** 122 | * Represents any of the valid authenticators that could be passed to the SDK. 123 | * 124 | * @category Authentication 125 | */ 126 | export type Authenticator = IPasswordAuthenticator | ICertificateAuthenticator 127 | -------------------------------------------------------------------------------- /lib/binarycollection.ts: -------------------------------------------------------------------------------- 1 | import { Collection } from './collection' 2 | import { CounterResult, MutationResult } from './crudoptypes' 3 | import { DurabilityLevel } from './generaltypes' 4 | import { CasInput, NodeCallback } from './utilities' 5 | 6 | /** 7 | * @category Key-Value 8 | */ 9 | export interface IncrementOptions { 10 | /** 11 | * The initial value to use for the document if it does not already exist. 12 | * Not specifying this value indicates the operation should fail if the 13 | * document does not exist. 14 | */ 15 | initial?: number 16 | 17 | /** 18 | * The expiry time that should be set for the document, expressed in seconds. 19 | */ 20 | expiry?: number 21 | 22 | /** 23 | * Specifies the level of synchronous durability for this operation. 24 | */ 25 | durabilityLevel?: DurabilityLevel 26 | 27 | /** 28 | * Specifies the number of nodes this operation should be persisted to 29 | * before it is considered successful. Note that this option is mutually 30 | * exclusive of {@link durabilityLevel}. 31 | */ 32 | durabilityPersistTo?: number 33 | 34 | /** 35 | * Specifies the number of nodes this operation should be replicated to 36 | * before it is considered successful. Note that this option is mutually 37 | * exclusive of {@link durabilityLevel}. 38 | */ 39 | durabilityReplicateTo?: number 40 | 41 | /** 42 | * The timeout for this operation, represented in milliseconds. 43 | */ 44 | timeout?: number 45 | } 46 | 47 | /** 48 | * @category Key-Value 49 | */ 50 | export interface DecrementOptions { 51 | /** 52 | * The initial value to use for the document if it does not already exist. 53 | * Not specifying this value indicates the operation should fail if the 54 | * document does not exist. 55 | */ 56 | initial?: number 57 | 58 | /** 59 | * The expiry time that should be set for the document, expressed in seconds. 60 | */ 61 | expiry?: number 62 | 63 | /** 64 | * Specifies the level of synchronous durability for this operation. 65 | */ 66 | durabilityLevel?: DurabilityLevel 67 | 68 | /** 69 | * Specifies the number of nodes this operation should be persisted to 70 | * before it is considered successful. Note that this option is mutually 71 | * exclusive of {@link durabilityLevel}. 72 | */ 73 | durabilityPersistTo?: number 74 | 75 | /** 76 | * Specifies the number of nodes this operation should be replicated to 77 | * before it is considered successful. Note that this option is mutually 78 | * exclusive of {@link durabilityLevel}. 79 | */ 80 | durabilityReplicateTo?: number 81 | 82 | /** 83 | * The timeout for this operation, represented in milliseconds. 84 | */ 85 | timeout?: number 86 | } 87 | 88 | /** 89 | * @category Key-Value 90 | */ 91 | export interface AppendOptions { 92 | /** 93 | * Specifies the level of synchronous durability for this operation. 94 | */ 95 | durabilityLevel?: DurabilityLevel 96 | 97 | /** 98 | * Specifies the number of nodes this operation should be persisted to 99 | * before it is considered successful. Note that this option is mutually 100 | * exclusive of {@link durabilityLevel}. 101 | */ 102 | durabilityPersistTo?: number 103 | 104 | /** 105 | * Specifies the number of nodes this operation should be replicated to 106 | * before it is considered successful. Note that this option is mutually 107 | * exclusive of {@link durabilityLevel}. 108 | */ 109 | durabilityReplicateTo?: number 110 | 111 | /** 112 | * If specified, indicates that operation should be failed if the CAS 113 | * has changed from this value, indicating that the document has changed. 114 | */ 115 | cas?: CasInput 116 | 117 | /** 118 | * The timeout for this operation, represented in milliseconds. 119 | */ 120 | timeout?: number 121 | } 122 | 123 | /** 124 | * @category Key-Value 125 | */ 126 | export interface PrependOptions { 127 | /** 128 | * Specifies the level of synchronous durability for this operation. 129 | */ 130 | durabilityLevel?: DurabilityLevel 131 | 132 | /** 133 | * Specifies the number of nodes this operation should be persisted to 134 | * before it is considered successful. Note that this option is mutually 135 | * exclusive of {@link durabilityLevel}. 136 | */ 137 | durabilityPersistTo?: number 138 | 139 | /** 140 | * Specifies the number of nodes this operation should be replicated to 141 | * before it is considered successful. Note that this option is mutually 142 | * exclusive of {@link durabilityLevel}. 143 | */ 144 | durabilityReplicateTo?: number 145 | 146 | /** 147 | * If specified, indicates that operation should be failed if the CAS 148 | * has changed from this value, indicating that the document has changed. 149 | */ 150 | cas?: CasInput 151 | 152 | /** 153 | * The timeout for this operation, represented in milliseconds. 154 | */ 155 | timeout?: number 156 | } 157 | 158 | /** 159 | * Exposes a number of binary-level operations against a collection. 160 | * These operations do not adhere to the standard JSON-centric 161 | * behaviour of the SDK. 162 | * 163 | * @category Core 164 | */ 165 | export class BinaryCollection { 166 | private _coll: Collection 167 | 168 | /** 169 | * @internal 170 | */ 171 | constructor(parent: Collection) { 172 | this._coll = parent 173 | } 174 | 175 | /** 176 | * Increments the ASCII value of the specified key by the amount 177 | * indicated in the delta parameter. 178 | * 179 | * @param key The key to increment. 180 | * @param delta The amount to increment the key. 181 | * @param options Optional parameters for this operation. 182 | * @param callback A node-style callback to be invoked after execution. 183 | */ 184 | increment( 185 | key: string, 186 | delta: number, 187 | options?: IncrementOptions, 188 | callback?: NodeCallback 189 | ): Promise { 190 | return this._coll._binaryIncrement(key, delta, options, callback) 191 | } 192 | 193 | /** 194 | * Decrements the ASCII value of the specified key by the amount 195 | * indicated in the delta parameter. 196 | * 197 | * @param key The key to increment. 198 | * @param delta The amount to increment the key. 199 | * @param options Optional parameters for this operation. 200 | * @param callback A node-style callback to be invoked after execution. 201 | */ 202 | decrement( 203 | key: string, 204 | delta: number, 205 | options?: DecrementOptions, 206 | callback?: NodeCallback 207 | ): Promise { 208 | return this._coll._binaryDecrement(key, delta, options, callback) 209 | } 210 | 211 | /** 212 | * Appends the specified value to the end of the specified key. 213 | * 214 | * @param key The key to append to. 215 | * @param value The value to adjoin to the end of the document. 216 | * @param options Optional parameters for this operation. 217 | * @param callback A node-style callback to be invoked after execution. 218 | */ 219 | append( 220 | key: string, 221 | value: string | Buffer, 222 | options?: AppendOptions, 223 | callback?: NodeCallback 224 | ): Promise { 225 | return this._coll._binaryAppend(key, value, options, callback) 226 | } 227 | 228 | /** 229 | * Prepends the specified value to the beginning of the specified key. 230 | * 231 | * @param key The key to prepend to. 232 | * @param value The value to adjoin to the beginning of the document. 233 | * @param options Optional parameters for this operation. 234 | * @param callback A node-style callback to be invoked after execution. 235 | */ 236 | prepend( 237 | key: string, 238 | value: string | Buffer, 239 | options?: PrependOptions, 240 | callback?: NodeCallback 241 | ): Promise { 242 | return this._coll._binaryPrepend(key, value, options, callback) 243 | } 244 | } 245 | -------------------------------------------------------------------------------- /lib/bucket.ts: -------------------------------------------------------------------------------- 1 | import { CppConnection } from './binding' 2 | import { Cluster } from './cluster' 3 | import { Collection } from './collection' 4 | import { CollectionManager } from './collectionmanager' 5 | import { PingExecutor } from './diagnosticsexecutor' 6 | import { PingOptions, PingResult } from './diagnosticstypes' 7 | import { Scope } from './scope' 8 | import { StreamableRowPromise } from './streamablepromises' 9 | import { Transcoder } from './transcoders' 10 | import { NodeCallback, PromiseHelper } from './utilities' 11 | import { ViewExecutor } from './viewexecutor' 12 | import { ViewIndexManager } from './viewindexmanager' 13 | import { 14 | ViewMetaData, 15 | ViewQueryOptions, 16 | ViewResult, 17 | ViewRow, 18 | } from './viewtypes' 19 | 20 | /** 21 | * Exposes the operations which are available to be performed against a bucket. 22 | * Namely the ability to access to Collections as well as performing management 23 | * operations against the bucket. 24 | * 25 | * @category Core 26 | */ 27 | export class Bucket { 28 | private _cluster: Cluster 29 | private _name: string 30 | private _conn: CppConnection 31 | 32 | /** 33 | @internal 34 | */ 35 | constructor(cluster: Cluster, bucketName: string) { 36 | this._cluster = cluster 37 | this._name = bucketName 38 | this._conn = cluster.conn 39 | } 40 | 41 | /** 42 | @internal 43 | */ 44 | get conn(): CppConnection { 45 | return this._conn 46 | } 47 | 48 | /** 49 | @internal 50 | */ 51 | get cluster(): Cluster { 52 | return this._cluster 53 | } 54 | 55 | /** 56 | @internal 57 | */ 58 | get transcoder(): Transcoder { 59 | return this._cluster.transcoder 60 | } 61 | 62 | /** 63 | * The name of the bucket this Bucket object references. 64 | */ 65 | get name(): string { 66 | return this._name 67 | } 68 | 69 | /** 70 | * Creates a Scope object reference to a specific scope. 71 | * 72 | * @param scopeName The name of the scope to reference. 73 | */ 74 | scope(scopeName: string): Scope { 75 | return new Scope(this, scopeName) 76 | } 77 | 78 | /** 79 | * Creates a Scope object reference to the default scope. 80 | */ 81 | defaultScope(): Scope { 82 | return this.scope(Scope.DEFAULT_NAME) 83 | } 84 | 85 | /** 86 | * Creates a Collection object reference to a specific collection. 87 | * 88 | * @param collectionName The name of the collection to reference. 89 | */ 90 | collection(collectionName: string): Collection { 91 | const scope = this.defaultScope() 92 | return scope.collection(collectionName) 93 | } 94 | 95 | /** 96 | * Creates a Collection object reference to the default collection. 97 | */ 98 | defaultCollection(): Collection { 99 | return this.collection(Collection.DEFAULT_NAME) 100 | } 101 | 102 | /** 103 | * Returns a ViewIndexManager which can be used to manage the view indexes 104 | * of this bucket. 105 | */ 106 | viewIndexes(): ViewIndexManager { 107 | return new ViewIndexManager(this) 108 | } 109 | 110 | /** 111 | * Returns a CollectionManager which can be used to manage the collections 112 | * of this bucket. 113 | */ 114 | collections(): CollectionManager { 115 | return new CollectionManager(this) 116 | } 117 | 118 | /** 119 | * Executes a view query. 120 | * 121 | * @param designDoc The name of the design document containing the view to execute. 122 | * @param viewName The name of the view to execute. 123 | * @param options Optional parameters for this operation. 124 | * @param callback A node-style callback to be invoked after execution. 125 | */ 126 | viewQuery( 127 | designDoc: string, 128 | viewName: string, 129 | options?: ViewQueryOptions, 130 | callback?: NodeCallback> 131 | ): StreamableRowPromise< 132 | ViewResult, 133 | ViewRow, 134 | ViewMetaData 135 | > { 136 | if (options instanceof Function) { 137 | callback = arguments[2] 138 | options = undefined 139 | } 140 | if (!options) { 141 | options = {} 142 | } 143 | 144 | const exec = new ViewExecutor(this) 145 | 146 | const options_ = options 147 | return PromiseHelper.wrapAsync( 148 | () => exec.query(designDoc, viewName, options_), 149 | callback 150 | ) 151 | } 152 | 153 | /** 154 | * Performs a ping operation against the cluster. Pinging the bucket services 155 | * which are specified (or all services if none are specified). Returns a report 156 | * which describes the outcome of the ping operations which were performed. 157 | * 158 | * @param options Optional parameters for this operation. 159 | * @param callback A node-style callback to be invoked after execution. 160 | */ 161 | ping( 162 | options?: PingOptions, 163 | callback?: NodeCallback 164 | ): Promise { 165 | if (options instanceof Function) { 166 | callback = arguments[0] 167 | options = undefined 168 | } 169 | if (!options) { 170 | options = {} 171 | } 172 | 173 | const exec = new PingExecutor(this._cluster) 174 | 175 | const options_ = options 176 | return PromiseHelper.wrapAsync( 177 | () => 178 | exec.ping({ 179 | ...options_, 180 | bucket: this.name, 181 | }), 182 | callback 183 | ) 184 | } 185 | } 186 | -------------------------------------------------------------------------------- /lib/configProfile.ts: -------------------------------------------------------------------------------- 1 | import { ConnectOptions } from './cluster' 2 | 3 | /** 4 | * IConfigProfile specifies a ConfigProfile which applies 5 | * specified option values to ConnectionOptions. 6 | * 7 | * Volatile: This API is subject to change at any time. 8 | */ 9 | export interface IConfigProfile { 10 | /** 11 | * Applies the ConfigProfile options to the provided ConnectOptions. 12 | * 13 | * Volatile: This API is subject to change at any time. 14 | * 15 | * @param options The Connect options the ConfigProfile should be applied toward. 16 | */ 17 | apply(options: ConnectOptions): void 18 | } 19 | 20 | /** 21 | * The WAN Development profile sets various timeout options that are useful 22 | * when working in a WAN environment. 23 | * 24 | * Volatile: This API is subject to change at any time. 25 | */ 26 | export class WanDevelopmentProfile implements IConfigProfile { 27 | /** 28 | * Applies the ConfigProfile options to the provided ConnectOptions. 29 | * 30 | * Volatile: This API is subject to change at any time. 31 | * 32 | * @param options The Connect options the ConfigProfile should be applied toward. 33 | */ 34 | apply(options: ConnectOptions): void { 35 | // the profile should override previously set values 36 | options.timeouts = { 37 | ...options.timeouts, 38 | ...{ 39 | kvTimeout: 20000, 40 | kvDurableTimeout: 20000, 41 | analyticsTimeout: 120000, 42 | managementTimeout: 120000, 43 | queryTimeout: 120000, 44 | searchTimeout: 120000, 45 | viewTimeout: 120000, 46 | bootstrapTimeout: 120000, 47 | connectTimeout: 20000, 48 | resolveTimeout: 20000, 49 | }, 50 | } 51 | options.dnsConfig = { ...options.dnsConfig, ...{ dnsSrvTimeout: 20000 } } 52 | } 53 | } 54 | 55 | /** 56 | * The ConfigProfiles class keeps track of registered/known Configuration Profiles. 57 | * 58 | * Volatile: This API is subject to change at any time. 59 | */ 60 | export class ConfigProfiles { 61 | private _profiles: { [profileName: string]: IConfigProfile } 62 | 63 | constructor() { 64 | this._profiles = {} 65 | this.registerProfile('wanDevelopment', new WanDevelopmentProfile()) 66 | } 67 | 68 | /** 69 | * Applies the specified registered ConfigProfile to the provided ConnectOptions. 70 | * 71 | * Volatile: This API is subject to change at any time. 72 | * 73 | * @param profileName The name of the ConfigProfile to apply. 74 | * @param options The Connect options the ConfigProfile should be applied toward. 75 | */ 76 | applyProfile(profileName: string, options: ConnectOptions): void { 77 | if (!(profileName in this._profiles)) { 78 | throw new Error(`${profileName} is not a registered profile.`) 79 | } 80 | this._profiles[profileName].apply(options) 81 | } 82 | 83 | /** 84 | * Registers a ConfigProfile under the specified name. 85 | * 86 | * Volatile: This API is subject to change at any time. 87 | * 88 | * @param profileName The name the ConfigProfile should be registered under. 89 | * @param profile The ConfigProfile to register. 90 | */ 91 | registerProfile(profileName: string, profile: IConfigProfile): void { 92 | this._profiles[profileName] = profile 93 | } 94 | 95 | /** 96 | * Unregisters the specified ConfigProfile. 97 | * 98 | * Volatile: This API is subject to change at any time. 99 | * 100 | * @param profileName The name of the ConfigProfile to unregister. 101 | */ 102 | unregisterProfile(profileName: string): void { 103 | delete this._profiles[profileName] 104 | } 105 | } 106 | 107 | export const knownProfiles = new ConfigProfiles() 108 | -------------------------------------------------------------------------------- /lib/connspec.ts: -------------------------------------------------------------------------------- 1 | /* eslint jsdoc/require-jsdoc: off */ 2 | import * as qs from 'querystring' 3 | 4 | const partsMatcher = 5 | /((.*):\/\/)?(([^/?:]*)(:([^/?:@]*))?@)?([^/?]*)(\/([^?]*))?(\?(.*))?/ 6 | const hostMatcher = /((\[[^\]]+\]+)|([^;,:]+))(:([0-9]*))?(;,)?/g 7 | const kvMatcher = /([^=]*)=([^&?]*)[&?]?/g 8 | 9 | export class ConnSpec { 10 | scheme: string 11 | hosts: [string, number][] 12 | bucket: string 13 | options: { [key: string]: string | string[] } 14 | 15 | constructor(data?: Partial) { 16 | this.scheme = 'couchbase' 17 | this.hosts = [['localhost', 0]] 18 | this.bucket = '' 19 | this.options = {} 20 | 21 | if (data) { 22 | Object.assign(this, data) 23 | } 24 | } 25 | 26 | static parse(connStr: string): ConnSpec { 27 | const spec = new ConnSpec() 28 | 29 | if (!connStr) { 30 | return spec 31 | } 32 | 33 | const parts = partsMatcher.exec(connStr) 34 | if (!parts) { 35 | return spec 36 | } 37 | 38 | if (parts[2]) { 39 | spec.scheme = parts[2] 40 | } else { 41 | spec.scheme = 'couchbase' 42 | } 43 | 44 | if (parts[7]) { 45 | spec.hosts = [] 46 | 47 | while (hostMatcher) { 48 | const hostMatch = hostMatcher.exec(parts[7]) 49 | if (!hostMatch) { 50 | break 51 | } 52 | spec.hosts.push([ 53 | hostMatch[1], 54 | hostMatch[5] ? parseInt(hostMatch[5], 10) : 0, 55 | ]) 56 | } 57 | } else { 58 | throw new Error('a connection string with no hosts is illegal') 59 | } 60 | 61 | if (parts[9]) { 62 | spec.bucket = parts[9] 63 | } else { 64 | spec.bucket = '' 65 | } 66 | 67 | if (parts[11]) { 68 | spec.options = {} 69 | 70 | for (;;) { 71 | const kvMatch = kvMatcher.exec(parts[11]) 72 | if (!kvMatch) { 73 | break 74 | } 75 | 76 | const optKey = qs.unescape(kvMatch[1]) 77 | const optVal = qs.unescape(kvMatch[2]) 78 | if (optKey in spec.options) { 79 | const specOptVal = spec.options[optKey] 80 | if (typeof specOptVal === 'string') { 81 | spec.options[optKey] = [specOptVal, optVal] 82 | } else { 83 | specOptVal.push(optVal) 84 | } 85 | } else { 86 | spec.options[optKey] = optVal 87 | } 88 | } 89 | } else { 90 | spec.options = {} 91 | } 92 | 93 | return spec 94 | } 95 | 96 | toString(): string { 97 | let connStr = '' 98 | 99 | if (this.scheme) { 100 | connStr += this.scheme + '://' 101 | } 102 | 103 | if (this.hosts.length === 0) { 104 | throw new Error('a connection string with no hosts is illegal') 105 | } 106 | for (let i = 0; i < this.hosts.length; ++i) { 107 | const host = this.hosts[i] 108 | if (i !== 0) { 109 | connStr += ',' 110 | } 111 | connStr += host[0] 112 | if (host[1]) { 113 | connStr += ':' + host[1] 114 | } 115 | } 116 | 117 | if (this.bucket) { 118 | connStr += '/' + this.bucket 119 | } 120 | 121 | if (this.options) { 122 | const optParts = [] 123 | 124 | for (const optKey in this.options) { 125 | const optVal = this.options[optKey] 126 | if (typeof optVal === 'string') { 127 | optParts.push(qs.escape(optKey) + '=' + qs.escape(optVal)) 128 | } else { 129 | for (let optIdx = 0; optIdx < optVal.length; ++optIdx) { 130 | optParts.push(qs.escape(optKey) + '=' + qs.escape(optVal[optIdx])) 131 | } 132 | } 133 | } 134 | 135 | if (optParts.length > 0) { 136 | connStr += '?' + optParts.join('&') 137 | } 138 | } 139 | 140 | return connStr 141 | } 142 | } 143 | -------------------------------------------------------------------------------- /lib/couchbase.ts: -------------------------------------------------------------------------------- 1 | import binding from './binding' 2 | import { Cluster, ConnectOptions } from './cluster' 3 | import { NodeCallback } from './utilities' 4 | 5 | /** 6 | * Acts as the entrypoint into the rest of the library. Connecting to the cluster 7 | * and exposing the various services and features. 8 | * 9 | * @param connStr The connection string to use to connect to the cluster. 10 | * @param options Optional parameters for this operation. 11 | * @param callback A node-style callback to be invoked after execution. 12 | * @category Core 13 | */ 14 | export async function connect( 15 | connStr: string, 16 | options?: ConnectOptions, 17 | callback?: NodeCallback 18 | ): Promise { 19 | return Cluster.connect(connStr, options, callback) 20 | } 21 | 22 | /** 23 | * Exposes the underlying couchbase++ library version that is being used by the 24 | * SDK to perform I/O with the cluster. 25 | * 26 | * @deprecated Use {@link cbppVersion} instead. 27 | */ 28 | export const lcbVersion: string = binding.cbppVersion 29 | 30 | /** 31 | * Exposes the underlying couchbase++ library version that is being used by the 32 | * SDK to perform I/O with the cluster. 33 | */ 34 | export const cbppVersion: string = binding.cbppVersion 35 | export const cbppMetadata: string = binding.cbppMetadata 36 | 37 | /** 38 | * Volatile: This API is subject to change at any time. 39 | * 40 | * Exposes the underlying couchbase++ library protocol logger. This method is for 41 | * logging/debugging purposes and must be used with caution as network details will 42 | * be logged to the provided file. 43 | * 44 | * @param filename Name of file protocol logger will save logging details. 45 | */ 46 | export function enableProtocolLoggerToSaveNetworkTrafficToFile( 47 | filename: string 48 | ): void { 49 | binding.enableProtocolLogger(filename) 50 | } 51 | 52 | /** 53 | * Volatile: This API is subject to change at any time. 54 | * 55 | * Shutdowns the underlying couchbase++ logger. 56 | * 57 | */ 58 | export function shutdownLogger(): void { 59 | binding.shutdownLogger() 60 | } 61 | 62 | export * from './analyticsindexmanager' 63 | export * from './analyticstypes' 64 | export * from './authenticators' 65 | export * from './binarycollection' 66 | export * from './bucket' 67 | export * from './bucketmanager' 68 | export * from './cluster' 69 | export * from './collection' 70 | export * from './collectionmanager' 71 | export * from './crudoptypes' 72 | export * from './datastructures' 73 | export * from './diagnosticstypes' 74 | export * from './errorcontexts' 75 | export * from './errors' 76 | export * from './eventingfunctionmanager' 77 | export * from './generaltypes' 78 | export * from './mutationstate' 79 | export * from './queryindexmanager' 80 | export * from './querytypes' 81 | export * from './rangeScan' 82 | export * from './scope' 83 | export * from './scopeeventingfunctionmanager' 84 | export * from './scopesearchindexmanager' 85 | export * from './sdspecs' 86 | export * from './searchfacet' 87 | export * from './searchindexmanager' 88 | export * from './searchquery' 89 | export * from './searchsort' 90 | export * from './searchtypes' 91 | export * from './streamablepromises' 92 | export * from './transactions' 93 | export * from './transcoders' 94 | export * from './usermanager' 95 | export * from './vectorsearch' 96 | export * from './viewexecutor' 97 | export * from './viewindexmanager' 98 | export * from './viewtypes' 99 | 100 | export { Cas, CasInput, NodeCallback } from './utilities' 101 | -------------------------------------------------------------------------------- /lib/diagnosticsexecutor.ts: -------------------------------------------------------------------------------- 1 | /* eslint jsdoc/require-jsdoc: off */ 2 | import { 3 | endpointStateFromCpp, 4 | errorFromCpp, 5 | pingStateFromCpp, 6 | serviceTypeFromCpp, 7 | serviceTypeToCpp, 8 | } from './bindingutilities' 9 | import { Cluster } from './cluster' 10 | import { 11 | DiagnosticsEndpoint, 12 | DiagnosticsOptions, 13 | DiagnosticsResult, 14 | PingEndpoint, 15 | PingOptions, 16 | PingResult, 17 | } from './diagnosticstypes' 18 | 19 | /** 20 | * @internal 21 | */ 22 | export class DiagnoticsExecutor { 23 | private _cluster: Cluster 24 | 25 | /** 26 | * @internal 27 | */ 28 | constructor(cluster: Cluster) { 29 | this._cluster = cluster 30 | } 31 | 32 | /** 33 | * @internal 34 | */ 35 | async diagnostics(options: DiagnosticsOptions): Promise { 36 | return new Promise((resolve, reject) => { 37 | this._cluster.conn.diagnostics( 38 | { 39 | report_id: options.reportId, 40 | }, 41 | (cppErr, resp) => { 42 | const err = errorFromCpp(cppErr) 43 | if (err || !resp) { 44 | reject(err) 45 | return 46 | } 47 | 48 | resolve( 49 | new DiagnosticsResult({ 50 | version: resp.version, 51 | id: resp.id, 52 | sdk: resp.sdk, 53 | services: Object.fromEntries( 54 | Object.entries(resp.services).map(([serviceType, services]) => { 55 | return [ 56 | serviceTypeFromCpp(parseInt(serviceType)), 57 | services.map((svc) => { 58 | return new DiagnosticsEndpoint({ 59 | type: serviceTypeFromCpp(svc.type), 60 | id: svc.id, 61 | local: svc.local, 62 | remote: svc.remote, 63 | lastActivity: svc.last_activity, 64 | state: endpointStateFromCpp(svc.state), 65 | }) 66 | }), 67 | ] 68 | }) 69 | ), 70 | }) 71 | ) 72 | } 73 | ) 74 | }) 75 | } 76 | } 77 | 78 | /** 79 | * @internal 80 | */ 81 | export class PingExecutor { 82 | private _cluster: Cluster 83 | 84 | /** 85 | * @internal 86 | */ 87 | constructor(cluster: Cluster) { 88 | this._cluster = cluster 89 | } 90 | 91 | /** 92 | * @internal 93 | */ 94 | async ping(options: PingOptions): Promise { 95 | return new Promise((resolve, reject) => { 96 | // BUG(JSCBC-993): timeout is not currently sent to the C++ client 97 | options.timeout 98 | 99 | this._cluster.conn.ping( 100 | { 101 | report_id: options.reportId, 102 | services: options.serviceTypes 103 | ? options.serviceTypes.map((svc) => serviceTypeToCpp(svc)) 104 | : undefined, 105 | }, 106 | (cppErr, resp) => { 107 | const err = errorFromCpp(cppErr) 108 | if (err || !resp) { 109 | reject(err) 110 | return 111 | } 112 | 113 | resolve( 114 | new PingResult({ 115 | version: resp.version, 116 | id: resp.id, 117 | sdk: resp.sdk, 118 | services: Object.fromEntries( 119 | Object.entries(resp.services).map(([serviceType, services]) => { 120 | return [ 121 | serviceTypeFromCpp(parseInt(serviceType)), 122 | services.map((svc) => { 123 | return new PingEndpoint({ 124 | type: serviceTypeFromCpp(svc.type), 125 | id: svc.id, 126 | latency: svc.latency, 127 | remote: svc.remote, 128 | local: svc.local, 129 | state: pingStateFromCpp(svc.state), 130 | bucket: svc.bucket, 131 | error: svc.error, 132 | }) 133 | }), 134 | ] 135 | }) 136 | ), 137 | }) 138 | ) 139 | } 140 | ) 141 | }) 142 | } 143 | } 144 | -------------------------------------------------------------------------------- /lib/generaltypes.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Represents the various service types available. 3 | */ 4 | export enum ServiceType { 5 | /** 6 | * The key-value service, responsible for data storage. 7 | */ 8 | KeyValue = 'kv', 9 | 10 | /** 11 | * The management service, responsible for managing the cluster. 12 | */ 13 | Management = 'mgmt', 14 | 15 | /** 16 | * The views service, responsible for views querying. 17 | */ 18 | Views = 'views', 19 | 20 | /** 21 | * The query service, responsible for N1QL querying. 22 | */ 23 | Query = 'query', 24 | 25 | /** 26 | * The search service, responsible for full-text search querying. 27 | */ 28 | Search = 'search', 29 | 30 | /** 31 | * The analytics service, responsible for analytics querying. 32 | */ 33 | Analytics = 'analytics', 34 | 35 | /** 36 | * The eventing service, responsible for event-driven actions. 37 | */ 38 | Eventing = 'eventing', 39 | } 40 | 41 | /** 42 | * Represents the durability level required for an operation. 43 | */ 44 | export enum DurabilityLevel { 45 | /** 46 | * Indicates that no durability is needed. 47 | */ 48 | None = 0, 49 | 50 | /** 51 | * Indicates that mutations should be replicated to a majority of the 52 | * nodes in the cluster before the operation is marked as successful. 53 | */ 54 | Majority = 1, 55 | 56 | /** 57 | * Indicates that mutations should be replicated to a majority of the 58 | * nodes in the cluster and persisted to the master node before the 59 | * operation is marked as successful. 60 | */ 61 | MajorityAndPersistOnMaster = 2, 62 | 63 | /** 64 | * Indicates that mutations should be persisted to the majority of the 65 | * nodes in the cluster before the operation is marked as successful. 66 | */ 67 | PersistToMajority = 3, 68 | } 69 | 70 | /** 71 | * Represents the storage semantics to use for some types of operations. 72 | */ 73 | export enum StoreSemantics { 74 | /** 75 | * Indicates that replace semantics should be used. This will replace 76 | * the document if it exists, and the operation will fail if the 77 | * document does not exist. 78 | */ 79 | Replace = 0, 80 | 81 | /** 82 | * Indicates that upsert semantics should be used. This will replace 83 | * the document if it exists, and create it if it does not. 84 | */ 85 | Upsert = 1, 86 | 87 | /** 88 | * Indicates that insert semantics should be used. This will insert 89 | * the document if it does not exist, and fail the operation if the 90 | * document already exists. 91 | */ 92 | Insert = 2, 93 | } 94 | 95 | /** 96 | * Represents the various scan consistency options that are available when 97 | * querying against the query service. 98 | */ 99 | export enum ReadPreference { 100 | /** 101 | * Indicates that filtering for replica set should not be enforced. 102 | */ 103 | NoPreference = 'no_preference', 104 | 105 | /** 106 | * Indicates that any nodes that do not belong to local group selected during 107 | * cluster instantiation using the `ConnectOptions.preferredServerGroup` option 108 | * should be excluded. 109 | */ 110 | SelectedServerGroup = 'selected_server_group', 111 | } 112 | -------------------------------------------------------------------------------- /lib/httpexecutor.ts: -------------------------------------------------------------------------------- 1 | /* eslint jsdoc/require-jsdoc: off */ 2 | import binding from './binding' 3 | import { CppConnection } from './binding' 4 | import { errorFromCpp } from './bindingutilities' 5 | import * as events from 'events' 6 | 7 | /** 8 | * @internal 9 | */ 10 | export enum HttpServiceType { 11 | Management = 'MGMT', 12 | Views = 'VIEW', 13 | Query = 'QUERY', 14 | Search = 'SEARCH', 15 | Analytics = 'ANALYTICS', 16 | Eventing = 'EVENTING', 17 | } 18 | 19 | /** 20 | * @internal 21 | */ 22 | export enum HttpMethod { 23 | Get = 'GET', 24 | Post = 'POST', 25 | Put = 'PUT', 26 | Delete = 'DELETE', 27 | } 28 | 29 | /** 30 | * @internal 31 | */ 32 | export interface HttpRequestOptions { 33 | type: HttpServiceType 34 | method: HttpMethod 35 | path: string 36 | contentType?: string 37 | body?: string | Buffer 38 | timeout: number 39 | } 40 | 41 | /** 42 | * @internal 43 | */ 44 | export interface HttpResponse { 45 | requestOptions: HttpRequestOptions 46 | statusCode: number 47 | headers: { [key: string]: string } 48 | body: Buffer 49 | } 50 | 51 | /** 52 | * @internal 53 | */ 54 | export class HttpExecutor { 55 | private _conn: CppConnection 56 | 57 | /** 58 | * @internal 59 | */ 60 | constructor(conn: CppConnection) { 61 | this._conn = conn 62 | } 63 | 64 | /** 65 | * @internal 66 | */ 67 | streamRequest(options: HttpRequestOptions): events.EventEmitter { 68 | const emitter = new events.EventEmitter() 69 | 70 | let cppHttpType 71 | if (options.type === HttpServiceType.Management) { 72 | cppHttpType = binding.service_type.management 73 | } else if (options.type === HttpServiceType.Views) { 74 | cppHttpType = binding.service_type.view 75 | } else if (options.type === HttpServiceType.Query) { 76 | cppHttpType = binding.service_type.query 77 | } else if (options.type === HttpServiceType.Search) { 78 | cppHttpType = binding.service_type.search 79 | } else if (options.type === HttpServiceType.Analytics) { 80 | cppHttpType = binding.service_type.analytics 81 | } else if (options.type === HttpServiceType.Eventing) { 82 | cppHttpType = binding.service_type.eventing 83 | } else { 84 | throw new Error('unexpected http request type') 85 | } 86 | 87 | let cppHttpMethod 88 | if (options.method === HttpMethod.Get) { 89 | cppHttpMethod = 'GET' 90 | } else if (options.method === HttpMethod.Post) { 91 | cppHttpMethod = 'POST' 92 | } else if (options.method === HttpMethod.Put) { 93 | cppHttpMethod = 'PUT' 94 | } else if (options.method === HttpMethod.Delete) { 95 | cppHttpMethod = 'DELETE' 96 | } else { 97 | throw new Error('unexpected http request method') 98 | } 99 | 100 | const headers: { [key: string]: string } = {} 101 | if (options.contentType) { 102 | headers['Content-Type'] = options.contentType 103 | } 104 | 105 | let body = '' 106 | if (!options.body) { 107 | // empty body is acceptable 108 | } else if (options.body instanceof Buffer) { 109 | body = options.body.toString() 110 | } else if (typeof options.body === 'string') { 111 | body = options.body 112 | } else { 113 | throw new Error('unexpected http body type') 114 | } 115 | 116 | this._conn.managementFreeform( 117 | { 118 | type: cppHttpType, 119 | method: cppHttpMethod, 120 | path: options.path, 121 | headers: headers, 122 | body: body, 123 | timeout: options.timeout, 124 | }, 125 | (cppErr, res) => { 126 | const err = errorFromCpp(cppErr) 127 | if (err) { 128 | emitter.emit('error', err) 129 | return 130 | } 131 | 132 | emitter.emit('meta', { 133 | statusCode: res.status, 134 | headers: res.headers, 135 | }) 136 | 137 | emitter.emit('data', Buffer.from(res.body)) 138 | emitter.emit('end') 139 | } 140 | ) 141 | 142 | return emitter 143 | } 144 | 145 | async request(options: HttpRequestOptions): Promise { 146 | return new Promise((resolve, reject) => { 147 | const emitter = this.streamRequest(options) 148 | 149 | emitter.on('error', (err) => { 150 | reject(err) 151 | }) 152 | 153 | let dataCache = Buffer.allocUnsafe(0) 154 | emitter.on('data', (data) => { 155 | dataCache = Buffer.concat([dataCache, data]) 156 | }) 157 | 158 | let metaCache: any = null 159 | emitter.on('meta', (meta) => { 160 | metaCache = meta 161 | }) 162 | 163 | emitter.on('end', () => { 164 | resolve({ 165 | requestOptions: options, 166 | statusCode: metaCache.statusCode, 167 | headers: metaCache.headers, 168 | body: dataCache, 169 | }) 170 | }) 171 | }) 172 | } 173 | } 174 | -------------------------------------------------------------------------------- /lib/mutationstate.ts: -------------------------------------------------------------------------------- 1 | import { CppMutationToken } from './binding' 2 | 3 | /** 4 | * Represents the mutation token returned by the server. 5 | * 6 | * @see {@link MutationState} 7 | */ 8 | export interface MutationToken { 9 | /** 10 | * Generates a string representation of this mutation token. 11 | */ 12 | toString(): string 13 | 14 | /** 15 | * Generates a JSON representation of this mutation token. 16 | */ 17 | toJSON(): any 18 | } 19 | 20 | /** 21 | * Aggregates a number of {@link MutationToken}'s which have been returned by mutation 22 | * operations, which can then be used when performing queries. This will guarenteed 23 | * that the query includes the specified set of mutations without incurring the wait 24 | * associated with request_plus level consistency. 25 | */ 26 | export class MutationState { 27 | /** 28 | * @internal 29 | */ 30 | public _data: { 31 | [bucketName: string]: { [vbId: number]: CppMutationToken } 32 | } 33 | 34 | constructor(...tokens: MutationToken[]) { 35 | this._data = {} 36 | 37 | tokens.forEach((token) => this._addOne(token)) 38 | } 39 | 40 | /** 41 | * Adds a set of tokens to this state. 42 | * 43 | * @param tokens The tokens to add. 44 | */ 45 | add(...tokens: MutationToken[]): void { 46 | tokens.forEach((token) => this._addOne(token)) 47 | } 48 | 49 | private _addOne(token: MutationToken) { 50 | if (!token) { 51 | return 52 | } 53 | 54 | const cppToken = token as CppMutationToken 55 | const tokenData = cppToken.toJSON() 56 | const vbId = parseInt(tokenData.partition_id, 10) 57 | const vbSeqNo = parseInt(tokenData.sequence_number, 10) 58 | const bucketName = tokenData.bucket_name 59 | 60 | if (!this._data[bucketName]) { 61 | this._data[bucketName] = {} 62 | } 63 | if (!this._data[bucketName][vbId]) { 64 | this._data[bucketName][vbId] = cppToken 65 | } else { 66 | const otherToken = this._data[bucketName][vbId] 67 | const otherTokenSeqNo = parseInt(otherToken.toJSON().sequence, 10) 68 | if (otherTokenSeqNo < vbSeqNo) { 69 | this._data[bucketName][vbId] = cppToken 70 | } 71 | } 72 | } 73 | 74 | /** 75 | * @internal 76 | */ 77 | toJSON(): any { 78 | return this._data 79 | } 80 | 81 | /** 82 | * @internal 83 | */ 84 | inspect(): string { 85 | const tokens: string[] = [] 86 | 87 | for (const bucketName in this._data) { 88 | for (const vbId in this._data[bucketName]) { 89 | const info = this._data[bucketName][vbId] 90 | tokens.push(bucketName + ':' + vbId + ':' + info.toString()) 91 | } 92 | } 93 | 94 | return 'MutationState<' + tokens.join('; ') + '>' 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /lib/queryexecutor.ts: -------------------------------------------------------------------------------- 1 | import { CppError, CppQueryResponse } from './binding' 2 | import { 3 | errorFromCpp, 4 | mutationStateToCpp, 5 | queryProfileToCpp, 6 | queryScanConsistencyToCpp, 7 | } from './bindingutilities' 8 | import { Cluster } from './cluster' 9 | import { 10 | QueryMetaData, 11 | QueryMetrics, 12 | QueryOptions, 13 | QueryResult, 14 | QueryStatus, 15 | QueryWarning, 16 | } from './querytypes' 17 | import { StreamableRowPromise } from './streamablepromises' 18 | 19 | /** 20 | * @internal 21 | */ 22 | export class QueryExecutor { 23 | private _cluster: Cluster 24 | 25 | /** 26 | * @internal 27 | */ 28 | constructor(cluster: Cluster) { 29 | this._cluster = cluster 30 | } 31 | 32 | /** 33 | * @internal 34 | */ 35 | static execute( 36 | exec: ( 37 | callback: (err: CppError | null, resp: CppQueryResponse) => void 38 | ) => void 39 | ): StreamableRowPromise, TRow, QueryMetaData> { 40 | const emitter = new StreamableRowPromise< 41 | QueryResult, 42 | TRow, 43 | QueryMetaData 44 | >((rows, meta) => { 45 | return new QueryResult({ 46 | rows: rows, 47 | meta: meta, 48 | }) 49 | }) 50 | 51 | exec((cppErr, resp) => { 52 | const err = errorFromCpp(cppErr) 53 | if (err) { 54 | emitter.emit('error', err) 55 | emitter.emit('end') 56 | return 57 | } 58 | 59 | resp.rows.forEach((row) => { 60 | emitter.emit('row', JSON.parse(row)) 61 | }) 62 | 63 | { 64 | const metaData = resp.meta 65 | 66 | let warnings: QueryWarning[] 67 | if (metaData.warnings) { 68 | warnings = metaData.warnings.map( 69 | (warningData: any) => 70 | new QueryWarning({ 71 | code: warningData.code, 72 | message: warningData.message, 73 | }) 74 | ) 75 | } else { 76 | warnings = [] 77 | } 78 | 79 | let metrics: QueryMetrics | undefined 80 | if (metaData.metrics) { 81 | const metricsData = metaData.metrics 82 | 83 | metrics = new QueryMetrics({ 84 | elapsedTime: metricsData.elapsed_time, 85 | executionTime: metricsData.execution_time, 86 | sortCount: metricsData.sort_count || 0, 87 | resultCount: metricsData.result_count || 0, 88 | resultSize: metricsData.result_size || 0, 89 | mutationCount: metricsData.mutation_count || 0, 90 | errorCount: metricsData.error_count || 0, 91 | warningCount: metricsData.warning_count || 0, 92 | }) 93 | } else { 94 | metrics = undefined 95 | } 96 | 97 | const meta = new QueryMetaData({ 98 | requestId: metaData.request_id, 99 | clientContextId: metaData.client_context_id, 100 | status: metaData.status as QueryStatus, 101 | signature: metaData.signature 102 | ? JSON.parse(metaData.signature) 103 | : undefined, 104 | warnings: warnings, 105 | metrics: metrics, 106 | profile: metaData.profile ? JSON.parse(metaData.profile) : undefined, 107 | }) 108 | 109 | emitter.emit('meta', meta) 110 | } 111 | 112 | emitter.emit('end') 113 | return 114 | }) 115 | 116 | return emitter 117 | } 118 | 119 | /** 120 | * @internal 121 | */ 122 | query( 123 | query: string, 124 | options: QueryOptions 125 | ): StreamableRowPromise, TRow, QueryMetaData> { 126 | const timeout = options.timeout || this._cluster.queryTimeout 127 | 128 | return QueryExecutor.execute((callback) => { 129 | this._cluster.conn.query( 130 | { 131 | statement: query, 132 | client_context_id: options.clientContextId, 133 | adhoc: options.adhoc === false ? false : true, 134 | metrics: options.metrics || false, 135 | readonly: options.readOnly || false, 136 | flex_index: options.flexIndex || false, 137 | preserve_expiry: options.preserveExpiry || false, 138 | use_replica: options.useReplica, 139 | max_parallelism: options.maxParallelism, 140 | scan_cap: options.scanCap, 141 | scan_wait: options.scanWait, 142 | pipeline_batch: options.pipelineBatch, 143 | pipeline_cap: options.pipelineCap, 144 | scan_consistency: queryScanConsistencyToCpp(options.scanConsistency), 145 | mutation_state: mutationStateToCpp(options.consistentWith).tokens, 146 | timeout: timeout, 147 | query_context: options.queryContext, 148 | profile: queryProfileToCpp(options.profile), 149 | raw: options.raw 150 | ? Object.fromEntries( 151 | Object.entries(options.raw) 152 | .filter(([, v]) => v !== undefined) 153 | .map(([k, v]) => [k, JSON.stringify(v)]) 154 | ) 155 | : {}, 156 | positional_parameters: 157 | options.parameters && Array.isArray(options.parameters) 158 | ? options.parameters.map((v) => JSON.stringify(v ?? null)) 159 | : [], 160 | named_parameters: 161 | options.parameters && !Array.isArray(options.parameters) 162 | ? Object.fromEntries( 163 | Object.entries(options.parameters as { [key: string]: any }) 164 | .filter(([, v]) => v !== undefined) 165 | .map(([k, v]) => [k, JSON.stringify(v)]) 166 | ) 167 | : {}, 168 | body_str: '', 169 | }, 170 | callback 171 | ) 172 | }) 173 | } 174 | } 175 | -------------------------------------------------------------------------------- /lib/rangeScan.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Represents a search term for a RangeScan. 3 | * 4 | * @see {@link RangeScan} 5 | * @category Key-Value 6 | */ 7 | export class ScanTerm { 8 | /** 9 | * The scan term. 10 | * 11 | * @see {@link MutationState} 12 | */ 13 | term: string 14 | 15 | /** 16 | * Set to true for the scan term to be exclusive. Defaults to false (inclusive). 17 | */ 18 | exclusive?: boolean 19 | 20 | /** 21 | * @internal 22 | */ 23 | constructor(term: string, exclusive?: boolean) { 24 | this.term = term 25 | this.exclusive = exclusive 26 | } 27 | } 28 | 29 | /** 30 | * 31 | * @internal 32 | */ 33 | export interface ScanType { 34 | /** 35 | * Returns string representation of scan type. 36 | */ 37 | getScanType(): string 38 | } 39 | 40 | /** 41 | * A RangeScan performs a scan on a range of keys with the range specified through 42 | * a start and end ScanTerm. 43 | * 44 | * @category Key-Value 45 | */ 46 | export class RangeScan implements ScanType { 47 | /** 48 | * RangeScan start term. 49 | */ 50 | start?: ScanTerm 51 | 52 | /** 53 | * RangeScan end term. 54 | */ 55 | end?: ScanTerm 56 | 57 | /** 58 | * @internal 59 | */ 60 | constructor(start?: ScanTerm, end?: ScanTerm) { 61 | this.start = start 62 | this.end = end 63 | } 64 | 65 | /** 66 | * Returns string representation of scan type. 67 | */ 68 | getScanType(): string { 69 | return 'range_scan' 70 | } 71 | } 72 | 73 | /** 74 | * A SamplingScan performs a scan on a random sampling of keys with the sampling bounded by 75 | * a limit. 76 | * 77 | * @category Key-Value 78 | */ 79 | export class SamplingScan implements ScanType { 80 | /** 81 | * SamplingScan limit. 82 | */ 83 | limit: number 84 | 85 | /** 86 | * SamplingScan seed. 87 | */ 88 | seed?: number 89 | 90 | /** 91 | * @internal 92 | */ 93 | constructor(limit: number, seed?: number) { 94 | this.limit = limit 95 | this.seed = seed 96 | } 97 | 98 | /** 99 | * Returns string representation of scan type. 100 | */ 101 | getScanType(): string { 102 | return 'sampling_scan' 103 | } 104 | } 105 | 106 | /** 107 | * A PrefixScan scan type selects every document whose ID starts with a certain prefix. 108 | * 109 | * @category Key-Value 110 | */ 111 | export class PrefixScan implements ScanType { 112 | /** 113 | * PrefixScan prefix. 114 | */ 115 | prefix: string 116 | 117 | /** 118 | * @internal 119 | */ 120 | constructor(prefix: string) { 121 | this.prefix = prefix 122 | } 123 | 124 | /** 125 | * Returns string representation of scan type. 126 | */ 127 | getScanType(): string { 128 | return 'prefix_scan' 129 | } 130 | } 131 | -------------------------------------------------------------------------------- /lib/scope.ts: -------------------------------------------------------------------------------- 1 | import { AnalyticsExecutor } from './analyticsexecutor' 2 | import { 3 | AnalyticsMetaData, 4 | AnalyticsQueryOptions, 5 | AnalyticsResult, 6 | } from './analyticstypes' 7 | import { CppConnection } from './binding' 8 | import { Bucket } from './bucket' 9 | import { Cluster } from './cluster' 10 | import { Collection } from './collection' 11 | import { QueryExecutor } from './queryexecutor' 12 | import { QueryMetaData, QueryOptions, QueryResult } from './querytypes' 13 | import { SearchExecutor } from './searchexecutor' 14 | import { ScopeSearchIndexManager } from './scopesearchindexmanager' 15 | import { 16 | SearchMetaData, 17 | SearchQueryOptions, 18 | SearchRequest, 19 | SearchResult, 20 | SearchRow, 21 | } from './searchtypes' 22 | import { StreamableRowPromise } from './streamablepromises' 23 | import { Transcoder } from './transcoders' 24 | import { NodeCallback, PromiseHelper } from './utilities' 25 | import { ScopeEventingFunctionManager } from './scopeeventingfunctionmanager' 26 | 27 | /** 28 | * Exposes the operations which are available to be performed against a scope. 29 | * Namely the ability to access to Collections for performing operations. 30 | * 31 | * @category Core 32 | */ 33 | export class Scope { 34 | /** 35 | * @internal 36 | */ 37 | static get DEFAULT_NAME(): string { 38 | return '_default' 39 | } 40 | 41 | private _bucket: Bucket 42 | private _name: string 43 | private _conn: CppConnection 44 | 45 | /** 46 | @internal 47 | */ 48 | constructor(bucket: Bucket, scopeName: string) { 49 | this._bucket = bucket 50 | this._name = scopeName 51 | this._conn = bucket.conn 52 | } 53 | 54 | /** 55 | @internal 56 | */ 57 | get conn(): CppConnection { 58 | return this._conn 59 | } 60 | 61 | /** 62 | @internal 63 | */ 64 | get bucket(): Bucket { 65 | return this._bucket 66 | } 67 | 68 | /** 69 | @internal 70 | */ 71 | get cluster(): Cluster { 72 | return this._bucket.cluster 73 | } 74 | 75 | /** 76 | @internal 77 | */ 78 | get transcoder(): Transcoder { 79 | return this._bucket.transcoder 80 | } 81 | 82 | /** 83 | * The name of the scope this Scope object references. 84 | */ 85 | get name(): string { 86 | return this._name 87 | } 88 | 89 | /** 90 | * Creates a Collection object reference to a specific collection. 91 | * 92 | * @param collectionName The name of the collection to reference. 93 | */ 94 | collection(collectionName: string): Collection { 95 | return new Collection(this, collectionName) 96 | } 97 | 98 | /** 99 | * Returns a SearchIndexManager which can be used to manage the search 100 | * indexes of this scope. 101 | */ 102 | searchIndexes(): ScopeSearchIndexManager { 103 | return new ScopeSearchIndexManager( 104 | this.cluster, 105 | this.bucket.name, 106 | this._name 107 | ) 108 | } 109 | 110 | /** 111 | * Returns a ScopeEventingFunctionManager which can be used to manage the eventing 112 | * functions of this scope. 113 | * Uncommitted: This API is subject to change in the future. 114 | */ 115 | eventingFunctions(): ScopeEventingFunctionManager { 116 | return new ScopeEventingFunctionManager( 117 | this.cluster, 118 | this._bucket.name, 119 | this._name 120 | ) 121 | } 122 | 123 | /** 124 | * Executes a N1QL query against the cluster scoped to this scope. 125 | * 126 | * @param statement The N1QL statement to execute. 127 | * @param options Optional parameters for this operation. 128 | * @param callback A node-style callback to be invoked after execution. 129 | */ 130 | query( 131 | statement: string, 132 | options?: QueryOptions, 133 | callback?: NodeCallback> 134 | ): StreamableRowPromise, TRow, QueryMetaData> { 135 | if (options instanceof Function) { 136 | callback = arguments[1] 137 | options = undefined 138 | } 139 | if (!options) { 140 | options = {} 141 | } 142 | 143 | const bucket = this.bucket 144 | const exec = new QueryExecutor(this.cluster) 145 | 146 | const options_ = options 147 | return PromiseHelper.wrapAsync( 148 | () => 149 | exec.query(statement, { 150 | ...options_, 151 | queryContext: `${bucket.name}.${this.name}`, 152 | }), 153 | callback 154 | ) 155 | } 156 | 157 | /** 158 | * Executes an analytics query against the cluster scoped this scope. 159 | * 160 | * @param statement The analytics statement to execute. 161 | * @param options Optional parameters for this operation. 162 | * @param callback A node-style callback to be invoked after execution. 163 | */ 164 | analyticsQuery( 165 | statement: string, 166 | options?: AnalyticsQueryOptions, 167 | callback?: NodeCallback> 168 | ): StreamableRowPromise, TRow, AnalyticsMetaData> { 169 | if (options instanceof Function) { 170 | callback = arguments[1] 171 | options = undefined 172 | } 173 | if (!options) { 174 | options = {} 175 | } 176 | 177 | const bucket = this.bucket 178 | const exec = new AnalyticsExecutor(this.cluster) 179 | 180 | const options_ = options 181 | return PromiseHelper.wrapAsync( 182 | () => 183 | exec.query(statement, { 184 | ...options_, 185 | queryContext: `${bucket.name}.${this.name}`, 186 | }), 187 | callback 188 | ) 189 | } 190 | 191 | /** 192 | * Executes a search query against the scope. 193 | * 194 | * @param indexName The name of the index to query. 195 | * @param request The SearchRequest describing the search to execute. 196 | * @param options Optional parameters for this operation. 197 | * @param callback A node-style callback to be invoked after execution. 198 | */ 199 | search( 200 | indexName: string, 201 | request: SearchRequest, 202 | options?: SearchQueryOptions, 203 | callback?: NodeCallback 204 | ): StreamableRowPromise { 205 | if (options instanceof Function) { 206 | callback = arguments[2] 207 | options = undefined 208 | } 209 | if (!options) { 210 | options = {} 211 | } 212 | 213 | const exec = new SearchExecutor(this.cluster, this._bucket.name, this._name) 214 | 215 | const options_ = options 216 | return PromiseHelper.wrapAsync( 217 | () => exec.query(indexName, request, options_), 218 | callback 219 | ) 220 | } 221 | } 222 | -------------------------------------------------------------------------------- /lib/sdutils.ts: -------------------------------------------------------------------------------- 1 | /* eslint jsdoc/require-jsdoc: off */ 2 | 'use strict' 3 | 4 | interface SdPathPartProp { 5 | type: 'property' 6 | path: string 7 | } 8 | 9 | interface SdPathPartIndex { 10 | type: 'index' 11 | index: number 12 | } 13 | 14 | type SdPathPart = SdPathPartProp | SdPathPartIndex 15 | 16 | export class SdUtils { 17 | private static _parsePath(path: string): SdPathPart[] { 18 | if (!path) { 19 | return [] 20 | } 21 | 22 | let identifier = '' 23 | const parts: SdPathPart[] = [] 24 | 25 | for (let i = 0; i < path.length; ++i) { 26 | if (path[i] === '[') { 27 | // Starting an array, use the previous bit as a property 28 | if (identifier) { 29 | parts.push({ type: 'property', path: identifier }) 30 | identifier = '' 31 | } 32 | } else if (path[i] === ']') { 33 | // array path of identifier; 34 | parts.push({ type: 'index', index: parseInt(identifier) }) 35 | identifier = '' 36 | // skip the `.` that follows, if there is one 37 | ++i 38 | } else if (path[i] === '.') { 39 | parts.push({ type: 'property', path: identifier }) 40 | identifier = '' 41 | } else { 42 | identifier += path[i] 43 | } 44 | } 45 | 46 | if (identifier) { 47 | parts.push({ type: 'property', path: identifier }) 48 | } 49 | 50 | return parts 51 | } 52 | 53 | private static _insertByPath( 54 | root: any, 55 | parts: SdPathPart[], 56 | value: any 57 | ): any { 58 | if (parts.length === 0) { 59 | return value 60 | } 61 | 62 | const firstPart = parts.shift() as SdPathPart 63 | if (firstPart.type === 'property') { 64 | if (!root) { 65 | root = {} 66 | } 67 | if (Array.isArray(root)) { 68 | throw new Error('expected object, found array') 69 | } 70 | 71 | root[firstPart.path] = this._insertByPath( 72 | root[firstPart.path], 73 | parts, 74 | value 75 | ) 76 | } else if (firstPart.type === 'index') { 77 | if (!root) { 78 | root = [] 79 | } 80 | if (!Array.isArray(root)) { 81 | throw new Error('expected array, found object') 82 | } 83 | 84 | root[firstPart.index] = this._insertByPath( 85 | root[firstPart.index], 86 | parts, 87 | value 88 | ) 89 | } else { 90 | throw new Error('encountered unexpected path type') 91 | } 92 | 93 | return root 94 | } 95 | 96 | static insertByPath(root: any, path: string, value: any): any { 97 | const parts = this._parsePath(path) 98 | return this._insertByPath(root, parts, value) 99 | } 100 | 101 | private static _getByPath(value: any, parts: SdPathPart[]): any { 102 | if (parts.length === 0) { 103 | return value 104 | } 105 | 106 | const firstPart = parts.shift() as SdPathPart 107 | if (firstPart.type === 'property') { 108 | if (!value) { 109 | return undefined 110 | } 111 | if (Array.isArray(value)) { 112 | throw new Error('expected object, found array') 113 | } 114 | 115 | return this._getByPath(value[firstPart.path], parts) 116 | } else if (firstPart.type === 'index') { 117 | if (!value) { 118 | return undefined 119 | } 120 | if (!Array.isArray(value)) { 121 | throw new Error('expected array, found object') 122 | } 123 | 124 | return this._getByPath(value[firstPart.index], parts) 125 | } else { 126 | throw new Error('encountered unexpected path type') 127 | } 128 | } 129 | 130 | static getByPath(value: any, path: string): any { 131 | const parts = this._parsePath(path) 132 | return this._getByPath(value, parts) 133 | } 134 | 135 | static convertMacroCasToCas(cas: string): string { 136 | const buf = Buffer.from(cas.startsWith('0x') ? cas.slice(2) : cas, 'hex') 137 | return `0x${buf.reverse().toString('hex')}` 138 | } 139 | } 140 | -------------------------------------------------------------------------------- /lib/searchexecutor.ts: -------------------------------------------------------------------------------- 1 | /* eslint jsdoc/require-jsdoc: off */ 2 | import { 3 | errorFromCpp, 4 | mutationStateToCpp, 5 | searchHighlightStyleToCpp, 6 | searchScanConsistencyToCpp, 7 | vectorQueryCombinationToCpp, 8 | } from './bindingutilities' 9 | import { Cluster } from './cluster' 10 | import { MatchNoneSearchQuery, SearchQuery } from './searchquery' 11 | import { SearchSort } from './searchsort' 12 | import { 13 | SearchMetaData, 14 | SearchQueryOptions, 15 | SearchRequest, 16 | SearchResult, 17 | SearchRow, 18 | } from './searchtypes' 19 | import { StreamableRowPromise } from './streamablepromises' 20 | import { CppSearchRequest } from './binding' 21 | 22 | /** 23 | * @internal 24 | */ 25 | export class SearchExecutor { 26 | private _cluster: Cluster 27 | private _bucketName: string | undefined 28 | private _scopeName: string | undefined 29 | 30 | /** 31 | * @internal 32 | */ 33 | constructor(cluster: Cluster, bucketName?: string, scopeName?: string) { 34 | this._cluster = cluster 35 | this._bucketName = bucketName 36 | this._scopeName = scopeName 37 | } 38 | 39 | /** 40 | * @internal 41 | */ 42 | query( 43 | indexName: string, 44 | query: SearchQuery | SearchRequest, 45 | options: SearchQueryOptions 46 | ): StreamableRowPromise { 47 | const emitter = new StreamableRowPromise< 48 | SearchResult, 49 | SearchRow, 50 | SearchMetaData 51 | >((rows, meta) => { 52 | return new SearchResult({ 53 | rows: rows, 54 | meta: meta, 55 | }) 56 | }) 57 | 58 | const searchQuery = 59 | query instanceof SearchQuery 60 | ? JSON.stringify(query) 61 | : query.searchQuery 62 | ? JSON.stringify(query.searchQuery) 63 | : JSON.stringify(new MatchNoneSearchQuery()) 64 | const timeout = options.timeout || this._cluster.searchTimeout 65 | const request: CppSearchRequest = { 66 | timeout, 67 | index_name: indexName, 68 | query: searchQuery, 69 | limit: options.limit, 70 | skip: options.skip, 71 | explain: options.explain || false, 72 | disable_scoring: options.disableScoring || false, 73 | include_locations: options.includeLocations || false, 74 | highlight_style: options.highlight 75 | ? searchHighlightStyleToCpp(options.highlight.style) 76 | : undefined, 77 | highlight_fields: 78 | options.highlight && options.highlight.fields 79 | ? options.highlight.fields 80 | : [], 81 | fields: options.fields || [], 82 | collections: options.collections || [], 83 | scan_consistency: searchScanConsistencyToCpp(options.consistency), 84 | mutation_state: mutationStateToCpp(options.consistentWith).tokens, 85 | sort_specs: options.sort 86 | ? options.sort.map((sort: string | SearchSort) => JSON.stringify(sort)) 87 | : [], 88 | facets: options.facets 89 | ? Object.fromEntries( 90 | Object.entries(options.facets) 91 | .filter(([, v]) => v !== undefined) 92 | .map(([k, v]) => [k, JSON.stringify(v)]) 93 | ) 94 | : {}, 95 | raw: options.raw 96 | ? Object.fromEntries( 97 | Object.entries(options.raw) 98 | .filter(([, v]) => v !== undefined) 99 | .map(([k, v]) => [k, JSON.stringify(v)]) 100 | ) 101 | : {}, 102 | body_str: '', 103 | show_request: options.showRequest || false, 104 | log_request: options.logRequest || false, 105 | log_response: options.logResponse || false, 106 | } 107 | 108 | if (query instanceof SearchRequest) { 109 | if (query.vectorSearch) { 110 | request.vector_search = JSON.stringify(query.vectorSearch.queries) 111 | if ( 112 | query.vectorSearch.options && 113 | query.vectorSearch.options.vectorQueryCombination 114 | ) { 115 | request.vector_query_combination = vectorQueryCombinationToCpp( 116 | query.vectorSearch.options.vectorQueryCombination 117 | ) 118 | } 119 | } 120 | } 121 | 122 | if (this._bucketName && this._scopeName) { 123 | request.bucket_name = this._bucketName 124 | request.scope_name = this._scopeName 125 | } 126 | 127 | this._cluster.conn.search(request, (cppErr, resp) => { 128 | const err = errorFromCpp(cppErr) 129 | if (err) { 130 | emitter.emit('error', err) 131 | emitter.emit('end') 132 | return 133 | } 134 | 135 | resp.rows.forEach((row) => { 136 | row.fields = row.fields ? JSON.parse(row.fields) : undefined 137 | row.explanation = row.explanation 138 | ? JSON.parse(row.explanation) 139 | : undefined 140 | emitter.emit('row', row) 141 | }) 142 | 143 | { 144 | const metaData = resp.meta 145 | emitter.emit('meta', { 146 | facets: Object.fromEntries( 147 | Object.values(resp.facets).map((v) => [v.name, v]) 148 | ), 149 | ...metaData, 150 | } as SearchMetaData) 151 | } 152 | 153 | emitter.emit('end') 154 | return 155 | }) 156 | 157 | return emitter 158 | } 159 | } 160 | -------------------------------------------------------------------------------- /lib/searchfacet.ts: -------------------------------------------------------------------------------- 1 | /* eslint jsdoc/require-jsdoc: off */ 2 | 3 | /** 4 | * Provides the ability to specify facets for a search query. 5 | * 6 | * @category Full Text Search 7 | */ 8 | export class SearchFacet { 9 | protected _data: any 10 | 11 | constructor(data: any) { 12 | if (!data) { 13 | data = {} 14 | } 15 | 16 | this._data = data 17 | } 18 | 19 | toJSON(): any { 20 | return this._data 21 | } 22 | 23 | static term(field: string, size: number): TermSearchFacet { 24 | return new TermSearchFacet(field, size) 25 | } 26 | 27 | static numeric(field: string, size: number): NumericSearchFacet { 28 | return new NumericSearchFacet(field, size) 29 | } 30 | 31 | static date(field: string, size: number): DateSearchFacet { 32 | return new DateSearchFacet(field, size) 33 | } 34 | } 35 | 36 | /** 37 | * Provides ability to request a term facet. 38 | * 39 | * @category Full Text Search 40 | */ 41 | export class TermSearchFacet extends SearchFacet { 42 | /** 43 | * @internal 44 | */ 45 | constructor(field: string, size: number) { 46 | super({ 47 | field: field, 48 | size: size, 49 | }) 50 | } 51 | } 52 | 53 | /** 54 | * Provides ability to request a numeric facet. 55 | * 56 | * @category Full Text Search 57 | */ 58 | export class NumericSearchFacet extends SearchFacet { 59 | /** 60 | * @internal 61 | */ 62 | constructor(field: string, size: number) { 63 | super({ 64 | field: field, 65 | size: size, 66 | numeric_ranges: [], 67 | }) 68 | } 69 | 70 | addRange(name: string, min?: number, max?: number): NumericSearchFacet { 71 | this._data.numeric_ranges.push({ 72 | name: name, 73 | min: min, 74 | max: max, 75 | }) 76 | return this 77 | } 78 | } 79 | 80 | /** 81 | * Provides ability to request a date facet. 82 | * 83 | * @category Full Text Search 84 | */ 85 | export class DateSearchFacet extends SearchFacet { 86 | /** 87 | * @internal 88 | */ 89 | constructor(field: string, size: number) { 90 | super({ 91 | field: field, 92 | size: size, 93 | date_ranges: [], 94 | }) 95 | } 96 | 97 | addRange(name: string, start?: Date, end?: Date): DateSearchFacet { 98 | this._data.date_ranges.push({ 99 | name: name, 100 | start: start, 101 | end: end, 102 | }) 103 | return this 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /lib/searchsort.ts: -------------------------------------------------------------------------------- 1 | /* eslint jsdoc/require-jsdoc: off */ 2 | 3 | /** 4 | * Provides the ability to specify sorting for a search query. 5 | * 6 | * @category Full Text Search 7 | */ 8 | export class SearchSort { 9 | protected _data: any 10 | 11 | constructor(data: any) { 12 | if (!data) { 13 | data = {} 14 | } 15 | 16 | this._data = data 17 | } 18 | 19 | toJSON(): any { 20 | return this._data 21 | } 22 | 23 | static score(): ScoreSearchSort { 24 | return new ScoreSearchSort() 25 | } 26 | 27 | static id(): IdSearchSort { 28 | return new IdSearchSort() 29 | } 30 | 31 | static field(field: string): FieldSearchSort { 32 | return new FieldSearchSort(field) 33 | } 34 | 35 | static geoDistance( 36 | field: string, 37 | lat: number, 38 | lon: number 39 | ): GeoDistanceSearchSort { 40 | return new GeoDistanceSearchSort(field, lat, lon) 41 | } 42 | } 43 | 44 | /** 45 | * Provides sorting for a search query by score. 46 | * 47 | * @category Full Text Search 48 | */ 49 | export class ScoreSearchSort extends SearchSort { 50 | /** 51 | * @internal 52 | */ 53 | constructor() { 54 | super({ 55 | by: 'score', 56 | }) 57 | } 58 | 59 | descending(descending: boolean): ScoreSearchSort { 60 | this._data.desc = descending 61 | return this 62 | } 63 | } 64 | 65 | /** 66 | * Provides sorting for a search query by document id. 67 | * 68 | * @category Full Text Search 69 | */ 70 | export class IdSearchSort extends SearchSort { 71 | /** 72 | * @internal 73 | */ 74 | constructor() { 75 | super({ 76 | by: 'id', 77 | }) 78 | } 79 | 80 | descending(descending: boolean): IdSearchSort { 81 | this._data.desc = descending 82 | return this 83 | } 84 | } 85 | 86 | /** 87 | * Provides sorting for a search query by a specified field. 88 | * 89 | * @category Full Text Search 90 | */ 91 | export class FieldSearchSort extends SearchSort { 92 | /** 93 | * @internal 94 | */ 95 | constructor(field: string) { 96 | super({ 97 | by: 'field', 98 | field: field, 99 | }) 100 | } 101 | 102 | type(type: string): FieldSearchSort { 103 | this._data.type = type 104 | return this 105 | } 106 | 107 | mode(mode: string): FieldSearchSort { 108 | this._data.mode = mode 109 | return this 110 | } 111 | 112 | missing(missing: boolean): FieldSearchSort { 113 | this._data.missing = missing 114 | return this 115 | } 116 | 117 | descending(descending: boolean): FieldSearchSort { 118 | this._data.desc = descending 119 | return this 120 | } 121 | } 122 | 123 | /** 124 | * Provides sorting for a search query by geographic distance from a point. 125 | * 126 | * @category Full Text Search 127 | */ 128 | export class GeoDistanceSearchSort extends SearchSort { 129 | /** 130 | * @internal 131 | */ 132 | constructor(field: string, lat: number, lon: number) { 133 | super({ 134 | by: 'geo_distance', 135 | field: field, 136 | location: [lon, lat], 137 | }) 138 | } 139 | 140 | unit(unit: string): GeoDistanceSearchSort { 141 | this._data.unit = unit 142 | return this 143 | } 144 | 145 | descending(descending: boolean): GeoDistanceSearchSort { 146 | this._data.desc = descending 147 | return this 148 | } 149 | } 150 | -------------------------------------------------------------------------------- /lib/searchtypes.ts: -------------------------------------------------------------------------------- 1 | import { InvalidArgumentError } from './errors' 2 | import { MutationState } from './mutationstate' 3 | import { SearchFacet } from './searchfacet' 4 | import { SearchQuery } from './searchquery' 5 | import { SearchSort } from './searchsort' 6 | import { VectorSearch } from './vectorsearch' 7 | 8 | /** 9 | * SearchMetaData represents the meta-data available from a search query. 10 | * This class is currently incomplete and must be casted to `any` in 11 | * TypeScript to be used. 12 | * 13 | * @category Full Text Search 14 | */ 15 | export class SearchMetaData {} 16 | 17 | /** 18 | * SearchRow represents the data available from a row of a search query. 19 | * This class is currently incomplete and must be casted to `any` in 20 | * TypeScript to be used. 21 | * 22 | * @category Full Text Search 23 | */ 24 | export class SearchRow {} 25 | 26 | /** 27 | * Contains the results of a search query. 28 | * 29 | * @category Full Text Search 30 | */ 31 | export class SearchResult { 32 | /** 33 | * The rows which have been returned by the query. 34 | */ 35 | rows: any[] 36 | 37 | /** 38 | * The meta-data which has been returned by the query. 39 | */ 40 | meta: SearchMetaData 41 | 42 | /** 43 | * @internal 44 | */ 45 | constructor(data: SearchResult) { 46 | this.rows = data.rows 47 | this.meta = data.meta 48 | } 49 | } 50 | 51 | /** 52 | * Specifies the highlight style that should be used for matches in the results. 53 | * 54 | * @category Full Text Search 55 | */ 56 | export enum HighlightStyle { 57 | /** 58 | * Indicates that matches should be highlighted using HTML tags in the result text. 59 | */ 60 | HTML = 'html', 61 | 62 | /** 63 | * Indicates that matches should be highlighted using ASCII coding in the result test. 64 | */ 65 | ANSI = 'ansi', 66 | } 67 | 68 | /** 69 | * Represents the various scan consistency options that are available when 70 | * querying against the query service. 71 | * 72 | * @category Full Text Search 73 | */ 74 | export enum SearchScanConsistency { 75 | /** 76 | * Indicates that no specific consistency is required, this is the fastest 77 | * options, but results may not include the most recent operations which have 78 | * been performed. 79 | */ 80 | NotBounded = 'not_bounded', 81 | } 82 | 83 | /** 84 | * @category Full Text Search 85 | */ 86 | export interface SearchQueryOptions { 87 | /** 88 | * Specifies the number of results to skip from the index before returning 89 | * results. 90 | */ 91 | skip?: number 92 | 93 | /** 94 | * Specifies the limit to the number of results that should be returned. 95 | */ 96 | limit?: number 97 | 98 | /** 99 | * Configures whether the result should contain the execution plan for the query. 100 | */ 101 | explain?: boolean 102 | 103 | /** 104 | * Specifies how the highlighting should behave. Specifically which mode should be 105 | * used for highlighting as well as which fields should be highlighted. 106 | */ 107 | highlight?: { 108 | style?: HighlightStyle 109 | fields?: string[] 110 | } 111 | 112 | /** 113 | * Specifies the collections which should be searched as part of the query. 114 | */ 115 | collections?: string[] 116 | 117 | /** 118 | * Specifies the list of fields which should be searched. 119 | */ 120 | fields?: string[] 121 | 122 | /** 123 | * Specifies any facets that should be included in the query. 124 | */ 125 | facets?: { [name: string]: SearchFacet } 126 | 127 | /** 128 | * Specifies a list of fields or SearchSort's to use when sorting the result sets. 129 | */ 130 | sort?: string[] | SearchSort[] 131 | 132 | /** 133 | * Specifies that scoring should be disabled. This improves performance but makes it 134 | * impossible to sort based on how well a particular result scored. 135 | */ 136 | disableScoring?: boolean 137 | 138 | /** 139 | * If set to true, will include the locations in the search result. 140 | * 141 | * @experimental This API is subject to change without notice. 142 | */ 143 | includeLocations?: boolean 144 | 145 | /** 146 | * Specifies the consistency requirements when executing the query. 147 | * 148 | * @see SearchScanConsistency 149 | */ 150 | consistency?: SearchScanConsistency 151 | 152 | /** 153 | * Specifies a MutationState which the query should be consistent with. 154 | * 155 | * @see {@link MutationState} 156 | */ 157 | consistentWith?: MutationState 158 | 159 | /** 160 | * Specifies any additional parameters which should be passed to the query engine 161 | * when executing the query. 162 | */ 163 | raw?: { [key: string]: any } 164 | 165 | /** 166 | * The timeout for this operation, represented in milliseconds. 167 | */ 168 | timeout?: number 169 | 170 | /** 171 | * Specifies that the search response should include the request JSON. 172 | */ 173 | showRequest?: boolean 174 | 175 | /** 176 | * Uncommitted: This API is subject to change in the future. 177 | * Specifies that the search request should appear in the log. 178 | */ 179 | logRequest?: boolean 180 | 181 | /** 182 | * Uncommitted: This API is subject to change in the future. 183 | * Specifies that the search response should appear in the log. 184 | */ 185 | logResponse?: boolean 186 | } 187 | 188 | /** 189 | * Represents a search query and/or vector search to execute via the Couchbase Full Text Search (FTS) service. 190 | * 191 | * @category Full Text Search 192 | */ 193 | export class SearchRequest { 194 | private _searchQuery: SearchQuery | undefined 195 | private _vectorSearch: VectorSearch | undefined 196 | 197 | constructor(query: SearchQuery | VectorSearch) { 198 | if (query instanceof SearchQuery) { 199 | this._searchQuery = query 200 | } else if (query instanceof VectorSearch) { 201 | this._vectorSearch = query 202 | } else { 203 | throw new InvalidArgumentError( 204 | new Error( 205 | 'Must provide either a SearchQuery or VectorSearch when creating SearchRequest.' 206 | ) 207 | ) 208 | } 209 | } 210 | 211 | /** 212 | * @internal 213 | */ 214 | get searchQuery(): SearchQuery | undefined { 215 | return this._searchQuery 216 | } 217 | 218 | /** 219 | * @internal 220 | */ 221 | get vectorSearch(): VectorSearch | undefined { 222 | return this._vectorSearch 223 | } 224 | 225 | /** 226 | * Adds a search query to the request if the request does not already have a search query. 227 | * 228 | * @param query A SearchQuery to add to the request. 229 | */ 230 | withSearchQuery(query: SearchQuery): SearchRequest { 231 | if (!(query instanceof SearchQuery)) { 232 | throw new InvalidArgumentError(new Error('Must provide a SearchQuery.')) 233 | } 234 | if (this._searchQuery) { 235 | throw new InvalidArgumentError( 236 | new Error('Request already has a SearchQuery.') 237 | ) 238 | } 239 | this._searchQuery = query 240 | return this 241 | } 242 | 243 | /** 244 | * Adds a vector search to the request if the request does not already have a vector search. 245 | * 246 | * @param search A VectorSearch to add to the request. 247 | */ 248 | withVectorSearch(search: VectorSearch): SearchRequest { 249 | if (!(search instanceof VectorSearch)) { 250 | throw new InvalidArgumentError(new Error('Must provide a VectorSearch.')) 251 | } 252 | if (this._vectorSearch) { 253 | throw new InvalidArgumentError( 254 | new Error('Request already has a VectorSearch.') 255 | ) 256 | } 257 | this._vectorSearch = search 258 | return this 259 | } 260 | 261 | /** 262 | * Creates a search request. 263 | * 264 | * @param query Either a SearchQuery or VectorSearch to add to the search request. 265 | */ 266 | static create(query: SearchQuery | VectorSearch): SearchRequest { 267 | return new SearchRequest(query) 268 | } 269 | } 270 | -------------------------------------------------------------------------------- /lib/streamablepromises.ts: -------------------------------------------------------------------------------- 1 | /* eslint jsdoc/require-jsdoc: off */ 2 | import EventEmitter from 'events' 3 | 4 | /** 5 | * @internal 6 | */ 7 | type ListenerFunc = (...args: any[]) => void 8 | 9 | /** 10 | * @internal 11 | */ 12 | interface PromisifyEmitter { 13 | on(eventName: string | symbol, listener: ListenerFunc): void 14 | } 15 | 16 | /** 17 | * @internal 18 | */ 19 | type PromisifyFunc = ( 20 | emitter: PromisifyEmitter, 21 | resolve: (result: T) => void, 22 | reject: (err: Error) => void 23 | ) => void 24 | 25 | /** 26 | * @internal 27 | */ 28 | export class StreamablePromise extends EventEmitter implements Promise { 29 | private _promise: Promise | null = null 30 | private _promiseOns: [string | symbol, ListenerFunc][] 31 | 32 | /** 33 | * @internal 34 | */ 35 | constructor(promisefyFn: PromisifyFunc) { 36 | super() 37 | 38 | this._promiseOns = [] 39 | this._promise = new Promise((resolve, reject) => { 40 | promisefyFn( 41 | { 42 | on: (eventName: string | symbol, listener: ListenerFunc) => { 43 | this._promiseOns.push([eventName, listener]) 44 | super.on(eventName, listener) 45 | }, 46 | }, 47 | resolve, 48 | reject 49 | ) 50 | }) 51 | } 52 | 53 | private get promise(): Promise { 54 | if (!this._promise) { 55 | throw new Error( 56 | 'Cannot await a promise that is already registered for events' 57 | ) 58 | } 59 | return this._promise 60 | } 61 | 62 | private _depromisify() { 63 | this._promiseOns.forEach((e) => this.off(...e)) 64 | this._promise = null 65 | } 66 | 67 | then( 68 | onfulfilled?: 69 | | ((value: T) => TResult1 | PromiseLike) 70 | | undefined 71 | | null, 72 | onrejected?: 73 | | ((reason: any) => TResult2 | PromiseLike) 74 | | undefined 75 | | null 76 | ): Promise { 77 | return this.promise.then(onfulfilled, onrejected) 78 | } 79 | 80 | catch( 81 | onrejected?: 82 | | ((reason: any) => TResult | PromiseLike) 83 | | undefined 84 | | null 85 | ): Promise { 86 | return this.promise.catch(onrejected) 87 | } 88 | 89 | finally(onfinally?: (() => void) | undefined | null): Promise { 90 | return this.promise.finally(onfinally) 91 | } 92 | 93 | addListener(eventName: string | symbol, listener: ListenerFunc): this { 94 | this._depromisify() 95 | return super.on(eventName, listener) 96 | } 97 | 98 | on(eventName: string | symbol, listener: ListenerFunc): this { 99 | this._depromisify() 100 | return super.on(eventName, listener) 101 | } 102 | 103 | /** 104 | * @internal 105 | */ 106 | get [Symbol.toStringTag](): string { 107 | return (Promise as any)[Symbol.toStringTag] 108 | } 109 | } 110 | 111 | /** 112 | * Provides the ability to be used as either a promise or an event emitter. Enabling 113 | * an application to easily retrieve all results using async/await or enabling 114 | * streaming of results by listening for the row and meta events. 115 | */ 116 | export class StreamableRowPromise extends StreamablePromise { 117 | constructor(fn: (rows: TRow[], meta: TMeta) => T) { 118 | super((emitter, resolve, reject) => { 119 | let err: Error | undefined 120 | const rows: TRow[] = [] 121 | let meta: TMeta | undefined 122 | 123 | emitter.on('row', (r) => rows.push(r)) 124 | emitter.on('meta', (m) => (meta = m)) 125 | emitter.on('error', (e) => (err = e)) 126 | emitter.on('end', () => { 127 | if (err) { 128 | return reject(err) 129 | } 130 | 131 | resolve(fn(rows, meta as TMeta)) 132 | }) 133 | }) 134 | } 135 | } 136 | 137 | /** 138 | * Provides the ability to be used as either a promise or an event emitter. Enabling 139 | * an application to easily retrieve all replicas using async/await or enabling 140 | * streaming of replicas by listening for the replica event. 141 | */ 142 | export class StreamableReplicasPromise extends StreamablePromise { 143 | constructor(fn: (replicas: TRep[]) => T) { 144 | super((emitter, resolve, reject) => { 145 | let err: Error | undefined 146 | const replicas: TRep[] = [] 147 | 148 | emitter.on('replica', (r) => replicas.push(r)) 149 | emitter.on('error', (e) => (err = e)) 150 | emitter.on('end', () => { 151 | if (err) { 152 | return reject(err) 153 | } 154 | 155 | resolve(fn(replicas)) 156 | }) 157 | }) 158 | } 159 | } 160 | 161 | /** 162 | * Provides the ability to be used as either a promise or an event emitter. Enabling 163 | * an application to easily retrieve all scan results using async/await or enabling 164 | * streaming of scan results by listening for the result event. 165 | */ 166 | export class StreamableScanPromise extends StreamablePromise { 167 | private _cancelRequested: boolean 168 | 169 | constructor(fn: (results: TRes[]) => T) { 170 | super((emitter, resolve, reject) => { 171 | let err: Error | undefined 172 | const results: TRes[] = [] 173 | 174 | emitter.on('result', (r) => results.push(r)) 175 | emitter.on('error', (e) => (err = e)) 176 | emitter.on('end', () => { 177 | if (err) { 178 | return reject(err) 179 | } 180 | 181 | resolve(fn(results)) 182 | }) 183 | }) 184 | this._cancelRequested = false 185 | } 186 | 187 | get cancelRequested(): boolean { 188 | return this._cancelRequested 189 | } 190 | 191 | cancelStreaming(): void { 192 | this._cancelRequested = true 193 | } 194 | } 195 | -------------------------------------------------------------------------------- /lib/utilities.ts: -------------------------------------------------------------------------------- 1 | import { DurabilityLevel } from './generaltypes' 2 | import { InvalidArgumentError } from './errors' 3 | import * as qs from 'querystring' 4 | 5 | /** 6 | * CAS represents an opaque value which can be used to compare documents to 7 | * determine if a change has occurred. 8 | * 9 | * @category Key-Value 10 | */ 11 | export interface Cas { 12 | /** 13 | * Generates a string representation of this CAS. 14 | */ 15 | toString(): string 16 | 17 | /** 18 | * Generates a JSON representation of this CAS. 19 | */ 20 | toJSON(): any 21 | } 22 | 23 | /** 24 | * CasIn represents the supported types that can be provided to an operation 25 | * that receive a CAS. 26 | * 27 | * @category Key-Value 28 | */ 29 | export type CasInput = Cas | string | Buffer 30 | 31 | /** 32 | * Reprents a node-style callback which receives an optional error or result. 33 | * 34 | * @category Utilities 35 | */ 36 | export interface NodeCallback { 37 | (err: Error | null, result: T | null): void 38 | } 39 | 40 | /** 41 | * @internal 42 | */ 43 | export class PromiseHelper { 44 | /** 45 | * @internal 46 | */ 47 | static wrapAsync>( 48 | fn: () => U, 49 | callback?: (err: Error | null, result: T | null) => void 50 | ): U { 51 | // If a callback in in use, we wrap the promise with a handler which 52 | // forwards to the callback and return undefined. If there is no 53 | // callback specified. We directly return the promise. 54 | if (callback) { 55 | const prom = fn() 56 | prom.then( 57 | (res) => callback(null, res), 58 | (err) => callback(err, null) 59 | ) 60 | return prom 61 | } 62 | 63 | return fn() 64 | } 65 | 66 | /** 67 | * @internal 68 | */ 69 | static wrap( 70 | fn: (callback: NodeCallback) => void, 71 | callback?: NodeCallback | null 72 | ): Promise { 73 | const prom: Promise = new Promise((resolve, reject) => { 74 | fn((err, res) => { 75 | if (err) { 76 | reject(err as Error) 77 | } else { 78 | resolve(res as T) 79 | } 80 | }) 81 | }) 82 | 83 | if (callback) { 84 | prom.then( 85 | (res) => callback(null, res), 86 | (err) => callback(err, null) 87 | ) 88 | } 89 | 90 | return prom 91 | } 92 | } 93 | 94 | /** 95 | * @internal 96 | */ 97 | export class CompoundTimeout { 98 | private _start: [number, number] 99 | private _timeout: number | undefined 100 | 101 | /** 102 | * @internal 103 | */ 104 | constructor(timeout: number | undefined) { 105 | this._start = process.hrtime() 106 | this._timeout = timeout 107 | } 108 | 109 | /** 110 | * @internal 111 | */ 112 | left(): number | undefined { 113 | if (this._timeout === undefined) { 114 | return undefined 115 | } 116 | 117 | const period = process.hrtime(this._start) 118 | 119 | const periodMs = period[0] * 1e3 + period[1] / 1e6 120 | if (periodMs > this._timeout) { 121 | return 0 122 | } 123 | 124 | return this._timeout - periodMs 125 | } 126 | 127 | /** 128 | * @internal 129 | */ 130 | expired(): boolean { 131 | const timeLeft = this.left() 132 | if (timeLeft === undefined) { 133 | return false 134 | } 135 | 136 | return timeLeft <= 0 137 | } 138 | } 139 | 140 | /** 141 | * @internal 142 | */ 143 | export function duraLevelToNsServerStr( 144 | level: DurabilityLevel | string | undefined 145 | ): string | undefined { 146 | if (level === undefined) { 147 | return undefined 148 | } 149 | 150 | if (typeof level === 'string') { 151 | return level as string 152 | } 153 | 154 | if (level === DurabilityLevel.None) { 155 | return 'none' 156 | } else if (level === DurabilityLevel.Majority) { 157 | return 'majority' 158 | } else if (level === DurabilityLevel.MajorityAndPersistOnMaster) { 159 | return 'majorityAndPersistActive' 160 | } else if (level === DurabilityLevel.PersistToMajority) { 161 | return 'persistToMajority' 162 | } else { 163 | throw new Error('invalid durability level specified') 164 | } 165 | } 166 | 167 | /** 168 | * @internal 169 | */ 170 | export function nsServerStrToDuraLevel( 171 | level: string | undefined 172 | ): DurabilityLevel { 173 | if (level === undefined) { 174 | return DurabilityLevel.None 175 | } 176 | 177 | if (level === 'none') { 178 | return DurabilityLevel.None 179 | } else if (level === 'majority') { 180 | return DurabilityLevel.Majority 181 | } else if (level === 'majorityAndPersistActive') { 182 | return DurabilityLevel.MajorityAndPersistOnMaster 183 | } else if (level === 'persistToMajority') { 184 | return DurabilityLevel.PersistToMajority 185 | } else { 186 | throw new Error('invalid durability level string') 187 | } 188 | } 189 | 190 | /** 191 | * @internal 192 | */ 193 | export function cbQsStringify( 194 | values: { [key: string]: any }, 195 | options?: { boolAsString?: boolean } 196 | ): string { 197 | const cbValues: { [key: string]: any } = {} 198 | for (const i in values) { 199 | if (values[i] === undefined) { 200 | // skipped 201 | } else if (typeof values[i] === 'boolean') { 202 | if (options && options.boolAsString) { 203 | cbValues[i] = values[i] ? 'true' : 'false' 204 | } else { 205 | cbValues[i] = values[i] ? 1 : 0 206 | } 207 | } else { 208 | cbValues[i] = values[i] 209 | } 210 | } 211 | return qs.stringify(cbValues) 212 | } 213 | 214 | const thirtyDaysInSeconds = 30 * 24 * 60 * 60 215 | /** 216 | * @internal 217 | */ 218 | export function expiryToTimestamp(expiry: number): number { 219 | if (typeof expiry !== 'number') { 220 | throw new InvalidArgumentError(new Error('Expected expiry to be a number.')) 221 | } 222 | 223 | if (expiry < 0) { 224 | throw new InvalidArgumentError( 225 | new Error( 226 | `Expected expiry to be either zero (for no expiry) or greater but got ${expiry}.` 227 | ) 228 | ) 229 | } 230 | 231 | if (expiry < thirtyDaysInSeconds) { 232 | return expiry 233 | } 234 | return expiry + Math.floor(Date.now() / 1000) 235 | } 236 | -------------------------------------------------------------------------------- /lib/utilities_internal.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @internal 3 | */ 4 | export function generateClientString(): string { 5 | // Grab the various versions. Note that we need to trim them 6 | // off as some Node.js versions insert strange characters into 7 | // the version identifiers (mainly newlines and such). 8 | /* eslint-disable-next-line @typescript-eslint/no-var-requires */ 9 | const nodeVer = process.versions.node.trim() 10 | const v8Ver = process.versions.v8.trim() 11 | const sslVer = process.versions.openssl.trim() 12 | 13 | return `node/${nodeVer}; v8/${v8Ver}; ssl/${sslVer}` 14 | } 15 | -------------------------------------------------------------------------------- /lib/vectorsearch.ts: -------------------------------------------------------------------------------- 1 | import { InvalidArgumentError } from './errors' 2 | 3 | /** 4 | * Specifies how multiple vector searches are combined. 5 | * 6 | * @category Full Text Search 7 | */ 8 | export enum VectorQueryCombination { 9 | /** 10 | * Indicates that multiple vector queries should be combined with logical AND. 11 | */ 12 | AND = 'and', 13 | 14 | /** 15 | * Indicates that multiple vector queries should be combined with logical OR. 16 | */ 17 | OR = 'or', 18 | } 19 | 20 | /** 21 | * @category Full Text Search 22 | */ 23 | export interface VectorSearchOptions { 24 | /** 25 | * Specifies how multiple vector queries should be combined. 26 | */ 27 | vectorQueryCombination?: VectorQueryCombination 28 | } 29 | 30 | /** 31 | * Represents a vector query. 32 | * 33 | * @category Full Text Search 34 | */ 35 | export class VectorQuery { 36 | private _fieldName: string 37 | private _vector: number[] | undefined 38 | private _vectorBase64: string | undefined 39 | private _numCandidates: number | undefined 40 | private _boost: number | undefined 41 | 42 | constructor(fieldName: string, vector: number[] | string) { 43 | if (!fieldName) { 44 | throw new InvalidArgumentError(new Error('Must provide a field name.')) 45 | } 46 | this._fieldName = fieldName 47 | if (!vector) { 48 | throw new InvalidArgumentError( 49 | new Error('Provided vector cannot be empty.') 50 | ) 51 | } 52 | if (Array.isArray(vector)) { 53 | if (vector.length == 0) { 54 | throw new InvalidArgumentError( 55 | new Error('Provided vector cannot be empty.') 56 | ) 57 | } 58 | this._vector = vector 59 | } else if (typeof vector === 'string') { 60 | this._vectorBase64 = vector 61 | } else { 62 | throw new InvalidArgumentError( 63 | new Error( 64 | 'Provided vector must be either a number[] or base64 encoded string.' 65 | ) 66 | ) 67 | } 68 | } 69 | 70 | /** 71 | * @internal 72 | */ 73 | toJSON(): any { 74 | const output: { [key: string]: any } = { 75 | field: this._fieldName, 76 | k: this._numCandidates ?? 3, 77 | } 78 | if (this._vector) { 79 | output['vector'] = this._vector 80 | } else { 81 | output['vector_base64'] = this._vectorBase64 82 | } 83 | if (this._boost) { 84 | output['boost'] = this._boost 85 | } 86 | return output 87 | } 88 | 89 | /** 90 | * Adds boost option to vector query. 91 | * 92 | * @param boost A floating point value. 93 | */ 94 | boost(boost: number): VectorQuery { 95 | this._boost = boost 96 | return this 97 | } 98 | 99 | /** 100 | * Adds numCandidates option to vector query. Value must be >= 1. 101 | * 102 | * @param numCandidates An integer value. 103 | */ 104 | numCandidates(numCandidates: number): VectorQuery { 105 | if (numCandidates < 1) { 106 | throw new InvalidArgumentError( 107 | new Error('Provided value for numCandidates must be >= 1.') 108 | ) 109 | } 110 | this._numCandidates = numCandidates 111 | return this 112 | } 113 | 114 | /** 115 | * Creates a vector query. 116 | * 117 | * @param fieldName The name of the field in the JSON document that holds the vector. 118 | * @param vector List of floating point values that represent the vector. 119 | */ 120 | static create(fieldName: string, vector: number[] | string): VectorQuery { 121 | return new VectorQuery(fieldName, vector) 122 | } 123 | } 124 | 125 | /** 126 | * Represents a vector search. 127 | * 128 | * @category Full Text Search 129 | */ 130 | export class VectorSearch { 131 | private _queries: VectorQuery[] 132 | private _options: VectorSearchOptions | undefined 133 | 134 | constructor(queries: VectorQuery[], options?: VectorSearchOptions) { 135 | if (!Array.isArray(queries) || queries.length == 0) { 136 | throw new InvalidArgumentError( 137 | new Error('Provided queries must be an array and cannot be empty.') 138 | ) 139 | } 140 | if (!queries.every((q) => q instanceof VectorQuery)) { 141 | throw new InvalidArgumentError( 142 | new Error('All provided queries must be a VectorQuery.') 143 | ) 144 | } 145 | this._queries = queries 146 | this._options = options 147 | } 148 | 149 | /** 150 | * @internal 151 | */ 152 | get queries(): VectorQuery[] { 153 | return this._queries 154 | } 155 | 156 | /** 157 | * @internal 158 | */ 159 | get options(): VectorSearchOptions | undefined { 160 | return this._options 161 | } 162 | 163 | /** 164 | * Creates a vector search from a single VectorQuery. 165 | * 166 | * @param query A vectory query that should be a part of the vector search. 167 | */ 168 | static fromVectorQuery(query: VectorQuery): VectorSearch { 169 | return new VectorSearch([query]) 170 | } 171 | } 172 | -------------------------------------------------------------------------------- /lib/viewexecutor.ts: -------------------------------------------------------------------------------- 1 | /* eslint jsdoc/require-jsdoc: off */ 2 | import { 3 | designDocumentNamespaceToCpp, 4 | errorFromCpp, 5 | viewOrderingToCpp, 6 | viewScanConsistencyToCpp, 7 | } from './bindingutilities' 8 | import { Bucket } from './bucket' 9 | import { Cluster } from './cluster' 10 | import { StreamableRowPromise } from './streamablepromises' 11 | import { 12 | DesignDocumentNamespace, 13 | ViewMetaData, 14 | ViewQueryOptions, 15 | ViewResult, 16 | ViewRow, 17 | } from './viewtypes' 18 | 19 | /** 20 | * @internal 21 | */ 22 | export class ViewExecutor { 23 | private _bucket: Bucket 24 | 25 | /** 26 | * @internal 27 | */ 28 | constructor(bucket: Bucket) { 29 | this._bucket = bucket 30 | } 31 | 32 | /** 33 | @internal 34 | */ 35 | get _cluster(): Cluster { 36 | return this._bucket.cluster 37 | } 38 | 39 | /** 40 | * @internal 41 | */ 42 | query( 43 | designDoc: string, 44 | viewName: string, 45 | options: ViewQueryOptions 46 | ): StreamableRowPromise< 47 | ViewResult, 48 | ViewRow, 49 | ViewMetaData 50 | > { 51 | const emitter = new StreamableRowPromise< 52 | ViewResult, 53 | ViewRow, 54 | ViewMetaData 55 | >((rows, meta) => { 56 | return new ViewResult({ 57 | rows: rows, 58 | meta: meta, 59 | }) 60 | }) 61 | 62 | const timeout = options.timeout || this._cluster.viewTimeout 63 | const raw = options.raw || {} 64 | const ns = options.namespace ?? DesignDocumentNamespace.Production 65 | let fullSet = options.full_set 66 | if (typeof options.fullSet !== 'undefined') { 67 | fullSet = options.fullSet 68 | } 69 | 70 | this._cluster.conn.documentView( 71 | { 72 | timeout: timeout, 73 | bucket_name: this._bucket.name, 74 | document_name: designDoc, 75 | view_name: viewName, 76 | ns: designDocumentNamespaceToCpp(ns), 77 | limit: options.limit, 78 | skip: options.skip, 79 | consistency: viewScanConsistencyToCpp(options.scanConsistency), 80 | keys: options.keys ? options.keys.map((k) => JSON.stringify(k)) : [], 81 | key: JSON.stringify(options.key), 82 | start_key: 83 | options.range && options.range.start 84 | ? JSON.stringify(options.range.start) 85 | : undefined, 86 | end_key: 87 | options.range && options.range.end 88 | ? JSON.stringify(options.range.end) 89 | : undefined, 90 | inclusive_end: options.range ? options.range.inclusiveEnd : undefined, 91 | start_key_doc_id: 92 | options.idRange && options.idRange.start 93 | ? options.idRange.start 94 | : undefined, 95 | end_key_doc_id: 96 | options.idRange && options.idRange.end 97 | ? options.idRange.end 98 | : undefined, 99 | reduce: options.reduce, 100 | group: options.group, 101 | group_level: options.groupLevel, 102 | order: viewOrderingToCpp(options.order), 103 | debug: false, 104 | query_string: [], 105 | raw: raw, 106 | full_set: fullSet, 107 | }, 108 | (cppErr, resp) => { 109 | const err = errorFromCpp(cppErr) 110 | if (err) { 111 | emitter.emit('error', err) 112 | emitter.emit('end') 113 | return 114 | } 115 | 116 | resp.rows.forEach((row) => { 117 | emitter.emit( 118 | 'row', 119 | new ViewRow({ 120 | value: JSON.parse(row.value), 121 | id: row.id, 122 | key: JSON.parse(row.key), 123 | }) 124 | ) 125 | }) 126 | 127 | { 128 | const metaData = resp.meta 129 | 130 | const meta = new ViewMetaData({ 131 | totalRows: metaData.total_rows, 132 | debug: metaData.debug_info, 133 | }) 134 | 135 | emitter.emit('meta', meta) 136 | } 137 | 138 | emitter.emit('end') 139 | return 140 | } 141 | ) 142 | 143 | return emitter 144 | } 145 | } 146 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "bugs": { 3 | "url": "http://www.couchbase.com/issues/browse/JSCBC" 4 | }, 5 | "description": "The official Couchbase Node.js Client Library.", 6 | "engines": { 7 | "node": ">=16" 8 | }, 9 | "homepage": "http://www.couchbase.com/communities/nodejs", 10 | "keywords": [ 11 | "couchbase", 12 | "libcouchbase", 13 | "memcached", 14 | "nosql", 15 | "json", 16 | "document" 17 | ], 18 | "main": "dist/couchbase.js", 19 | "types": "dist/couchbase.d.ts", 20 | "license": "Apache-2.0", 21 | "name": "couchbase", 22 | "dependencies": { 23 | "cmake-js": "^7.3.1", 24 | "node-addon-api": "^8.3.1" 25 | }, 26 | "devDependencies": { 27 | "@trivago/prettier-plugin-sort-imports": "^5.2.2", 28 | "@tsconfig/node12": "^12.1.4", 29 | "@types/bindings": "^1.5.5", 30 | "@types/debug": "^4.1.12", 31 | "@types/node": "^22.14.1", 32 | "@typescript-eslint/eslint-plugin": "^7.13.1", 33 | "@typescript-eslint/parser": "^7.13.1", 34 | "chai": "^4.5.0", 35 | "eslint": "^8.57.0", 36 | "eslint-config-prettier": "^9.1.0", 37 | "eslint-plugin-jsdoc": "^48.2.12", 38 | "eslint-plugin-mocha": "^10.5.0", 39 | "eslint-plugin-node": "^11.1.0", 40 | "expose-gc": "^1.0.0", 41 | "mocha": "^11.1.0", 42 | "npm-check-updates": "^18.0.0", 43 | "nyc": "^17.1.0", 44 | "prettier": "^3.5.3", 45 | "segfault-handler": "^1.3.0", 46 | "semver": "^7.7.1", 47 | "ts-mocha": "^11.1.0", 48 | "ts-node": "^10.9.2", 49 | "typedoc": "^0.28.3", 50 | "typescript": "~5.4.5", 51 | "uuid": "^11.1.0" 52 | }, 53 | "repository": { 54 | "type": "git", 55 | "url": "http://github.com/couchbase/couchnode.git" 56 | }, 57 | "version": "4.5.0-dev", 58 | "config": { 59 | "native": false 60 | }, 61 | "scripts": { 62 | "install": "node ./scripts/install.js", 63 | "build": "cmake-js build && tsc", 64 | "rebuild": "cmake-js rebuild && tsc", 65 | "prebuild": "node ./scripts/buildPrebuild.js", 66 | "prepare": "tsc", 67 | "help-prune": "node ./scripts/prune.js", 68 | "build-docs": "typedoc", 69 | "test": "ts-mocha test/*.test.*", 70 | "test-fast": "ts-mocha test/*.test.* -ig '(slow)'", 71 | "cover": "nyc ts-mocha test/*.test.*", 72 | "cover-fast": "nyc ts-mocha test/*.test.* -ig '(slow)'", 73 | "lint": "eslint ./lib/ ./test/", 74 | "check-deps": "ncu" 75 | }, 76 | "binary": { 77 | "napi_versions": [ 78 | 6 79 | ] 80 | }, 81 | "optionalDependencies": {}, 82 | "files": [ 83 | "LICENSE", 84 | "CMakeLists.txt", 85 | "CONTRIBUTING.md", 86 | "package.json", 87 | "README.md", 88 | "BUILDING.md", 89 | "couchbase-sdk-nodejs-black-duck-manifest.yaml", 90 | "scripts/*.js", 91 | "src/*.{c,h}pp", 92 | "dist/*.{t,j}s", 93 | "tools/*.{py,js}", 94 | "deps/couchbase-cxx-client/CMakeLists.txt", 95 | "deps/couchbase-cxx-client/LICENSE.txt", 96 | "deps/couchbase-cxx-client/couchbase-sdk-cxx-black-duck-manifest.yaml", 97 | "deps/couchbase-cxx-client/cmake/**", 98 | "deps/couchbase-cxx-client/core/**/*", 99 | "deps/couchbase-cxx-client/couchbase/**/*", 100 | "deps/couchbase-cxx-client/third_party/expected/COPYING", 101 | "deps/couchbase-cxx-client/third_party/expected/include/**/*", 102 | "deps/couchbase-cxx-client/third_party/jsonsl/*", 103 | "deps/couchbase-cxx-cache/json/*/json/CMakeLists.txt", 104 | "deps/couchbase-cxx-cache/json/*/json/LICENSE*", 105 | "deps/couchbase-cxx-cache/json/*/json/external/PEGTL/.cmake/**", 106 | "deps/couchbase-cxx-cache/json/*/json/external/PEGTL/CMakeLists.txt", 107 | "deps/couchbase-cxx-cache/json/*/json/external/PEGTL/LICENSE*", 108 | "deps/couchbase-cxx-cache/json/*/json/external/PEGTL/include/**/*", 109 | "deps/couchbase-cxx-cache/json/*/json/include/**/*", 110 | "deps/couchbase-cxx-cache/cpm/*.cmake", 111 | "deps/couchbase-cxx-cache/asio/*/LICENSE*", 112 | "deps/couchbase-cxx-cache/asio/*/asio/COPYING", 113 | "deps/couchbase-cxx-cache/asio/*/asio/asio/include/*.hpp", 114 | "deps/couchbase-cxx-cache/asio/*/asio/asio/include/asio/**/*.[hi]pp", 115 | "deps/couchbase-cxx-cache/asio/*/asio/asio/src/asio.cpp", 116 | "deps/couchbase-cxx-cache/asio/*/asio/asio/src/asio_ssl.cpp", 117 | "deps/couchbase-cxx-cache/boringssl/*/boringssl/**/*.{cc,h,c,asm,S}", 118 | "deps/couchbase-cxx-cache/boringssl/*/boringssl/**/CMakeLists.txt", 119 | "deps/couchbase-cxx-cache/boringssl/*/boringssl/LICENSE", 120 | "deps/couchbase-cxx-cache/gsl/*/gsl/CMakeLists.txt", 121 | "deps/couchbase-cxx-cache/gsl/*/gsl/GSL.natvis", 122 | "deps/couchbase-cxx-cache/gsl/*/gsl/LICENSE*", 123 | "deps/couchbase-cxx-cache/gsl/*/gsl/ThirdPartyNotices.txt", 124 | "deps/couchbase-cxx-cache/gsl/*/gsl/cmake/**", 125 | "deps/couchbase-cxx-cache/gsl/*/gsl/include/**/*", 126 | "deps/couchbase-cxx-cache/hdr_histogram/*/hdr_histogram/*.pc.in", 127 | "deps/couchbase-cxx-cache/hdr_histogram/*/hdr_histogram/CMakeLists.txt", 128 | "deps/couchbase-cxx-cache/hdr_histogram/*/hdr_histogram/COPYING.txt", 129 | "deps/couchbase-cxx-cache/hdr_histogram/*/hdr_histogram/LICENSE.txt", 130 | "deps/couchbase-cxx-cache/hdr_histogram/*/hdr_histogram/cmake/**", 131 | "deps/couchbase-cxx-cache/hdr_histogram/*/hdr_histogram/config.cmake.in", 132 | "deps/couchbase-cxx-cache/hdr_histogram/*/hdr_histogram/include/**/*", 133 | "deps/couchbase-cxx-cache/hdr_histogram/*/hdr_histogram/src/**/*", 134 | "deps/couchbase-cxx-cache/llhttp/*/llhttp/*.pc.in", 135 | "deps/couchbase-cxx-cache/llhttp/*/llhttp/CMakeLists.txt", 136 | "deps/couchbase-cxx-cache/llhttp/*/llhttp/LICENSE*", 137 | "deps/couchbase-cxx-cache/llhttp/*/llhttp/include/*.h", 138 | "deps/couchbase-cxx-cache/llhttp/*/llhttp/src/*.c", 139 | "deps/couchbase-cxx-cache/snappy/*/snappy/CMakeLists.txt", 140 | "deps/couchbase-cxx-cache/snappy/*/snappy/COPYING", 141 | "deps/couchbase-cxx-cache/snappy/*/snappy/cmake/**", 142 | "deps/couchbase-cxx-cache/snappy/*/snappy/snappy-c.{h,cc}", 143 | "deps/couchbase-cxx-cache/snappy/*/snappy/snappy-internal.h", 144 | "deps/couchbase-cxx-cache/snappy/*/snappy/snappy-sinksource.{h,cc}", 145 | "deps/couchbase-cxx-cache/snappy/*/snappy/snappy-stubs-internal.{h,cc}", 146 | "deps/couchbase-cxx-cache/snappy/*/snappy/snappy-stubs-public.h.in", 147 | "deps/couchbase-cxx-cache/snappy/*/snappy/snappy.{h,cc}", 148 | "deps/couchbase-cxx-cache/spdlog/*/spdlog/CMakeLists.txt", 149 | "deps/couchbase-cxx-cache/spdlog/*/spdlog/LICENSE", 150 | "deps/couchbase-cxx-cache/spdlog/*/spdlog/cmake/**", 151 | "deps/couchbase-cxx-cache/spdlog/*/spdlog/include/**/*", 152 | "deps/couchbase-cxx-cache/spdlog/*/spdlog/src/**/*", 153 | "deps/couchbase-cxx-cache/mozilla-ca-bundle.*" 154 | ] 155 | } 156 | -------------------------------------------------------------------------------- /scripts/buildPrebuild.js: -------------------------------------------------------------------------------- 1 | const prebuilds = require('./prebuilds') 2 | 3 | function buildPrebuild( 4 | runtime, 5 | runtimeVersion, 6 | useOpenSSL, 7 | configure, 8 | setCpmCache, 9 | useCmakeJsCompile, 10 | cmakeParallel 11 | ) { 12 | runtime = runtime || process.env.CN_PREBUILD_RUNTIME || 'node' 13 | runtimeVersion = 14 | runtimeVersion || 15 | process.env.CN_PREBUILD_RUNTIME_VERSION || 16 | process.version.replace('v', '') 17 | 18 | if (typeof useOpenSSL === 'undefined') { 19 | useOpenSSL = prebuilds.ENV_TRUE.includes( 20 | (process.env.CN_USE_OPENSSL || 'true').toLowerCase() 21 | ) 22 | } 23 | 24 | // we only want to configure if setting the CPM cache 25 | if (configure || setCpmCache) { 26 | prebuilds.configureBinary( 27 | runtime, 28 | runtimeVersion, 29 | useOpenSSL, 30 | setCpmCache, 31 | cmakeParallel 32 | ) 33 | } else { 34 | prebuilds.buildBinary( 35 | runtime, 36 | runtimeVersion, 37 | useOpenSSL, 38 | useCmakeJsCompile, 39 | cmakeParallel 40 | ) 41 | } 42 | } 43 | 44 | let configurePrebuild = false 45 | let setCpmCache = false 46 | let useCmakeJsCompile = true 47 | let runtime, runtimeVersion, useOpenSSL 48 | let cmakeParallel = 4 49 | const args = process.argv.slice(2) 50 | if (args.length > 0) { 51 | // --configure 52 | if (args.includes('--configure')) { 53 | configurePrebuild = true 54 | } 55 | 56 | // --set-cpm-cache 57 | if (args.includes('--set-cpm-cache')) { 58 | setCpmCache = true 59 | } 60 | 61 | // --runtime=[node|electron] OR --runtime [node|electron] 62 | const runtimeIdx = args.findIndex((a) => { 63 | return a.includes('runtime') && !a.includes('version') 64 | }) 65 | if (runtimeIdx >= 0) { 66 | let rt = undefined 67 | if (args[runtimeIdx].includes('=')) { 68 | rt = args[runtimeIdx].split('=')[1] 69 | } else if (args.length - 1 >= runtimeIdx + 1) { 70 | rt = args[runtimeIdx + 1] 71 | } 72 | 73 | if (rt && ['node', 'electron'].includes(rt)) { 74 | runtime = rt 75 | } 76 | } 77 | 78 | // --runtime-version=<> OR --runtime-version <> 79 | const runtimeVersionIdx = args.findIndex((a) => a.includes('runtime-version')) 80 | if (runtimeVersionIdx >= 0) { 81 | let rtv = undefined 82 | if (args[runtimeVersionIdx].includes('=')) { 83 | rtv = args[runtimeVersionIdx].split('=')[1] 84 | } else if (args.length - 1 >= runtimeVersionIdx + 1) { 85 | rtv = args[runtimeVersionIdx + 1] 86 | } 87 | 88 | if (rtv) { 89 | const tokens = rtv.split('.') 90 | if (tokens.length == 3 && tokens.every((t) => !isNaN(parseInt(t)))) { 91 | runtimeVersion = rtv 92 | } 93 | } 94 | } 95 | 96 | // --use-boringssl or --use-openssl 97 | if (args.includes('--use-boringssl') && args.includes('--use-openssl')) { 98 | throw new Error('Cannot set both BoringSSL and OpenSSL to be used.') 99 | } 100 | 101 | if (args.includes('--use-boringssl')) { 102 | useOpenSSL = false 103 | } 104 | 105 | if (args.includes('--use-openssl')) { 106 | useOpenSSL = true 107 | } 108 | 109 | // --parallel=<> OR --parallel <> 110 | const parallelIdx = args.findIndex((a) => a.includes('parallel')) 111 | if (parallelIdx >= 0) { 112 | let pv = undefined 113 | if (args[parallelIdx].includes('=')) { 114 | pv = args[parallelIdx].split('=')[1] 115 | } else if (args.length - 1 >= parallelIdx + 1) { 116 | pv = args[parallelIdx + 1] 117 | } 118 | 119 | if (pv && !isNaN(parseInt(pv))) { 120 | const os = require('os') 121 | const pvi = parseInt(pv) 122 | if (pvi <= os.cpus().length) { 123 | cmakeParallel = pvi 124 | } 125 | } 126 | } 127 | 128 | if (args.includes('--use-cmakejs-build')) { 129 | useCmakeJsCompile = false 130 | } 131 | } 132 | 133 | buildPrebuild( 134 | runtime, 135 | runtimeVersion, 136 | useOpenSSL, 137 | configurePrebuild, 138 | setCpmCache, 139 | useCmakeJsCompile, 140 | cmakeParallel 141 | ) 142 | -------------------------------------------------------------------------------- /scripts/install.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | const prebuilds = require('./prebuilds') 3 | 4 | if (hasLocalPrebuild()) { 5 | const destination = path.join( 6 | path.resolve(__dirname, '..'), 7 | 'build', 8 | 'Release' 9 | ) 10 | const source = getLocalPrebuild() 11 | // on either success or failure of resolving local prebuild we still confirm we have a prebuild 12 | prebuilds.resolveLocalPrebuild(source, destination).then(installPrebuild()) 13 | } else { 14 | installPrebuild() 15 | } 16 | 17 | function getLocalPrebuild() { 18 | const localPrebuildsName = `npm_config_couchbase_local_prebuilds` 19 | return process.env[localPrebuildsName] 20 | } 21 | 22 | function hasLocalPrebuild() { 23 | return typeof getLocalPrebuild() === 'string' 24 | } 25 | 26 | function installPrebuild() { 27 | try { 28 | prebuilds.resolvePrebuild(path.resolve(__dirname, '..'), { 29 | runtimeResolve: false, 30 | }) 31 | process.exit(0) 32 | } catch (err) { 33 | prebuilds.buildBinary() 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /scripts/prune.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs') 2 | const path = require('path') 3 | const prebuilds = require('./prebuilds') 4 | 5 | function getMismatchedPlatformPackagesInfo( 6 | platformPackagesDir, 7 | expectedPlatformPackage 8 | ) { 9 | let message = `Checking for platform packages in ${platformPackagesDir} ` 10 | message += `that do not match the expected platform package (${expectedPlatformPackage}).` 11 | console.log(message) 12 | let mismatches = [] 13 | try { 14 | const files = fs.readdirSync(platformPackagesDir) 15 | files.forEach((file) => { 16 | if (file === expectedPlatformPackage) { 17 | return 18 | } 19 | const stats = fs.statSync(path.join(platformPackagesDir, file)) 20 | if (!stats.isDirectory()) { 21 | return 22 | } 23 | const filePath = path.join(platformPackagesDir, file) 24 | const size = getDirectorySize(filePath) 25 | console.log(`Found mismatch: Path=${filePath}`) 26 | const platformPackage = path.basename(filePath) 27 | mismatches.push({ 28 | name: platformPackage, 29 | dir: filePath, 30 | size: size, 31 | }) 32 | }) 33 | } catch (err) { 34 | console.error(`Error trying to delete mismatched platform packages.`, err) 35 | } 36 | return mismatches 37 | } 38 | 39 | function getDirectorySize(dir) { 40 | let size = 0 41 | const dirContents = fs.readdirSync(dir) 42 | dirContents.forEach((content) => { 43 | const contentPath = path.join(dir, content) 44 | const stats = fs.statSync(contentPath) 45 | if (stats.isFile()) { 46 | size += stats.size 47 | } else if (stats.isDirectory()) { 48 | size += getDirectorySize(contentPath) 49 | } 50 | }) 51 | 52 | return size 53 | } 54 | 55 | function getPrebuildsInfo() { 56 | try { 57 | const prebuildsInfo = prebuilds.getPrebuildsInfo() 58 | return prebuildsInfo 59 | } catch (err) { 60 | console.error('Error trying to obtain couchbase prebuilds info.', err) 61 | return undefined 62 | } 63 | } 64 | 65 | function pruneCouchbaseHelp() { 66 | const prebuildsInfo = getPrebuildsInfo() 67 | const platformPackagesDir = path.dirname(prebuildsInfo.platformPackageDir) 68 | const expectedPlatformPackage = path.basename( 69 | prebuildsInfo.platformPackageDir 70 | ) 71 | 72 | const mismatchedPlatPkgs = getMismatchedPlatformPackagesInfo( 73 | platformPackagesDir, 74 | expectedPlatformPackage 75 | ) 76 | const cbDeps = { 77 | dir: path.join(prebuildsInfo.packageDir, 'deps'), 78 | size: undefined, 79 | } 80 | const cbSrc = { 81 | dir: path.join(prebuildsInfo.packageDir, 'src'), 82 | size: undefined, 83 | } 84 | try { 85 | cbDeps.size = getDirectorySize(cbDeps.dir) 86 | } catch (_) { 87 | console.log('Couchbase deps/ not found.') 88 | } 89 | try { 90 | cbSrc.size = getDirectorySize(cbSrc.dir) 91 | } catch (_) { 92 | console.log('Couchbase src/ not found.') 93 | } 94 | 95 | console.log('\nRecommendations for pruning:\n') 96 | if (mismatchedPlatPkgs.length > 0) { 97 | for (const pkg of mismatchedPlatPkgs) { 98 | const sizeMb = pkg.size / 1024 / 1024 99 | console.log( 100 | `Removing mismatched platform=${pkg.name} (path=${ 101 | pkg.dir 102 | }) saves ~${sizeMb.toFixed(2)} MB on disk.` 103 | ) 104 | } 105 | } 106 | if (cbDeps.size) { 107 | const sizeMb = cbDeps.size / 1024 / 1024 108 | console.log( 109 | `Removing Couchbase deps/ (path=${cbDeps.dir}) saves ~${sizeMb.toFixed( 110 | 2 111 | )} MB on disk.` 112 | ) 113 | } 114 | if (cbSrc.size) { 115 | const sizeMb = cbSrc.size / 1024 / 1024 116 | console.log( 117 | `Removing Couchbase src/ (path=${cbSrc.dir}) saves ~${sizeMb.toFixed( 118 | 2 119 | )} MB on disk.` 120 | ) 121 | } 122 | } 123 | 124 | pruneCouchbaseHelp() 125 | -------------------------------------------------------------------------------- /src/addondata.cpp: -------------------------------------------------------------------------------- 1 | #include "addondata.hpp" 2 | -------------------------------------------------------------------------------- /src/addondata.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include 3 | 4 | namespace couchnode 5 | { 6 | 7 | class AddonData 8 | { 9 | public: 10 | static inline void Init(Napi::Env env, Napi::Object exports) 11 | { 12 | env.SetInstanceData(new AddonData()); 13 | } 14 | 15 | static inline AddonData *fromEnv(Napi::Env &env) 16 | { 17 | return env.GetInstanceData(); 18 | } 19 | 20 | Napi::FunctionReference _connectionCtor; 21 | Napi::FunctionReference _casCtor; 22 | Napi::FunctionReference _mutationTokenCtor; 23 | Napi::FunctionReference _transactionsCtor; 24 | Napi::FunctionReference _transactionCtor; 25 | Napi::FunctionReference _scanIteratorCtor; 26 | }; 27 | 28 | } // namespace couchnode 29 | -------------------------------------------------------------------------------- /src/binding.cpp: -------------------------------------------------------------------------------- 1 | #include "addondata.hpp" 2 | #include "cas.hpp" 3 | #include "connection.hpp" 4 | #include "constants.hpp" 5 | #include "mutationtoken.hpp" 6 | #include "scan_iterator.hpp" 7 | #include "transaction.hpp" 8 | #include "transactions.hpp" 9 | #include 10 | #include 11 | #include 12 | 13 | namespace couchnode 14 | { 15 | 16 | Napi::Value enable_protocol_logger(const Napi::CallbackInfo &info) 17 | { 18 | try { 19 | auto filename = info[0].ToString().Utf8Value(); 20 | couchbase::core::logger::configuration configuration{}; 21 | configuration.filename = filename; 22 | couchbase::core::logger::create_protocol_logger(configuration); 23 | } catch (...) { 24 | return Napi::Error::New(info.Env(), "Unexpected C++ error").Value(); 25 | } 26 | return info.Env().Null(); 27 | } 28 | 29 | Napi::Value shutdown_logger(const Napi::CallbackInfo &info) 30 | { 31 | try { 32 | couchbase::core::logger::shutdown(); 33 | } catch (...) { 34 | return Napi::Error::New(info.Env(), "Unexpected C++ error").Value(); 35 | } 36 | return info.Env().Null(); 37 | } 38 | 39 | Napi::Object Init(Napi::Env env, Napi::Object exports) 40 | { 41 | auto cbppLogLevel = couchbase::core::logger::level::off; 42 | { 43 | const char *logLevelCstr = getenv("CBPPLOGLEVEL"); 44 | if (logLevelCstr) { 45 | std::string logLevelStr = logLevelCstr; 46 | if (logLevelStr == "trace") { 47 | cbppLogLevel = couchbase::core::logger::level::trace; 48 | } else if (logLevelStr == "debug") { 49 | cbppLogLevel = couchbase::core::logger::level::debug; 50 | } else if (logLevelStr == "info") { 51 | cbppLogLevel = couchbase::core::logger::level::info; 52 | } else if (logLevelStr == "warn") { 53 | cbppLogLevel = couchbase::core::logger::level::warn; 54 | } else if (logLevelStr == "err") { 55 | cbppLogLevel = couchbase::core::logger::level::err; 56 | } else if (logLevelStr == "critical") { 57 | cbppLogLevel = couchbase::core::logger::level::critical; 58 | } 59 | } 60 | } 61 | 62 | if (cbppLogLevel != couchbase::core::logger::level::off) { 63 | const char *logFileCstr = getenv("CBPPLOGFILE"); 64 | if (logFileCstr) { 65 | std::string logFileStr = logFileCstr; 66 | couchbase::core::logger::configuration configuration{}; 67 | configuration.filename = logFileStr; 68 | configuration.log_level = cbppLogLevel; 69 | const char *enableConsoleLoggingCstr = getenv("CBPPENABLECONSOLE"); 70 | configuration.console = enableConsoleLoggingCstr != nullptr; 71 | couchbase::core::logger::create_file_logger(configuration); 72 | 73 | } else { 74 | couchbase::core::logger::create_console_logger(); 75 | couchbase::core::logger::set_log_levels(cbppLogLevel); 76 | } 77 | } 78 | 79 | AddonData::Init(env, exports); 80 | Constants::Init(env, exports); 81 | Cas::Init(env, exports); 82 | MutationToken::Init(env, exports); 83 | Connection::Init(env, exports); 84 | Transactions::Init(env, exports); 85 | Transaction::Init(env, exports); 86 | ScanIterator::Init(env, exports); 87 | 88 | exports.Set(Napi::String::New(env, "cbppVersion"), 89 | Napi::String::New(env, "1.0.0-beta")); 90 | exports.Set( 91 | Napi::String::New(env, "cbppMetadata"), 92 | Napi::String::New(env, couchbase::core::meta::sdk_build_info_json())); 93 | exports.Set(Napi::String::New(env, "enableProtocolLogger"), 94 | Napi::Function::New(env)); 95 | exports.Set(Napi::String::New(env, "shutdownLogger"), 96 | Napi::Function::New(env)); 97 | return exports; 98 | } 99 | 100 | } // namespace couchnode 101 | 102 | Napi::Object Init(Napi::Env env, Napi::Object exports) 103 | { 104 | return couchnode::Init(env, exports); 105 | } 106 | NODE_API_MODULE(couchbase_impl, Init) 107 | -------------------------------------------------------------------------------- /src/cas.cpp: -------------------------------------------------------------------------------- 1 | #include "cas.hpp" 2 | #include "utils.hpp" 3 | #include 4 | 5 | namespace couchnode 6 | { 7 | 8 | void Cas::Init(Napi::Env env, Napi::Object exports) 9 | { 10 | Napi::Function func = 11 | DefineClass(env, "Cas", 12 | { 13 | InstanceMethod<&Cas::jsToString>("toString"), 14 | InstanceMethod<&Cas::jsToString>("toJSON"), 15 | InstanceMethod<&Cas::jsInspect>(utils::napiGetSymbol( 16 | env, "nodejs.util.inspect.custom")), 17 | }); 18 | 19 | constructor(env) = Napi::Persistent(func); 20 | exports.Set("Cas", func); 21 | } 22 | 23 | Cas::Cas(const Napi::CallbackInfo &info) 24 | : Napi::ObjectWrap(info) 25 | { 26 | if (info[0].IsBuffer()) { 27 | info.This().As().Set("raw", info[0]); 28 | return; 29 | } 30 | 31 | couchbase::cas cas = Cas::parse(info[0]); 32 | auto rawBytesVal = Cas::toBuffer(info.Env(), cas); 33 | info.This().As().Set("raw", rawBytesVal); 34 | } 35 | 36 | Cas::~Cas() 37 | { 38 | } 39 | 40 | Napi::Value Cas::toBuffer(Napi::Env env, couchbase::cas cas) 41 | { 42 | return utils::napiDataToBuffer(env, cas); 43 | } 44 | 45 | couchbase::cas Cas::fromBuffer(Napi::Value val) 46 | { 47 | return utils::napiBufferToData(val); 48 | } 49 | 50 | Napi::Value Cas::create(Napi::Env env, couchbase::cas cas) 51 | { 52 | auto rawBytesVal = Cas::toBuffer(env, cas); 53 | return Cas::constructor(env).New({rawBytesVal}); 54 | } 55 | 56 | couchbase::cas Cas::parse(Napi::Value val) 57 | { 58 | if (val.IsNull()) { 59 | return couchbase::cas{0}; 60 | } else if (val.IsUndefined()) { 61 | return couchbase::cas{0}; 62 | } else if (val.IsString()) { 63 | auto textVal = val.ToString().Utf8Value(); 64 | auto intVal = std::stoull(textVal); 65 | return couchbase::cas{intVal}; 66 | } else if (val.IsBuffer()) { 67 | return Cas::fromBuffer(val); 68 | } else if (val.IsObject()) { 69 | auto objVal = val.As(); 70 | auto maybeRawVal = objVal.Get("raw"); 71 | if (!maybeRawVal.IsEmpty()) { 72 | return Cas::fromBuffer(maybeRawVal); 73 | } 74 | } 75 | 76 | return couchbase::cas{0}; 77 | } 78 | 79 | Napi::Value Cas::jsToString(const Napi::CallbackInfo &info) 80 | { 81 | auto cas = Cas::parse(info.This()); 82 | 83 | std::stringstream stream; 84 | stream << cas.value(); 85 | return Napi::String::New(info.Env(), stream.str()); 86 | } 87 | 88 | Napi::Value Cas::jsInspect(const Napi::CallbackInfo &info) 89 | { 90 | auto cas = Cas::parse(info.This()); 91 | 92 | std::stringstream stream; 93 | stream << "Cas<" << cas.value() << ">"; 94 | return Napi::String::New(info.Env(), stream.str()); 95 | } 96 | 97 | } // namespace couchnode 98 | -------------------------------------------------------------------------------- /src/cas.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include "addondata.hpp" 3 | #include "napi.h" 4 | #include 5 | 6 | namespace couchbase 7 | { 8 | typedef couchbase::cas cas; 9 | } 10 | 11 | namespace couchnode 12 | { 13 | 14 | class Cas : public Napi::ObjectWrap 15 | { 16 | public: 17 | static Napi::FunctionReference &constructor(Napi::Env env) 18 | { 19 | return AddonData::fromEnv(env)->_casCtor; 20 | } 21 | 22 | static void Init(Napi::Env env, Napi::Object exports); 23 | 24 | static Napi::Value toBuffer(Napi::Env env, couchbase::cas cas); 25 | static couchbase::cas fromBuffer(Napi::Value val); 26 | static Napi::Value create(Napi::Env env, couchbase::cas cas); 27 | static couchbase::cas parse(Napi::Value val); 28 | 29 | Cas(const Napi::CallbackInfo &info); 30 | ~Cas(); 31 | 32 | Napi::Value jsToString(const Napi::CallbackInfo &info); 33 | Napi::Value jsInspect(const Napi::CallbackInfo &info); 34 | 35 | private: 36 | }; 37 | 38 | } // namespace couchnode 39 | -------------------------------------------------------------------------------- /src/constants.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include 3 | 4 | namespace couchnode 5 | { 6 | 7 | class Constants 8 | { 9 | public: 10 | static void Init(Napi::Env env, Napi::Object exports); 11 | static void InitAutogen(Napi::Env env, Napi::Object exports); 12 | }; 13 | 14 | } // namespace couchnode 15 | -------------------------------------------------------------------------------- /src/instance.cpp: -------------------------------------------------------------------------------- 1 | #include "instance.hpp" 2 | 3 | namespace couchnode 4 | { 5 | 6 | Instance::Instance() 7 | : _cluster(couchbase::core::cluster(_io)) 8 | { 9 | _ioThread = std::thread([this]() { 10 | try { 11 | _io.run(); 12 | } catch (const std::exception &e) { 13 | CB_LOG_ERROR(e.what()); 14 | throw; 15 | } catch (...) { 16 | CB_LOG_ERROR("Unknown exception"); 17 | throw; 18 | } 19 | }); 20 | } 21 | 22 | Instance::~Instance() 23 | { 24 | } 25 | 26 | void Instance::asyncDestroy() 27 | { 28 | _cluster.close([this]() mutable { 29 | // We have to run this on a separate thread since the callback itself is 30 | // actually running from within the io context. 31 | std::thread([this]() { 32 | _ioThread.join(); 33 | delete this; 34 | }).detach(); 35 | }); 36 | } 37 | 38 | } // namespace couchnode 39 | -------------------------------------------------------------------------------- /src/instance.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | namespace couchnode 9 | { 10 | 11 | class Instance 12 | { 13 | private: 14 | ~Instance(); 15 | 16 | public: 17 | Instance(); 18 | 19 | void asyncDestroy(); 20 | 21 | asio::io_context _io; 22 | std::thread _ioThread; 23 | couchbase::core::cluster _cluster; 24 | }; 25 | 26 | } // namespace couchnode 27 | -------------------------------------------------------------------------------- /src/jstocbpp.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include "jstocbpp_defs.hpp" 4 | 5 | #include "jstocbpp_autogen.hpp" 6 | #include "jstocbpp_basic.hpp" 7 | #include "jstocbpp_cpptypes.hpp" 8 | #include "jstocbpp_errors.hpp" 9 | #include "jstocbpp_transactions.hpp" 10 | -------------------------------------------------------------------------------- /src/jstocbpp_basic.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include "jstocbpp_defs.hpp" 3 | 4 | #include "cas.hpp" 5 | #include "jstocbpp_cpptypes.hpp" 6 | #include "mutationtoken.hpp" 7 | 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | 15 | namespace couchnode 16 | { 17 | 18 | template <> 19 | struct js_to_cbpp_t { 20 | static inline Napi::Value to_js(Napi::Env env, 21 | const couchbase::core::json_string &cppObj) 22 | { 23 | return js_to_cbpp_t::to_js(env, cppObj.str()); 24 | } 25 | 26 | static inline couchbase::core::json_string from_js(Napi::Value jsVal) 27 | { 28 | auto str = js_to_cbpp_t::from_js(jsVal); 29 | return couchbase::core::json_string(std::move(str)); 30 | } 31 | }; 32 | 33 | template <> 34 | struct js_to_cbpp_t { 35 | static inline couchbase::core::cluster_credentials 36 | from_js(Napi::Value jsVal) 37 | { 38 | auto jsObj = jsVal.ToObject(); 39 | couchbase::core::cluster_credentials cppObj; 40 | js_to_cbpp(cppObj.username, jsObj.Get("username")); 41 | js_to_cbpp(cppObj.password, jsObj.Get("password")); 42 | js_to_cbpp(cppObj.certificate_path, jsObj.Get("certificate_path")); 43 | js_to_cbpp(cppObj.key_path, jsObj.Get("key_path")); 44 | js_to_cbpp(cppObj.allowed_sasl_mechanisms, 45 | jsObj.Get("allowed_sasl_mechanisms")); 46 | return cppObj; 47 | } 48 | }; 49 | 50 | template <> 51 | struct js_to_cbpp_t { 52 | static inline couchbase::core::io::dns::dns_config 53 | from_js(Napi::Value jsVal) 54 | { 55 | auto jsObj = jsVal.ToObject(); 56 | auto cppObj = couchbase::core::io::dns::dns_config{ 57 | js_to_cbpp(jsObj.Get("nameserver")), 58 | js_to_cbpp(jsObj.Get("port")), 59 | js_to_cbpp(jsObj.Get("dnsSrvTimeout"))}; 60 | return cppObj; 61 | } 62 | }; 63 | 64 | template <> 65 | struct js_to_cbpp_t { 66 | static inline Napi::Value to_js(Napi::Env env, 67 | const couchbase::core::document_id &cppObj) 68 | { 69 | auto resObj = Napi::Object::New(env); 70 | resObj.Set("bucket", cbpp_to_js(env, cppObj.bucket())); 71 | resObj.Set("scope", cbpp_to_js(env, cppObj.scope())); 72 | resObj.Set("collection", cbpp_to_js(env, cppObj.collection())); 73 | resObj.Set("key", cbpp_to_js(env, cppObj.key())); 74 | return resObj; 75 | } 76 | 77 | static inline couchbase::core::document_id from_js(Napi::Value jsVal) 78 | { 79 | auto jsObj = jsVal.ToObject(); 80 | return couchbase::core::document_id( 81 | js_to_cbpp(jsObj.Get("bucket")), 82 | js_to_cbpp(jsObj.Get("scope")), 83 | js_to_cbpp(jsObj.Get("collection")), 84 | js_to_cbpp(jsObj.Get("key"))); 85 | } 86 | }; 87 | 88 | template <> 89 | struct js_to_cbpp_t { 90 | static inline Napi::Value to_js(Napi::Env env, couchbase::cas cppObj) 91 | { 92 | return Cas::create(env, cppObj); 93 | } 94 | 95 | static inline couchbase::cas from_js(Napi::Value jsVal) 96 | { 97 | return Cas::parse(jsVal); 98 | } 99 | }; 100 | 101 | template <> 102 | struct js_to_cbpp_t { 103 | static inline Napi::Value to_js(Napi::Env env, 104 | const couchbase::mutation_token &cppObj) 105 | { 106 | return MutationToken::create(env, cppObj); 107 | } 108 | 109 | static inline couchbase::mutation_token from_js(Napi::Value jsVal) 110 | { 111 | return MutationToken::parse(jsVal); 112 | } 113 | }; 114 | 115 | template <> 116 | struct js_to_cbpp_t { 117 | static inline Napi::Value 118 | to_js(Napi::Env env, const couchbase::core::query_context &cppObj) 119 | { 120 | auto resObj = Napi::Object::New(env); 121 | resObj.Set("bucket_name", cbpp_to_js(env, cppObj.bucket_name())); 122 | resObj.Set("scope_name", cbpp_to_js(env, cppObj.scope_name())); 123 | return resObj; 124 | } 125 | 126 | static inline couchbase::core::query_context from_js(Napi::Value jsVal) 127 | { 128 | auto jsObj = jsVal.ToObject(); 129 | auto bucket_name = js_to_cbpp(jsObj.Get("bucket_name")); 130 | auto scope_name = js_to_cbpp(jsObj.Get("scope_name")); 131 | if (!bucket_name.empty() || !scope_name.empty()) { 132 | return couchbase::core::query_context(bucket_name, scope_name); 133 | } 134 | return couchbase::core::query_context(); 135 | } 136 | }; 137 | 138 | template <> 139 | struct js_to_cbpp_t { 140 | static inline Napi::Value to_js(Napi::Env env, 141 | couchbase::codec::encoded_value cppObj) 142 | { 143 | auto resObj = Napi::Object::New(env); 144 | resObj.Set("data", 145 | cbpp_to_js>(env, cppObj.data)); 146 | resObj.Set("flags", cbpp_to_js(env, cppObj.flags)); 147 | return resObj; 148 | } 149 | 150 | static inline couchbase::codec::encoded_value from_js(Napi::Value jsVal) 151 | { 152 | auto jsObj = jsVal.ToObject(); 153 | auto cppObj = couchbase::codec::encoded_value{ 154 | js_to_cbpp>(jsObj.Get("data")), 155 | js_to_cbpp(jsObj.Get("flags"))}; 156 | return cppObj; 157 | } 158 | }; 159 | 160 | } // namespace couchnode 161 | -------------------------------------------------------------------------------- /src/jstocbpp_defs.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include 3 | 4 | #include "transcoder.hpp" 5 | 6 | namespace couchnode 7 | { 8 | 9 | template 10 | struct js_to_cbpp_t; 11 | 12 | template 13 | static inline T js_to_cbpp(Napi::Value jsVal) 14 | { 15 | return js_to_cbpp_t::from_js(jsVal); 16 | } 17 | 18 | template 19 | static inline void js_to_cbpp(T &cppObj, Napi::Value jsVal) 20 | { 21 | cppObj = js_to_cbpp_t::from_js(jsVal); 22 | } 23 | 24 | template 25 | static inline Napi::Value cbpp_to_js(Napi::Env env, const T &cppObj) 26 | { 27 | return js_to_cbpp_t::to_js(env, cppObj); 28 | } 29 | 30 | template 31 | static inline Napi::Value cbpp_to_js(Napi::Env env, const T &cppObj, 32 | const Transcoder &transcoder) 33 | { 34 | return js_to_cbpp_t::to_js(env, cppObj, transcoder); 35 | } 36 | 37 | template 38 | static inline T jsToCbpp(Napi::Value jsVal) 39 | { 40 | return js_to_cbpp_t::from_js(jsVal); 41 | } 42 | 43 | template 44 | static inline T jsToCbpp(Napi::Value jsVal, const Transcoder &transcoder) 45 | { 46 | return js_to_cbpp_t::from_js(jsVal, transcoder); 47 | } 48 | 49 | template 50 | Napi::Value cbppToJs(Napi::Env env, const T &cppObj) 51 | { 52 | return js_to_cbpp_t::to_js(env, cppObj); 53 | } 54 | 55 | template 56 | Napi::Value cbppToJs(Napi::Env env, const T &cppObj, 57 | const Transcoder &transcoder) 58 | { 59 | return js_to_cbpp_t::to_js(env, cppObj, transcoder); 60 | } 61 | 62 | } // namespace couchnode 63 | -------------------------------------------------------------------------------- /src/mutationtoken.cpp: -------------------------------------------------------------------------------- 1 | #include "mutationtoken.hpp" 2 | #include "jstocbpp.hpp" 3 | #include "utils.hpp" 4 | #include 5 | 6 | namespace couchnode 7 | { 8 | 9 | struct MutationTokenData { 10 | uint64_t partitionUuid; 11 | uint64_t sequenceNumber; 12 | uint16_t partitionId; 13 | char bucketName[256]; 14 | }; 15 | 16 | void MutationToken::Init(Napi::Env env, Napi::Object exports) 17 | { 18 | Napi::Function func = DefineClass( 19 | env, "MutationToken", 20 | { 21 | InstanceMethod<&MutationToken::jsToString>("toString"), 22 | InstanceMethod<&MutationToken::jsToJSON>("toJSON"), 23 | InstanceMethod<&MutationToken::jsInspect>( 24 | utils::napiGetSymbol(env, "nodejs.util.inspect.custom")), 25 | }); 26 | 27 | constructor(env) = Napi::Persistent(func); 28 | exports.Set("MutationToken", func); 29 | } 30 | 31 | MutationToken::MutationToken(const Napi::CallbackInfo &info) 32 | : Napi::ObjectWrap(info) 33 | { 34 | if (info[0].IsBuffer()) { 35 | info.This().As().Set("raw", info[0]); 36 | return; 37 | } 38 | 39 | auto token = MutationToken::parse(info[0]); 40 | auto rawBytesVal = MutationToken::toBuffer(info.Env(), token); 41 | info.This().As().Set("raw", rawBytesVal); 42 | } 43 | 44 | MutationToken::~MutationToken() 45 | { 46 | } 47 | 48 | Napi::Value MutationToken::toBuffer(Napi::Env env, 49 | const couchbase::mutation_token &token) 50 | { 51 | MutationTokenData tokenData; 52 | tokenData.partitionId = token.partition_id(); 53 | tokenData.partitionUuid = token.partition_uuid(); 54 | tokenData.sequenceNumber = token.sequence_number(); 55 | memcpy(tokenData.bucketName, token.bucket_name().c_str(), 56 | token.bucket_name().size() + 1); 57 | return utils::napiDataToBuffer(env, tokenData); 58 | } 59 | 60 | couchbase::mutation_token MutationToken::fromBuffer(Napi::Value val) 61 | { 62 | MutationTokenData tokenData = 63 | utils::napiBufferToData(val); 64 | 65 | return couchbase::mutation_token{ 66 | tokenData.partitionUuid, tokenData.sequenceNumber, 67 | tokenData.partitionId, tokenData.bucketName}; 68 | } 69 | 70 | Napi::Value MutationToken::create(Napi::Env env, 71 | const couchbase::mutation_token &token) 72 | { 73 | auto rawBytesVal = MutationToken::toBuffer(env, token); 74 | return MutationToken::constructor(env).New({rawBytesVal}); 75 | } 76 | 77 | couchbase::mutation_token MutationToken::parse(Napi::Value val) 78 | { 79 | if (val.IsNull()) { 80 | return couchbase::mutation_token{}; 81 | } else if (val.IsUndefined()) { 82 | return couchbase::mutation_token{}; 83 | } else if (val.IsObject()) { 84 | auto objVal = val.As(); 85 | if (objVal.HasOwnProperty("partition_uuid")) { 86 | auto partitionUuid = 87 | jsToCbpp(objVal.Get("partition_uuid")); 88 | auto sequenceNumber = 89 | jsToCbpp(objVal.Get("sequence_number")); 90 | auto partitionId = 91 | jsToCbpp(objVal.Get("partition_id")); 92 | auto bucketName = jsToCbpp(objVal.Get("bucket_name")); 93 | return couchbase::mutation_token{partitionUuid, sequenceNumber, 94 | partitionId, bucketName}; 95 | } 96 | 97 | auto maybeRawVal = objVal.Get("raw"); 98 | if (!maybeRawVal.IsEmpty()) { 99 | return MutationToken::fromBuffer(maybeRawVal); 100 | } 101 | } else if (val.IsString()) { 102 | // not currently supported 103 | } else if (val.IsBuffer()) { 104 | return MutationToken::fromBuffer(val); 105 | } 106 | 107 | return couchbase::mutation_token{}; 108 | } 109 | 110 | Napi::Value MutationToken::jsToString(const Napi::CallbackInfo &info) 111 | { 112 | auto token = MutationToken::parse(info.This()); 113 | 114 | std::stringstream stream; 115 | stream << token.bucket_name() << ":" << token.partition_id() << ":" 116 | << token.partition_uuid() << ":" << token.sequence_number(); 117 | return Napi::String::New(info.Env(), stream.str()); 118 | } 119 | 120 | Napi::Value MutationToken::jsToJSON(const Napi::CallbackInfo &info) 121 | { 122 | auto env = info.Env(); 123 | auto token = MutationToken::parse(info.This()); 124 | 125 | auto resObj = Napi::Object::New(env); 126 | resObj.Set("bucket_name", Napi::String::New(env, token.bucket_name())); 127 | resObj.Set("partition_id", Napi::Number::New(env, token.partition_id())); 128 | resObj.Set("partition_uuid", 129 | Napi::String::New(env, std::to_string(token.partition_uuid()))); 130 | resObj.Set("sequence_number", 131 | Napi::String::New(env, std::to_string(token.sequence_number()))); 132 | return resObj; 133 | } 134 | 135 | Napi::Value MutationToken::jsInspect(const Napi::CallbackInfo &info) 136 | { 137 | auto token = MutationToken::parse(info.This()); 138 | 139 | std::stringstream stream; 140 | stream << "MutationToken<" << token.bucket_name() << ":" 141 | << token.partition_id() << ":" << token.partition_uuid() << ":" 142 | << token.sequence_number() << ">"; 143 | return Napi::String::New(info.Env(), stream.str()); 144 | } 145 | 146 | } // namespace couchnode 147 | -------------------------------------------------------------------------------- /src/mutationtoken.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include "addondata.hpp" 3 | #include "napi.h" 4 | #include 5 | #include 6 | 7 | namespace couchnode 8 | { 9 | 10 | class MutationToken : public Napi::ObjectWrap 11 | { 12 | public: 13 | static Napi::FunctionReference &constructor(Napi::Env env) 14 | { 15 | return AddonData::fromEnv(env)->_mutationTokenCtor; 16 | } 17 | 18 | static void Init(Napi::Env env, Napi::Object exports); 19 | 20 | static Napi::Value toBuffer(Napi::Env env, 21 | const couchbase::mutation_token &token); 22 | static couchbase::mutation_token fromBuffer(Napi::Value val); 23 | static Napi::Value create(Napi::Env env, 24 | const couchbase::mutation_token &token); 25 | static couchbase::mutation_token parse(Napi::Value val); 26 | 27 | MutationToken(const Napi::CallbackInfo &info); 28 | ~MutationToken(); 29 | 30 | Napi::Value jsToString(const Napi::CallbackInfo &info); 31 | Napi::Value jsToJSON(const Napi::CallbackInfo &info); 32 | Napi::Value jsInspect(const Napi::CallbackInfo &info); 33 | 34 | private: 35 | }; 36 | 37 | } // namespace couchnode 38 | -------------------------------------------------------------------------------- /src/scan_iterator.cpp: -------------------------------------------------------------------------------- 1 | #include "scan_iterator.hpp" 2 | #include "connection.hpp" 3 | #include "jstocbpp.hpp" 4 | 5 | namespace couchnode 6 | { 7 | 8 | void ScanIterator::Init(Napi::Env env, Napi::Object exports) 9 | { 10 | Napi::Function func = DefineClass( 11 | env, "ScanIterator", 12 | { 13 | InstanceMethod<&ScanIterator::jsNext>("next"), 14 | InstanceMethod<&ScanIterator::jsCancel>("cancel"), 15 | InstanceAccessor<&ScanIterator::jsCancelled>("cancelled"), 16 | }); 17 | 18 | constructor(env) = Napi::Persistent(func); 19 | exports.Set("ScanIterator", func); 20 | } 21 | 22 | ScanIterator::ScanIterator(const Napi::CallbackInfo &info) 23 | : Napi::ObjectWrap(info) 24 | { 25 | if (info.Length() > 0) { 26 | auto wrapped_result = 27 | *info[0] 28 | .As>() 29 | .Data(); 30 | this->result_ = 31 | std::make_shared(wrapped_result); 32 | } 33 | } 34 | 35 | ScanIterator::~ScanIterator() 36 | { 37 | } 38 | 39 | Napi::Value ScanIterator::jsNext(const Napi::CallbackInfo &info) 40 | { 41 | auto env = info.Env(); 42 | auto callbackJsFn = info[0].As(); 43 | auto cookie = CallCookie(env, callbackJsFn, "cbRangeScanNext"); 44 | 45 | auto handler = [](Napi::Env env, Napi::Function callback, 46 | couchbase::core::range_scan_item resp, 47 | std::error_code ec) mutable { 48 | Napi::Value jsErr, jsRes; 49 | if (ec && ec == couchbase::errc::key_value::range_scan_completed) { 50 | jsErr = env.Null(); 51 | jsRes = env.Undefined(); 52 | } else { 53 | try { 54 | jsErr = cbpp_to_js(env, ec); 55 | jsRes = cbpp_to_js(env, resp); 56 | } catch (const Napi::Error &e) { 57 | jsErr = e.Value(); 58 | jsRes = env.Null(); 59 | } 60 | } 61 | callback.Call({jsErr, jsRes}); 62 | }; 63 | 64 | this->result_->next( 65 | [cookie = std::move(cookie), handler = std::move(handler)]( 66 | couchbase::core::range_scan_item resp, std::error_code ec) mutable { 67 | cookie.invoke([handler = std::move(handler), resp = std::move(resp), 68 | ec = std::move(ec)]( 69 | Napi::Env env, Napi::Function callback) mutable { 70 | handler(env, callback, std::move(resp), std::move(ec)); 71 | }); 72 | }); 73 | 74 | return env.Null(); 75 | } 76 | 77 | Napi::Value ScanIterator::jsCancel(const Napi::CallbackInfo &info) 78 | { 79 | auto env = info.Env(); 80 | this->result_->cancel(); 81 | return Napi::Boolean::New(env, this->result_->is_cancelled()); 82 | } 83 | 84 | Napi::Value ScanIterator::jsCancelled(const Napi::CallbackInfo &info) 85 | { 86 | auto env = info.Env(); 87 | return Napi::Boolean::New(env, this->result_->is_cancelled()); 88 | } 89 | 90 | } // namespace couchnode 91 | -------------------------------------------------------------------------------- /src/scan_iterator.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include "addondata.hpp" 3 | #include "napi.h" 4 | #include 5 | 6 | namespace couchnode 7 | { 8 | 9 | class ScanIterator : public Napi::ObjectWrap 10 | { 11 | public: 12 | static Napi::FunctionReference &constructor(Napi::Env env) 13 | { 14 | return AddonData::fromEnv(env)->_scanIteratorCtor; 15 | } 16 | 17 | static void Init(Napi::Env env, Napi::Object exports); 18 | 19 | ScanIterator(const Napi::CallbackInfo &info); 20 | ~ScanIterator(); 21 | 22 | Napi::Value jsNext(const Napi::CallbackInfo &info); 23 | Napi::Value jsCancel(const Napi::CallbackInfo &info); 24 | Napi::Value jsCancelled(const Napi::CallbackInfo &info); 25 | 26 | private: 27 | std::shared_ptr result_; 28 | }; 29 | 30 | } // namespace couchnode 31 | -------------------------------------------------------------------------------- /src/transaction.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include "addondata.hpp" 3 | #include 4 | #include 5 | #include 6 | 7 | namespace cbtxns = couchbase::transactions; 8 | namespace cbcoretxns = couchbase::core::transactions; 9 | 10 | namespace couchnode 11 | { 12 | 13 | class Transaction : public Napi::ObjectWrap 14 | { 15 | public: 16 | static Napi::FunctionReference &constructor(Napi::Env env) 17 | { 18 | return AddonData::fromEnv(env)->_transactionCtor; 19 | } 20 | 21 | static void Init(Napi::Env env, Napi::Object exports); 22 | 23 | Transaction(const Napi::CallbackInfo &info); 24 | ~Transaction(); 25 | 26 | Napi::Value jsNewAttempt(const Napi::CallbackInfo &info); 27 | Napi::Value jsFinalizeAttempt(const Napi::CallbackInfo &info); 28 | Napi::Value jsGet(const Napi::CallbackInfo &info); 29 | Napi::Value 30 | jsGetReplicaFromPreferredServerGroup(const Napi::CallbackInfo &info); 31 | Napi::Value jsGetMulti(const Napi::CallbackInfo &info); 32 | Napi::Value 33 | jsGetMultiReplicasFromPreferredServerGroup(const Napi::CallbackInfo &info); 34 | Napi::Value jsInsert(const Napi::CallbackInfo &info); 35 | Napi::Value jsReplace(const Napi::CallbackInfo &info); 36 | Napi::Value jsRemove(const Napi::CallbackInfo &info); 37 | Napi::Value jsQuery(const Napi::CallbackInfo &info); 38 | Napi::Value jsCommit(const Napi::CallbackInfo &info); 39 | Napi::Value jsRollback(const Napi::CallbackInfo &info); 40 | 41 | private: 42 | std::shared_ptr _impl; 43 | }; 44 | 45 | } // namespace couchnode 46 | -------------------------------------------------------------------------------- /src/transactions.cpp: -------------------------------------------------------------------------------- 1 | #include "transactions.hpp" 2 | #include "connection.hpp" 3 | #include 4 | #include 5 | #include 6 | 7 | namespace couchnode 8 | { 9 | 10 | void Transactions::Init(Napi::Env env, Napi::Object exports) 11 | { 12 | Napi::Function func = 13 | DefineClass(env, "Transactions", 14 | { 15 | InstanceMethod<&Transactions::jsClose>("close"), 16 | }); 17 | 18 | constructor(env) = Napi::Persistent(func); 19 | exports.Set("Transactions", func); 20 | } 21 | 22 | Transactions::Transactions(const Napi::CallbackInfo &info) 23 | : Napi::ObjectWrap(info) 24 | { 25 | auto clusterJsObj = info[0].As(); 26 | auto configJsObj = info[1].As(); 27 | 28 | if (!clusterJsObj.InstanceOf(Connection::constructor(info.Env()).Value())) { 29 | throw Napi::Error::New(info.Env(), 30 | "first parameter must be a Connection object"); 31 | } 32 | auto cluster = Connection::Unwrap(clusterJsObj)->cluster(); 33 | 34 | auto txnsConfig = jsToCbpp(configJsObj); 35 | std::future< 36 | std::pair>> 37 | future = cbcoretxns::transactions::create(cluster, txnsConfig); 38 | std::pair> 39 | result = future.get(); 40 | if (result.first.value()) { 41 | Napi::Error err = Napi::Error::New(info.Env(), result.first.message()); 42 | err.Set("code", Napi::Number::New(info.Env(), result.first.value())); 43 | throw err; 44 | } 45 | _impl.reset(); 46 | _impl = std::move(result.second); 47 | } 48 | 49 | Transactions::~Transactions() 50 | { 51 | } 52 | 53 | Napi::Value Transactions::jsClose(const Napi::CallbackInfo &info) 54 | { 55 | auto callbackJsFn = info[0].As(); 56 | 57 | _impl->close(); 58 | callbackJsFn.Call({info.Env().Null()}); 59 | 60 | return info.Env().Null(); 61 | } 62 | 63 | } // namespace couchnode 64 | -------------------------------------------------------------------------------- /src/transactions.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include "addondata.hpp" 3 | #include 4 | #include 5 | #include 6 | 7 | namespace cbtxns = couchbase::transactions; 8 | namespace cbcoretxns = couchbase::core::transactions; 9 | 10 | namespace couchnode 11 | { 12 | 13 | class Transactions : public Napi::ObjectWrap 14 | { 15 | public: 16 | static Napi::FunctionReference &constructor(Napi::Env env) 17 | { 18 | return AddonData::fromEnv(env)->_transactionsCtor; 19 | } 20 | 21 | cbcoretxns::transactions &transactions() const 22 | { 23 | return *_impl; 24 | } 25 | 26 | static void Init(Napi::Env env, Napi::Object exports); 27 | 28 | Transactions(const Napi::CallbackInfo &info); 29 | ~Transactions(); 30 | 31 | Napi::Value jsClose(const Napi::CallbackInfo &info); 32 | 33 | private: 34 | std::shared_ptr _impl; 35 | }; 36 | 37 | } // namespace couchnode 38 | -------------------------------------------------------------------------------- /src/transcoder.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include 3 | 4 | namespace couchnode 5 | { 6 | 7 | class Transcoder 8 | { 9 | public: 10 | Transcoder() 11 | { 12 | } 13 | 14 | static inline Transcoder parse(Napi::Value transcoder) 15 | { 16 | return Transcoder(transcoder); 17 | } 18 | 19 | std::tuple encode(Napi::Value content) const 20 | { 21 | auto jsTranscoderObj = _jsObj.Value(); 22 | auto jsEncodeVal = jsTranscoderObj.Get("encode"); 23 | auto jsEncodeFn = jsEncodeVal.As(); 24 | if (!jsEncodeFn.IsFunction()) { 25 | throw Napi::Error::New(content.Env(), 26 | "invalid transcoder encode function"); 27 | } 28 | 29 | auto jsEncodedVal = jsEncodeFn.Call(jsTranscoderObj, {content}); 30 | 31 | auto jsEncodedArr = jsEncodedVal.As(); 32 | if (!jsEncodedArr.IsArray()) { 33 | throw Napi::Error::New( 34 | content.Env(), 35 | "transcoder encode function did not return a tuple"); 36 | } 37 | 38 | auto jsContentVal = jsEncodedArr.Get(uint32_t(0)); 39 | auto jsContentBuf = jsContentVal.As>(); 40 | if (!jsContentBuf.IsBuffer()) { 41 | throw Napi::Error::New(content.Env(), 42 | "transcoder encode function did not return " 43 | "content as a buffer"); 44 | } 45 | 46 | auto jsFlagsVal = jsEncodedArr.Get(uint32_t(1)); 47 | auto jsFlagsNum = jsFlagsVal.As(); 48 | if (!jsFlagsNum.IsNumber()) { 49 | throw Napi::Error::New( 50 | content.Env(), 51 | "transcoder encode function did not return flags as a number"); 52 | } 53 | 54 | std::string data(reinterpret_cast(jsContentBuf.Data()), 55 | jsContentBuf.Length()); 56 | uint32_t flags = 57 | static_cast(jsFlagsNum.Int64Value() & 0xFFFFFFFF); 58 | return std::make_tuple(std::move(data), flags); 59 | } 60 | 61 | Napi::Value decode(Napi::Env env, const std::string &data, 62 | uint32_t flags) const 63 | { 64 | auto jsTranscoderObj = _jsObj.Value(); 65 | auto jsDecodeVal = jsTranscoderObj.Get("decode"); 66 | auto jsDecodeFn = jsDecodeVal.As(); 67 | if (!jsDecodeFn.IsFunction()) { 68 | throw Napi::Error::New(env, "invalid transcoder decode function"); 69 | } 70 | 71 | auto jsDataBuf = Napi::Buffer::Copy( 72 | env, reinterpret_cast(data.c_str()), data.size()); 73 | auto jsFlagsNum = Napi::Number::New(env, flags); 74 | 75 | auto jsDecodedVal = 76 | jsDecodeFn.Call(jsTranscoderObj, {jsDataBuf, jsFlagsNum}); 77 | 78 | return jsDecodedVal; 79 | } 80 | 81 | private: 82 | Transcoder(Napi::Value transcoder) 83 | { 84 | auto jsTranscoderObj = transcoder.As(); 85 | if (!jsTranscoderObj.IsObject()) { 86 | throw Napi::Error::New(transcoder.Env(), 87 | "invalid transcoder type specified"); 88 | } 89 | 90 | _jsObj = Napi::Persistent(jsTranscoderObj); 91 | } 92 | 93 | Napi::ObjectReference _jsObj; 94 | }; 95 | 96 | } // namespace couchnode 97 | -------------------------------------------------------------------------------- /src/utils.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include 3 | 4 | namespace couchnode 5 | { 6 | 7 | namespace utils 8 | { 9 | 10 | static inline Napi::Symbol napiGetSymbol(const Napi::Env &env, std::string name) 11 | { 12 | auto Symbol = env.Global().Get("Symbol").As(); 13 | auto forSymbol = Symbol.Get("for").As().Call( 14 | Symbol, {Napi::String::New(env, name)}); 15 | return forSymbol.As(); 16 | } 17 | 18 | template 19 | static inline Napi::Value napiDataToBuffer(Napi::Env env, const T &data) 20 | { 21 | return Napi::Buffer::Copy(env, &data, 1); 22 | } 23 | 24 | template 25 | static inline T napiBufferToData(Napi::Value buf) 26 | { 27 | return *(T *)buf.template As>().Data(); 28 | } 29 | 30 | } // namespace utils 31 | 32 | } // namespace couchnode 33 | -------------------------------------------------------------------------------- /test/.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | extends: ['../.eslintrc.js'], 3 | rules: { 4 | '@typescript-eslint/no-var-requires': 'off', 5 | 'jsdoc/require-jsdoc': 'off', 6 | }, 7 | } 8 | -------------------------------------------------------------------------------- /test/cluster.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const assert = require('assert') 4 | const gc = require('expose-gc/function') 5 | const { Cluster } = require('../lib/cluster') 6 | var { knownProfiles } = require('../lib/configProfile') 7 | const harness = require('./harness') 8 | 9 | const H = harness 10 | 11 | class TestProfile { 12 | apply(options) { 13 | const timeouts = { 14 | kvTimeout: 5000, 15 | kvDurableTimeout: 10000, 16 | analyticsTimeout: 60000, 17 | managementTimeout: 60000, 18 | queryTimeout: 60000, 19 | searchTimeout: 60000, 20 | viewTimeout: 60000, 21 | } 22 | // the profile should override previously set values 23 | options.timeouts = { ...options.timeouts, ...timeouts } 24 | } 25 | } 26 | 27 | describe('#Cluster', function () { 28 | let testKeys = [] 29 | 30 | after(async function () { 31 | for (const key of testKeys) { 32 | try { 33 | await H.dco.remove(key) 34 | } catch (e) { 35 | // ignore 36 | } 37 | } 38 | }) 39 | 40 | it('should queue operations until connected', async function () { 41 | var cluster = await H.lib.Cluster.connect(H.connStr, H.connOpts) 42 | var bucket = cluster.bucket(H.bucketName) 43 | var coll = bucket.defaultCollection() 44 | 45 | const key = H.genTestKey() 46 | testKeys.push(key) 47 | await coll.insert(key, 'bar') 48 | 49 | await cluster.close() 50 | }) 51 | 52 | it('should successfully gc connections', async function () { 53 | var cluster = await H.lib.Cluster.connect(H.connStr, H.connOpts) 54 | var bucket = cluster.bucket(H.bucketName) 55 | var coll = bucket.defaultCollection() 56 | const key = H.genTestKey() 57 | testKeys.push(key) 58 | await coll.insert(key, 'bar') 59 | await cluster.close() 60 | 61 | gc() 62 | }) 63 | 64 | it('should successfully close an unconnected cluster and error ops', async function () { 65 | var cluster = await H.lib.Cluster.connect(H.connStr, H.connOpts) 66 | var bucket = cluster.bucket(H.bucketName) 67 | var coll = bucket.defaultCollection() 68 | 69 | await cluster.close() 70 | 71 | await H.throwsHelper(async () => { 72 | await coll.insert(H.genTestKey(), 'bar') 73 | }, Error) 74 | }) 75 | 76 | it('should error ops after close and ignore superfluous closes', async function () { 77 | var cluster = await H.lib.Cluster.connect(H.connStr, H.connOpts) 78 | var bucket = cluster.bucket(H.bucketName) 79 | var coll = bucket.defaultCollection() 80 | const key = H.genTestKey() 81 | testKeys.push(key) 82 | await coll.insert(key, 'bar') 83 | 84 | await cluster.close() 85 | await cluster.close() 86 | await cluster.close() 87 | await cluster.close() 88 | 89 | await H.throwsHelper(async () => { 90 | await coll.insert(H.genTestKey(), 'bar') 91 | }, Error) 92 | 93 | await cluster.close() 94 | await cluster.close() 95 | }) 96 | 97 | it('lcbVersion property should work', function () { 98 | assert(typeof H.lib.lcbVersion === 'string') 99 | }) 100 | 101 | it('should error with AuthenticationFailureError', async function () { 102 | if (!H._usingMock) { 103 | let connOpts = { ...H.connOpts } 104 | connOpts.username = 'wrongUsername' 105 | await H.throwsHelper(async () => { 106 | await H.lib.Cluster.connect(H.connStr, connOpts) 107 | }, H.lib.AuthenticationFailureError) 108 | } else { 109 | this.skip() 110 | } 111 | }) 112 | 113 | it('should use wanDevelopment config profile', async function () { 114 | const cluster = await H.lib.Cluster.connect(H.connStr, { 115 | ...H.connOpts, 116 | configProfile: 'wanDevelopment', 117 | }) 118 | assert.strictEqual(cluster.kvTimeout, 20000) 119 | assert.strictEqual(cluster.kvDurableTimeout, 20000) 120 | assert.strictEqual(cluster.analyticsTimeout, 120000) 121 | assert.strictEqual(cluster.managementTimeout, 120000) 122 | assert.strictEqual(cluster.queryTimeout, 120000) 123 | assert.strictEqual(cluster.searchTimeout, 120000) 124 | assert.strictEqual(cluster.viewTimeout, 120000) 125 | assert.strictEqual(cluster.bootstrapTimeout, 120000) 126 | assert.strictEqual(cluster.connectTimeout, 20000) 127 | assert.strictEqual(cluster.resolveTimeout, 20000) 128 | }) 129 | 130 | it('should error when config profile is not registered', function () { 131 | assert.throws( 132 | () => { 133 | new Cluster(H.connStr, { ...H.connOpts, configProfile: 'testProfile' }) 134 | }, 135 | { name: 'Error', message: 'testProfile is not a registered profile.' } 136 | ) 137 | }) 138 | 139 | it('should use custom config profile', async function () { 140 | knownProfiles.registerProfile('testProfile', new TestProfile()) 141 | const cluster = await H.lib.Cluster.connect(H.connStr, { 142 | ...H.connOpts, 143 | configProfile: 'testProfile', 144 | }) 145 | assert.strictEqual(cluster.kvTimeout, 5000) 146 | assert.strictEqual(cluster.kvDurableTimeout, 10000) 147 | assert.strictEqual(cluster.analyticsTimeout, 60000) 148 | assert.strictEqual(cluster.managementTimeout, 60000) 149 | assert.strictEqual(cluster.queryTimeout, 60000) 150 | assert.strictEqual(cluster.searchTimeout, 60000) 151 | assert.strictEqual(cluster.viewTimeout, 60000) 152 | }) 153 | 154 | it('should not expose auth info in cluster', async function () { 155 | let clusterStr = JSON.stringify(H.c) 156 | let cluster = JSON.parse(clusterStr) 157 | assert.strictEqual(cluster._auth, '***hidden***') 158 | }) 159 | 160 | it('should not expose cluster auth info in bucket', async function () { 161 | let bucketStr = JSON.stringify(H.b) 162 | let bucket = JSON.parse(bucketStr) 163 | assert.strictEqual(bucket._cluster._auth, '***hidden***') 164 | }) 165 | }) 166 | -------------------------------------------------------------------------------- /test/connspec.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const assert = require('chai').assert 4 | var { ConnSpec } = require('../lib/connspec') 5 | 6 | const harness = require('./harness') 7 | 8 | const H = harness 9 | 10 | describe('#ConnSpec', function () { 11 | describe('stringify', function () { 12 | it('should stringify a connstr spec', function () { 13 | var x = new ConnSpec({ 14 | scheme: 'https', 15 | hosts: [ 16 | ['1.1.1.1', 8094], 17 | ['2.2.2.2', 8099], 18 | ], 19 | bucket: 'frank', 20 | options: { 21 | joe: 'bob', 22 | jane: 'drew', 23 | }, 24 | }).toString() 25 | 26 | assert.equal( 27 | x, 28 | 'https://1.1.1.1:8094,2.2.2.2:8099/frank?joe=bob&jane=drew' 29 | ) 30 | }) 31 | 32 | it('should stringify a connstr spec without a scheme', function () { 33 | var x = new ConnSpec({ 34 | hosts: [['1.1.1.1', 8094]], 35 | bucket: 'frank', 36 | options: { 37 | x: 'y', 38 | }, 39 | }).toString() 40 | assert.equal(x, 'couchbase://1.1.1.1:8094/frank?x=y') 41 | }) 42 | 43 | it('should stringify a connstr spec without a bucket', function () { 44 | var x = new ConnSpec({ 45 | scheme: 'http', 46 | hosts: [['1.1.1.1', 8094]], 47 | options: { 48 | x: 'y', 49 | }, 50 | }).toString() 51 | assert.equal(x, 'http://1.1.1.1:8094?x=y') 52 | }) 53 | 54 | it('should stringify a connstr spec without options', function () { 55 | var x = new ConnSpec({ 56 | scheme: 'http', 57 | hosts: [['1.1.1.1', 8094]], 58 | bucket: 'joe', 59 | }).toString() 60 | assert.equal(x, 'http://1.1.1.1:8094/joe') 61 | }) 62 | 63 | it('should stringify a connstr spec with ipv6 addresses', function () { 64 | var x = new ConnSpec({ 65 | scheme: 'couchbase', 66 | hosts: [['[2001:4860:4860::8888]', 8094]], 67 | bucket: 'joe', 68 | }).toString() 69 | assert.equal(x, 'couchbase://[2001:4860:4860::8888]:8094/joe') 70 | }) 71 | 72 | it('should correctly stringify a connstr spec with sasl_mech_force', function () { 73 | var x = new ConnSpec({ 74 | scheme: 'couchbase', 75 | hosts: [['localhost', 0]], 76 | bucket: '', 77 | options: { 78 | sasl_mech_force: 'PLAIN', 79 | }, 80 | }).toString() 81 | assert.equal(x, 'couchbase://localhost?sasl_mech_force=PLAIN') 82 | }) 83 | 84 | it('should correctly stringify a connstr spec with allowed_sasl_mechanisms', function () { 85 | var x = new ConnSpec({ 86 | scheme: 'couchbase', 87 | hosts: [['localhost', 0]], 88 | bucket: '', 89 | options: { 90 | allowed_sasl_mechanisms: 'PLAIN', 91 | }, 92 | }).toString() 93 | assert.equal(x, 'couchbase://localhost?allowed_sasl_mechanisms=PLAIN') 94 | }) 95 | }) 96 | 97 | describe('parse', function () { 98 | it('should generate a blank spec for a blank string', function () { 99 | var x = ConnSpec.parse('') 100 | assert.deepEqual(x, { 101 | scheme: 'couchbase', 102 | hosts: [['localhost', 0]], 103 | bucket: '', 104 | options: {}, 105 | }) 106 | }) 107 | 108 | it('should not parse a string with no host', function () { 109 | assert.throws(() => { 110 | ConnSpec.parse('https:///shirley') 111 | }) 112 | }) 113 | 114 | it('should parse a string with options', function () { 115 | var x = ConnSpec.parse('http://a/b?c=d&e=f') 116 | assert.deepEqual(x, { 117 | scheme: 'http', 118 | hosts: [['a', 0]], 119 | bucket: 'b', 120 | options: { 121 | c: 'd', 122 | e: 'f', 123 | }, 124 | }) 125 | }) 126 | 127 | it('should parse a string with ipv6', function () { 128 | var x = ConnSpec.parse('couchbase://[2001:4860:4860::8888]:9011/b') 129 | assert.deepEqual(x, { 130 | scheme: 'couchbase', 131 | hosts: [['[2001:4860:4860::8888]', 9011]], 132 | bucket: 'b', 133 | options: {}, 134 | }) 135 | }) 136 | 137 | it('should parse a string sasl_mech_force in options', function () { 138 | var x = ConnSpec.parse('couchbase://localhost?sasl_mech_force=PLAIN') 139 | assert.deepEqual(x, { 140 | scheme: 'couchbase', 141 | hosts: [['localhost', 0]], 142 | bucket: '', 143 | options: { 144 | sasl_mech_force: 'PLAIN', 145 | }, 146 | }) 147 | }) 148 | 149 | it('should parse a multiple strings in sasl_mech_force in options', function () { 150 | var x = ConnSpec.parse( 151 | 'couchbase://localhost?sasl_mech_force=SCRAM-SHA512&sasl_mech_force=SCRAM-SHA256' 152 | ) 153 | assert.deepEqual(x, { 154 | scheme: 'couchbase', 155 | hosts: [['localhost', 0]], 156 | bucket: '', 157 | options: { 158 | sasl_mech_force: ['SCRAM-SHA512', 'SCRAM-SHA256'], 159 | }, 160 | }) 161 | }) 162 | 163 | it('should parse a string allowed_sasl_mechanisms in options', function () { 164 | var x = ConnSpec.parse( 165 | 'couchbase://localhost?allowed_sasl_mechanisms=PLAIN' 166 | ) 167 | assert.deepEqual(x, { 168 | scheme: 'couchbase', 169 | hosts: [['localhost', 0]], 170 | bucket: '', 171 | options: { 172 | allowed_sasl_mechanisms: 'PLAIN', 173 | }, 174 | }) 175 | }) 176 | 177 | it('should parse a multiple strings in allowed_sasl_mechanisms in options', function () { 178 | var x = ConnSpec.parse( 179 | 'couchbase://localhost?allowed_sasl_mechanisms=SCRAM-SHA512&allowed_sasl_mechanisms=SCRAM-SHA256' 180 | ) 181 | assert.deepEqual(x, { 182 | scheme: 'couchbase', 183 | hosts: [['localhost', 0]], 184 | bucket: '', 185 | options: { 186 | allowed_sasl_mechanisms: ['SCRAM-SHA512', 'SCRAM-SHA256'], 187 | }, 188 | }) 189 | }) 190 | }) 191 | 192 | describe('#passwordauthenticator', function () { 193 | it('Should have empty allowed_sasl_mechanisms by default', async function () { 194 | const authenticator = new H.lib.PasswordAuthenticator('user', 'password') 195 | assert.isUndefined(authenticator.allowed_sasl_mechanisms) 196 | }) 197 | 198 | it('should only enable PLAIN when ldap compatible', async function () { 199 | const authenticator = H.lib.PasswordAuthenticator.ldapCompatible( 200 | 'user', 201 | 'password' 202 | ) 203 | 204 | assert.strictEqual(1, authenticator.allowed_sasl_mechanisms.length) 205 | assert.strictEqual('PLAIN', authenticator.allowed_sasl_mechanisms[0]) 206 | }) 207 | }) 208 | }) 209 | -------------------------------------------------------------------------------- /test/data/search_index.json: -------------------------------------------------------------------------------- 1 | { 2 | "doc_config": { 3 | "docid_prefix_delim": "_", 4 | "docid_regexp": "", 5 | "mode": "docid_prefix", 6 | "type_field": "type" 7 | }, 8 | "mapping": { 9 | "default_analyzer": "standard", 10 | "default_datetime_parser": "dateTimeOptional", 11 | "default_field": "_all", 12 | "default_mapping": { 13 | "dynamic": true, 14 | "enabled": false 15 | }, 16 | "default_type": "_default", 17 | "docvalues_dynamic": true, 18 | "index_dynamic": false, 19 | "store_dynamic": false, 20 | "type_field": "_type", 21 | "types": { 22 | "testIndexUUID": { 23 | "dynamic": false, 24 | "enabled": true, 25 | "properties": { 26 | "name": { 27 | "enabled": true, 28 | "dynamic": false, 29 | "fields": [ 30 | { 31 | "docvalues": true, 32 | "include_in_all": true, 33 | "include_term_vectors": true, 34 | "index": true, 35 | "name": "name", 36 | "store": true, 37 | "type": "text" 38 | } 39 | ] 40 | }, 41 | "testUid": { 42 | "enabled": true, 43 | "dynamic": false, 44 | "fields": [ 45 | { 46 | "docvalues": true, 47 | "include_in_all": true, 48 | "include_term_vectors": true, 49 | "index": true, 50 | "name": "testUid", 51 | "store": true, 52 | "type": "text" 53 | } 54 | ] 55 | } 56 | } 57 | } 58 | } 59 | }, 60 | "store": { 61 | "indexType": "scorch" 62 | } 63 | } -------------------------------------------------------------------------------- /test/data/vector_search_index.json: -------------------------------------------------------------------------------- 1 | { 2 | "doc_config": { 3 | "docid_prefix_delim": "_", 4 | "docid_regexp": "", 5 | "mode": "docid_prefix", 6 | "type_field": "type" 7 | }, 8 | "mapping": { 9 | "default_analyzer": "standard", 10 | "default_datetime_parser": "dateTimeOptional", 11 | "default_field": "_all", 12 | "default_mapping": { 13 | "dynamic": true, 14 | "enabled": false 15 | }, 16 | "default_type": "_default", 17 | "docvalues_dynamic": false, 18 | "index_dynamic": true, 19 | "store_dynamic": false, 20 | "type_field": "_type", 21 | "types": { 22 | "testIndexUUID": { 23 | "dynamic": false, 24 | "enabled": true, 25 | "properties": { 26 | "text": { 27 | "enabled": true, 28 | "dynamic": false, 29 | "fields": [ 30 | { 31 | "index": true, 32 | "name": "text", 33 | "store": true, 34 | "type": "text" 35 | } 36 | ] 37 | }, 38 | "vector_field": { 39 | "enabled": true, 40 | "dynamic": false, 41 | "fields": [ 42 | { 43 | "dims": 1536, 44 | "index": true, 45 | "name": "vector_field", 46 | "similarity": "l2_norm", 47 | "store": true, 48 | "type": "vector" 49 | } 50 | ] 51 | }, 52 | "testUid": { 53 | "enabled": true, 54 | "dynamic": false, 55 | "fields": [ 56 | { 57 | "docvalues": true, 58 | "include_in_all": true, 59 | "include_term_vectors": true, 60 | "index": true, 61 | "name": "testUid", 62 | "store": true, 63 | "type": "text" 64 | } 65 | ] 66 | } 67 | } 68 | } 69 | } 70 | }, 71 | "store": { 72 | "indexType": "scorch", 73 | "segmentVersion": 16 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /test/diag.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const assert = require('chai').assert 4 | const H = require('./harness') 5 | 6 | describe('#diagnostics', function () { 7 | describe('#info', function () { 8 | it('should fetch diagnostics data on a cluster successfully', async function () { 9 | var testCluster = await H.newCluster() 10 | 11 | // We put this in a try-catch block to enable us to capture the case 12 | // where the cluster does not support G3CP, and thus cluster-level 13 | // diagnostics are not possible. 14 | var res 15 | try { 16 | res = await testCluster.diagnostics() 17 | } catch (e) { 18 | res = { 19 | id: '', 20 | version: 2, 21 | sdk: '', 22 | services: {}, 23 | } 24 | } 25 | assert.isObject(res) 26 | assert.isString(res.id) 27 | assert.equal(res.version, 2) 28 | assert.isString(res.sdk) 29 | assert.isObject(res.services) 30 | 31 | await testCluster.close() 32 | }) 33 | 34 | it('should fetch diagnostics data with a bucket open successfully', async function () { 35 | var res = await H.c.diagnostics() 36 | assert.isObject(res) 37 | assert.isString(res.id) 38 | assert.equal(res.version, 2) 39 | assert.isString(res.sdk) 40 | assert.isObject(res.services) 41 | }) 42 | 43 | it('should ping a cluster successfully', async function () { 44 | var res = await H.c.ping({ 45 | serviceTypes: [H.lib.ServiceType.KeyValue], 46 | }) 47 | assert.isObject(res) 48 | assert.isString(res.id) 49 | assert.equal(res.version, 2) 50 | assert.isString(res.sdk) 51 | assert.isObject(res.services) 52 | }) 53 | 54 | it('should ping a bucket successfully', async function () { 55 | var res = await H.b.ping({ 56 | serviceTypes: [H.lib.ServiceType.KeyValue], 57 | }) 58 | assert.isObject(res) 59 | assert.isString(res.id) 60 | assert.equal(res.version, 2) 61 | assert.isString(res.sdk) 62 | assert.isObject(res.services) 63 | }) 64 | 65 | it('should ping a new cluster successfully', async function () { 66 | var cluster = await H.lib.Cluster.connect(H.connStr, H.connOpts) 67 | 68 | var res = await cluster.ping({ 69 | serviceTypes: [H.lib.ServiceType.KeyValue], 70 | }) 71 | assert.isObject(res) 72 | assert.isString(res.id) 73 | assert.equal(res.version, 2) 74 | assert.isString(res.sdk) 75 | assert.isObject(res.services) 76 | 77 | await cluster.close() 78 | }) 79 | 80 | it('should ping a new bucket successfully', async function () { 81 | var cluster = await H.lib.Cluster.connect(H.connStr, H.connOpts) 82 | var bucket = cluster.bucket(H.bucketName) 83 | 84 | var res = await bucket.ping({ 85 | serviceTypes: [H.lib.ServiceType.KeyValue], 86 | }) 87 | assert.isObject(res) 88 | assert.isString(res.id) 89 | assert.equal(res.version, 2) 90 | assert.isString(res.sdk) 91 | assert.isObject(res.services) 92 | 93 | await cluster.close() 94 | }) 95 | }) 96 | }) 97 | -------------------------------------------------------------------------------- /test/errors.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const assert = require('chai').assert 4 | const H = require('./harness') 5 | 6 | function genericTests(collFn) { 7 | it('should should return a context with crud operations', async function () { 8 | try { 9 | await collFn().get('some-missing-key') 10 | } catch (err) { 11 | assert.instanceOf(err, H.lib.DocumentNotFoundError) 12 | assert.instanceOf(err.context, H.lib.KeyValueErrorContext) 13 | return 14 | } 15 | assert(false, 'should never reach here') 16 | }) 17 | } 18 | 19 | describe('#errors', function () { 20 | /* eslint-disable-next-line mocha/no-setup-in-describe */ 21 | genericTests(() => H.dco) 22 | }) 23 | 24 | describe('#collections-errors', function () { 25 | /* eslint-disable-next-line mocha/no-hooks-for-single-case */ 26 | before(function () { 27 | H.skipIfMissingFeature(this, H.Features.Collections) 28 | }) 29 | 30 | /* eslint-disable-next-line mocha/no-setup-in-describe */ 31 | genericTests(() => H.co) 32 | }) 33 | -------------------------------------------------------------------------------- /test/management.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const H = require('./harness') 4 | 5 | describe('#management-apis', function () { 6 | it('should successfully timeout operations (slow)', async function () { 7 | var bmgr = H.c.buckets() 8 | await H.throwsHelper(async () => { 9 | await bmgr.flushBucket('default', { timeout: 1 }) 10 | }, H.lib.TimeoutError) 11 | await H.sleep(1000) 12 | }).timeout(2000) 13 | }) 14 | -------------------------------------------------------------------------------- /test/sdutils.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const assert = require('assert') 4 | const { SdUtils } = require('../lib/sdutils') 5 | 6 | describe('#sdutils', function () { 7 | it('should handle base properties', function () { 8 | var res = SdUtils.insertByPath(null, 'foo', 'test') 9 | assert.deepEqual(res, { foo: 'test' }) 10 | }) 11 | 12 | it('should handle nested properties', function () { 13 | var res = SdUtils.insertByPath(null, 'foo.bar', 'test') 14 | assert.deepEqual(res, { foo: { bar: 'test' } }) 15 | }) 16 | 17 | it('should handle arrays', function () { 18 | var res = SdUtils.insertByPath(null, 'foo[0]', 'test') 19 | assert.deepEqual(res, { foo: ['test'] }) 20 | }) 21 | }) 22 | -------------------------------------------------------------------------------- /test/testdata.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | var TEST_DOCS = [ 4 | { x: 0, y: 0, name: 'x0,y0' }, 5 | { x: 1, y: 0, name: 'x1,y0' }, 6 | { x: 2, y: 0, name: 'x2,y0' }, 7 | { x: 0, y: 1, name: 'x0,y1' }, 8 | { x: 1, y: 1, name: 'x1,y1' }, 9 | { x: 2, y: 1, name: 'x2,y1' }, 10 | { x: 0, y: 2, name: 'x0,y2' }, 11 | { x: 1, y: 2, name: 'x1,y2' }, 12 | { x: 2, y: 2, name: 'x2,y2' }, 13 | ] 14 | 15 | async function sleep(ms) { 16 | return new Promise((resolve) => setTimeout(resolve, ms)) 17 | } 18 | 19 | async function upsertTestData(target, testUid, retries = 3) { 20 | var promises = [] 21 | 22 | for (var i = 0; i < TEST_DOCS.length; ++i) { 23 | promises.push( 24 | (async () => { 25 | var testDocKey = testUid + '::' + i 26 | var testDoc = TEST_DOCS[i] 27 | testDoc.testUid = testUid 28 | 29 | for (let i = 0; i < retries; ++i) { 30 | try { 31 | await target.upsert(testDocKey, testDoc) 32 | } catch (e) { 33 | if (i === retries - 1) { 34 | throw e 35 | } 36 | await sleep(500) 37 | } 38 | } 39 | return testDocKey 40 | })() 41 | ) 42 | } 43 | 44 | return await Promise.allSettled(promises) 45 | } 46 | 47 | async function upsertTestDataFromList(target, testUid, docList) { 48 | var promises = [] 49 | 50 | for (var i = 0; i < docList.length; ++i) { 51 | promises.push( 52 | (async () => { 53 | var testDocKey = testUid + '::' + i 54 | var testDoc = docList[i] 55 | testDoc.testUid = testUid 56 | 57 | await target.upsert(testDocKey, testDoc) 58 | return testDocKey 59 | })() 60 | ) 61 | } 62 | 63 | return await Promise.allSettled(promises) 64 | } 65 | 66 | module.exports.upsertData = upsertTestData 67 | module.exports.upserDataFromList = upsertTestDataFromList 68 | 69 | async function removeTestData(target, testDocs) { 70 | if (!testDocs) { 71 | return 72 | } 73 | 74 | await Promise.allSettled(testDocs.map((docId) => target.remove(docId))) 75 | } 76 | 77 | module.exports.removeTestData = removeTestData 78 | 79 | function testDocCount() { 80 | return TEST_DOCS.length 81 | } 82 | 83 | module.exports.docCount = testDocCount 84 | -------------------------------------------------------------------------------- /test/workerthread.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const semver = require('semver') 4 | const harness = require('./harness') 5 | 6 | const H = harness 7 | 8 | async function startWorker(workerData) { 9 | /* eslint-disable-next-line node/no-unsupported-features/node-builtins */ 10 | const worker = require('worker_threads') 11 | 12 | return await new Promise((resolve, reject) => { 13 | let has_resolved = false 14 | const work = new worker.Worker('./test/workerthread.worker.js', { 15 | workerData, 16 | }) 17 | work.on('message', (data) => { 18 | // we need to force terminate the worker so it doesn't stay running, but 19 | // we mark the handler as resolved so we don't send an error for the terminate 20 | has_resolved = true 21 | work.terminate().then(() => { 22 | resolve(data) 23 | }) 24 | }) 25 | work.on('error', (err) => { 26 | if (!has_resolved) { 27 | has_resolved = true 28 | reject(err) 29 | } 30 | }) 31 | work.on('exit', (code) => { 32 | if (!has_resolved) { 33 | if (code !== 0) { 34 | has_resolved = true 35 | reject(new Error(`Worker stopped with exit code ${code}`)) 36 | } 37 | } 38 | }) 39 | }) 40 | } 41 | 42 | describe('#worker-threads', function () { 43 | let testKey 44 | 45 | before(function () { 46 | testKey = H.genTestKey() 47 | }) 48 | 49 | after(async function () { 50 | try { 51 | await H.dco.remove(testKey) 52 | } catch (e) { 53 | // ignore 54 | } 55 | }) 56 | it('should start a worker and complete an operation', async function () { 57 | if (semver.lt(process.version, '12.11.0')) { 58 | return this.skip() 59 | } 60 | 61 | const res = await startWorker({ 62 | connStr: H.connStr, 63 | connOpts: H.connOpts, 64 | bucketName: H.bucketName, 65 | testKey: testKey, 66 | }) 67 | if (!res.success) { 68 | throw res.error 69 | } 70 | }).timeout(45000) 71 | }) 72 | -------------------------------------------------------------------------------- /test/workerthread.worker.js: -------------------------------------------------------------------------------- 1 | require('ts-node').register() 2 | 3 | /* eslint-disable-next-line node/no-unsupported-features/node-builtins */ 4 | const worker = require('worker_threads') 5 | const assert = require('chai').assert 6 | const couchbase = require('../lib/couchbase') 7 | 8 | const workerData = worker.workerData 9 | 10 | async function doWork() { 11 | try { 12 | const cluster = await couchbase.connect( 13 | workerData.connStr, 14 | workerData.connOpts 15 | ) 16 | const bucket = cluster.bucket(workerData.bucketName) 17 | const coll = bucket.defaultCollection() 18 | 19 | // increased timeout for the first operation. sometimes the connection 20 | // isn't available immediately and leads to this failing. 21 | await coll.insert(workerData.testKey, 'bar', { timeout: 25000 }) 22 | 23 | const getRes = await coll.get(workerData.testKey) 24 | assert.isObject(getRes) 25 | assert.isOk(getRes.cas) 26 | assert.deepStrictEqual(getRes.value, 'bar') 27 | 28 | // We intentionally omit the call to close here to test that the 29 | // connection is correctly cleaned up automatically when the context 30 | // is destroyed. Without proper handling, this causes libuv to panic 31 | // due to handles that are left open. 32 | //await cluster.close() 33 | 34 | worker.parentPort.postMessage({ 35 | success: true, 36 | }) 37 | } catch (e) { 38 | worker.parentPort.postMessage({ 39 | error: e, 40 | }) 41 | } 42 | } 43 | doWork() 44 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@tsconfig/node12/tsconfig.json", 3 | 4 | "compilerOptions": { 5 | "outDir": "dist", 6 | "declaration": true 7 | }, 8 | 9 | "include": ["lib"] 10 | } 11 | -------------------------------------------------------------------------------- /typedoc.json: -------------------------------------------------------------------------------- 1 | { 2 | "entryPoints": ["./lib/couchbase.ts"], 3 | "out": "docs", 4 | "excludePrivate": true, 5 | "excludeInternal": true, 6 | "categorizeByGroup": false, 7 | "validation": { 8 | "invalidLink": true 9 | }, 10 | "categoryOrder": [ 11 | "Core", 12 | "Key-Value", 13 | "Transactions", 14 | "Query", 15 | "Views", 16 | "Full Text Search", 17 | "Analytics", 18 | "Datastructures", 19 | "Management", 20 | "Diagnostics", 21 | "Authentication", 22 | "Error Handling", 23 | "Logging" 24 | ], 25 | "intentionallyNotExported": ["lib/rangeScan.ts:ScanType"] 26 | } 27 | --------------------------------------------------------------------------------