├── .gitignore ├── .travis.yml ├── LICENSE ├── README-EN.md ├── README.md ├── bower.json ├── browser.js ├── browser.md ├── img ├── QRCode-EN.png └── QRCode.png ├── index.js ├── lib ├── client.js ├── config.js ├── core.js ├── event_listeners.js ├── filter.js ├── http.js ├── http │ ├── node.js │ └── xhr.js ├── long.js ├── metadata.js ├── protocol │ ├── build_proto.sh │ ├── decoder.js │ ├── encoder.js │ ├── plain_buffer_coded_stream.js │ ├── plain_buffer_consts.js │ ├── plain_buffer_crc8.js │ ├── plain_buffer_stream.js │ ├── plian_buffer_builder.js │ ├── sql.fbs │ ├── sql_generated.js │ ├── tablestore.proto │ ├── tablestore_compiled_proto.js │ ├── tablestore_filter.proto │ └── tablestore_search.proto ├── request.js ├── retry │ ├── default_retry_policy.js │ └── retry_util.js ├── search.js ├── sequential_executor.js ├── signer.js ├── sql.js ├── util-browser.js └── util.js ├── package-lock.json ├── package.json ├── samples-async(node6) ├── client.js ├── createTable.js ├── searchPaginateByToken.js ├── token.js └── transaction.js ├── samples ├── AutoIncrement.js ├── batchGetRow.js ├── batchWriteRow.js ├── browser │ ├── index.html │ └── tablestore-js-sdk-4.0.9.min.js ├── client.js ├── computeSplits.js ├── conditionUpdateRow.js ├── createGlobalIndex.js ├── createSearchIndex.js ├── createTable.js ├── createTableWithGlobalIndex.js ├── deleteGlobalIndex.js ├── deleteRow.js ├── deleteSearchIndex.js ├── deleteTable.js ├── describeSearchIndex.js ├── describeTable.js ├── domainError.js ├── getRange.js ├── getRow.js ├── getRowByGlobalIndex.js ├── increment.js ├── listSearchIndex.js ├── listTable.js ├── multiVersion.js ├── parallelScan.js ├── primarykey.js ├── putRow.js ├── search.js ├── searchPaginateByToken.js ├── sql.js ├── stsTokenClient.js ├── updateRow.js ├── updateSearchIndex.js └── updateTable.js └── test ├── data_opration.test.js ├── globalIndex.test.js ├── long.test.js ├── ots_test_utils.js ├── performance.js ├── protocol └── plain_buffer.test.js ├── search.test.js ├── search_old.test.js ├── sql.test.js └── table.test.js /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | *.log 3 | node_modules 4 | bower_components 5 | .idea 6 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | node_js: 3 | - '6' 4 | - '7' 5 | env: 6 | global: 7 | - secure: RtRr64HmPex79+75v6tKK0UANu9XOgePRJdV/MEg+cdmlkJc4PYozZetNCtFtS74rYfis85eYobNrmCcVXUzx9gLiA/Wha5KWe9fJfQXOfj/HiS2iVGqnjXICSPYC0UoXONM+zR4xxz0pRrqVudaPiRQdy3761LmzfHzHbFvOrMAAAfcj3CV5sopIyLhDs2VIeunIAXk1GUEkUtdxhaDFCecH7zU5x+0obGo93xOvyOa9AfCZqBSiFYVOdcjPmhWEN6Ft15PxGpl2MSQvjFwMrUCGrmjWgmhiVfxGun/1DzROBOB1PFl/xSdkKveqjBSi2vKS112xDqx2dWju98eLdFYp2Nt+pAWqzyTRyRgf+cY8Mhtcfw4xNp1cJBzbq1Y9zdh//CrRXCjY5/ezKGG23byDNrrnRyXlYFGj5sEx7naafT6xlX4JPprTsgYgce3epCwJhzaDtI4DhRNorS69LlTIN7f34Klx7wGqgwvPrO5N+dvADwxZuQeooMa3HKjaZxE79cshfq+A4IYBz+8eP/ENXXxxlX+lAS/uVugGrLZ01HWoLLVeYsHlpWSzN7jadVsOZLjAwSefFiyqosI9vAeGcowRA2Ejj27kwUZc2lDSQfmD3w4fP1OAXh24t3+ms21bAURuCqXXMe+eN2PjIHFGfnco91J8FxCJu/IQnA= 8 | - secure: ik8HmrGVFaw5+OJ/2mrapzmVw8tk2nIrAE7Uk05v3jk7g54kn/pqiTGP9auxt3GS8byQpdtom4wrDiSGDdsIBrf9P9stIVVvlYj5OGW/BIDu8ThBe6LUUGQAXOre+8mOKpsZlGTm3/kaO/ZPismtSBTDb0JUUMfYgXFPeLjOFGFyHSs8jacQAjeBspwez4KuquBs/M/Vngyh2QMolZKGbcOOYH3o+z2NJeZ7KFlxt1uh/vNWV1TE25HUVuEtyWMq2dLaGeDlEVLLxXGBDlBo6DPGytN2K6vAIKVP5jsUATsrbBTCJCpeA2WQNHrkB8Bc3OC4NLpduJCyDWmDE3g2FDlmuNY8UK7Bu9eMUC5iWJFhnWrS0U255H0JDblOHQ93AKEt5kBsJP2zbkEtsQIt8FxpLNn47MzowS8j1YLGOJ2/TLI85x9nQyINAUg9A9uqxZDjffXjal7YPej6HZ+did780u1OJ+KZGQIrx70P/HMBdrqbQgFZNuBfYckQvKKKD3FNVo3OapbOuDbS5SMFiXh8D1vg2mkG46i4TBmI2Nlc+SA0QoP2oRwba/ruINRqJnpjE0kmN1azGdcsjiSg/npkhzH1Ruzn9vQttnGq3MWoXu5VlQ4rrT5bQSuS/TvXmWKFvKbCDBwTJfdIcAbR4p/+pciudXAuWPMbI0JnN9M= 9 | - secure: dTR2vorcn0ujW8deFsX2wi0HX8xOkam4PvWD3vKGgMHWz6c8cej3kGrbYg343bhYwEa2VUvfNpsR3sthTV/+6LR6D5FY/eWl+hYhs9/UYbEtidHiiDrx45P2gN4gDyf/QDoVzNoL7xQXK9R3vSyuwzsYIMHQO/pHSwuze5kPFqj5B3AfWaqSBuO7OQnwAkVc92dSnvlONJYEGeurmbNgcxr5COqDXsijIlJvOxWjpts3+SSRWUYqYD5O4LbImmouYubq3t5bBGZTo59p+n7n6xrCxQVn4Ljo9st0vUSbMoPPioJCbPSyfN0oDxBMgjFe6Mwffbga5PBb5LOk/CWPWTBndOBqcOWHojO2oyr2cfWTnKj+sLpxKuDwDasvSBZ08rwJUj6wKXADNjZD9Zj1owTLkRUVXfM7qB3ZCGYo2TNRyHAZK12LoyvrFn6URy09lS+6Mo31rC47dZClPjMl5+WLciUBOEujKuDp8gHE4rYEeeXX9XxeGntQub6JS2KDaqFcbt+LbUhIJw0wwZi3RvFhup03xsrZDeXgujOLZi93l2ieAcOL2AL/uA+mz63k3yXBDbg8xQfk1x8kxQzNYZIsgRYlo66NzaOE7NxG20l5p8Vs/YoOqG+ujW2KeRjenHpbvhPxugHeQdQqqngqXou9Om76UoE3tdWtFtw0BDE= 10 | - secure: S7hjerRyKwysSLD75VURpBKg4jGtNmiC25PDgo3PogVW4rBvrIkHx2s9+xH7iO76pIsOWgtu2IAtEbLnpuhzebZZadqnJjD9LWbh8RPo3cr8p0fkbz8bRl22DmHYgO+CKtwtzNyp66Nz5ALeijqvHo978a7XuIpR/VADFkixQRTXZmP16FqzQVQTdEFarTthBChb00GOPcvd/97r+V6NA1qQvnOifrp0DoaXhl5f/Dve/bcGw2lEPtmPmFLNSlOA2LrqNMQFGXdK/mxIJWjSIX8s5HncuGd1qEkRO3htd125sJl6u5ups0Q2SU0P7ZKOotFGjoPdCfMm37xUgA+NPyrhRZDWasAM80/69CrxPA+4duPyrhgfkA0DiLBugete+CtuD4zO2iJdOsk06vT/n5BxELXt9jq04COm89Ft/XNNoaPvY1b/I/gPFk2fjZ33CIPSBcSqWwOusVVuymxXLsfewnP7C167a5f+o9QrVM4DYcT1IqJ7kwXTCwOs9H6r/BLfP7PxEdSGUMBhg4fJjrwFzuO7ncYwgMnxDSnClamnF2i6JYnjdzh4PkuCX/Canm7q9N7wK3O447esE1gN3UZ1AXOmfUWNItybWCQ1hG5OUmjYtP0ylFR0473gDz3TA+kQf4szKqYAxqjYojQe3+/llUR+oGs7dnDnQDbawa8= 11 | branches: 12 | only: 13 | - master 14 | install: 15 | - npm install 16 | script: 17 | - npm test 18 | after_success: 19 | - npm run coveralls 20 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README-EN.md: -------------------------------------------------------------------------------- 1 | # Aliyun Tablestore SDK for Node.js 2 | 3 | [![License Status](https://img.shields.io/badge/license-apache2-brightgreen.svg)](https://travis-ci.org/aliyun/aliyun-tablestore-nodejs-sdk) 4 | [![GitHub version](https://badge.fury.io/gh/aliyun%2Faliyun-tablestore-nodejs-sdk.svg)](https://badge.fury.io/gh/aliyun%2Faliyun-tablestore-nodejs-sdk) 5 | [![Build Status](https://travis-ci.org/aliyun/aliyun-tablestore-nodejs-sdk.svg?branch=master)](https://travis-ci.org/aliyun/aliyun-tablestore-nodejs-sdk) 6 | [![Coverage Status](https://coveralls.io/repos/github/aliyun/aliyun-tablestore-nodejs-sdk/badge.svg?branch=master)](https://coveralls.io/github/aliyun/aliyun-tablestore-nodejs-sdk?branch=master) 7 | 8 | ## [中文版 (Link to the Chinese README)](README.md) 9 | 10 | ## About 11 | - This NodeJs SDK is a client library for the [Alibaba Cloud Tablestore Service](http://www.aliyun.com/product/ots/) API. 12 | - Alibaba Cloud Tablestore is a NoSQL database service built on Alibaba Cloud’s Apsara distributed operating system that can store and access large volumes of structured data in real time. 13 | 14 | 15 | ## Version Feature Updates 16 | - Version: 5.6.0 17 | - Remove domain dependency. 18 | - Version: 5.5.1 19 | - Bug fix in _parseSearchHit function. 20 | - Version: 5.5.0 21 | - Search: GroupByDateHistogram 22 | - GroupByHistogram Support Offset Parameter 23 | - DescribeTable API Support IndexSyncPhase 24 | - DescribeSearchIndex Support IndexStatus 25 | - Search: GroupByGeoGrid 26 | - Search: Function Score 27 | - Search: KnnVectorQuery 28 | - Search: Highlight 29 | - Nested Query Highlight 30 | - Search: GroupByComposite 31 | - Field Sort Support Missing Field Parameter 32 | - Version: 5.4.1 33 | - fix wrong time unit in default_retry_policy.js 34 | - Version: 5.3.1 35 | - Modified the writing style of some js, compatible with Deno 36 | - Version: 5.3.0 37 | - SQLPayloadVersion no longer supports SQL_PLAIN_BUFFER 38 | - Version: 5.2.2 39 | - Search Support Collapse 40 | - Version: 5.2.1 41 | - SQL use default SQLPayloadVersion SQL_FLAT_BUFFERS 42 | - Version: 5.2.0 43 | - Added Support for SQL 44 | - Added Support for Search Agg and GroupBy 45 | - Added Support for Search ColumnReturnType RETURN_ALL_FROM_INDEX 46 | - Added Support for Search Query Weight Parameter 47 | - Added Support for ComputeSplits 48 | - Added Support for ParallelScan 49 | - Added Support for UpdateSearchIndex 50 | - UpdateSearchIndex supports the return of measurement, TTL, createTime and other information 51 | - CreateSearchIndex supports dynamic schema modification and TTL 52 | - Added Support for Table allowUpdate Parameter 53 | - Modified Retry Policy 54 | - Carry RequestId in Error 55 | - Version: 4.3.2 56 | - Bug fixed: Queries are now supported using the "Long" datatype 57 | - Added Support for SearchIndex 58 | - Added Support for GlobalIndex 59 | - Added Support for Atomic Addition 60 | - Added Support for Transactions 61 | - Added Support for ExistsQuery 62 | 63 | ## Installation 64 | 65 | ```sh 66 | npm install tablestore 67 | ``` 68 | 69 | ## Getting Started 70 | To get started, we recommend taking a look at the samples found in the "samples/" directory. 71 | You can configure the samples for use by setting the correct relevant parameters for your Tablestore instance by modifying the "samples/client.js" file (modify the values for 'accessKeyId', 'secretAccessKey', 'endpoint' and 'instancename' accordingly). 72 | 73 | 74 | 75 | ## Contributing 76 | - We welcome everyone to contribute code to the Tablestore NodeJs SDK and other Tablestore SDKs. 77 | 78 | ## Contact US 79 | - [Alibaba Cloud Tablestore Official Product Page](https://www.alibabacloud.com/product/table-store) 80 | - [Alibaba Cloud Official Support Forum](https://www.alibabacloud.com/forum?) 81 | - [Alibaba Cloud Tablestore Official Documentation](https://www.alibabacloud.com/help/product/27278.htm) 82 | - [Alibaba Cloud Official Blog](https://www.alibabacloud.com/blog) 83 | - [Create Alibaba Cloud Support Ticket (Must be logged in)](https://workorder.console.aliyun.com/#/ticket/createIndex) 84 | 85 | ### Join our Tablestore Chat Group(GroupNumber:23307953) on [DingDing Talk](https://www.dingtalk.com/en) 86 | ![Image text](img/QRCode-EN.png) 87 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Aliyun TableStore SDK for Node.js 2 | 3 | [![License Status](https://img.shields.io/badge/license-apache2-brightgreen.svg)](https://travis-ci.org/aliyun/aliyun-tablestore-nodejs-sdk) 4 | [![GitHub version](https://badge.fury.io/gh/aliyun%2Faliyun-tablestore-nodejs-sdk.svg)](https://badge.fury.io/gh/aliyun%2Faliyun-tablestore-nodejs-sdk) 5 | [![Build Status](https://travis-ci.org/aliyun/aliyun-tablestore-nodejs-sdk.svg?branch=master)](https://travis-ci.org/aliyun/aliyun-tablestore-nodejs-sdk) 6 | [![Coverage Status](https://coveralls.io/repos/github/aliyun/aliyun-tablestore-nodejs-sdk/badge.svg?branch=master)](https://coveralls.io/github/aliyun/aliyun-tablestore-nodejs-sdk?branch=master) 7 | 8 | ## [Click here for the English README](README-EN.md) 9 | 10 | ## 关于 11 | - 此NodeJs SDK基于[阿里云表格存储服务](http://www.aliyun.com/product/ots/) API构建。 12 | - 阿里云表格存储是阿里云自主研发的NoSQL数据存储服务,提供海量结构化数据的存储和实时访问。 13 | 14 | 15 | ## 版本特性 16 | - 版本: 5.6.0 17 | - Remove domain dependency. 18 | - 版本: 5.5.1 19 | - Bug fix in _parseSearchHit function. 20 | - 版本: 5.5.0 21 | - Search 新增:GroupByDateHistogram 功能 22 | - GroupByHistogram 支持 offset 参数 23 | - DescribeTable API 新增 IndexSyncPhase 返回 24 | - DescribeSearchIndex 新增 IndexStatus 返回 25 | - Search 新增:GroupByGeoGrid 功能 26 | - Search 新增:完整 Function Score 功能 27 | - Search 新增:KnnVectoryQuery 功能 28 | - Search 新增:Highlight 功能 29 | - Nested Query 支持 Highlight 参数 30 | - Search 新增:GroupByComposite 功能 31 | - Field Sort 支持 Missing Field 参数 32 | - 版本:5.4.1 33 | - 修复默认重试策略中的默认值 34 | - 版本:5.3.1 35 | - proto 不再使用废弃的web标准,以兼容 Deno 36 | - 版本:5.3.0 37 | - SQL 序列化协议去除对 SQL_PLAIN_BUFFER 的支持,使用性能更好的 SQL_FLAT_BUFFERS 38 | - 版本:5.2.2 39 | - Search 支持折叠功能 40 | - 版本:5.2.1 41 | - SQL使用默认序列化协议SQL_FLAT_BUFFERS 42 | - 版本:5.2.0 43 | - 新增 SQL 接口 44 | - Search 新增:统计聚合功能 45 | - Search ColumnReturnType 支持 RETURN_ALL_FROM_INDEX 46 | - Search 部分 Query 支持 weight 参数 47 | - 新增 ComputeSplits 接口 48 | - 新增 ParallelScan 接口 49 | - 新增 UpdateSearchIndex 接口 50 | - DescribeSearchIndex 可以获取: 计量、创建时间、TTL 等信息 51 | - 创建 SearchIndex 支持: 动态修改schema、TTL 52 | - 主表支持 allowUpdate 的修改 53 | - 默认重试策略支持 Search 和 SQLQuery 54 | - 出错时携带 RequestId 55 | - 版本:5.1.2 56 | - 修复 创建含增量二级索引问题修复 57 | - 版本:5.1.1 58 | - 修复 10_000引入的低版本node不识别问题 59 | - 版本:5.1.0 60 | - 多元索引支持日期类型 61 | - 多元索引查询SearchAPI支持设置单独的timeout 62 | - 多元索引排序支持missing 63 | - 创建多元索引支持设置分词、分词参数、虚拟列 64 | - 版本:5.0.6 65 | - 升级protobufjs依赖(4.1.2 -> 6.8.8) 66 | - 请求参数向前兼容,返回参数字段改变:下划线变为驼峰式(与request保持一致) 67 | - 版本:4.3.2 68 | - 修复Query不支持Long类型查询问题 69 | - 支持SearchIndex 70 | - 支持GlobalIndex 71 | - 支持原子加 72 | - 支持事务 73 | - 支持ExistsQuery 74 | 75 | ## 安装 76 | 77 | ```sh 78 | npm install tablestore 79 | ``` 80 | 81 | ## 使用方法 82 | 参考在samples目录下的代码示例,将samples/client.js文件中的相关参数修改为自己实例的参数即可。 83 | 84 | ## 贡献代码 85 | - 我们非常欢迎大家为TableStore NodeJs SDK以及其他TableStore SDK贡献代码 86 | 87 | ## 联系我们 88 | - [阿里云TableStore官方网站](http://www.aliyun.com/product/ots) 89 | - [阿里云TableStore官方论坛](http://bbs.aliyun.com) 90 | - [阿里云TableStore官方文档中心](https://help.aliyun.com/product/8315004_ots.html) 91 | - [阿里云云栖社区](http://yq.aliyun.com) 92 | - [阿里云工单系统](https://workorder.console.aliyun.com/#/ticket/createIndex) 93 | 94 | ### 扫码加入TableStore钉钉讨论群(群号:23307953),和我们直接交流讨论 95 | ![Image text](img/QRCode.png) 96 | -------------------------------------------------------------------------------- /bower.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "bower", 3 | "description": "TableStore SDK for JavaScript", 4 | "main": "index.js", 5 | "authors": [ 6 | "boxiao.wpl" 7 | ], 8 | "license": "Apache-2.0", 9 | "keywords": [ 10 | "tablestore", 11 | "ots" 12 | ], 13 | "homepage": "https://github.com/aliyun/aliyun-tablestore-nodejs-sdk", 14 | "ignore": [ 15 | "**/.*", 16 | "node_modules", 17 | "bower_components", 18 | "test*", 19 | "samples", 20 | "samples-async(node6)", 21 | "browser.js", 22 | "index.js" 23 | ], 24 | "devDependencies": { 25 | "spark-md5": "~1.0.0", 26 | "jsSHA": "~2.0.1" 27 | }, 28 | "moduleType": [ 29 | "globals" 30 | ] 31 | } 32 | -------------------------------------------------------------------------------- /browser.js: -------------------------------------------------------------------------------- 1 | window.TableStore = module.exports = require('./lib/core'); 2 | require('./lib/http/xhr'); 3 | -------------------------------------------------------------------------------- /browser.md: -------------------------------------------------------------------------------- 1 | ## 在浏览器端使用 2 | 3 | 现在 tablestore-js-sdk 只支持特定的实例在浏览器端调用, 如果需要在浏览器中使用,请通过钉钉联系我们,将您的实例在后端做一下处理。 4 | 使用方式请参考 sample/browser/index.html。 5 | 6 | ### 如何 build 7 | 8 | 目前在浏览器端运行的 sdk 还在测试阶段, 如果有问题请随时提出, 如果需要自己转换js sdk请按照以下步骤操作: 9 | 10 | - git clone https://github.com/aliyun/aliyun-tablestore-nodejs-sdk.git 11 | - cd aliyun-tablestore-nodejs-sdk 12 | - npm install 13 | - bower install 14 | - npm install -g browserify 15 | 16 | ```sh 17 | browserify browser.js > tablestore-js-sdk.js 18 | ``` 19 | 20 | 使用uglifyjs压缩: 21 | ```sh 22 | uglifyjs tablestore-js-sdk.js -o tablestore-js-sdk.min.js 23 | ``` 24 | 25 | ## 初始化 26 | 27 | 考虑安全问题,请使用 STS token 初始化 TableStore Client 28 | 29 | ```javascript 30 | var stsTokenClient = new TableStore.Client({ 31 | accessKeyId: "sts token 中的 accessKeyId", 32 | secretAccessKey: "sts token 中的 secretAccessKey", 33 | stsToken: "sts token 中的 securityToken", 34 | endpoint: ' ', 35 | instancename: '' 36 | }); 37 | ``` 38 | 39 | -------------------------------------------------------------------------------- /img/QRCode-EN.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aliyun/aliyun-tablestore-nodejs-sdk/17fbd4469f59aba86d2eba51cfd099d03a39e84b/img/QRCode-EN.png -------------------------------------------------------------------------------- /img/QRCode.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aliyun/aliyun-tablestore-nodejs-sdk/17fbd4469f59aba86d2eba51cfd099d03a39e84b/img/QRCode.png -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('./lib/core'); 2 | module.exports = TableStore; 3 | -------------------------------------------------------------------------------- /lib/client.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('./core'); 2 | var inherit = TableStore.util.inherit; 3 | 4 | var capitalizeFirstLetter = function (string) { 5 | return string.charAt(0).toUpperCase() + string.slice(1); 6 | }; 7 | 8 | TableStore.Client = inherit({ 9 | constructor: function Client(config) { 10 | this.config = new TableStore.Config(config); 11 | }, 12 | 13 | setupRequestListeners: function setupRequestListeners(request) { 14 | request.addListener('build', this.populateHeader); 15 | request.addListener('build', this.populateURI); 16 | request.addListener('build', this.buildContent); 17 | request.addListener('build', this.computeContentMd5); 18 | request.addListener('extractError', this.extractError); 19 | request.addListener('extractData', this.extractData); 20 | }, 21 | 22 | populateURI: function populateURI(req) { 23 | var httpRequest = req.httpRequest; 24 | httpRequest.endpoint.host = httpRequest.endpoint.hostname; 25 | httpRequest.path = '/' + TableStore.util.string.upperFirst(req.operation); 26 | }, 27 | 28 | populateHeader: function populateHeader(req) { 29 | var httpRequest = req.httpRequest; 30 | httpRequest.headers['x-ots-apiversion'] = '2015-12-31'; 31 | httpRequest.headers['x-ots-instancename'] = req.config.instancename; 32 | }, 33 | 34 | buildContent: function buildContent(req) { 35 | var request = TableStore.encoder.encode(req.operation, req.params); 36 | var buffer = Object.getPrototypeOf(request).constructor.encode(request); 37 | req.httpRequest.body = buffer.finish(); 38 | }, 39 | 40 | computeContentMd5: function computeContentMd5(req) { 41 | var md5 = TableStore.util.crypto.md5(req.httpRequest.body, 'base64'); 42 | req.httpRequest.headers['x-ots-contentmd5'] = md5; 43 | }, 44 | 45 | /** 46 | * Provides a specialized parser for getBucketLocation -- all other 47 | * operations are parsed by the super class. 48 | * 49 | * @api private 50 | */ 51 | extractData: function extractData(resp) { 52 | resp.data = TableStore.decoder.decode(resp.request.operation, resp.httpResponse.body); 53 | 54 | // extract request id 55 | resp.data.RequestId = resp.httpResponse.headers['x-ots-request-id'] || 56 | resp.httpResponse.headers['x-ots-requestid']; 57 | }, 58 | 59 | /** 60 | * Extracts an error object from the http response. 61 | * 62 | * @api private 63 | */ 64 | extractError: function extractError(resp) { 65 | var codes = { 66 | 304: 'NotModified', 67 | 403: 'Forbidden', 68 | 400: 'BadRequest', 69 | 404: 'NotFound' 70 | }; 71 | 72 | var code = resp.httpResponse.statusCode; 73 | var body = resp.httpResponse.body; 74 | let requestId = resp.httpResponse.headers['x-ots-request-id'] || resp.httpResponse.headers['x-ots-requestid'] || ""; 75 | if (codes[code] && body.length === 0) { 76 | resp.error = TableStore.util.error(new Error(), { 77 | code: codes[resp.httpResponse.statusCode], 78 | message: requestId, 79 | headers: resp.httpResponse.headers 80 | }); 81 | } else { 82 | var data; 83 | try { 84 | data = new TableStore.XML.Parser({}).parse(body.toString()); 85 | resp.error = TableStore.util.error(new Error(), { 86 | code: data.Code || code, 87 | message: data.Message || requestId, 88 | headers: resp.httpResponse.headers 89 | }); 90 | } 91 | catch (e) { 92 | data = body.toString() + " requestId:" + requestId; 93 | resp.error = TableStore.util.error(new Error(), { 94 | code: code, 95 | message: data, 96 | headers: resp.httpResponse.headers 97 | }); 98 | } 99 | } 100 | }, 101 | 102 | /** 103 | * Calls an operation on a service with the given input parameters. 104 | * 105 | * @param operation [String] the name of the operation to call on the service. 106 | * @param params [map] a map of input options for the operation 107 | * @callback callback function(err, data) 108 | * If a callback is supplied, it is called when a response is returned 109 | * from the service. 110 | * @param err [Error] the error object returned from the request. 111 | * Set to `null` if the request is successful. 112 | * @param data [Object] the de-serialized data returned from 113 | * the request. Set to `null` if a request error occurs. 114 | */ 115 | makeRequest: function makeRequest(operation, params, callback) { 116 | if (typeof params === 'function') { 117 | callback = params; 118 | params = null; 119 | } 120 | 121 | var request = new TableStore.Request(this.config, operation, params); 122 | this.addAllRequestListeners(request); 123 | if (callback) { 124 | request.send(callback); 125 | return request; 126 | } else { 127 | return new Promise(function (resolve, reject) { 128 | request.send(function (err, data) { 129 | if (err) { 130 | return reject(err); 131 | } 132 | resolve(data); 133 | }); 134 | }); 135 | } 136 | }, 137 | 138 | /** 139 | * @api private 140 | */ 141 | addAllRequestListeners: function addAllRequestListeners(request) { 142 | var list = [TableStore.events, TableStore.EventListeners.Core]; 143 | for (var i = 0; i < list.length; i++) { 144 | if (list[i]) request.addListeners(list[i]); 145 | } 146 | 147 | if (this.config.logger) { // add logging events 148 | request.addListeners(TableStore.EventListeners.Logger); 149 | } 150 | 151 | this.setupRequestListeners(request); 152 | }, 153 | 154 | /********************************** 表操作 开始 ******************************************/ 155 | /** 156 | * 根据给定的表结构信息创建相应的表。 157 | */ 158 | createTable: function createTable(params, callback) { 159 | return this.makeRequest('createTable', params, callback); 160 | }, 161 | 162 | /** 163 | * 获取当前实例下已创建的所有表的表名。 164 | */ 165 | listTable: function listTable(params, callback) { 166 | return this.makeRequest('listTable', params, callback); 167 | }, 168 | 169 | /** 170 | * 删除本实例下指定的表。 171 | */ 172 | deleteTable: function deleteTable(params, callback) { 173 | return this.makeRequest('deleteTable', params, callback); 174 | }, 175 | 176 | /** 177 | * 更新指定表的预留读吞吐量或预留写吞吐量设置。 178 | */ 179 | updateTable: function describeTable(params, callback) { 180 | return this.makeRequest('updateTable', params, callback); 181 | }, 182 | 183 | /** 184 | * 查询指定表的结构信息和预留读/写吞吐量设置信息。 185 | */ 186 | describeTable: function describeTable(params, callback) { 187 | return this.makeRequest('describeTable', params, callback); 188 | }, 189 | 190 | /********************************** 表操作 结束 ******************************************/ 191 | 192 | 193 | /********************************** 数据操作 开始 ******************************************/ 194 | 195 | /** 196 | * 根据给定的主键读取单行数据。 197 | */ 198 | getRow: function getRow(params, callback) { 199 | return this.makeRequest('getRow', params, callback); 200 | }, 201 | 202 | /** 203 | * 插入数据到指定的行,如果该行不存在,则新增一行;若该行存在,则覆盖原有行。 204 | */ 205 | putRow: function putRow(params, callback) { 206 | return this.makeRequest('putRow', params, callback); 207 | }, 208 | 209 | /** 210 | * 更新指定行的数据。如果该行不存在,则新增一行;若该行存在,则根据请求的内容在这一行中新增、修改或者删除指定列的值。 211 | */ 212 | updateRow: function updateRow(params, callback) { 213 | return this.makeRequest('updateRow', params, callback); 214 | }, 215 | 216 | /** 217 | * 删除一行数据。 218 | */ 219 | deleteRow: function deleteRow(params, callback) { 220 | return this.makeRequest('deleteRow', params, callback); 221 | }, 222 | 223 | /** 224 | * 读取指定主键范围内的数据。 225 | */ 226 | getRange: function getRange(params, callback) { 227 | return this.makeRequest('getRange', params, callback); 228 | }, 229 | 230 | /** 231 | * 批量读取一个或多个表中的若干行数据。 232 | */ 233 | batchGetRow: function batchGetRow(params, callback) { 234 | return this.makeRequest('batchGetRow', params, callback); 235 | }, 236 | 237 | /** 238 | * 批量修改行 239 | */ 240 | batchWriteRow: function batchWriteRow(params, callback) { 241 | return this.makeRequest('batchWriteRow', params, callback); 242 | }, 243 | 244 | /** 245 | * 获取表下所有SearchIndex索引名。 246 | */ 247 | listSearchIndex: function listSearchIndex(params, callback) { 248 | return this.makeRequest('listSearchIndex', params, callback); 249 | }, 250 | 251 | /** 252 | * 获取SearchIndex索引描述信息。 253 | */ 254 | describeSearchIndex: function describeSearchIndex(params, callback) { 255 | return this.makeRequest('describeSearchIndex', params, callback); 256 | }, 257 | 258 | /** 259 | * 更新SearchIndex索引信息。 260 | */ 261 | updateSearchIndex: function updateSearchIndex(params, callback) { 262 | return this.makeRequest('updateSearchIndex', params, callback); 263 | }, 264 | 265 | /** 266 | * SearchIndex创建新索引。 267 | */ 268 | createSearchIndex: function createSearchIndex(params, callback) { 269 | return this.makeRequest('createSearchIndex', params, callback); 270 | }, 271 | 272 | /** 273 | * SearchIndex删除索引。 274 | */ 275 | deleteSearchIndex: function deleteSearchIndex(params, callback) { 276 | return this.makeRequest('deleteSearchIndex', params, callback); 277 | }, 278 | 279 | 280 | /** 281 | * SearchIndex搜索。 282 | */ 283 | search: function search(params, callback) { 284 | return this.makeRequest('search', params, callback); 285 | }, 286 | 287 | /** 288 | * 获取索引分裂信息。 289 | */ 290 | computeSplits: function computeSplits(params, callback) { 291 | return this.makeRequest('computeSplits', params, callback); 292 | }, 293 | 294 | /** 295 | * 批量导出数据。 296 | */ 297 | parallelScan: function parallelScan(params, callback) { 298 | return this.makeRequest('parallelScan', params, callback); 299 | }, 300 | 301 | /** 302 | * 创建GlobalIndex索引名。 303 | */ 304 | createIndex: function createIndex(params, callback) { 305 | return this.makeRequest('createIndex', params, callback); 306 | }, 307 | 308 | /** 309 | * 删除GlobalIndex索引名。 310 | */ 311 | dropIndex: function dropIndex(params, callback) { 312 | return this.makeRequest('dropIndex', params, callback); 313 | }, 314 | 315 | /** 316 | * 创建局部事务 317 | */ 318 | startLocalTransaction: function startLocalTransaction(params, callback) { 319 | return this.makeRequest('startLocalTransaction', params, callback) 320 | }, 321 | 322 | /** 323 | * 提交事务 324 | */ 325 | commitTransaction: function commitTransaction(params, callback) { 326 | return this.makeRequest('commitTransaction', params, callback) 327 | }, 328 | 329 | /** 330 | * 丢弃事务 331 | */ 332 | abortTransaction: function abortTransaction(params, callback) { 333 | return this.makeRequest('abortTransaction', params, callback) 334 | }, 335 | 336 | /** 337 | * sqlQuery 338 | */ 339 | sqlQuery: function sqlQuery(params, callback) { 340 | return this.makeRequest('SQLQuery', params, callback); 341 | }, 342 | 343 | /********************************** 数据操作 结束 ******************************************/ 344 | }); 345 | -------------------------------------------------------------------------------- /lib/config.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('./core'); 2 | 3 | TableStore.Config = TableStore.util.inherit({ 4 | 5 | constructor: function Config(options) { 6 | if (options === undefined) options = {}; 7 | 8 | TableStore.util.each.call(this, this.keys, function (key, value) { 9 | var optionVal = options[key]; 10 | if (typeof (options[key]) === 'string') { 11 | optionVal = options[key].replace(/^\s+/g, ''); 12 | } 13 | this.set(key, optionVal, value); 14 | }); 15 | }, 16 | 17 | clear: function clear() { 18 | /*jshint forin:false */ 19 | TableStore.util.each.call(this, this.keys, function (key) { 20 | delete this[key]; 21 | }); 22 | 23 | // reset credential provider 24 | this.set('credentials', undefined); 25 | this.set('credentialProvider', undefined); 26 | }, 27 | 28 | getCredentials: function getCredentials() { 29 | return { 30 | accessKeyId: this.accessKeyId, 31 | secretAccessKey: this.secretAccessKey || this.accessKeySecret, 32 | securityToken: this.stsToken || this.securityToken 33 | }; 34 | }, 35 | 36 | /** 37 | * Sets a property on the configuration object, allowing for a 38 | * default value 39 | * @api private 40 | */ 41 | set: function set(property, value, defaultValue) { 42 | if (value === undefined) { 43 | if (defaultValue === undefined) { 44 | defaultValue = this.keys[property]; 45 | } 46 | if (typeof defaultValue === 'function') { 47 | this[property] = defaultValue.call(this); 48 | } else { 49 | this[property] = defaultValue; 50 | } 51 | } else { 52 | this[property] = value; 53 | } 54 | }, 55 | 56 | keys: { 57 | accessKeyId: null, 58 | secretAccessKey: null, 59 | accessKeySecret: null, //the same with secretAccessKey 60 | stsToken: null, 61 | securityToken: null, // the same with stsToken 62 | logger: null, 63 | endpoint: undefined, 64 | httpOptions: {},//timeout,maxSockets。default maxSockets = 300 65 | maxRetries: undefined, 66 | instancename: undefined, 67 | computeChecksums: true, 68 | } 69 | }); 70 | -------------------------------------------------------------------------------- /lib/core.js: -------------------------------------------------------------------------------- 1 | /** 2 | * The main TableStore namespace 3 | */ 4 | var TableStore = {}; 5 | module.exports = TableStore; 6 | require('./util'); 7 | require('./metadata'); 8 | require('./long'); 9 | require('./protocol/plain_buffer_consts'); 10 | require('./protocol/plain_buffer_crc8'); 11 | require('./protocol/plain_buffer_stream'); 12 | require('./protocol/plain_buffer_coded_stream'); 13 | require('./protocol/plian_buffer_builder'); 14 | require('./filter'); 15 | require('./protocol/encoder'); 16 | require('./protocol/decoder'); 17 | require('./metadata'); 18 | require('./config'); 19 | require('./http'); 20 | require('./sequential_executor'); 21 | require('./event_listeners'); 22 | require('./request'); 23 | require('./signer'); 24 | TableStore.events = new TableStore.SequentialExecutor(); 25 | require('./http/node'); 26 | require('./retry/retry_util'); 27 | require('./retry/default_retry_policy'); 28 | require('./client'); 29 | require('./search'); 30 | require('./sql'); 31 | -------------------------------------------------------------------------------- /lib/event_listeners.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('./core'); 2 | require('./sequential_executor'); 3 | 4 | TableStore.EventListeners = { 5 | Core: {} 6 | }; 7 | 8 | TableStore.EventListeners = { 9 | Core: new TableStore.SequentialExecutor().addNamedListeners(function (add, addAsync) { 10 | 11 | add('SET_CONTENT_LENGTH', 'afterBuild', function SET_CONTENT_LENGTH(req) { 12 | if (req.httpRequest.headers['Content-Length'] === undefined) { 13 | var length = TableStore.util.string.byteLength(req.httpRequest.body); 14 | req.httpRequest.headers['Content-Length'] = length; 15 | } 16 | }); 17 | 18 | add('SET_HTTP_HOST', 'afterBuild', function SET_HTTP_HOST(req) { 19 | req.httpRequest.headers['Host'] = req.httpRequest.endpoint.host; 20 | }); 21 | 22 | addAsync('SIGN', 'sign', function SIGN(req, done) { 23 | var credentials = req.config.getCredentials(); 24 | 25 | try { 26 | var date = TableStore.util.date.getDate(); 27 | var signer = new TableStore.Signer(req.httpRequest); 28 | 29 | // add new authorization 30 | signer.addAuthorization(credentials, date); 31 | } catch (e) { 32 | req.response.error = e; 33 | } 34 | done(); 35 | }); 36 | 37 | add('VALIDATE_RESPONSE', 'validateResponse', function VALIDATE_RESPONSE(resp) { 38 | if (resp.httpResponse.statusCode < 300) { 39 | resp.data = {}; 40 | resp.error = null; 41 | } else { 42 | resp.data = null; 43 | resp.error = TableStore.util.error(new Error(), 44 | { code: 'UnknownError', message: 'An unknown error occurred.' }); 45 | } 46 | }); 47 | 48 | addAsync('SEND', 'send', function SEND(resp, done) { 49 | function callback(httpResp) { 50 | resp.httpResponse.stream = httpResp; 51 | resp.httpResponse._abortCallback = done; 52 | 53 | httpResp.on('headers', function onHeaders(statusCode, headers) { 54 | resp.request.emit('httpHeaders', [statusCode, headers, resp]); 55 | 56 | if (!resp.request.httpRequest._streaming) { 57 | if (TableStore.HttpClient.streamsApiVersion === 2) { // streams2 API check 58 | httpResp.on('readable', function onReadable() { 59 | var data = httpResp.read(); 60 | if (data !== null) { 61 | resp.request.emit('httpData', [data, resp]); 62 | } 63 | }); 64 | } else { // legacy streams API 65 | httpResp.on('data', function onData(data) { 66 | resp.request.emit('httpData', [data, resp]); 67 | }); 68 | } 69 | } 70 | }); 71 | 72 | httpResp.on('end', function onEnd() { 73 | resp.request.emit('httpDone'); 74 | done(); 75 | }); 76 | } 77 | 78 | function progress(httpResp) { 79 | httpResp.on('sendProgress', function onSendProgress(progress) { 80 | resp.request.emit('httpUploadProgress', [progress, resp]); 81 | }); 82 | 83 | httpResp.on('receiveProgress', function onReceiveProgress(progress) { 84 | resp.request.emit('httpDownloadProgress', [progress, resp]); 85 | }); 86 | } 87 | 88 | function error(err) { 89 | resp.error = TableStore.util.error(err, { 90 | code: 'NetworkingError', 91 | region: resp.request.httpRequest.region, 92 | hostname: resp.request.httpRequest.endpoint.hostname, 93 | retryable: true 94 | }); 95 | resp.request.emit('httpError', [resp.error, resp], function () { 96 | done(); 97 | }); 98 | } 99 | 100 | resp.error = null; 101 | resp.data = null; 102 | 103 | var http = TableStore.HttpClient.getInstance(); 104 | var httpOptions = resp.request.config.httpOptions || {}; 105 | //this.httpRequest.debug(); 106 | var s = http.handleRequest(this.httpRequest, httpOptions, callback, error); 107 | progress(s); 108 | }); 109 | 110 | add('HTTP_HEADERS', 'httpHeaders', function HTTP_HEADERS(statusCode, headers, resp) { 111 | resp.httpResponse.statusCode = statusCode; 112 | resp.httpResponse.headers = headers; 113 | resp.httpResponse.body = new TableStore.util.Buffer(''); 114 | resp.httpResponse.buffers = []; 115 | resp.httpResponse.numBytes = 0; 116 | }); 117 | 118 | add('HTTP_DATA', 'httpData', function HTTP_DATA(chunk, resp) { 119 | if (chunk) { 120 | resp.httpResponse.numBytes += chunk.length; 121 | 122 | var total = resp.httpResponse.headers['content-length']; 123 | var progress = { loaded: resp.httpResponse.numBytes, total: total }; 124 | resp.request.emit('httpDownloadProgress', [progress, resp]); 125 | 126 | resp.httpResponse.buffers.push(new TableStore.util.Buffer(chunk)); 127 | } 128 | }); 129 | 130 | add('HTTP_DONE', 'httpDone', function HTTP_DONE(resp) { 131 | // convert buffers array into single buffer 132 | if (resp.httpResponse.buffers && resp.httpResponse.buffers.length > 0) { 133 | var body = TableStore.util.buffer.concat(resp.httpResponse.buffers); 134 | resp.httpResponse.body = body; 135 | } 136 | delete resp.httpResponse.numBytes; 137 | delete resp.httpResponse.buffers; 138 | }); 139 | 140 | add('RETRY_CHECK', 'retry', function FINALIZE_ERROR(resp) { 141 | if (resp.error) { 142 | resp.error.retryable = TableStore.DefaultRetryPolicy.shouldRetry(resp.retryCount, resp.error, resp.request.operation); 143 | } 144 | }); 145 | 146 | addAsync('RESET_RETRY_STATE', 'afterRetry', function RESET_RETRY_STATE(resp, done) { 147 | var delay, willRetry = false; 148 | 149 | if (resp.error) { 150 | delay = TableStore.DefaultRetryPolicy.getRetryDelay(resp.retryCount, resp.error); 151 | var maxRetryTimes = TableStore.DefaultRetryPolicy.maxRetryTimes; 152 | if (resp.error.retryable && resp.retryCount < maxRetryTimes) { 153 | resp.retryCount++; 154 | willRetry = true; 155 | } 156 | } 157 | 158 | if (willRetry) { 159 | resp.error = null; 160 | setTimeout(done, delay); 161 | } else { 162 | done(); 163 | } 164 | }); 165 | 166 | }), 167 | 168 | Logger: new TableStore.SequentialExecutor().addNamedListeners(function (add) { 169 | add('LOG_REQUEST', 'complete', function LOG_REQUEST(resp) { 170 | var req = resp.request; 171 | var logger = req.config.logger; 172 | if (!logger) return; 173 | 174 | function buildMessage() { 175 | var time = TableStore.util.date.getDate().getTime(); 176 | var delta = (time - req.startTime.getTime()) / 1000; 177 | var ansi = logger.isTTY ? true : false; 178 | var status = resp.httpResponse.statusCode; 179 | var params = require('util').inspect(req.params, true, true); 180 | 181 | var message = ''; 182 | if (ansi) message += '\x1B[33m'; 183 | message += '[TableStore ' + req.service.serviceIdentifier + ' ' + status; 184 | message += ' ' + delta.toString() + 's ' + resp.retryCount + ' retries]'; 185 | if (ansi) message += '\x1B[0;1m'; 186 | message += ' ' + req.operation + '(' + params + ')'; 187 | if (ansi) message += '\x1B[0m'; 188 | return message; 189 | } 190 | 191 | var message = buildMessage(); 192 | if (typeof logger.log === 'function') { 193 | logger.log(message); 194 | } else if (typeof logger.write === 'function') { 195 | logger.write(message + '\n'); 196 | } 197 | }); 198 | }), 199 | }; 200 | -------------------------------------------------------------------------------- /lib/filter.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('./core'); 2 | var tsFilterProtos = require('./protocol/tablestore_compiled_proto.js').filter.proto; 3 | var inherit = TableStore.util.inherit; 4 | 5 | 6 | TableStore.LogicalOperator = { 7 | NOT: tsFilterProtos.LogicalOperator.LO_NOT, 8 | AND: tsFilterProtos.LogicalOperator.LO_AND, 9 | OR: tsFilterProtos.LogicalOperator.LO_OR, 10 | }; 11 | 12 | TableStore.ColumnConditionType = { 13 | COMPOSITE_COLUMN_CONDITION: 0, 14 | SINGLE_COLUMN_CONDITION: 1 15 | }; 16 | 17 | TableStore.ComparatorType = { 18 | EQUAL: tsFilterProtos.ComparatorType.CT_EQUAL, 19 | NOT_EQUAL: tsFilterProtos.ComparatorType.CT_NOT_EQUAL, 20 | GREATER_THAN: tsFilterProtos.ComparatorType.CT_GREATER_THAN, 21 | GREATER_EQUAL: tsFilterProtos.ComparatorType.CT_GREATER_EQUAL, 22 | LESS_THAN: tsFilterProtos.ComparatorType.CT_LESS_THAN, 23 | LESS_EQUAL: tsFilterProtos.ComparatorType.CT_LESS_EQUAL 24 | }; 25 | 26 | TableStore.RowExistenceExpectation = { 27 | IGNORE: 0, 28 | EXPECT_EXIST: 1, 29 | EXPECT_NOT_EXIST: 2 30 | }; 31 | 32 | TableStore.ColumnCondition = inherit({}); 33 | 34 | TableStore.CompositeCondition = inherit(TableStore.ColumnCondition, { 35 | constructor: function (combinator) { 36 | this.sub_conditions = []; 37 | this.setCombinator(combinator); 38 | }, 39 | getType: function () { 40 | return tsFilterProtos.FilterType.FT_COMPOSITE_COLUMN_VALUE;//TableStore.ColumnConditionType.COMPOSITE_COLUMN_CONDITION 41 | }, 42 | setCombinator: function (combinator) { 43 | var isValidateValue = false; 44 | for (pro in TableStore.LogicalOperator) { 45 | if (TableStore.LogicalOperator[pro] === combinator) { 46 | isValidateValue = true; 47 | break; 48 | } 49 | } 50 | 51 | if (!isValidateValue) { 52 | throw new Error("Expect input combinator should be one of TableStore.LogicalOperator"); 53 | } 54 | this.combinator = combinator; 55 | }, 56 | getCombinator: function () { 57 | return combinator; 58 | }, 59 | addSubCondition: function (condition) { 60 | if (!condition instanceof TableStore.ColumnCondition) { 61 | throw new Error("The input condition should be an instance of TableStore.ColumnCondition"); 62 | } 63 | 64 | this.sub_conditions.push(condition); 65 | }, 66 | clearSubCondition: function () { 67 | this.sub_conditions = []; 68 | } 69 | }); 70 | 71 | TableStore.SingleColumnCondition = inherit(TableStore.ColumnCondition, { 72 | constructor: function (columnName, columnValue, comparator, passIfMissing, latestVersionOnly) { 73 | if (passIfMissing === undefined) { 74 | passIfMissing = true; 75 | } 76 | 77 | if (latestVersionOnly === undefined) { 78 | latestVersionOnly = true; 79 | } 80 | 81 | this.columnName = columnName; 82 | this.columnValue = columnValue; 83 | 84 | this.comparator = null; 85 | this.passIfMissing = null; 86 | this.latestVersionOnly = null; 87 | 88 | this.setComparator(comparator); 89 | this.setPassIfMissing(passIfMissing); 90 | this.setLatestVersionOnly(latestVersionOnly); 91 | }, 92 | getType: function () { 93 | return tsFilterProtos.FilterType.FT_SINGLE_COLUMN_VALUE; //TableStore.ColumnConditionType.SINGLE_COLUMN_CONDITION; 94 | }, 95 | setPassIfMissing: function (passIfMissing) { 96 | /* 97 | 设置```passIfMissing``` 98 | 99 | 由于OTS一行的属性列不固定,有可能存在有condition条件的列在该行不存在的情况,这时 100 | 参数控制在这种情况下对该行的检查结果。 101 | 如果设置为True,则若列在该行中不存在,则检查条件通过。 102 | 如果设置为False,则若列在该行中不存在,则检查条件失败。 103 | 默认值为True。 104 | */ 105 | if (!typeof (passIfMissing) === 'boolean') { 106 | throw new Error("The input passIfMissing should be an instance of Bool"); 107 | } 108 | this.passIfMissing = passIfMissing; 109 | }, 110 | getPassIfMissing: function () { 111 | return this.passIfMissing; 112 | }, 113 | setLatestVersionOnly: function (latestVersionOnly) { 114 | if (!typeof (latestVersionOnly) === 'boolean') { 115 | throw new Error("The input passIfMissing should be an instance of Bool"); 116 | } 117 | 118 | this.latestVersionOnly = latestVersionOnly; 119 | }, 120 | getLatestVersionOnly: function () { 121 | return this.latestVersionOnly; 122 | }, 123 | setColumnName: function (columnName) { 124 | this.columnName = columnName; 125 | }, 126 | getColumnName: function () { 127 | return this.columnName; 128 | }, 129 | setColumnValue: function (columnValue) { 130 | this.columnValue = columnValue 131 | }, 132 | getColumnValue: function () { 133 | return this.columnValue; 134 | }, 135 | setComparator: function (comparator) { 136 | var isValidateValue = false; 137 | for (pro in TableStore.ComparatorType) { 138 | if (TableStore.ComparatorType[pro] === comparator) { 139 | isValidateValue = true; 140 | break; 141 | } 142 | } 143 | 144 | if (!isValidateValue) { 145 | throw new Error("Expect input comparator should be one of TableStore.ComparatorType"); 146 | } 147 | this.comparator = comparator; 148 | }, 149 | getComparator: function () { 150 | return this.comparator; 151 | } 152 | }); 153 | 154 | TableStore.Condition = inherit({ 155 | constructor: function (rowExistenceExpectation, columnCondition) { 156 | this.rowExistenceExpectation = null; 157 | this.columnCondition = null; 158 | if (undefined === columnCondition) { 159 | columnCondition = null; 160 | } 161 | 162 | this.setRowExistenceExpectation(rowExistenceExpectation) 163 | if (columnCondition != null) { 164 | this.setColumnCondition(columnCondition); 165 | } 166 | }, 167 | setRowExistenceExpectation: function (rowExistenceExpectation) { 168 | var isValidateValue = false; 169 | for (pro in TableStore.RowExistenceExpectation) { 170 | if (TableStore.RowExistenceExpectation[pro] === rowExistenceExpectation) { 171 | isValidateValue = true; 172 | break; 173 | } 174 | } 175 | if (!isValidateValue) { 176 | throw new Error("Expect input rowExistenceExpectation should be one of TableStore.RowExistenceExpectation"); 177 | } 178 | 179 | this.rowExistenceExpectation = rowExistenceExpectation; 180 | }, 181 | getRowExistenceExpectation: function () { 182 | return this.rowExistenceExpectation; 183 | }, 184 | 185 | setColumnCondition: function (columnCondition) { 186 | if (!columnCondition instanceof TableStore.ColumnCondition) { 187 | throw new Error("The input columnCondition should be an instance of TableStore.ColumnCondition"); 188 | } 189 | this.columnCondition = columnCondition; 190 | }, 191 | getColumnCondition: function () { 192 | this.columnCondition; 193 | } 194 | }); 195 | 196 | TableStore.ColumnPaginationFilter = inherit({ 197 | constructor: function (limit, offset) { 198 | this.limit = limit === undefined ? 1 : limit; 199 | this.offset = offset === undefined ? 0 : offset;; 200 | }, 201 | getType: function () { 202 | return tsFilterProtos.FilterType.FT_COLUMN_PAGINATION; //TableStore.ColumnConditionType.SINGLE_COLUMN_CONDITION; 203 | } 204 | }); 205 | -------------------------------------------------------------------------------- /lib/http.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('./core'); 2 | var inherit = TableStore.util.inherit; 3 | 4 | TableStore.Endpoint = inherit({ 5 | 6 | constructor: function Endpoint(endpoint) { 7 | TableStore.util.hideProperties(this, ['slashes', 'auth', 'hash', 'search', 'query']); 8 | 9 | if (typeof endpoint === 'undefined' || endpoint === null) { 10 | throw new Error('Invalid endpoint: ' + endpoint); 11 | } 12 | 13 | if (!endpoint.match(/^http/)) { 14 | throw new Error('错误的 endpoint 格式, 需要以 http 或者 https 开头'); 15 | } 16 | 17 | TableStore.util.update(this, TableStore.util.urlParse(endpoint)); 18 | 19 | // Ensure the port property is set as an integer 20 | if (this.port) { 21 | this.port = parseInt(this.port, 10); 22 | } else { 23 | this.port = this.protocol === 'https:' ? 443 : 80; 24 | } 25 | } 26 | 27 | }); 28 | 29 | TableStore.HttpRequest = inherit({ 30 | 31 | constructor: function HttpRequest(endpoint, region) { 32 | this.method = 'POST'; 33 | this.path = endpoint.path || '/'; 34 | this.headers = {}; 35 | this.body = ''; 36 | this.endpoint = endpoint; 37 | this.region = region; 38 | }, 39 | 40 | pathname: function pathname() { 41 | return this.path.split('?', 1)[0]; 42 | }, 43 | 44 | search: function search() { 45 | return this.path.split('?', 2)[1] || ''; 46 | }, 47 | 48 | debug: function () { 49 | if(process.env.DEBUG == 'aliyun') { 50 | console.log('-------- HttpRequest Start: --------'); 51 | console.log('method:', this.method); 52 | console.log('path:', this.path); 53 | console.log('headers:'); 54 | for(var i in this.headers) { 55 | if (i == 'constructor') 56 | continue; 57 | console.log(i, ':', this.headers[i]); 58 | }; 59 | } 60 | } 61 | }); 62 | 63 | TableStore.HttpResponse = inherit({ 64 | 65 | constructor: function HttpResponse() { 66 | this.statusCode = undefined; 67 | this.headers = {}; 68 | this.body = undefined; 69 | } 70 | }); 71 | 72 | 73 | TableStore.HttpClient = inherit({}); 74 | 75 | TableStore.HttpClient.getInstance = function getInstance() { 76 | /*jshint newcap:false */ 77 | if (this.singleton === undefined) { 78 | this.singleton = new this(); 79 | } 80 | return this.singleton; 81 | }; 82 | -------------------------------------------------------------------------------- /lib/http/node.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../core'); 2 | var Stream = require('stream').Stream; 3 | var WritableStream = require('stream').Writable; 4 | var ReadableStream = require('stream').Readable; 5 | require('../http'); 6 | 7 | /** 8 | * @api private 9 | */ 10 | TableStore.NodeHttpClient = TableStore.util.inherit({ 11 | handleRequest: function handleRequest(httpRequest, httpOptions, callback, errCallback) { 12 | var endpoint = httpRequest.endpoint; 13 | var pathPrefix = ''; 14 | if (!httpOptions) httpOptions = {}; 15 | 16 | var useSSL = endpoint.protocol === 'https:'; 17 | var http = useSSL ? require('https') : require('http'); 18 | if (httpOptions.maxSockets) { 19 | http.globalAgent.maxSockets = httpOptions.maxSockets; 20 | } else { 21 | http.globalAgent.maxSockets = 300; 22 | } 23 | var options = { 24 | host: endpoint.hostname, 25 | port: endpoint.port, 26 | method: httpRequest.method, 27 | headers: httpRequest.headers, 28 | path: pathPrefix + httpRequest.path 29 | }; 30 | 31 | if (useSSL && !httpOptions.agent) { 32 | options.agent = this.sslAgent(); 33 | } 34 | 35 | TableStore.util.update(options, httpOptions); 36 | delete options.proxy; // proxy isn't an HTTP option 37 | delete options.timeout; // timeout isn't an HTTP option 38 | delete options.maxSockets; // maxSockets isn't an HTTP option 39 | 40 | var stream = http.request(options, function (httpResp) { 41 | callback(httpResp); 42 | httpResp.emit('headers', httpResp.statusCode, httpResp.headers); 43 | }); 44 | httpRequest.stream = stream; // attach stream to httpRequest 45 | 46 | // timeout support 47 | stream.setTimeout(httpOptions.timeout || 0); 48 | stream.once('timeout', function () { 49 | var msg = 'Connection timed out after ' + httpOptions.timeout + 'ms'; 50 | errCallback(TableStore.util.error(new Error(msg), { code: 'TimeoutError' })); 51 | 52 | // HACK - abort the connection without tripping our error handler 53 | // since we already raised our TimeoutError. Otherwise the connection 54 | // comes back with ECONNRESET, which is not a helpful error message 55 | stream.removeListener('error', errCallback); 56 | stream.on('error', function () { }); 57 | stream.abort(); 58 | }); 59 | 60 | stream.on('error', errCallback); 61 | this.writeBody(stream, httpRequest); 62 | return stream; 63 | }, 64 | 65 | writeBody: function writeBody(stream, httpRequest) { 66 | var body = httpRequest.body; 67 | 68 | if (body && WritableStream && ReadableStream) { // progress support 69 | if (!(body instanceof Stream)) body = this.bufferToStream(body); 70 | body.pipe(this.progressStream(stream, httpRequest)); 71 | } 72 | 73 | if (body instanceof Stream) { 74 | body.pipe(stream); 75 | } else if (body) { 76 | stream.end(body); 77 | } else { 78 | stream.end(); 79 | } 80 | }, 81 | 82 | sslAgent: function sslAgent() { 83 | var https = require('https'); 84 | 85 | if (!TableStore.NodeHttpClient.sslAgent) { 86 | TableStore.NodeHttpClient.sslAgent = new https.Agent({ rejectUnauthorized: true }); 87 | TableStore.NodeHttpClient.sslAgent.setMaxListeners(0); 88 | 89 | // delegate maxSockets to globalAgent 90 | Object.defineProperty(TableStore.NodeHttpClient.sslAgent, 'maxSockets', { 91 | enumerable: true, 92 | get: function () { return https.globalAgent.maxSockets; } 93 | }); 94 | } 95 | return TableStore.NodeHttpClient.sslAgent; 96 | }, 97 | 98 | progressStream: function progressStream(stream, httpRequest) { 99 | var numBytes = 0; 100 | var totalBytes = httpRequest.headers['Content-Length']; 101 | var writer = new WritableStream(); 102 | writer._write = function (chunk, encoding, callback) { 103 | if (chunk) { 104 | numBytes += chunk.length; 105 | stream.emit('sendProgress', { 106 | loaded: numBytes, total: totalBytes 107 | }); 108 | } 109 | callback(); 110 | }; 111 | return writer; 112 | }, 113 | 114 | bufferToStream: function bufferToStream(buffer) { 115 | if (!TableStore.util.Buffer.isBuffer(buffer)) buffer = new TableStore.util.Buffer(buffer); 116 | 117 | var readable = new ReadableStream(); 118 | var pos = 0; 119 | readable._read = function (size) { 120 | if (pos >= buffer.length) return readable.push(null); 121 | 122 | var end = pos + size; 123 | if (end > buffer.length) end = buffer.length; 124 | readable.push(buffer.slice(pos, end)); 125 | pos = end; 126 | }; 127 | 128 | return readable; 129 | }, 130 | 131 | emitter: null 132 | }); 133 | 134 | /** 135 | * @!ignore 136 | */ 137 | 138 | /** 139 | * @api private 140 | */ 141 | TableStore.HttpClient.prototype = TableStore.NodeHttpClient.prototype; 142 | 143 | /** 144 | * @api private 145 | */ 146 | TableStore.HttpClient.streamsApiVersion = ReadableStream ? 2 : 1; 147 | -------------------------------------------------------------------------------- /lib/http/xhr.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../core'); 2 | var EventEmitter = require('events').EventEmitter; 3 | require('../http'); 4 | 5 | /** 6 | * @api private 7 | */ 8 | TableStore.XHRClient = TableStore.util.inherit({ 9 | handleRequest: function handleRequest(httpRequest, httpOptions, callback, errCallback) { 10 | var self = this; 11 | var endpoint = httpRequest.endpoint; 12 | var emitter = new EventEmitter(); 13 | var href = endpoint.protocol + '//' + endpoint.hostname; 14 | if (endpoint.port != 80 && endpoint.port != 443) { 15 | href += ':' + endpoint.port; 16 | } 17 | href += httpRequest.path; 18 | 19 | var xhr = new XMLHttpRequest(); 20 | httpRequest.stream = xhr; 21 | 22 | if (httpOptions.timeout) { 23 | xhr.timeout = httpOptions.timeout; 24 | } 25 | 26 | xhr.addEventListener('readystatechange', function() { 27 | try { 28 | if (xhr.status === 0) return; // 0 code is invalid 29 | } 30 | catch (e) { return; } 31 | 32 | if (this.readyState === this.HEADERS_RECEIVED) { 33 | try { xhr.responseType = 'arraybuffer'; } catch (e) {} 34 | emitter.statusCode = xhr.status; 35 | emitter.headers = self.parseHeaders(xhr.getAllResponseHeaders()); 36 | emitter.emit('headers', emitter.statusCode, emitter.headers); 37 | } else if (this.readyState === this.DONE) { 38 | self.finishRequest(xhr, emitter); 39 | } 40 | }, false); 41 | xhr.upload.addEventListener('progress', function (evt) { 42 | emitter.emit('sendProgress', evt); 43 | }); 44 | xhr.addEventListener('progress', function (evt) { 45 | emitter.emit('receiveProgress', evt); 46 | }, false); 47 | xhr.addEventListener('timeout', function () { 48 | errCallback(TableStore.util.error(new Error('Timeout'), {code: 'TimeoutError'})); 49 | }, false); 50 | xhr.addEventListener('error', function () { 51 | errCallback(TableStore.util.error(new Error('Network Failure'), { 52 | code: 'NetworkingError' 53 | })); 54 | }, false); 55 | 56 | callback(emitter); 57 | xhr.open(httpRequest.method, href, true); 58 | TableStore.util.each(httpRequest.headers, function (key, value) { 59 | if (key !== 'Content-Length' && key !== 'User-Agent' && key !== 'Host' && key !== 'Date') { 60 | xhr.setRequestHeader(key, value); 61 | } 62 | }); 63 | 64 | if (httpRequest.body && typeof httpRequest.body.buffer === 'object') { 65 | xhr.send(httpRequest.body.buffer); // typed arrays sent as ArrayBuffer 66 | } else { 67 | xhr.send(httpRequest.body); 68 | } 69 | 70 | return emitter; 71 | }, 72 | 73 | parseHeaders: function parseHeaders(rawHeaders) { 74 | var headers = {}; 75 | TableStore.util.arrayEach(rawHeaders.split(/\r?\n/), function (line) { 76 | var key = line.split(':', 1)[0]; 77 | var value = line.substring(key.length + 2); 78 | if (key.length > 0) headers[key] = value; 79 | }); 80 | return headers; 81 | }, 82 | 83 | finishRequest: function finishRequest(xhr, emitter) { 84 | var buffer; 85 | if (xhr.responseType === 'arraybuffer' && xhr.response) { 86 | var ab = xhr.response; 87 | buffer = new TableStore.util.Buffer(ab.byteLength); 88 | var view = new Uint8Array(ab); 89 | for (var i = 0; i < buffer.length; ++i) { 90 | buffer[i] = view[i]; 91 | } 92 | } 93 | 94 | try { 95 | if (!buffer && typeof xhr.responseText === 'string') { 96 | buffer = new TableStore.util.Buffer(xhr.responseText); 97 | } 98 | } catch (e) {} 99 | 100 | if (buffer) emitter.emit('data', buffer); 101 | emitter.emit('end'); 102 | } 103 | }); 104 | 105 | /** 106 | * @api private 107 | */ 108 | TableStore.HttpClient.prototype = TableStore.XHRClient.prototype; 109 | 110 | /** 111 | * @api private 112 | */ 113 | TableStore.HttpClient.streamsApiVersion = 1; 114 | -------------------------------------------------------------------------------- /lib/long.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('./core'); 2 | var Int64buf = require("int64-buffer"); 3 | 4 | TableStore.Long = { 5 | fromNumber: function (num) { 6 | this.int64 = new Int64buf.Int64LE(num); 7 | return this.int64; 8 | }, 9 | fromString: function (str) { 10 | this.int64 = new Int64buf.Int64LE(str, 10); 11 | return this.int64; 12 | }, 13 | toBuffer: function () { 14 | return this.int64.toBuffer(); 15 | }, 16 | toNumber: function () { 17 | return this.int64.toNumber(); 18 | } 19 | }; 20 | 21 | 22 | -------------------------------------------------------------------------------- /lib/metadata.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('./core'); 2 | var inherit = TableStore.util.inherit; 3 | 4 | TableStore.rowExistenceExpectation = { 5 | IGNORE: "IGNORE", 6 | EXPECT_EXIST: "EXPECT_EXIST", 7 | EXPECT_NOT_EXIST: "EXPECT_NOT_EXIST" 8 | }; 9 | 10 | TableStore.Direction = { 11 | FORWARD: "FORWARD", 12 | BACKWARD: "BACKWARD" 13 | }; 14 | 15 | TableStore.UpdateType = { 16 | PUT: "PUT", 17 | DELETE: "DELETE", 18 | DELETE_ALL: "DELETE_ALL", 19 | INCREMENT: "INCREMENT" 20 | }; 21 | 22 | TableStore.BatchWriteType = { 23 | PUT: 1, 24 | UPDATE: 2, 25 | DELETE: 3 26 | }; 27 | 28 | TableStore.ReturnType = { 29 | NONE: 0, 30 | Primarykey: 1, 31 | AfterModify: 2 32 | }; 33 | 34 | TableStore.DefinedColumnType = { 35 | DCT_INTEGER: 1, 36 | DCT_DOUBLE: 2, 37 | DCT_BOOLEAN: 3, 38 | DCT_STRING: 4 39 | }; 40 | 41 | TableStore.PrimaryKeyType = { 42 | INTEGER: 1, 43 | STRING: 2, 44 | BINARY: 3 45 | }; 46 | 47 | TableStore.PrimaryKeyOption = { 48 | AUTO_INCREMENT: 1 49 | }; 50 | 51 | TableStore.IndexUpdateMode = { 52 | IUM_ASYNC_INDEX: 0, 53 | IUM_SYNC_INDEX: 1 54 | }; 55 | 56 | TableStore.IndexType = { 57 | IT_GLOBAL_INDEX: 0, 58 | IT_LOCAL_INDEX: 1 59 | }; 60 | 61 | TableStore.INF_MIN = {}; 62 | 63 | TableStore.INF_MAX = {}; 64 | 65 | TableStore.PK_AUTO_INCR = {}; 66 | -------------------------------------------------------------------------------- /lib/protocol/build_proto.sh: -------------------------------------------------------------------------------- 1 | # v6.9.0 2 | pbjs -t static-module tablestore.proto tablestore_filter.proto tablestore_search.proto -w commonjs -o tablestore_compiled_proto.js 3 | # flatc --version : 1.11.0 4 | flatc --js sql.fbs -------------------------------------------------------------------------------- /lib/protocol/plain_buffer_consts.js: -------------------------------------------------------------------------------- 1 | var TableStore=require('../core'); 2 | 3 | TableStore.plainBufferConsts = { 4 | HEADER: 0x75, 5 | 6 | // tag type 7 | TAG_ROW_PK: 0x1, 8 | TAG_ROW_DATA: 0x2, 9 | TAG_CELL: 0x3, 10 | TAG_CELL_NAME: 0x4, 11 | TAG_CELL_VALUE: 0x5, 12 | TAG_CELL_TYPE: 0x6, 13 | TAG_CELL_TIMESTAMP: 0x7, 14 | TAG_DELETE_ROW_MARKER: 0x8, 15 | TAG_ROW_CHECKSUM: 0x9, 16 | TAG_CELL_CHECKSUM: 0x0A, 17 | TAG_EXTENSION: 0x0B, 18 | TAG_SEQ_INFO: 0x0C, 19 | TAG_SEQ_INFO_EPOCH: 0x0D, 20 | TAG_SEQ_INFO_TS: 0x0E, 21 | TAG_SEQ_INFO_ROW_INDEX: 0x0F, 22 | 23 | // cell op type 24 | DELETE_ALL_VERSION: 0x1, 25 | DELETE_ONE_VERSION: 0x3, 26 | INCREMENT: 0x4, 27 | 28 | // variant type 29 | VT_INTEGER: 0x0, 30 | VT_DOUBLE: 0x1, 31 | VT_BOOLEAN: 0x2, 32 | VT_STRING: 0x3, 33 | VT_NULL: 0x6, 34 | VT_BLOB: 0x7, 35 | VT_INF_MIN: 0x9, 36 | VT_INF_MAX: 0xa, 37 | VT_AUTO_INCREMENT: 0xb, 38 | 39 | // othber 40 | LITTLE_ENDIAN_32_SIZE: 4, 41 | LITTLE_ENDIAN_64_SIZE: 8, 42 | MAX_BUFFER_SIZE: 64 * 1024 * 1024 43 | 44 | }; 45 | -------------------------------------------------------------------------------- /lib/protocol/plain_buffer_crc8.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../core'); 2 | var Buffer = TableStore.util.Buffer; 3 | 4 | var CRC8_TABLE = 5 | [0x00, 0x07, 0x0e, 0x09, 0x1c, 0x1b, 0x12, 0x15, 6 | 0x38, 0x3f, 0x36, 0x31, 0x24, 0x23, 0x2a, 0x2d, 7 | 0x70, 0x77, 0x7e, 0x79, 0x6c, 0x6b, 0x62, 0x65, 8 | 0x48, 0x4f, 0x46, 0x41, 0x54, 0x53, 0x5a, 0x5d, 9 | 0xe0, 0xe7, 0xee, 0xe9, 0xfc, 0xfb, 0xf2, 0xf5, 10 | 0xd8, 0xdf, 0xd6, 0xd1, 0xc4, 0xc3, 0xca, 0xcd, 11 | 0x90, 0x97, 0x9e, 0x99, 0x8c, 0x8b, 0x82, 0x85, 12 | 0xa8, 0xaf, 0xa6, 0xa1, 0xb4, 0xb3, 0xba, 0xbd, 13 | 0xc7, 0xc0, 0xc9, 0xce, 0xdb, 0xdc, 0xd5, 0xd2, 14 | 0xff, 0xf8, 0xf1, 0xf6, 0xe3, 0xe4, 0xed, 0xea, 15 | 0xb7, 0xb0, 0xb9, 0xbe, 0xab, 0xac, 0xa5, 0xa2, 16 | 0x8f, 0x88, 0x81, 0x86, 0x93, 0x94, 0x9d, 0x9a, 17 | 0x27, 0x20, 0x29, 0x2e, 0x3b, 0x3c, 0x35, 0x32, 18 | 0x1f, 0x18, 0x11, 0x16, 0x03, 0x04, 0x0d, 0x0a, 19 | 0x57, 0x50, 0x59, 0x5e, 0x4b, 0x4c, 0x45, 0x42, 20 | 0x6f, 0x68, 0x61, 0x66, 0x73, 0x74, 0x7d, 0x7a, 21 | 0x89, 0x8e, 0x87, 0x80, 0x95, 0x92, 0x9b, 0x9c, 22 | 0xb1, 0xb6, 0xbf, 0xb8, 0xad, 0xaa, 0xa3, 0xa4, 23 | 0xf9, 0xfe, 0xf7, 0xf0, 0xe5, 0xe2, 0xeb, 0xec, 24 | 0xc1, 0xc6, 0xcf, 0xc8, 0xdd, 0xda, 0xd3, 0xd4, 25 | 0x69, 0x6e, 0x67, 0x60, 0x75, 0x72, 0x7b, 0x7c, 26 | 0x51, 0x56, 0x5f, 0x58, 0x4d, 0x4a, 0x43, 0x44, 27 | 0x19, 0x1e, 0x17, 0x10, 0x05, 0x02, 0x0b, 0x0c, 28 | 0x21, 0x26, 0x2f, 0x28, 0x3d, 0x3a, 0x33, 0x34, 29 | 0x4e, 0x49, 0x40, 0x47, 0x52, 0x55, 0x5c, 0x5b, 30 | 0x76, 0x71, 0x78, 0x7f, 0x6a, 0x6d, 0x64, 0x63, 31 | 0x3e, 0x39, 0x30, 0x37, 0x22, 0x25, 0x2c, 0x2b, 32 | 0x06, 0x01, 0x08, 0x0f, 0x1a, 0x1d, 0x14, 0x13, 33 | 0xae, 0xa9, 0xa0, 0xa7, 0xb2, 0xb5, 0xbc, 0xbb, 34 | 0x96, 0x91, 0x98, 0x9f, 0x8a, 0x8d, 0x84, 0x83, 35 | 0xde, 0xd9, 0xd0, 0xd7, 0xc2, 0xc5, 0xcc, 0xcb, 36 | 0xe6, 0xe1, 0xe8, 0xef, 0xfa, 0xfd, 0xf4, 0xf3]; 37 | 38 | 39 | TableStore.plainBufferCrc8 = { 40 | 41 | update: function (crc, bytes_) { 42 | return TableStore.plainBufferCrc8._update(crc, bytes_); 43 | }, 44 | _update: function (crc, bytes_) { 45 | var bytesBuffer = new Buffer(bytes_); 46 | for (var i = 0; i < bytesBuffer.length; i++) { 47 | crc = CRC8_TABLE[((crc & 0xff) ^ bytesBuffer[i])]; 48 | } 49 | return crc 50 | }, 51 | 52 | crcString: function (crc, bytes_) { 53 | return this.update(crc, bytes_); 54 | }, 55 | 56 | crcInt8: function (crc, byte_) { 57 | return CRC8_TABLE[((crc & 0xff) ^ byte_)]; 58 | }, 59 | 60 | crcInt32: function (crc, byte_) { 61 | for (var i = 0; i < 4; i++) { 62 | crc = this.crcInt8(crc, (byte_ >> (i * 8)) & 0xff); 63 | } 64 | return crc; 65 | }, 66 | 67 | crcInt64Buf: function (crc, bufs) { 68 | crc = this.crcInt8(crc, bufs[0] & 0xFF); 69 | crc = this.crcInt8(crc, bufs[1] & 0xFF); 70 | crc = this.crcInt8(crc, bufs[2] & 0xFF); 71 | crc = this.crcInt8(crc, bufs[3] & 0xFF); 72 | crc = this.crcInt8(crc, bufs[4] & 0xFF); 73 | crc = this.crcInt8(crc, bufs[5] & 0xFF); 74 | crc = this.crcInt8(crc, bufs[6] & 0xFF); 75 | crc = this.crcInt8(crc, bufs[7] & 0xFF); 76 | 77 | return crc; 78 | } 79 | }; -------------------------------------------------------------------------------- /lib/protocol/plain_buffer_stream.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../core'); 2 | var int64buffer = require("int64-buffer"); 3 | var Buffer = TableStore.util.Buffer; 4 | var inherit = TableStore.util.inherit; 5 | 6 | 7 | TableStore.PlainBufferInputStream = inherit({ 8 | constructor: function (dataBuffer) { 9 | if (!TableStore.util.Buffer.isBuffer(dataBuffer.buffer)) { 10 | this.buffer = new TableStore.util.Buffer(dataBuffer.buffer); 11 | } else { 12 | this.buffer = dataBuffer.buffer; 13 | } 14 | this.bufferLimit = dataBuffer.limit; 15 | this.curPos = dataBuffer.offset; 16 | this.lastTag = 0; 17 | }, 18 | 19 | isAtEnd: function () { 20 | return this.bufferLimit === this.curPos; 21 | }, 22 | 23 | readTag: function () { 24 | if (this.isAtEnd()) { 25 | this.lastTag = 0; 26 | return 0; 27 | } 28 | 29 | this.lastTag = this.readRawByte(); 30 | return this.lastTag; 31 | }, 32 | 33 | checkLastTagWas: function (tag) { 34 | return this.lastTag === tag; 35 | }, 36 | 37 | getLastTag: function () { 38 | return this.lastTag; 39 | }, 40 | 41 | readRawByte: function () { 42 | if (this.isAtEnd()) { 43 | throw new Error('Read raw byte encountered EOF.'); 44 | } 45 | var pos = this.curPos; 46 | this.curPos += 1; 47 | return this.buffer[pos]; 48 | }, 49 | 50 | readRawLittleEndian64: function () { 51 | var b1 = this.readRawByte(); 52 | var b2 = this.readRawByte(); 53 | var b3 = this.readRawByte(); 54 | var b4 = this.readRawByte(); 55 | var b5 = this.readRawByte(); 56 | var b6 = this.readRawByte(); 57 | var b7 = this.readRawByte(); 58 | var b8 = this.readRawByte(); 59 | 60 | var buf = new Buffer([b1, b2, b3, b4, b5, b6, b7, b8]); 61 | return buf; 62 | }, 63 | 64 | readRawLittleEndian32: function () { 65 | var b1 = this.readRawByte(); 66 | var b2 = this.readRawByte(); 67 | var b3 = this.readRawByte(); 68 | var b4 = this.readRawByte(); 69 | return (((b1) & 0xff)) | (((b2) & 0xff) << 8) | (((b3) & 0xff) << 16) | (((b4) & 0xff) << 24); 70 | }, 71 | 72 | readBoolean: function () { 73 | return this.readRawByte() != 0; 74 | }, 75 | 76 | readDoubleAndInt64: function () { 77 | var buf = this.readRawLittleEndian64(); 78 | var doubleVal = buf.readDoubleLE(0); 79 | var int64LE = new int64buffer.Int64LE(buf); 80 | 81 | return { doubleVal: doubleVal, int64LE: int64LE }; 82 | }, 83 | 84 | readInt32: function () { 85 | return this.readRawLittleEndian32(); 86 | }, 87 | 88 | readInt64: function () { 89 | var buf = this.readRawLittleEndian64(); 90 | //https://www.npmjs.com/package/int64-buffer 91 | var int64LE = new int64buffer.Int64LE(buf); 92 | return int64LE; 93 | }, 94 | 95 | readBytes: function (size) { 96 | if (this.buffer.length - this.curPos < size) { 97 | throw new Error('Read bytes encountered EOF.'); 98 | } 99 | 100 | var start = this.curPos; 101 | this.curPos += size; 102 | var rtBuffer = new Buffer(size); 103 | this.buffer.copy(rtBuffer, 0, start, this.curPos); 104 | return rtBuffer; 105 | }, 106 | 107 | readUtfString: function (size) { 108 | if (this.buffer.length - this.curPos < size) { 109 | throw new Error('Read UTF string encountered EOF.'); 110 | } 111 | var utf_str = this.buffer.toString('utf8', this.curPos, this.curPos + size); 112 | this.curPos += size; 113 | return utf_str; 114 | } 115 | 116 | }); 117 | 118 | TableStore.PlainBufferOutputStream = inherit({ 119 | constructor: function (capacity) { 120 | this.buffer = new Buffer(capacity); 121 | this.buffer.fill(0); 122 | this.capacity = capacity; 123 | this.pos = 0; 124 | }, 125 | 126 | getBuffer: function () { 127 | return this.buffer; 128 | }, 129 | 130 | isFull: function () { 131 | return this.pos === this.capacity; 132 | }, 133 | 134 | count: function () { 135 | return this.pos; 136 | }, 137 | 138 | remain: function () { 139 | return this.capacity - this.pos; 140 | }, 141 | 142 | clear: function () { 143 | this.pos = 0; 144 | this.buffer = []; 145 | }, 146 | 147 | writeRawByte: function (value) { 148 | if (this.isFull()) { 149 | throw new Error('The buffer is full'); 150 | } 151 | this.buffer[this.pos++] = value; 152 | }, 153 | 154 | writeRawLittleEndian32: function (value) { 155 | this.writeRawByte((value) & 0xFF); 156 | this.writeRawByte((value >> 8) & 0xFF); 157 | this.writeRawByte((value >> 16) & 0xFF); 158 | this.writeRawByte((value >> 24) & 0xFF); 159 | }, 160 | 161 | writeRawLittleEndian64: function (buf) { 162 | for (var i = 0; i < buf.length; i++) { 163 | var number = buf[i] > 127 ? buf[i] - 256 : buf[i]; 164 | this.writeRawByte(number & 0xFF); 165 | } 166 | }, 167 | 168 | writeInt64LE: function (int64LE) { 169 | var buf = int64LE.toBuffer(); 170 | this.writeRawLittleEndian64(buf); 171 | }, 172 | 173 | writeDouble: function (value) { 174 | var buf = TableStore.util.Int64.doubleToRawLongBits(value); 175 | this.writeRawLittleEndian64(buf.toBuffer()); 176 | }, 177 | 178 | writeBoolean: function (value) { 179 | if (value) { 180 | this.writeRawByte(1); 181 | } else { 182 | this.writeRawByte(0); 183 | } 184 | }, 185 | 186 | writeBytes: function (value) { 187 | var bytes = null; 188 | if (value instanceof Buffer) { 189 | bytes = value; 190 | } else if (typeof (value) === 'string') { 191 | bytes = new Buffer(value); 192 | } 193 | 194 | if (this.pos + bytes.length > this.capacity) { 195 | throw Error('The buffer is full.'); 196 | } 197 | 198 | if (value instanceof Buffer) { 199 | value.copy(this.buffer, this.pos); 200 | } else if (typeof (value) === 'string') { 201 | this.buffer.write(value, this.pos); 202 | } else { 203 | throw new Error('expect Buffer or string,but it was:' + typeof (value)); 204 | } 205 | 206 | this.pos += bytes.length; 207 | } 208 | }); 209 | 210 | -------------------------------------------------------------------------------- /lib/protocol/plian_buffer_builder.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../core'); 2 | var Int64buf = require("int64-buffer"); 3 | 4 | TableStore.PlainBufferBuilder = { 5 | computePrimaryKeyValueSize: function (value) { 6 | var size = 1; // # TAG_CELL_VALUE 7 | size += TableStore.plainBufferConsts.LITTLE_ENDIAN_32_SIZE + 1; // length + type 8 | 9 | if ((value === TableStore.INF_MIN) || (value === TableStore.INF_MAX) || (value === TableStore.PK_AUTO_INCR)) { 10 | size += 1; 11 | return size; 12 | } 13 | if (value instanceof Int64buf.Int64LE) { 14 | size += 8; 15 | } else if (typeof (value) === 'string') { 16 | size += TableStore.plainBufferConsts.LITTLE_ENDIAN_32_SIZE; 17 | size += TableStore.util.string.byteLength(value); 18 | } else if (value instanceof Buffer) { 19 | size += TableStore.plainBufferConsts.LITTLE_ENDIAN_32_SIZE; 20 | size += value.length; 21 | } else { 22 | throw new Error("Unsupported primary key type:" + typeof (value)); 23 | } 24 | return size; 25 | }, 26 | 27 | computeVariantValueSize: function (value) { 28 | return this.computePrimaryKeyValueSize(value) - TableStore.plainBufferConsts.LITTLE_ENDIAN_32_SIZE - 1; 29 | }, 30 | 31 | computePrimaryKeyColumnSize: function (pk_name, pk_value) { 32 | var size = 1; 33 | size += 1 + TableStore.plainBufferConsts.LITTLE_ENDIAN_32_SIZE; 34 | size += pk_name.length; 35 | size += this.computePrimaryKeyValueSize(pk_value); 36 | size += 2; 37 | return size; 38 | }, 39 | 40 | computeColumnValueSize: function (value) { 41 | var size = 1 42 | size += TableStore.plainBufferConsts.LITTLE_ENDIAN_32_SIZE + 1; 43 | 44 | if (typeof (value) === 'number') { 45 | size += TableStore.plainBufferConsts.LITTLE_ENDIAN_64_SIZE; 46 | } else if (typeof (value) === 'string') { 47 | size += TableStore.plainBufferConsts.LITTLE_ENDIAN_32_SIZE; 48 | size += TableStore.util.string.byteLength(value); 49 | } else if (value instanceof Buffer) { 50 | size += TableStore.plainBufferConsts.LITTLE_ENDIAN_32_SIZE; 51 | size += value.length; 52 | } else if (typeof (value) === 'boolean') { 53 | size += 1; 54 | } else if (value instanceof Int64buf.Int64LE) { 55 | size += TableStore.plainBufferConsts.LITTLE_ENDIAN_64_SIZE; 56 | } else { 57 | throw new Error("Unsupported column type: " + typeof (value)); 58 | } 59 | return size; 60 | }, 61 | 62 | computeVariantValueSize: function (columnValue) { 63 | return this.computeColumnValueSize(columnValue) - TableStore.plainBufferConsts.LITTLE_ENDIAN_32_SIZE - 1; 64 | }, 65 | 66 | computeColumnSize: function (columnName, columnValue, timestamp) { 67 | var size = 1; 68 | size += 1 + TableStore.plainBufferConsts.LITTLE_ENDIAN_32_SIZE; 69 | size += TableStore.util.string.byteLength(columnName); 70 | if (columnValue !== null) { 71 | size += this.computeColumnValueSize(columnValue); 72 | } 73 | if (timestamp !== undefined) { 74 | size += 1 + TableStore.plainBufferConsts.LITTLE_ENDIAN_64_SIZE; 75 | } 76 | size += 2; 77 | return size; 78 | }, 79 | 80 | computeUpdateColumnSize: function (columnName, columnValue, updateType) { 81 | var size = this.computeColumnSize(columnName, columnValue); 82 | if (updateType === TableStore.UpdateType.DELETE 83 | || updateType === TableStore.UpdateType.DELETE_ALL 84 | || updateType === TableStore.UpdateType.INCREMENT 85 | ) { 86 | size += 2; 87 | } 88 | return size; 89 | }, 90 | 91 | computePrimaryKeySize: function (primaryKeys) { 92 | var size = 1; 93 | for (var i = 0; i < primaryKeys.length; i++) { 94 | for (key in primaryKeys[i]) { 95 | size += this.computePrimaryKeyColumnSize(key, primaryKeys[i][key]); 96 | } 97 | } 98 | return size; 99 | }, 100 | 101 | computePutRowSize: function (primaryKey, attributeColumns) { 102 | var size = TableStore.plainBufferConsts.LITTLE_ENDIAN_32_SIZE; 103 | size += this.computePrimaryKeySize(primaryKey); 104 | if (attributeColumns && attributeColumns.length != 0) { 105 | size += 1; 106 | for (var i = 0; i < attributeColumns.length; i++) { 107 | if (attributeColumns[i].timestamp === undefined) { 108 | for (var k in attributeColumns[i]) { 109 | size += this.computeColumnSize(k, attributeColumns[i][k]); 110 | } 111 | } else { 112 | for (var k in attributeColumns[i]) { 113 | size += this.computeColumnSize(k, attributeColumns[i][k], attributeColumns[i].timestamp); 114 | break;//注意 break不能省略 115 | } 116 | } 117 | } 118 | } 119 | size += 2; 120 | return size; 121 | }, 122 | 123 | computeUpdateRowSize: function (primaryKey, attributeColumns) { 124 | var size = TableStore.plainBufferConsts.LITTLE_ENDIAN_32_SIZE; 125 | size += this.computePrimaryKeySize(primaryKey); 126 | 127 | if (attributeColumns.length != 0) { 128 | size += 1; 129 | 130 | for (var i = 0; i < attributeColumns.length; i++) { 131 | for (var updateType in attributeColumns[i]) { 132 | var columns = attributeColumns[i][updateType]; 133 | if (!columns instanceof Array) { 134 | throw new Error("Unsupported column type:" + typeof (columns)); 135 | } 136 | for (var obj in columns) { 137 | if (updateType === TableStore.UpdateType.DELETE_ALL) { 138 | size += this.computeUpdateColumnSize(columns[obj], null, updateType); 139 | } else if ( 140 | updateType === TableStore.UpdateType.PUT 141 | || updateType === TableStore.UpdateType.DELETE 142 | || updateType === TableStore.UpdateType.INCREMENT 143 | ) { 144 | for (var o in columns[obj]) { 145 | size += this.computeUpdateColumnSize(o, columns[obj][o], updateType); 146 | } 147 | } else { 148 | throw new Error('Expect TableStore.UpdateType but it was:' + updateType); 149 | } 150 | } 151 | break; 152 | } 153 | } 154 | } 155 | size += 2; 156 | return size; 157 | }, 158 | 159 | computeDeleteRowSize: function (primaryKey) { 160 | size = TableStore.plainBufferConsts.LITTLE_ENDIAN_32_SIZE 161 | size += this.computePrimaryKeySize(primaryKey) 162 | size += 3 163 | return size 164 | }, 165 | 166 | serializePrimaryKeyValue: function (value) { 167 | var bufSize = this.computeVariantValueSize(value); 168 | var stream = new TableStore.PlainBufferOutputStream(bufSize); 169 | var coded_stream = new TableStore.PlainBufferCodedOutputStream(stream); 170 | 171 | coded_stream.writePrimaryKeyValue(value); 172 | return stream.getBuffer(); 173 | }, 174 | 175 | serializeColumnValue: function (value) { 176 | var bufSize = this.computeVariantValueSize(value); 177 | var stream = new TableStore.PlainBufferOutputStream(bufSize); 178 | var coded_stream = new TableStore.PlainBufferCodedOutputStream(stream); 179 | 180 | coded_stream.writeColumnValue(value); 181 | return stream.getBuffer(); 182 | }, 183 | 184 | serializePrimaryKey: function (primaryKey) { 185 | var bufSize = TableStore.plainBufferConsts.LITTLE_ENDIAN_32_SIZE; 186 | bufSize += this.computePrimaryKeySize(primaryKey); 187 | bufSize += 2; 188 | 189 | var outputStream = new TableStore.PlainBufferOutputStream(bufSize); 190 | var codedOutputStream = new TableStore.PlainBufferCodedOutputStream(outputStream); 191 | 192 | var rowCheckSum = 0; 193 | codedOutputStream.writeHeader(); 194 | 195 | rowCheckSum = codedOutputStream.writePrimaryKey(primaryKey, rowCheckSum); 196 | rowCheckSum = TableStore.plainBufferCrc8.crcInt8(rowCheckSum, 0); 197 | codedOutputStream.writeRowChecksum(rowCheckSum); 198 | return outputStream.getBuffer(); 199 | }, 200 | 201 | serializeForPutRow: function (primaryKey, attributeColumns) { 202 | var bufSize = this.computePutRowSize(primaryKey, attributeColumns); 203 | var outputStream = new TableStore.PlainBufferOutputStream(bufSize); 204 | var codedOutputStream = new TableStore.PlainBufferCodedOutputStream(outputStream); 205 | 206 | var rowCheckSum = 0; 207 | codedOutputStream.writeHeader(); 208 | rowCheckSum = codedOutputStream.writePrimaryKey(primaryKey, rowCheckSum); 209 | rowCheckSum = codedOutputStream.writeColumns(attributeColumns, rowCheckSum); 210 | rowCheckSum = TableStore.plainBufferCrc8.crcInt8(rowCheckSum, 0); 211 | codedOutputStream.writeRowChecksum(rowCheckSum); 212 | return outputStream.getBuffer(); 213 | }, 214 | 215 | serializeForUpdateRow: function (primaryKey, attributeColumns) { 216 | var bufSize = this.computeUpdateRowSize(primaryKey, attributeColumns); 217 | var outputStream = new TableStore.PlainBufferOutputStream(bufSize); 218 | var codedOutputStream = new TableStore.PlainBufferCodedOutputStream(outputStream); 219 | 220 | var rowCheckSum = 0; 221 | codedOutputStream.writeHeader(); 222 | rowCheckSum = codedOutputStream.writePrimaryKey(primaryKey, rowCheckSum); 223 | rowCheckSum = codedOutputStream.writeUpdateColumns(attributeColumns, rowCheckSum); 224 | rowCheckSum = TableStore.plainBufferCrc8.crcInt8(rowCheckSum, 0); 225 | codedOutputStream.writeRowChecksum(rowCheckSum); 226 | return outputStream.getBuffer(); 227 | }, 228 | 229 | serializeForDeleteRow: function (primaryKey) { 230 | var bufSize = this.computeDeleteRowSize(primaryKey); 231 | var outputStream = new TableStore.PlainBufferOutputStream(bufSize); 232 | var codedOutputStream = new TableStore.PlainBufferCodedOutputStream(outputStream); 233 | 234 | var rowCheckSum = 0; 235 | codedOutputStream.writeHeader(); 236 | rowCheckSum = codedOutputStream.writePrimaryKey(primaryKey, rowCheckSum); 237 | rowCheckSum = codedOutputStream.writeDeleteMarker(rowCheckSum); 238 | codedOutputStream.writeRowChecksum(rowCheckSum); 239 | return outputStream.getBuffer(); 240 | }, 241 | serializeSearchValue: function (value, field) { 242 | if (value === undefined || value === null) { 243 | throw new Error('Expect [' + field + '] but it was: ' + value); 244 | } 245 | var bufSize = this.computeVariantValueSize(value); 246 | var stream = new TableStore.PlainBufferOutputStream(bufSize); 247 | var coded_stream = new TableStore.PlainBufferCodedOutputStream(stream); 248 | 249 | coded_stream.writeSearchValue(value); 250 | return stream.getBuffer(); 251 | }, 252 | }; 253 | -------------------------------------------------------------------------------- /lib/protocol/sql.fbs: -------------------------------------------------------------------------------- 1 | enum DataType:byte {NONE = 0, LONG = 1, BOOLEAN = 2, DOUBLE = 3, STRING = 4, BINARY = 5, STRING_RLE = 6} 2 | 3 | table BytesValue { 4 | value: [byte]; 5 | } 6 | 7 | // rle(run-length encoding) format, [a, a, a, b, c, d, a, a] would encode as 8 | // array: [a, b, c, d, a] 9 | // index_mapping: [0, 0, 0, 1, 2, 3, 4, 4] 10 | table RLEStringValues { 11 | array: [string]; 12 | index_mapping: [int32]; 13 | } 14 | 15 | table ColumnValues { 16 | is_nullvalues: [bool]; 17 | long_values: [long]; 18 | bool_values: [bool]; 19 | double_values: [double]; 20 | string_values: [string]; 21 | binary_values: [BytesValue]; 22 | rle_string_values: RLEStringValues; 23 | } 24 | 25 | table SQLResponseColumn { 26 | column_name: string; 27 | column_type: DataType; 28 | column_value: ColumnValues; 29 | } 30 | 31 | table SQLResponseColumns{ 32 | columns: [SQLResponseColumn]; 33 | row_count: int64; 34 | } 35 | 36 | root_type SQLResponseColumns; -------------------------------------------------------------------------------- /lib/protocol/tablestore_filter.proto: -------------------------------------------------------------------------------- 1 | package filter.proto; 2 | 3 | enum FilterType { 4 | FT_SINGLE_COLUMN_VALUE = 1; 5 | FT_COMPOSITE_COLUMN_VALUE = 2; 6 | FT_COLUMN_PAGINATION = 3; 7 | } 8 | 9 | enum ComparatorType { 10 | CT_EQUAL = 1; 11 | CT_NOT_EQUAL = 2; 12 | CT_GREATER_THAN = 3; 13 | CT_GREATER_EQUAL = 4; 14 | CT_LESS_THAN = 5; 15 | CT_LESS_EQUAL = 6; 16 | } 17 | 18 | message SingleColumnValueFilter { 19 | required ComparatorType comparator = 1; 20 | required string column_name = 2; 21 | required bytes column_value = 3; // Serialized SQLVariant 22 | required bool filter_if_missing = 4; 23 | required bool latest_version_only = 5; 24 | } 25 | 26 | enum LogicalOperator { 27 | LO_NOT = 1; 28 | LO_AND = 2; 29 | LO_OR = 3; 30 | } 31 | 32 | message CompositeColumnValueFilter { 33 | required LogicalOperator combinator = 1; 34 | repeated Filter sub_filters = 2; 35 | } 36 | 37 | message ColumnPaginationFilter { 38 | required int32 offset = 1; 39 | required int32 limit = 2; 40 | } 41 | 42 | message Filter { 43 | required FilterType type = 1; 44 | required bytes filter = 2; // Serialized string of filter of the type 45 | } 46 | -------------------------------------------------------------------------------- /lib/request.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('./core'); 2 | var inherit = TableStore.util.inherit; 3 | 4 | var hardErrorStates = { success: 1, error: 1, complete: 1 }; 5 | 6 | function isTerminalState(machine) { 7 | return Object.prototype.hasOwnProperty.call(hardErrorStates, machine._asm.currentState); 8 | } 9 | 10 | function AcceptorStateMachine(states, state) { 11 | this.currentState = state || null; 12 | this.states = states || {}; 13 | } 14 | 15 | AcceptorStateMachine.prototype.runTo = async function runTo(finalState, done, bindObject, inputError) { 16 | if (typeof finalState === 'function') { 17 | inputError = bindObject; bindObject = done; 18 | done = finalState; finalState = null; 19 | } 20 | 21 | try { 22 | var self = this; 23 | var state = self.states[self.currentState]; 24 | await state.fn.call(bindObject || self, inputError, async (err) => { 25 | if (err) { 26 | if (bindObject.logger) bindObject.logger.log(self.currentState, '->', state.fail, err); 27 | if (state.fail) self.currentState = state.fail; 28 | else return done ? done(err) : null; 29 | } else { 30 | if (bindObject.logger) bindObject.logger.log(self.currentState, '->', state.accept); 31 | if (state.accept) self.currentState = state.accept; 32 | else return done ? done() : null; 33 | } 34 | if (self.currentState === finalState) return done ? done(err) : null; 35 | 36 | await self.runTo(finalState, done, bindObject, err); 37 | }); 38 | } catch (err) { 39 | if (done) done(err); 40 | } 41 | }; 42 | 43 | AcceptorStateMachine.prototype.addState = function addState(name, acceptState, failState, fn) { 44 | if (typeof acceptState === 'function') { 45 | fn = acceptState; acceptState = null; failState = null; 46 | } else if (typeof failState === 'function') { 47 | fn = failState; failState = null; 48 | } 49 | 50 | if (!this.currentState) this.currentState = name; 51 | this.states[name] = { accept: acceptState, fail: failState, fn: fn }; 52 | return this; 53 | }; 54 | 55 | var fsm = new AcceptorStateMachine(); 56 | fsm.setupStates = function () { 57 | var transition = async function transition(_, done) { 58 | var self = this; 59 | var origError = self.response.error; 60 | try { 61 | await self.emit(self._asm.currentState, async (err) => { 62 | if (err) { 63 | if (isTerminalState(self)) { 64 | throw err; 65 | } else { 66 | self.response.error = err; 67 | done(err); 68 | } 69 | } else { 70 | done(self.response.error); 71 | } 72 | }); 73 | } catch (err) { 74 | if (isTerminalState(self)) { 75 | throw err; 76 | } else { 77 | self.response.error = err; 78 | done(err); 79 | } 80 | } 81 | }; 82 | 83 | this.addState('restart', 'build', 'error', async function (err, done) { 84 | err = this.response.error; 85 | if (!err) return done(); 86 | err.retryable = TableStore.DefaultRetryPolicy.shouldRetry(this.response.retryCount, this.response.error, this.response.request.operation); 87 | if (!err.retryable) return done(err); 88 | 89 | if (this.response.retryCount < TableStore.DefaultRetryPolicy.maxRetryTimes) { 90 | this.response.retryCount++; 91 | done(); 92 | } else { 93 | done(err); 94 | } 95 | }); 96 | this.addState('build', 'afterBuild', 'restart', transition); 97 | this.addState('afterBuild', 'sign', 'restart', transition); 98 | this.addState('sign', 'send', 'retry', transition); 99 | this.addState('retry', 'afterRetry', 'afterRetry', transition); 100 | this.addState('afterRetry', 'sign', 'error', transition); 101 | this.addState('send', 'validateResponse', 'retry', transition); 102 | this.addState('validateResponse', 'extractData', 'extractError', transition); 103 | this.addState('extractError', 'extractData', 'retry', transition); 104 | this.addState('extractData', 'success', 'retry', transition); 105 | this.addState('success', 'complete', 'complete', transition); 106 | this.addState('error', 'complete', 'complete', transition); 107 | this.addState('complete', null, null, transition); 108 | }; 109 | fsm.setupStates(); 110 | 111 | TableStore.Request = inherit({ 112 | 113 | /** 114 | * Creates a request for an operation on a given service with 115 | * a set of input parameters. 116 | * 117 | * @param config [TableStore.Config] the config to perform the operation on 118 | * @param operation [String] the operation to perform on the service 119 | * @param params [Object] parameters to send to the operation. 120 | * See the operation's documentation for the format of the 121 | * parameters. 122 | */ 123 | constructor: function Request(config, operation, params) { 124 | var endpoint = new TableStore.Endpoint(config.endpoint); 125 | var region = config.region; 126 | this.config = config; 127 | if (config.maxRetries !== undefined) { 128 | TableStore.DefaultRetryPolicy.maxRetryTimes = config.maxRetries; 129 | } 130 | this.operation = operation; 131 | this.params = params || {}; 132 | this.httpRequest = new TableStore.HttpRequest(endpoint, region); 133 | this.startTime = TableStore.util.date.getDate(); 134 | 135 | this.response = new TableStore.Response(this); 136 | this.restartCount = 0; 137 | this._asm = new AcceptorStateMachine(fsm.states, 'build'); 138 | 139 | TableStore.SequentialExecutor.call(this); 140 | this.emit = this.emitEvent; 141 | }, 142 | 143 | /** 144 | * @!group Sending a Request 145 | */ 146 | 147 | /** 148 | * @overload send(callback = null) 149 | * Sends the request object. 150 | * 151 | * @callback callback function(err, data) 152 | * If a callback is supplied, it is called when a response is returned 153 | * from the service. 154 | * @param err [Error] the error object returned from the request. 155 | * Set to `null` if the request is successful. 156 | * @param data [Object] the de-serialized data returned from 157 | * the request. Set to `null` if a request error occurs. 158 | * @example Sending a request with a callback 159 | * request = client.listTable({Bucket: 'bucket', Key: 'key'}); 160 | * request.send(function(err, data) { console.log(err, data); }); 161 | */ 162 | send: async function send(callback) { 163 | if (callback) { 164 | this.on('complete', function (resp) { 165 | callback.call(resp, resp.error, resp.data); 166 | }); 167 | } 168 | await this.runTo(); 169 | 170 | return this.response; 171 | }, 172 | 173 | build: async function build(callback) { 174 | await this.runTo('send', callback); 175 | }, 176 | 177 | runTo: async function runTo(state, done) { 178 | await this._asm.runTo(state, done, this); 179 | return this; 180 | }, 181 | 182 | /** 183 | * @param [Array,Response] args This should be the response object, 184 | * or an array of args to send to the event. 185 | * @api private 186 | */ 187 | emitEvent: async function emit(eventName, args, done) { 188 | if (typeof args === 'function') { done = args; args = null; } 189 | if (!done) done = function () { }; 190 | if (!args) args = this.eventParameters(eventName, this.response); 191 | var origEmit = TableStore.SequentialExecutor.prototype.emit; 192 | try { 193 | await origEmit.call(this, eventName, args, async (err) => { 194 | if (err) this.response.error = err; 195 | done.call(this, err); 196 | }); 197 | } catch (err) { 198 | if (done) done(err); 199 | } 200 | }, 201 | 202 | /** 203 | * @api private 204 | */ 205 | eventParameters: function eventParameters(eventName) { 206 | switch (eventName) { 207 | case 'validate': 208 | case 'sign': 209 | case 'build': 210 | case 'afterBuild': 211 | return [this]; 212 | case 'error': 213 | return [this.response.error, this.response]; 214 | default: 215 | return [this.response]; 216 | } 217 | } 218 | }); 219 | 220 | TableStore.util.mixin(TableStore.Request, TableStore.SequentialExecutor); 221 | 222 | TableStore.Response = inherit({ 223 | 224 | /** 225 | * @api private 226 | */ 227 | constructor: function Response(request) { 228 | this.request = request; 229 | this.data = null; 230 | this.error = null; 231 | this.retryCount = 0; 232 | this.httpResponse = new TableStore.HttpResponse(); 233 | } 234 | 235 | }); -------------------------------------------------------------------------------- /lib/retry/default_retry_policy.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../core'); 2 | 3 | TableStore.DefaultRetryPolicy = { 4 | /* 5 | 默认重试策略 6 | 最大重试次数为20,最大重试间隔为3秒,对流控类错误以及读操作相关的服务端内部错误进行了重试。 7 | */ 8 | 9 | // 最大重试次数 10 | maxRetryTimes: 20, 11 | 12 | //最大重试间隔,单位为毫秒 13 | maxRetryDelay: 3000, 14 | 15 | //每次重试间隔的递增倍数 16 | scaleFactor: 2, 17 | 18 | //两种错误的起始重试间隔,单位为毫秒 19 | serverThrottlingExceptionDelayFactor: 500, 20 | stabilityExceptionDelayFactor: 200, 21 | 22 | _maxRetryTimeReached: function (retryTimes, exception, apiName) { 23 | return retryTimes >= TableStore.DefaultRetryPolicy.maxRetryTimes; 24 | }, 25 | 26 | isRepeatableApi: function (apiName) { 27 | return TableStore.RetryUtil.isRepeatableApi(apiName); 28 | }, 29 | 30 | _canRetry: function (retryTimes, exception, apiName) { 31 | 32 | if (TableStore.RetryUtil.shouldRetryNoMatterWhichApi(exception)) { 33 | return true; 34 | } 35 | 36 | if (TableStore.DefaultRetryPolicy.isRepeatableApi(apiName) 37 | && TableStore.RetryUtil.shouldRetryWhenApiRepeatable(retryTimes, exception, apiName)) { 38 | return true; 39 | } 40 | 41 | return false; 42 | }, 43 | 44 | getRetryDelay: function (retryTimes, exception) { 45 | var delayFactor; 46 | if (TableStore.RetryUtil.isServerThrottlingException(exception)) { 47 | delayFactor = TableStore.DefaultRetryPolicy.serverThrottlingExceptionDelayFactor; 48 | } else { 49 | delayFactor = TableStore.DefaultRetryPolicy.stabilityExceptionDelayFactor; 50 | } 51 | 52 | var delayLimit = delayFactor * Math.pow(TableStore.DefaultRetryPolicy.scaleFactor, retryTimes); 53 | 54 | if (delayLimit >= TableStore.DefaultRetryPolicy.maxRetryDelay) { 55 | delayLimit = TableStore.DefaultRetryPolicy.maxRetryDelay; 56 | } 57 | 58 | var realDelay = delayLimit * 0.5 + delayLimit * 0.5 * Math.random(); 59 | return realDelay; 60 | }, 61 | 62 | shouldRetry: function (retryTimes, exception, apiName) { 63 | 64 | if (TableStore.DefaultRetryPolicy._maxRetryTimeReached(retryTimes, exception, apiName)) { 65 | return false; 66 | } 67 | 68 | if (TableStore.DefaultRetryPolicy._canRetry(retryTimes, exception, apiName)) { 69 | return true; 70 | } 71 | 72 | return false; 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /lib/retry/retry_util.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../core'); 2 | 3 | TableStore.RetryUtil = { 4 | 5 | shouldRetryNoMatterWhichApi: function (exception) { 6 | var errorCode = exception.code; 7 | var errorMessage = exception.message; 8 | 9 | if (errorCode == "OTSRowOperationConflict" || 10 | errorCode == "OTSNotEnoughCapacityUnit" || 11 | errorCode == "OTSTableNotReady" || 12 | errorCode == "OTSPartitionUnavailable" || 13 | errorCode == "OTSServerBusy" || 14 | errorCode == "OTSOperationThrottled") { 15 | return true; 16 | } 17 | 18 | if (errorCode == "OTSQuotaExhausted" && errorMessage == "Too frequent table operations.") { 19 | return true; 20 | } 21 | 22 | return false; 23 | }, 24 | 25 | isRepeatableApi: function (apiName) { 26 | apiName = TableStore.util.string.upperFirst(apiName); 27 | const repeatableApi = [ 28 | 'ListTable', 29 | 'DescribeTable', 30 | 'GetRow', 31 | 'BatchGetRow', 32 | 'GetRange', 33 | 'DescribeSearchIndex', 34 | 'ListSearchIndex', 35 | 'ComputeSplits', 36 | 'ParallelScan', 37 | 'Search', 38 | 'SQLQuery', 39 | ]; 40 | for (const i in repeatableApi) { 41 | if (repeatableApi[i] === apiName) { 42 | return true; 43 | } 44 | } 45 | }, 46 | 47 | shouldRetryWhenApiRepeatable: function (retry_times, exception, api_name) { 48 | var errorCode = exception.code; 49 | var errorMessage = exception.message; 50 | 51 | if (errorCode == "OTSTimeout" || 52 | errorCode == "OTSInternalServerError" || 53 | errorCode == "OTSServerUnavailable" || 54 | errorCode == "NetworkingError") { 55 | return true; 56 | } 57 | 58 | if (errorCode == 500 || errorCode == 502 || errorCode == 503) { 59 | return true; 60 | } 61 | 62 | return false; 63 | }, 64 | 65 | isServerThrottlingException: function (exception) { 66 | var errorCode = exception.code 67 | var errorMessage = exception.message 68 | 69 | if (errorCode == "OTSServerBusy" || 70 | errorCode == "OTSNotEnoughCapacityUnit" || 71 | errorCode == "OTSOperationThrottled") { 72 | return true; 73 | } 74 | 75 | if (errorCode == "OTSQuotaExhausted" && errorMessage == "Too frequent table operations.") { 76 | return true; 77 | } 78 | 79 | return false; 80 | } 81 | } -------------------------------------------------------------------------------- /lib/search.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('./core'); 2 | var inherit = TableStore.util.inherit; 3 | 4 | TableStore.QueryType = { 5 | MATCH_QUERY: 1, 6 | MATCH_PHRASE_QUERY: 2, 7 | TERM_QUERY: 3, 8 | RANGE_QUERY: 4, 9 | PREFIX_QUERY: 5, 10 | BOOL_QUERY: 6, 11 | CONST_SCORE_QUERY: 7, 12 | FUNCTION_SCORE_QUERY: 8, 13 | NESTED_QUERY: 9, 14 | WILDCARD_QUERY: 10, 15 | MATCH_ALL_QUERY: 11, 16 | GEO_BOUNDING_BOX_QUERY: 12, 17 | GEO_DISTANCE_QUERY: 13, 18 | GEO_POLYGON_QUERY: 14, 19 | TERMS_QUERY: 15, 20 | EXISTS_QUERY: 16, 21 | KNN_VECTOR_QUERY: 17, 22 | FUNCTIONS_SCORE_QUERY: 18, 23 | }; 24 | 25 | TableStore.ScoreMode = { 26 | SCORE_MODE_NONE: 1, 27 | SCORE_MODE_AVG: 2, 28 | SCORE_MODE_MAX: 3, 29 | SCORE_MODE_TOTAL: 4, 30 | SCORE_MODE_MIN: 5, 31 | } 32 | 33 | TableStore.SortOrder = { 34 | SORT_ORDER_ASC: 0, 35 | SORT_ORDER_DESC: 1, 36 | }; 37 | 38 | TableStore.SortMode = { 39 | SORT_MODE_MIN: 0, 40 | SORT_MODE_MAX: 1, 41 | SORT_MODE_AVG: 2, 42 | }; 43 | 44 | TableStore.FieldType = { 45 | LONG: 1, 46 | DOUBLE: 2, 47 | BOOLEAN: 3, 48 | KEYWORD: 4, 49 | TEXT: 5, 50 | NESTED: 6, 51 | GEO_POINT: 7, 52 | DATE: 8, 53 | VECTOR: 9, 54 | }; 55 | 56 | TableStore.ColumnReturnType = { 57 | RETURN_ALL: 1, 58 | RETURN_SPECIFIED: 2, 59 | RETURN_NONE: 3, 60 | RETURN_ALL_FROM_INDEX: 4, 61 | }; 62 | 63 | TableStore.GeoDistanceType = { 64 | GEO_DISTANCE_ARC: 0, 65 | GEO_DISTANCE_PLANE: 1 66 | }; 67 | 68 | TableStore.IndexOptions = { 69 | DOCS: 1, 70 | FREQS: 2, 71 | POSITIONS: 3, 72 | OFFSETS: 4 73 | }; 74 | 75 | TableStore.QueryOperator = { 76 | OR: 1, 77 | AND: 2 78 | } 79 | 80 | TableStore.AggregationType = { 81 | AGG_AVG: 1, 82 | AGG_MAX: 2, 83 | AGG_MIN: 3, 84 | AGG_SUM: 4, 85 | AGG_COUNT: 5, 86 | AGG_DISTINCT_COUNT: 6, 87 | AGG_TOP_ROWS: 7, 88 | AGG_PERCENTILES: 8, 89 | } 90 | 91 | TableStore.GroupByType = { 92 | GROUP_BY_FIELD: 1, 93 | GROUP_BY_RANGE: 2, 94 | GROUP_BY_FILTER: 3, 95 | GROUP_BY_GEO_DISTANCE: 4, 96 | GROUP_BY_HISTOGRAM: 5, 97 | GROUP_BY_DATE_HISTOGRAM: 6, 98 | GROUP_BY_GEO_GRID: 7, 99 | GROUP_BY_COMPOSITE: 8, 100 | } 101 | 102 | TableStore.DateTimeUnit = { 103 | YEAR: 1, 104 | QUARTER_YEAR: 2, // 一个季度 105 | MONTH: 3, 106 | WEEK: 4, 107 | DAY: 5, 108 | HOUR: 6, 109 | MINUTE: 7, 110 | SECOND: 8, 111 | MILLISECOND: 9, 112 | } 113 | 114 | TableStore.GeoHashPrecision = { 115 | GHP_5009KM_4992KM_1: 1, 116 | GHP_1252KM_624KM_2: 2, 117 | GHP_156KM_156KM_3: 3, 118 | GHP_39KM_19KM_4: 4, 119 | GHP_4900M_4900M_5: 5, 120 | GHP_1200M_609M_6: 6, 121 | GHP_152M_152M_7: 7, 122 | GHP_38M_19M_8: 8, 123 | GHP_480CM_480CM_9: 9, 124 | GHP_120CM_595MM_10: 10, 125 | GHP_149MM_149MM_11: 11, 126 | GHP_37MM_19MM_12: 12, 127 | } 128 | 129 | TableStore.FSMScoreMode = { 130 | FSM_AVG: 1, 131 | FSM_MAX: 2, 132 | FSM_SUM: 3, 133 | FSM_MIN: 4, 134 | FSM_MULTIPLY: 5, 135 | FSM_FIRST: 6, 136 | } 137 | 138 | TableStore.FSMCombineMode = { 139 | FCM_MULTIPLY: 1, 140 | FCM_AVG: 2, 141 | FCM_MAX: 3, 142 | FCM_SUM: 4, 143 | FCM_MIN:5 , 144 | FCM_REPLACE: 6, 145 | } 146 | 147 | TableStore.MathFunction = { 148 | GAUSS: 1, 149 | EXP: 2, 150 | LINEAR:3, 151 | } 152 | 153 | TableStore.MultiValueMode = { 154 | MVM_MAX: 1, 155 | MVM_MIN: 2, 156 | MVM_SUM: 3, 157 | MVM_AVG: 4, 158 | } 159 | 160 | TableStore.DecayFuncParamType = { 161 | DF_DATE_PARAM: 1, 162 | DF_NUMERIC_PARAM: 2, 163 | DF_GEO_PARAM: 3, 164 | } 165 | 166 | TableStore.FunctionModifier = { 167 | FM_NONE: 1, 168 | FM_LOG: 2, 169 | FM_LOG1P: 3, 170 | FM_LOG2P: 4, 171 | FM_LN: 5, 172 | FM_LN1P: 6, 173 | FM_LN2P: 7, 174 | FM_SQUARE: 8, 175 | FM_SQRT: 9, 176 | FM_RECIPROCAL: 10, 177 | } 178 | 179 | TableStore.VectorDataType = { 180 | VD_FLOAT_32: 2, 181 | } 182 | 183 | TableStore.VectorMetricType = { 184 | VM_EUCLIDEAN: 0, 185 | VM_COSINE: 1, 186 | VM_DOT_PRODUCT: 2, 187 | } 188 | 189 | TableStore.HighlightFragmentOrder = { 190 | TEXT_SEQUENCE: 1, 191 | SCORE: 2, 192 | } 193 | 194 | TableStore.HighlightEncoder = { 195 | PLAIN_MODE: 1, 196 | HTML_MODE: 2, 197 | } 198 | 199 | TableStore.IndexStatus = { 200 | PENDING: 'pending', 201 | FAILED: 'failed', 202 | RUNNING: 'running', 203 | UNKNOWN: 'unknown', 204 | } 205 | 206 | TableStore.SyncPhase = { 207 | INCR: 'INCR', 208 | FULL: "FULL" 209 | } -------------------------------------------------------------------------------- /lib/sequential_executor.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('./core'); 2 | 3 | /** 4 | * @!method on(eventName, callback) 5 | * Registers an event listener callback for the event given by `eventName`. 6 | * Parameters passed to the callback function depend on the individual event 7 | * being triggered. See the event documentation for those parameters. 8 | * 9 | * @param eventName [String] the event name to register the listener for 10 | * @param callback [Function] the listener callback function 11 | * @return [TableStore.SequentialExecutor] the same object for chaining 12 | */ 13 | TableStore.SequentialExecutor = TableStore.util.inherit({ 14 | 15 | constructor: function SequentialExecutor() { 16 | this._events = {}; 17 | }, 18 | 19 | /** 20 | * @api private 21 | */ 22 | listeners: function listeners(eventName) { 23 | return this._events[eventName] ? this._events[eventName].slice(0) : []; 24 | }, 25 | 26 | on: function on(eventName, listener) { 27 | if (this._events[eventName]) { 28 | this._events[eventName].push(listener); 29 | } else { 30 | this._events[eventName] = [listener]; 31 | } 32 | return this; 33 | }, 34 | 35 | onAsync: function onAsync(eventName, listener) { 36 | listener._isAsync = true; 37 | return this.on(eventName, listener); 38 | }, 39 | 40 | removeListener: function removeListener(eventName, listener) { 41 | var listeners = this._events[eventName]; 42 | if (listeners) { 43 | var length = listeners.length; 44 | var position = -1; 45 | for (var i = 0; i < length; ++i) { 46 | if (listeners[i] === listener) { 47 | position = i; 48 | } 49 | } 50 | if (position > -1) { 51 | listeners.splice(position, 1); 52 | } 53 | } 54 | return this; 55 | }, 56 | 57 | removeAllListeners: function removeAllListeners(eventName) { 58 | if (eventName) { 59 | delete this._events[eventName]; 60 | } else { 61 | this._events = {}; 62 | } 63 | return this; 64 | }, 65 | 66 | /** 67 | * @api private 68 | */ 69 | emit: async function emit(eventName, eventArgs, doneCallback) { 70 | if (!doneCallback) doneCallback = this.unhandledErrorCallback; 71 | 72 | if (process.env.DEBUG == 'aliyun') { 73 | console.log('emit', eventName); 74 | } 75 | 76 | var listeners = this.listeners(eventName); 77 | var count = listeners.length; 78 | 79 | try { 80 | await this.callListeners(listeners, eventArgs, doneCallback); 81 | } catch (err) { 82 | doneCallback.call(this, err); 83 | } 84 | 85 | return count > 0; 86 | }, 87 | 88 | /** 89 | * @api private 90 | */ 91 | callListeners: async function callListeners(listeners, args, doneCallback) { 92 | if (listeners.length === 0) { 93 | doneCallback.call(this); 94 | } else { 95 | var listener = listeners.shift(); 96 | if (listener._isAsync) { 97 | 98 | // asynchronous listener 99 | try { 100 | await new Promise((resolve, reject) => { 101 | listener.apply(this, args.concat([function(err) { 102 | if (err) { 103 | reject(err); 104 | } else { 105 | resolve(); 106 | } 107 | }])); 108 | }); 109 | await this.callListeners(listeners, args, doneCallback); 110 | } catch (err) { 111 | doneCallback.call(this, err); 112 | } 113 | 114 | } else { 115 | 116 | // synchronous listener 117 | try { 118 | listener.apply(this, args); 119 | await this.callListeners(listeners, args, doneCallback); 120 | } catch (err) { 121 | doneCallback.call(this, err); 122 | } 123 | 124 | } 125 | } 126 | }, 127 | 128 | /** 129 | * Adds or copies a set of listeners from another list of 130 | * listeners or SequentialExecutor object. 131 | * 132 | * @param listeners [map>, TableStore.SequentialExecutor] 133 | * a list of events and callbacks, or an event emitter object 134 | * containing listeners to add to this emitter object. 135 | * @return [TableStore.SequentialExecutor] the emitter object, for chaining. 136 | * @example Adding listeners from a map of listeners 137 | * emitter.addListeners({ 138 | * event1: [function() { ... }, function() { ... }], 139 | * event2: [function() { ... }] 140 | * }); 141 | * emitter.emit('event1'); // emitter has event1 142 | * emitter.emit('event2'); // emitter has event2 143 | * @example Adding listeners from another emitter object 144 | * var emitter1 = new TableStore.SequentialExecutor(); 145 | * emitter1.on('event1', function() { ... }); 146 | * emitter1.on('event2', function() { ... }); 147 | * var emitter2 = new TableStore.SequentialExecutor(); 148 | * emitter2.addListeners(emitter1); 149 | * emitter2.emit('event1'); // emitter2 has event1 150 | * emitter2.emit('event2'); // emitter2 has event2 151 | */ 152 | addListeners: function addListeners(listeners) { 153 | var self = this; 154 | 155 | // extract listeners if parameter is an SequentialExecutor object 156 | if (listeners._events) listeners = listeners._events; 157 | 158 | TableStore.util.each(listeners, function(event, callbacks) { 159 | if (typeof callbacks === 'function') callbacks = [callbacks]; 160 | TableStore.util.arrayEach(callbacks, function(callback) { 161 | self.on(event, callback); 162 | }); 163 | }); 164 | 165 | return self; 166 | }, 167 | 168 | /** 169 | * Registers an event with {on} and saves the callback handle function 170 | * as a property on the emitter object using a given `name`. 171 | * 172 | * @param name [String] the property name to set on this object containing 173 | * the callback function handle so that the listener can be removed in 174 | * the future. 175 | * @param (see on) 176 | * @return (see on) 177 | * @example Adding a named listener DATA_CALLBACK 178 | * var listener = function() { doSomething(); }; 179 | * emitter.addNamedListener('DATA_CALLBACK', 'data', listener); 180 | * 181 | * // the following prints: true 182 | * console.log(emitter.DATA_CALLBACK == listener); 183 | */ 184 | addNamedListener: function addNamedListener(name, eventName, callback) { 185 | this[name] = callback; 186 | this.addListener(eventName, callback); 187 | return this; 188 | }, 189 | 190 | /** 191 | * @api private 192 | */ 193 | addNamedAsyncListener: function addNamedAsyncListener(name, eventName, callback) { 194 | callback._isAsync = true; 195 | return this.addNamedListener(name, eventName, callback); 196 | }, 197 | 198 | /** 199 | * Helper method to add a set of named listeners using 200 | * {addNamedListener}. The callback contains a parameter 201 | * with a handle to the `addNamedListener` method. 202 | * 203 | * @callback callback function(add) 204 | * The callback function is called immediately in order to provide 205 | * the `add` function to the block. This simplifies the addition of 206 | * a large group of named listeners. 207 | * @param add [Function] the {addNamedListener} function to call 208 | * when registering listeners. 209 | * @example Adding a set of named listeners 210 | * emitter.addNamedListeners(function(add) { 211 | * add('DATA_CALLBACK', 'data', function() { ... }); 212 | * add('OTHER', 'otherEvent', function() { ... }); 213 | * add('LAST', 'lastEvent', function() { ... }); 214 | * }); 215 | * 216 | * // these properties are now set: 217 | * emitter.DATA_CALLBACK; 218 | * emitter.OTHER; 219 | * emitter.LAST; 220 | */ 221 | addNamedListeners: function addNamedListeners(callback) { 222 | var self = this; 223 | callback( 224 | function() { 225 | self.addNamedListener.apply(self, arguments); 226 | }, 227 | function() { 228 | self.addNamedAsyncListener.apply(self, arguments); 229 | } 230 | ); 231 | return this; 232 | } 233 | }); 234 | 235 | /** 236 | * {on} is the preferred method. 237 | * @api private 238 | */ 239 | TableStore.SequentialExecutor.prototype.addListener = TableStore.SequentialExecutor.prototype.on; 240 | TableStore.SequentialExecutor.prototype.addAsyncListener = TableStore.SequentialExecutor.prototype.onAsync; 241 | -------------------------------------------------------------------------------- /lib/signer.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('./core'); 2 | var inherit = TableStore.util.inherit; 3 | 4 | /** 5 | * @api private 6 | */ 7 | TableStore.Signer = inherit({ 8 | constructor: function Signer(request) { 9 | this.request = request; 10 | }, 11 | 12 | addAuthorization: function addAuthorization(credentials, date) { 13 | this.request.headers['x-ots-date'] = TableStore.util.date.iso8601(date); 14 | this.request.headers['x-ots-accesskeyid'] = credentials.accessKeyId; 15 | if(credentials.securityToken){ 16 | this.request.headers['x-ots-ststoken'] = credentials.securityToken; 17 | } 18 | delete this.request.headers['x-ots-signature']; 19 | var signature = this.sign(credentials.secretAccessKey, this.stringToSign()); 20 | this.request.headers['x-ots-signature'] = signature; 21 | }, 22 | 23 | stringToSign: function stringToSign() { 24 | var r = this.request; 25 | 26 | var parts = []; 27 | 28 | parts.push(r.path); 29 | 30 | parts.push(r.method + '\n'); 31 | 32 | var headers = this.canonicalizedHeaders(); 33 | if (headers) parts.push(headers); 34 | 35 | return parts.join('\n') + '\n'; 36 | }, 37 | 38 | canonicalizedHeaders: function canonicalizedHeaders() { 39 | 40 | var headers = []; 41 | 42 | TableStore.util.each(this.request.headers, function (name) { 43 | if (name.match(/^x-ots-/i)) 44 | headers.push(name); 45 | }); 46 | 47 | headers.sort(function (a, b) { 48 | return a.toLowerCase() < b.toLowerCase() ? -1 : 1; 49 | }); 50 | 51 | var parts = []; 52 | TableStore.util.arrayEach.call(this, headers, function (name) { 53 | parts.push(name.toLowerCase() + ':' + String(this.request.headers[name])); 54 | }); 55 | 56 | return parts.join('\n'); 57 | 58 | }, 59 | 60 | sign: function sign(secret, string) { 61 | if(process.env.DEBUG == 'aliyun') { 62 | console.log('----------- sign string start -----------'); 63 | console.log(string); 64 | console.log('----------- sign string end -----------'); 65 | } 66 | return TableStore.util.crypto.hmac(secret, string, 'base64', 'sha1'); 67 | } 68 | }); 69 | 70 | module.exports = TableStore.Signer; 71 | -------------------------------------------------------------------------------- /lib/sql.js: -------------------------------------------------------------------------------- 1 | const TableStore = require('./core'); 2 | const {DataType, DataTypeName} = require('./protocol/sql_generated.js') 3 | 4 | TableStore.SQLPayloadVersion = { 5 | SQL_FLAT_BUFFERS: 2, 6 | }; 7 | 8 | TableStore.SQLStatementType = { 9 | SQL_SELECT: 1, 10 | SQL_CREATE_TABLE: 2, 11 | SQL_SHOW_TABLE: 3, 12 | SQL_DESCRIBE_TABLE: 4, 13 | SQL_DROP_TABLE: 5, 14 | SQL_ALTER_TABLE: 6, 15 | }; 16 | 17 | TableStore.SQLDataType = DataType 18 | TableStore.SQLDataTypeName = DataTypeName 19 | 20 | TableStore.SQLRows = function (columns) { 21 | this.rowCount = columns.rowCount(); 22 | this.columnCount = columns.columnsLength(); 23 | this._columnNames = []; 24 | this._columnTypes = []; 25 | this._columnTypeNames = []; 26 | this._columnValues = []; 27 | this._rleStringValues = []; 28 | for (let i = 0; i < columns.columnsLength(); i++) { 29 | let column = columns.columns(i); 30 | this._columnNames[i] = column.columnName(); 31 | this._columnTypes[i] = column.columnType(); 32 | this._columnTypeNames[i] = TableStore.SQLDataTypeName[column.columnType()]; 33 | this._columnValues[i] = column.columnValue(); 34 | this._rleStringValues[i] = this._columnValues[i].rleStringValues(); 35 | } 36 | this.get = function (rowIndex, columnIndex) { 37 | if (rowIndex >= this.rowCount || rowIndex < 0) { 38 | throw new Error("Row index " + columnIndex + " out of range"); 39 | } 40 | if (columnIndex >= this.columnCount || columnIndex < 0) { 41 | throw new Error("Column index " + columnIndex + " out of range"); 42 | } 43 | let columnType = this._columnTypes[columnIndex]; 44 | let columnValue = this._columnValues[columnIndex]; 45 | switch (columnType) { 46 | case TableStore.SQLDataType.LONG: 47 | if (columnValue.isNullvalues(rowIndex)) { 48 | return null; 49 | } else { 50 | return columnValue.longValues(rowIndex); 51 | } 52 | case TableStore.SQLDataType.BOOLEAN: 53 | if (columnValue.isNullvalues(rowIndex)) { 54 | return null; 55 | } else { 56 | return columnValue.boolValues(rowIndex); 57 | } 58 | case TableStore.SQLDataType.DOUBLE: 59 | if (columnValue.isNullvalues(rowIndex)) { 60 | return null; 61 | } else { 62 | return columnValue.doubleValues(rowIndex); 63 | } 64 | case TableStore.SQLDataType.STRING: 65 | if (columnValue.isNullvalues(rowIndex)) { 66 | return null; 67 | } else { 68 | return columnValue.stringValues(rowIndex); 69 | } 70 | case TableStore.SQLDataType.BINARY: 71 | if (columnValue.isNullvalues(rowIndex)) { 72 | return null; 73 | } else { 74 | return columnValue.binaryValues(rowIndex); 75 | } 76 | case TableStore.SQLDataType.STRING_RLE: 77 | if (columnValue.isNullvalues(rowIndex)) { 78 | return null; 79 | } else { 80 | let rleStringValue = this._rleStringValues[columnIndex]; 81 | return this._resolveRLEString(rleStringValue, rowIndex); 82 | } 83 | default: 84 | throw new Error("not supported column type in flatBuffers: " + columnType); 85 | } 86 | }; 87 | 88 | this._resolveRLEString = function (rleStringValue, rowIndex) { 89 | return rleStringValue.array(rleStringValue.indexMapping(rowIndex)); 90 | } 91 | 92 | this._resolveSQLTableMetaFromColumns = function () { 93 | let schemas = []; 94 | let columnsMap = {}; 95 | for (let i = 0; i < this.columnCount; i++) { 96 | let schema = { 97 | name: this._columnNames[i], 98 | type: this._columnTypes[i], 99 | typeName: this._columnTypeNames[i], 100 | 101 | }; 102 | schemas.push(schema); 103 | columnsMap[this._columnNames[i]] = i; 104 | } 105 | return { 106 | schemas: schemas, 107 | columnsMap: columnsMap, 108 | }; 109 | }; 110 | this.sqlTableMeta = this._resolveSQLTableMetaFromColumns(); 111 | } 112 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tablestore", 3 | "main": "index.js", 4 | "description": "TableStore SDK for JavaScript", 5 | "version": "5.6.0", 6 | "devDependencies": { 7 | "mocha": "^3.2.0", 8 | "coveralls": "^2.13.1", 9 | "mocha-lcov-reporter": "^1.3.0", 10 | "istanbul": "^0.4.5" 11 | }, 12 | "dependencies": { 13 | "buffer": "4.9.1", 14 | "flatbuffers": "^1.5.0", 15 | "int64-buffer": "0.1.9", 16 | "protobufjs": "^6.11.3" 17 | }, 18 | "directories": { 19 | "doc": "doc", 20 | "test": "test" 21 | }, 22 | "scripts": { 23 | "test": "mocha test", 24 | "coveralls": "istanbul cover ./node_modules/mocha/bin/_mocha --report lcovonly -- -R spec && cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js && rm -rf ./coverage" 25 | }, 26 | "repository": { 27 | "type": "git", 28 | "url": "https://github.com/aliyun/aliyun-tablestore-nodejs-sdk.git" 29 | }, 30 | "keywords": [ 31 | "tablestore", 32 | "ots" 33 | ], 34 | "browser": { 35 | "./lib/util.js": "./lib/util-browser.js" 36 | }, 37 | "author": "boxiao.wpl", 38 | "license": "Apache-2.0" 39 | } 40 | -------------------------------------------------------------------------------- /samples-async(node6)/client.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../index.js'); 2 | 3 | var accessKeyId = process.env.accessKeyId; 4 | var secretAccessKey = process.env.secretAccessKey; 5 | var stsToken = process.env.stsToken; 6 | var endpoint = process.env.endpoint; 7 | var instancename = process.env.instancename; 8 | 9 | var client = new TableStore.Client({ 10 | accessKeyId, // , 11 | secretAccessKey, // , 12 | stsToken, // , /*When you use the STS authorization, you need to fill in. ref:https://help.aliyun.com/document_detail/27364.html*/ 13 | endpoint, // , 14 | instancename, // 15 | }); 16 | 17 | module.exports = client; 18 | -------------------------------------------------------------------------------- /samples-async(node6)/createTable.js: -------------------------------------------------------------------------------- 1 | var client = require('./client'); 2 | 3 | var params = { 4 | tableMeta: { 5 | tableName: 'sampleTable', 6 | primaryKey: [ 7 | { 8 | name: 'gid', 9 | type: 'INTEGER' 10 | }, 11 | { 12 | name: 'uid', 13 | type: 'INTEGER' 14 | } 15 | ] 16 | }, 17 | reservedThroughput: { 18 | capacityUnit: { 19 | read: 0, 20 | write: 0 21 | } 22 | }, 23 | tableOptions: { 24 | timeToLive: -1,// 数据的过期时间, 单位秒, -1代表永不过期. 假如设置过期时间为一年, 即为 365 * 24 * 3600. 25 | maxVersions: 1// 保存的最大版本数, 设置为1即代表每列上最多保存一个版本(保存最新的版本). 26 | } 27 | }; 28 | (async () => { 29 | try { 30 | const data = await client.createTable(params); 31 | console.log(data); 32 | } catch (error) { 33 | console.log(error); 34 | } 35 | })() -------------------------------------------------------------------------------- /samples-async(node6)/searchPaginateByToken.js: -------------------------------------------------------------------------------- 1 | var client = require('../samples/client'); 2 | var TableStore = require('../index.js'); 3 | 4 | 5 | var params = { 6 | tableName: "nestedTag", 7 | indexName: "testIndex", 8 | searchQuery: { 9 | offset: 0, 10 | limit: 1, 11 | query: { 12 | queryType: TableStore.QueryType.MATCH_ALL_QUERY 13 | }, 14 | sort: { 15 | sorters: [ 16 | { 17 | fieldSort: { 18 | fieldName: "pic_id", 19 | order: TableStore.SortOrder.SORT_ORDER_DESC, 20 | } 21 | } 22 | ] 23 | }, 24 | getTotalCount: true 25 | }, 26 | columnToGet: { 27 | returnType: TableStore.ColumnReturnType.RETURN_NONE, 28 | returnNames: ["pic_tag", "pic_description", "time_stamp", "pos"] 29 | } 30 | }; 31 | 32 | (async () => { 33 | try { 34 | var data = await client.search(params); 35 | console.log(data); 36 | delete params.searchQuery.sort.sorts; 37 | 38 | while (data.nextToken) { 39 | var nextToken = data.nextToken.toString("base64", data.nextToken.offset, data.nextToken.limit); 40 | var token = new Buffer(nextToken, "base64"); 41 | params.searchQuery.token = token; 42 | data = await client.search(params); 43 | console.log(data); 44 | } 45 | } catch (error) { 46 | console.log(error); 47 | } 48 | })() 49 | -------------------------------------------------------------------------------- /samples-async(node6)/token.js: -------------------------------------------------------------------------------- 1 | /** 2 | * 使用token翻页示例(同步+异步)。 3 | * 4 | */ 5 | var client = require('./client'); 6 | var TableStore = require('../index.js'); 7 | var params = { 8 | tableName: "actable", //设置数据表名称。 9 | indexName: "actable_index001", //设置多元索引名称。 10 | searchQuery: { 11 | offset: 0, 12 | limit: 2, 13 | token: null,//获取nextToken作为下一页起点(数据类型为字节流)。 14 | query: { 15 | queryType: TableStore.QueryType.MATCH_ALL_QUERY 16 | }, 17 | getTotalCount: true 18 | }, 19 | columnToGet: { 20 | returnType: TableStore.ColumnReturnType.RETURN_SPECIFIED,//RETURN_NONE 21 | returnNames: ["monitor", "tearch", "name", "className"] 22 | } 23 | }; 24 | 25 | /** 26 | * 使用token翻页示例(同步)。 27 | */ 28 | (async () => { //同步示例代码。 29 | try { 30 | var data = await client.search(params); 31 | console.log(data); 32 | 33 | while (data.nextToken && data.nextToken.length) { //当存在nextToken时,表示还有未读取的数据。 34 | console.log("Origin: ", data.nextToken); 35 | // var nextToken = data.nextToken.toString("base64", data.nextToken.offset, data.nextToken.length + data.nextToken.offset); 36 | var nextToken = data.nextToken.toString("base64"); 37 | var token = Buffer.from(nextToken, "base64"); 38 | 39 | console.log("After: ", token) 40 | params.searchQuery.token = token;//翻页更新token值。 41 | data = await client.search(params); 42 | console.log(data); 43 | 44 | } 45 | } catch (error) { 46 | console.log(error); 47 | } 48 | })() 49 | -------------------------------------------------------------------------------- /samples-async(node6)/transaction.js: -------------------------------------------------------------------------------- 1 | const TableStore = require('../index.js'); 2 | const client = require('./client'); 3 | 4 | const tableName = "sample" 5 | const primaryKey = [{ 6 | "id": "a6ef32e3-e058-4b71-b39b-16ad2f6b1afb" 7 | }] 8 | 9 | async function init() { 10 | await client.putRow({ 11 | tableName, 12 | condition: new TableStore.Condition(TableStore.RowExistenceExpectation.IGNORE, null), 13 | primaryKey, 14 | attributeColumns: [{ 15 | col: 'inited' 16 | }] 17 | }) 18 | } 19 | 20 | async function update(transactionId) { 21 | await client.putRow({ 22 | tableName, 23 | condition: new TableStore.Condition(TableStore.RowExistenceExpectation.IGNORE, null), 24 | primaryKey, 25 | attributeColumns: [{ 26 | col: 'updated' 27 | }], 28 | transactionId 29 | }) 30 | } 31 | 32 | (async () => { 33 | try { 34 | // 写入初始数据 35 | await init() 36 | 37 | // 创建局部事务 38 | const request = await client.startLocalTransaction({ 39 | tableName, 40 | primaryKey 41 | }) 42 | const transactionId = request.transactionId 43 | 44 | // 数据操作 45 | // ... 46 | await update(transactionId) 47 | 48 | // 提交事务 49 | await client.commitTransaction({ 50 | transactionId 51 | }) 52 | // or client.commitTransaction(transactionId) 53 | 54 | // 或丢弃事务 55 | // await client.abortTransaction({ 56 | // transactionId 57 | // }) 58 | // or client.abortTransaction(transactionId) 59 | } catch (e) { 60 | console.error(e) 61 | } 62 | })() 63 | -------------------------------------------------------------------------------- /samples/AutoIncrement.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../index.js'); 2 | var Long = TableStore.Long; 3 | var client = require('./client'); 4 | 5 | var tableName = "autoIncTable"; 6 | var pk1 = "stringPK"; 7 | var pk2 = "autoIncPK"; 8 | 9 | //创建一个带自增主键的表 10 | function createTableWithAutoIncrementPk() { 11 | var createParams = { 12 | tableMeta: { 13 | tableName: tableName, 14 | primaryKey: [ 15 | { 16 | name: pk1, 17 | type: 'STRING' 18 | }, 19 | { 20 | name: pk2, 21 | type: 'INTEGER', 22 | option: 'AUTO_INCREMENT'//自增列,指定otpion为AUTO_INCREMENT 23 | }, 24 | ] 25 | }, 26 | reservedThroughput: { 27 | capacityUnit: { 28 | read: 0, 29 | write: 0 30 | } 31 | }, 32 | tableOptions: { 33 | timeToLive: -1,// 数据的过期时间, 单位秒, -1代表永不过期. 假如设置过期时间为一年, 即为 365 * 24 * 3600. 34 | maxVersions: 1// 保存的最大版本数, 设置为1即代表每列上最多保存一个版本(保存最新的版本). 35 | }, 36 | }; 37 | 38 | client.createTable(createParams, function (err, data) { 39 | if (err) { 40 | console.log('error:', err); 41 | return; 42 | } 43 | console.log('create table success'); 44 | }); 45 | } 46 | 47 | //插入数据,自增列的值指定为:TableStore.PK_AUTO_INCR 即可 48 | function putRow() { 49 | var putParams = { 50 | tableName: tableName, 51 | condition: new TableStore.Condition(TableStore.RowExistenceExpectation.IGNORE, null), 52 | primaryKey: [ 53 | { stringPK: 'pk1' }, 54 | { autoIncPK: TableStore.PK_AUTO_INCR } 55 | ], 56 | attributeColumns: [ 57 | { 'col1': 'col1val' } 58 | ], 59 | returnContent: { returnType: TableStore.ReturnType.Primarykey } 60 | }; 61 | 62 | client.putRow(putParams, function (err, data) { 63 | if (err) { 64 | console.log('error:', err); 65 | return; 66 | } 67 | 68 | console.log('put row success,autoIncrement pk value:' + JSON.stringify(data.row.primaryKey)); 69 | }); 70 | 71 | } 72 | 73 | // createTableWithAutoIncrementPk(); 74 | // putRow(); 75 | 76 | -------------------------------------------------------------------------------- /samples/batchGetRow.js: -------------------------------------------------------------------------------- 1 | var client = require('./client'); 2 | var TableStore = require('../index.js'); 3 | var Long = TableStore.Long; 4 | 5 | var params = { 6 | tables: [{ 7 | tableName: 'sampleTable', 8 | primaryKey: [ 9 | [{ 'gid': Long.fromNumber(20013) }, { 'uid': Long.fromNumber(20013) }], 10 | [{ 'gid': Long.fromNumber(20015) }, { 'uid': Long.fromNumber(20015) }] 11 | ], 12 | startColumn: "col2", 13 | endColumn: "col4" 14 | }, 15 | // { 16 | // tableName: 'notExistTable', 17 | // primaryKey: [ 18 | // [{ 'gid': Long.fromNumber(10001) }, { 'uid': Long.fromNumber(10001) }] 19 | // ] 20 | // } 21 | ], 22 | }; 23 | 24 | var maxRetryTimes = 3; 25 | var retryCount = 0; 26 | 27 | function batchGetRow(params) { 28 | client.batchGetRow(params, function (err, data) { 29 | if (err) { 30 | console.log('error:', err); 31 | return; 32 | } 33 | 34 | var isAllSuccess = true; 35 | var retryRequest = { tables: [] }; 36 | for (var i = 0; i < data.tables.length; i++) { 37 | var faildRequest = { tableName: data.tables[i][0].tableName, primaryKey: [] }; 38 | 39 | for (var j = 0; j < data.tables[i].length; j++) { 40 | if (!data.tables[i][j].isOk && null != data.tables[i][j].primaryKey) { 41 | isAllSuccess = false; 42 | var pks = []; 43 | for (var k in data.tables[i][j].primaryKey) { 44 | var name = data.tables[i][j].primaryKey[k].name; 45 | var value = data.tables[i][j].primaryKey[k].value; 46 | var kp = {}; 47 | kp[name] = value; 48 | pks.push(kp); 49 | } 50 | faildRequest.primaryKey.push(pks); 51 | 52 | } else { 53 | // get success data 54 | } 55 | } 56 | 57 | if (faildRequest.primaryKey.length > 0) { 58 | retryRequest.tables.push(faildRequest); 59 | } 60 | } 61 | 62 | if (!isAllSuccess && retryCount++ < maxRetryTimes) { 63 | batchGetRow(retryRequest); 64 | } 65 | 66 | console.log('success:', JSON.stringify(data, null, 4)); 67 | }); 68 | } 69 | 70 | batchGetRow(params, maxRetryTimes); 71 | 72 | 73 | -------------------------------------------------------------------------------- /samples/batchWriteRow.js: -------------------------------------------------------------------------------- 1 | var client = require('./client'); 2 | var TableStore = require('../index.js'); 3 | var Long = TableStore.Long; 4 | 5 | var params = { 6 | 7 | tables: [ 8 | { 9 | tableName: 'sampleTable', 10 | rows: [ 11 | { 12 | type: 'UPDATE', 13 | condition: new TableStore.Condition(TableStore.RowExistenceExpectation.IGNORE, null), 14 | primaryKey: [{ 'gid': Long.fromNumber(8) }, { 'uid': Long.fromNumber(80) }], 15 | attributeColumns: [{ 'PUT': [{ 'attrCol1': 'test3' }, { 'attrCol2': 'test4' }] }], 16 | returnContent: { returnType: 1 } 17 | }, 18 | { 19 | type: 'PUT', 20 | condition: new TableStore.Condition(TableStore.RowExistenceExpectation.IGNORE, null), 21 | primaryKey: [{ 'gid': Long.fromNumber(8) }, { 'uid': Long.fromNumber(81) }], 22 | attributeColumns: [{ 'attrCol1': 'test1' }, { 'attrCol2': 'test2' }], 23 | returnContent: { returnType: TableStore.ReturnType.Primarykey } 24 | } 25 | ] 26 | } 27 | ], 28 | }; 29 | 30 | client.batchWriteRow(params, function (err, data) { 31 | 32 | if (err) { 33 | console.log('error:', err); 34 | return; 35 | } 36 | 37 | console.log('success:', data); 38 | }); 39 | -------------------------------------------------------------------------------- /samples/browser/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | tablestore-js-sdk demo 7 | 8 | 9 | 10 | 11 | 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /samples/client.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../index.js'); 2 | 3 | var accessKeyId = process.env.accessKeyId; 4 | var secretAccessKey = process.env.secretAccessKey; 5 | var endpoint = process.env.endpoint; 6 | var instancename = process.env.instancename; 7 | 8 | var client = new TableStore.Client({ 9 | accessKeyId: accessKeyId, 10 | secretAccessKey: secretAccessKey, 11 | endpoint: endpoint, 12 | instancename: instancename 13 | }); 14 | 15 | module.exports = client; 16 | -------------------------------------------------------------------------------- /samples/computeSplits.js: -------------------------------------------------------------------------------- 1 | const client = require('./client'); 2 | 3 | client.computeSplits({ 4 | tableName: "tableName", 5 | searchIndexSplitsOptions: { 6 | indexName: "indexName", 7 | } 8 | }, function (err, data) { 9 | if (err) { 10 | console.log('computeSplits error:', err.toString()); 11 | } else { 12 | console.log('computeSplits success:', data); 13 | } 14 | }) -------------------------------------------------------------------------------- /samples/conditionUpdateRow.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../index.js'); 2 | var Long = TableStore.Long; 3 | var client = require('./client'); 4 | 5 | var params = { 6 | tableName: "sampleTable", 7 | primaryKey: [{ 'gid': Long.fromNumber(20013) }, { 'uid': Long.fromNumber(20013) }], 8 | updateOfAttributeColumns: [{ 'PUT': [{ 'col1': 'test6' }] }] 9 | }; 10 | 11 | /* 12 | 条件更新使用说明: 13 | 1、期望行是否存在 14 | RowExistenceExpectation.IGNORE 表示不管此行是否已经存在,都会插入新数据,如果之前有会被覆盖。 15 | RowExistenceExpectation.EXPECT_EXIST 表示只有此行存在时,才会插入新数据,此时,原有数据也会被覆盖。 16 | RowExistenceExpectation.EXPECT_NOT_EXIST 表示只有此行不存在时,才会插入数据,否则不执行。 17 | 2、条件组合 18 | TableStore.SingleColumnCondition 只有一个条件的时候使用 19 | TableStore.CompositeCondition 有多个条件的时候使用 20 | */ 21 | 22 | //示例:指定条件 期望行存在,并且name=john,addr=china 23 | var condition = new TableStore.CompositeCondition(TableStore.LogicalOperator.AND); 24 | condition.addSubCondition(new TableStore.SingleColumnCondition('name', 'john', TableStore.ComparatorType.EQUAL)); 25 | condition.addSubCondition(new TableStore.SingleColumnCondition('addr', 'china', TableStore.ComparatorType.EQUAL)); 26 | 27 | params.condition = new TableStore.Condition(TableStore.RowExistenceExpectation.EXPECT_EXIST, condition); 28 | 29 | client.updateRow(params, 30 | function (err, data) { 31 | if (err) { 32 | console.log('error:', err); 33 | return; 34 | } 35 | console.log('success:', data); 36 | }); 37 | 38 | -------------------------------------------------------------------------------- /samples/createGlobalIndex.js: -------------------------------------------------------------------------------- 1 | var client = require('./client'); 2 | var TableStore = require('../index.js'); 3 | 4 | client.createIndex({ 5 | mainTableName: "sdkGlobalTest", 6 | includeBaseData: false, 7 | indexMeta: { 8 | name: "noBase", 9 | primaryKey: ["col1"], 10 | definedColumn: ["col2"], 11 | indexUpdateMode: TableStore.IndexUpdateMode.IUM_ASYNC_INDEX,//默认增量 12 | indexType: TableStore.IndexType.IT_GLOBAL_INDEX,//IT_GLOBAL_INDEX 13 | } 14 | }, function (err, data) { 15 | if (err) { 16 | console.log('error:', err); 17 | return; 18 | } 19 | console.log('success:', JSON.stringify(data, null, 2)); 20 | }); 21 | 22 | client.createIndex({ 23 | mainTableName: "sdkGlobalTest", 24 | includeBaseData: true, 25 | indexMeta: { 26 | name: "hasBase", 27 | primaryKey: ["col1"], 28 | definedColumn: ["col2"], 29 | indexUpdateMode: TableStore.IndexUpdateMode.IUM_ASYNC_INDEX,//默认增量 30 | indexType: TableStore.IndexType.IT_GLOBAL_INDEX,//IT_GLOBAL_INDEX 31 | } 32 | }, function (err, data) { 33 | if (err) { 34 | console.log('error:', err); 35 | return; 36 | } 37 | console.log('success:', JSON.stringify(data, null, 2)); 38 | }); 39 | 40 | -------------------------------------------------------------------------------- /samples/createSearchIndex.js: -------------------------------------------------------------------------------- 1 | var client = require('./client'); 2 | var TableStore = require('../index.js'); 3 | 4 | client.createSearchIndex({ 5 | tableName: "nestedTag",// 设置表名 6 | indexName: "testIndex",// 设置索引名 7 | schema: { 8 | fieldSchemas: [ 9 | { 10 | fieldName: "pic_id", 11 | fieldType: TableStore.FieldType.KEYWORD,// 设置字段名、类型 12 | index: true,// 设置开启索引 13 | enableSortAndAgg: true,// 设置开启排序和统计功能 14 | store: false, 15 | isAnArray: false 16 | }, 17 | { 18 | fieldName: "count", 19 | fieldType: TableStore.FieldType.LONG, 20 | index: true, 21 | enableSortAndAgg: true, 22 | store: true, 23 | isAnArray: false 24 | }, 25 | { 26 | fieldName: "time_stamp", 27 | fieldType: TableStore.FieldType.LONG, 28 | index: true, 29 | enableSortAndAgg: false, 30 | store: true, 31 | isAnArray: false, 32 | }, 33 | { 34 | fieldName: "pic_description", 35 | fieldType: TableStore.FieldType.TEXT, 36 | index: true, 37 | enableSortAndAgg: false, 38 | store: true, 39 | isAnArray: false, 40 | }, 41 | { 42 | fieldName: "pos", 43 | fieldType: TableStore.FieldType.GEO_POINT, 44 | index: true, 45 | enableSortAndAgg: true, 46 | store: true, 47 | isAnArray: false, 48 | }, 49 | { 50 | fieldName: "pic_tag", 51 | fieldType: TableStore.FieldType.NESTED, 52 | index: false, 53 | enableSortAndAgg: false, 54 | store: false, 55 | fieldSchemas: [ 56 | { 57 | fieldName: "sub_tag_name", 58 | fieldType: TableStore.FieldType.KEYWORD, 59 | index: true, 60 | enableSortAndAgg: true, 61 | store: false, 62 | }, 63 | { 64 | fieldName: "tag_name", 65 | fieldType: TableStore.FieldType.KEYWORD, 66 | index: true, 67 | enableSortAndAgg: true, 68 | store: false, 69 | } 70 | ] 71 | }, 72 | { 73 | fieldName: "date", 74 | fieldType: TableStore.FieldType.DATE, 75 | index: true, 76 | enableSortAndAgg: true, 77 | store: true, 78 | isAnArray: false, 79 | dateFormats: ["yyyy-MM-dd'T'HH:mm:ss.SSSSSS"], 80 | }, 81 | { 82 | fieldName: "analyzer_single_word", 83 | fieldType: TableStore.FieldType.TEXT, 84 | analyzer: "single_word", 85 | index: true, 86 | enableSortAndAgg: false, 87 | store: true, 88 | isAnArray: false, 89 | analyzerParameter: { 90 | caseSensitive: true, 91 | delimitWord: false, 92 | } 93 | }, 94 | { 95 | fieldName: "analyzer_split", 96 | fieldType: TableStore.FieldType.TEXT, 97 | analyzer: "split", 98 | index: true, 99 | enableSortAndAgg: false, 100 | store: true, 101 | isAnArray: false, 102 | analyzerParameter: { 103 | delimiter: ",", 104 | } 105 | }, 106 | { 107 | fieldName: "analyzer_fuzzy", 108 | fieldType: TableStore.FieldType.TEXT, 109 | analyzer: "fuzzy", 110 | index: true, 111 | enableSortAndAgg: false, 112 | store: true, 113 | isAnArray: false, 114 | analyzerParameter: { 115 | minChars: 1, 116 | maxChars: 5, 117 | } 118 | }, 119 | ], 120 | indexSetting: {//optional 121 | "routingFields": ["count", "pic_id"],//仅支持主键 122 | "routingPartitionSize": null 123 | }, 124 | // indexSort: {//不支持含含NESTED的索引 125 | // sorters: [ 126 | // // {//不设置时默认PrimaryKeySort(正序) 127 | // // primaryKeySort: { 128 | // // order: TableStore.SortOrder.SORT_ORDER_DESC 129 | // // } 130 | // // }, 131 | // { 132 | // fieldSort: { 133 | // fieldName: "pic_id", 134 | // order: TableStore.SortOrder.SORT_ORDER_DESC 135 | // } 136 | // } 137 | // ] 138 | // } 139 | } 140 | }, function (err, data) { 141 | if (err) { 142 | console.log('error:', err); 143 | return; 144 | } 145 | console.log('success:', JSON.stringify(data, null, 2)); 146 | }); 147 | 148 | -------------------------------------------------------------------------------- /samples/createTable.js: -------------------------------------------------------------------------------- 1 | var client = require('./client'); 2 | 3 | var params = { 4 | tableMeta: { 5 | tableName: 'sampleTable', 6 | primaryKey: [ 7 | { 8 | name: 'gid', 9 | type: 'INTEGER' 10 | }, 11 | { 12 | name: 'uid', 13 | type: 'INTEGER' 14 | } 15 | ] 16 | }, 17 | reservedThroughput: { 18 | capacityUnit: { 19 | read: 0, 20 | write: 0 21 | } 22 | }, 23 | tableOptions: { 24 | timeToLive: -1,// 数据的过期时间, 单位秒, -1代表永不过期. 假如设置过期时间为一年, 即为 365 * 24 * 3600. 25 | maxVersions: 1// 保存的最大版本数, 设置为1即代表每列上最多保存一个版本(保存最新的版本). 26 | }, 27 | streamSpecification: { 28 | enableStream: true, //开启Stream 29 | expirationTime: 24 //Stream的过期时间,单位是小时,最长为168,设置完以后不能修改 30 | } 31 | }; 32 | 33 | client.createTable(params, function (err, data) { 34 | if (err) { 35 | console.log('error:', err); 36 | return; 37 | } 38 | console.log('success:', data); 39 | }); 40 | 41 | -------------------------------------------------------------------------------- /samples/createTableWithGlobalIndex.js: -------------------------------------------------------------------------------- 1 | var client = require('./client'); 2 | var TableStore = require('../index.js'); 3 | 4 | var params = { 5 | tableMeta: { 6 | tableName: 'sdkGlobalTest', 7 | primaryKey: [ 8 | { 9 | name: 'pk1', 10 | type: TableStore.PrimaryKeyType.INTEGER 11 | }, 12 | { 13 | name: 'pk2', 14 | type: TableStore.PrimaryKeyType.INTEGER 15 | } 16 | ], 17 | definedColumn: [ 18 | { 19 | "name": "col1", 20 | "type": TableStore.DefinedColumnType.DCT_INTEGER 21 | }, 22 | { 23 | "name": "col2", 24 | "type": TableStore.DefinedColumnType.DCT_INTEGER 25 | } 26 | ], 27 | }, 28 | reservedThroughput: { 29 | capacityUnit: { 30 | read: 0, 31 | write: 0 32 | } 33 | }, 34 | tableOptions: { 35 | timeToLive: -1,// 数据的过期时间, 单位秒, -1代表永不过期. 假如设置过期时间为一年, 即为 365 * 24 * 3600. 36 | maxVersions: 1// 保存的最大版本数, 设置为1即代表每列上最多保存一个版本(保存最新的版本). 37 | }, 38 | streamSpecification: { 39 | enableStream: false, //globalIndex不支持开启Stream 40 | }, 41 | indexMetas: [ 42 | { 43 | name: "sdkIndex1", 44 | primaryKey: ["pk2"], 45 | definedColumn: ["col1", "col2"] 46 | }, 47 | { 48 | name: "sdkIndex2", 49 | primaryKey: ["col1"], 50 | definedColumn: ["col2"] 51 | } 52 | ] 53 | }; 54 | 55 | client.createTable(params, function (err, data) { 56 | if (err) { 57 | console.log('error:', err); 58 | return; 59 | } 60 | console.log('success:', data); 61 | }); 62 | 63 | -------------------------------------------------------------------------------- /samples/deleteGlobalIndex.js: -------------------------------------------------------------------------------- 1 | var client = require('./client'); 2 | 3 | client.dropIndex({ 4 | mainTableName: "sdkGlobalTest", 5 | indexName: "sdkIndex1" 6 | }, function (err, data) { 7 | if (err) { 8 | console.log('error:', err); 9 | return; 10 | } 11 | console.log('success:', JSON.stringify(data, null, 2)); 12 | }); 13 | 14 | -------------------------------------------------------------------------------- /samples/deleteRow.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../index.js'); 2 | var Long = TableStore.Long; 3 | var client = require('./client'); 4 | 5 | var params = { 6 | tableName: "sampleTable", 7 | condition: new TableStore.Condition(TableStore.RowExistenceExpectation.IGNORE, null), 8 | primaryKey: [{ 'gid': Long.fromNumber(20013) }, { 'uid': Long.fromNumber(20013) }] 9 | }; 10 | 11 | client.deleteRow(params, function (err, data) { 12 | if (err) { 13 | console.log('error:', err); 14 | return; 15 | } 16 | 17 | console.log('success:', data); 18 | }); 19 | -------------------------------------------------------------------------------- /samples/deleteSearchIndex.js: -------------------------------------------------------------------------------- 1 | var client = require('./client'); 2 | 3 | client.deleteSearchIndex({ 4 | tableName: "nestedTag", 5 | indexName: "testIndex" 6 | }, function (err, data) { 7 | if (err) { 8 | console.log('error:', err); 9 | return; 10 | } 11 | console.log('success:', data); 12 | }); 13 | 14 | -------------------------------------------------------------------------------- /samples/deleteTable.js: -------------------------------------------------------------------------------- 1 | var client = require('./client'); 2 | 3 | var params = { 4 | tableName: 'nestedTag' 5 | }; 6 | 7 | client.deleteTable(params, function (err, data) { 8 | if (err) { 9 | console.log('error:', err); 10 | return; 11 | } 12 | console.log('success:', data); 13 | }); 14 | 15 | -------------------------------------------------------------------------------- /samples/describeSearchIndex.js: -------------------------------------------------------------------------------- 1 | var client = require('./client'); 2 | 3 | client.describeSearchIndex({ 4 | tableName: "nestedTag", 5 | indexName: "testIndex", 6 | }, function (err, data) { 7 | if (err) { 8 | console.log('error:', err); 9 | return; 10 | } 11 | console.log('success:', JSON.stringify(data, null, 2)); 12 | }); 13 | 14 | -------------------------------------------------------------------------------- /samples/describeTable.js: -------------------------------------------------------------------------------- 1 | var client = require('./client'); 2 | 3 | var params = { 4 | tableName: 'sampleTable' 5 | }; 6 | 7 | client.describeTable(params, function (err, data) { 8 | if (err) { 9 | console.log('error:', err); 10 | return; 11 | } 12 | console.log('success:', data); 13 | }); 14 | 15 | -------------------------------------------------------------------------------- /samples/domainError.js: -------------------------------------------------------------------------------- 1 | const client = require('./client'); 2 | 3 | const listTablePromise = (params) => { 4 | return new Promise((resolve, reject) => { 5 | client.listTable(params, (err, data) => { 6 | if (err) return reject(err); 7 | resolve(data); 8 | }); 9 | }); 10 | }; 11 | 12 | async function run() { 13 | try { 14 | const data = await listTablePromise({}); 15 | console.log('Data:', data); 16 | throw new Error('test'); 17 | } catch (err) { 18 | console.log('error:', err); 19 | console.log('Exit your application!'); 20 | process.exit(1); 21 | } 22 | } 23 | 24 | process.on('unhandledRejection', (err) => { 25 | console.error('Unhandled Promise Rejection:', err); 26 | process.exit(1); 27 | }); 28 | 29 | process.on('uncaughtException', (err) => { 30 | console.error('Uncaught Exception:', err); 31 | process.exit(1); 32 | }); 33 | 34 | run(); -------------------------------------------------------------------------------- /samples/getRange.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../index.js'); 2 | var Long = TableStore.Long; 3 | var client = require('./client'); 4 | 5 | var params = { 6 | tableName: "sampleTable", 7 | direction: TableStore.Direction.FORWARD, 8 | maxVersions: 10, 9 | inclusiveStartPrimaryKey: [{ "gid": TableStore.INF_MIN }, { "uid": TableStore.INF_MIN }], 10 | exclusiveEndPrimaryKey: [{ "gid": TableStore.INF_MAX }, { "uid": TableStore.INF_MAX }], 11 | limit: 2 12 | }; 13 | 14 | var resultRows = [] 15 | 16 | var getRange = function () { 17 | client.getRange(params, function (err, data) { 18 | if (err) { 19 | console.log('error:', err); 20 | return; 21 | } 22 | console.log(JSON.stringify(data, null, 4)); 23 | resultRows = resultRows.concat(data.rows) 24 | 25 | //如果data.next_start_primary_key不为空,说明需要继续读取 26 | if (data.nextStartPrimaryKey) { 27 | params.inclusiveStartPrimaryKey = [ 28 | { "gid": data.nextStartPrimaryKey[0].value }, 29 | { "uid": data.nextStartPrimaryKey[1].value } 30 | ]; 31 | getRange() 32 | } else { 33 | console.log(JSON.stringify(resultRows)); 34 | } 35 | }); 36 | } 37 | 38 | getRange() 39 | -------------------------------------------------------------------------------- /samples/getRow.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../index.js'); 2 | var Long = TableStore.Long; 3 | var client = require('./client'); 4 | 5 | //公用参数 6 | var params = { 7 | tableName: "sampleTable", 8 | primaryKey: [{ 'gid': Long.fromNumber(20013) }, { 'uid': Long.fromNumber(20013) }], 9 | }; 10 | 11 | 12 | //示例1:读取一行,设置读取最新版本,设置ColumsToGet 13 | function getRowSample1() { 14 | //设置读取最新版本,默认为1 15 | params.maxVersions = 1; 16 | //设置读取指定的列 17 | params.columnsToGet = ["col1", "col2"]; 18 | client.getRow(params, function (err, data) { 19 | if (err) { 20 | console.log('error:', err); 21 | return; 22 | } 23 | console.log('success:', data); 24 | }); 25 | } 26 | 27 | //示例2:设置过滤器 28 | /* 29 | TableStore.LogicalOperator支持的操作包括:AND、NOT、OR 30 | TableStore.ComparatorType支持的操作包括:EQUAL、NOT_EQUAL、GREATER_THAN、GREATER_EQUAL、LESS_THAN、LESS_EQUAL 31 | */ 32 | function getRowSample2() { 33 | //设置过滤器,当name = john 而且 addr = china 时返回该行 34 | var condition = new TableStore.CompositeCondition(TableStore.LogicalOperator.AND); 35 | condition.addSubCondition(new TableStore.SingleColumnCondition('col1', '表格存储', TableStore.ComparatorType.EQUAL)); 36 | condition.addSubCondition(new TableStore.SingleColumnCondition('col5', Long.fromNumber(123456789), TableStore.ComparatorType.EQUAL)); 37 | 38 | params.columnFilter = condition; 39 | 40 | //设置按列进行翻页,用于读取宽行,两个参数为:limit,offset 41 | //params.columnFilter = new TableStore.ColumnPaginationFilter(2, 0); 42 | client.getRow(params, function (err, data) { 43 | if (err) { 44 | console.log('error:', err); 45 | return; 46 | } 47 | console.log('success:', data); 48 | }); 49 | } 50 | 51 | getRowSample2(); 52 | -------------------------------------------------------------------------------- /samples/getRowByGlobalIndex.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../index.js'); 2 | var Long = TableStore.Long; 3 | var client = require('./client'); 4 | 5 | var params = { 6 | tableName: "index1", 7 | primaryKey: [ {'pk2': Long.fromNumber(2)}, {'pk1': Long.fromNumber(1)}] 8 | }; 9 | 10 | client.getRow(params, function (err, data) { 11 | if (err) { 12 | console.log('error:', err); 13 | return; 14 | } 15 | console.log('success:', JSON.stringify(data, null, 2)); 16 | }); 17 | -------------------------------------------------------------------------------- /samples/increment.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../index.js'); 2 | var Long = TableStore.Long; 3 | var client = require('./client'); 4 | 5 | var params = { 6 | tableName: "orderHistory", 7 | condition: new TableStore.Condition(TableStore.RowExistenceExpectation.EXPECT_EXIST, null), 8 | primaryKey: [{ 'order_id': "order_id_001" }], 9 | updateOfAttributeColumns: [ 10 | { 'INCREMENT': [{ 'increment': Long.fromNumber(1)}] } 11 | ], 12 | returnContent: { 13 | returnColumns: ["increment"], 14 | returnType: TableStore.ReturnType.AfterModify 15 | } 16 | }; 17 | client.updateRow(params, 18 | function (err, data) { 19 | if (err) { 20 | console.log('error:', err); 21 | return; 22 | } 23 | 24 | console.log('success:', JSON.stringify(data, null, 2)); 25 | }); 26 | 27 | 28 | var batchParams = { 29 | tables: [ 30 | { 31 | tableName: 'orderHistory', 32 | rows: [ 33 | { 34 | type: 'UPDATE', 35 | condition: new TableStore.Condition(TableStore.RowExistenceExpectation.EXPECT_EXIST, null), 36 | primaryKey: [{ 'order_id': "order_id_001" }], 37 | attributeColumns: [{ 'INCREMENT': [{ 'increment': Long.fromNumber(1)}] }], 38 | returnContent: { 39 | returnColumns: ["increment"], 40 | returnType: TableStore.ReturnType.AfterModify 41 | } 42 | } 43 | ] 44 | }, 45 | ], 46 | }; 47 | 48 | client.batchWriteRow(batchParams, 49 | function (err, data) { 50 | if (err) { 51 | console.log('error:', err); 52 | return; 53 | } 54 | 55 | console.log('success:', JSON.stringify(data, null, 2)); 56 | }); 57 | -------------------------------------------------------------------------------- /samples/listSearchIndex.js: -------------------------------------------------------------------------------- 1 | var client = require('./client'); 2 | 3 | client.listSearchIndex({ 4 | tableName: "nestedTag" 5 | }, function (err, data) { 6 | if (err) { 7 | console.log('error:', err); 8 | return; 9 | } 10 | console.log('success:', JSON.stringify(data, null, 2)); 11 | }); 12 | 13 | -------------------------------------------------------------------------------- /samples/listTable.js: -------------------------------------------------------------------------------- 1 | var client = require('./client'); 2 | 3 | client.listTable({}, function (err, data) { 4 | if (err) { 5 | console.log('error:', err); 6 | return; 7 | } 8 | console.log('success:', data); 9 | }); 10 | 11 | -------------------------------------------------------------------------------- /samples/multiVersion.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../index.js'); 2 | var Long = TableStore.Long; 3 | var client = require('./client'); 4 | 5 | var tableName = 'maxVersionsTestTable'; 6 | var primaryKey = [{ 'pk1': 'pk1val' }, { 'pk2': 'pk2val' }]; 7 | 8 | var getRow = function () { 9 | var getRowParams = { 10 | tableName: tableName, 11 | primaryKey: primaryKey, 12 | timeRange: { 13 | startTime: '0', 14 | endTime: new Date().getTime().toString() 15 | }, 16 | maxVersions: 10 17 | }; 18 | client.getRow(getRowParams, function (err, data) { 19 | 20 | }); 21 | }; 22 | 23 | var batchWriteRow = function () { 24 | var params = { 25 | tables: [{ 26 | tableName: tableName, 27 | rows: [], 28 | }], 29 | }; 30 | 31 | for (var i = 0; i < 10; i++) { 32 | params.tables[0].rows.push({ 33 | type: 'UPDATE', 34 | condition: new TableStore.Condition(TableStore.RowExistenceExpectation.IGNORE, null), 35 | primaryKey: primaryKey, 36 | attributeColumns: [{ "PUT": [{ 'multiVersionCol': '第' + i + '次更新', 'timestamp': Long.fromNumber(new Date().getTime() + i) }] }] 37 | }); 38 | } 39 | client.batchWriteRow(params, function (err, data) { 40 | if (err) { 41 | console.log('error:', err); 42 | return; 43 | } 44 | 45 | getRow(); 46 | }); 47 | 48 | }; 49 | 50 | var putRow = function () { 51 | var putRowParams = { 52 | tableName: tableName, 53 | condition: new TableStore.Condition(TableStore.RowExistenceExpectation.IGNORE, null), 54 | primaryKey: primaryKey, 55 | attributeColumns: [{ 'multiVersionCol': '插入原始数据' }] 56 | }; 57 | 58 | client.putRow(putRowParams, function (err, data) { 59 | if (err) { 60 | console.log('error:', err); 61 | return; 62 | } 63 | batchWriteRow(); 64 | }); 65 | }; 66 | 67 | var createTable = function () { 68 | var params = { 69 | tableMeta: { 70 | tableName: tableName, 71 | primaryKey: [{ name: 'pk1', type: 'STRING' }, { name: 'pk2', type: 'STRING' }] 72 | }, 73 | reservedThroughput: { 74 | capacityUnit: { read: 0, write: 0 } 75 | }, 76 | tableOptions: { 77 | timeToLive: -1,// 数据的过期时间, 单位秒, -1代表永不过期. 假如设置过期时间为一年, 即为 365 * 24 * 3600. 78 | maxVersions: 10// 保存的最大版本数, 设置为1即代表每列上最多保存一个版本(保存最新的版本). 79 | } 80 | }; 81 | 82 | client.createTable(params, function (err, data) { 83 | putRow(); 84 | }); 85 | 86 | }; 87 | 88 | //createTable(); 89 | getRow(); 90 | 91 | 92 | -------------------------------------------------------------------------------- /samples/parallelScan.js: -------------------------------------------------------------------------------- 1 | const client = require('./client'); 2 | const TableStore = require("../index"); 3 | 4 | 5 | // 1. 获取sessionId 6 | let computeSplits = await new Promise((resolve, reject) => { 7 | client.computeSplits({ 8 | tableName: tableName, 9 | searchIndexSplitsOptions: { 10 | indexName: indexName, 11 | } 12 | }, function (err, data) { 13 | if (err) { 14 | console.log('computeSplits error:', err.toString()); 15 | reject(err); 16 | } else { 17 | console.log('computeSplits success:', data); 18 | resolve(data) 19 | } 20 | }) 21 | }) 22 | 23 | // 2. 构造query 24 | const scanQuery = { 25 | query: { 26 | queryType: TableStore.QueryType.MATCH_ALL_QUERY, 27 | }, 28 | limit: 1000, 29 | aliveTime: 30, 30 | token: undefined, 31 | currentParallelId: 0, 32 | maxParallel: 1, 33 | } 34 | 35 | // 3. 构造ParallelScan请求(该示例为了方便用同步请求进行展示,业务可改为异步) 36 | const parallelScanPromise = function () { 37 | return new Promise(function (resolve, reject) { 38 | client.parallelScan({ 39 | tableName: tableName, 40 | indexName: indexName, 41 | columnToGet: { 42 | returnType: TableStore.ColumnReturnType.RETURN_ALL_FROM_INDEX, 43 | }, 44 | sessionId: computeSplits.sessionId, 45 | scanQuery: scanQuery, 46 | }, function (err, data) { 47 | if (err) { 48 | console.log('parallelScan error:', err.toString()); 49 | reject(err); 50 | } else { 51 | console.log("parallelScan, rows:", data.rows.length) 52 | resolve(data) 53 | } 54 | }); 55 | }) 56 | } 57 | let totalCount = 0 58 | let parallelScanResponse = await parallelScanPromise() 59 | totalCount = totalCount + parallelScanResponse.rows.length 60 | // 4. 迭代拉取数据,直到拉取所有数据结束 61 | while (parallelScanResponse.nextToken !== null && parallelScanResponse.nextToken.length > 0) { 62 | scanQuery.token = parallelScanResponse.nextToken 63 | parallelScanResponse = await parallelScanPromise() 64 | totalCount += parallelScanResponse.rows.length 65 | } 66 | console.log("total rows:", totalCount) -------------------------------------------------------------------------------- /samples/primarykey.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../index.js'); 2 | var Long = TableStore.Long; 3 | var client = require('./client'); 4 | 5 | function getRow(pks) { 6 | 7 | var params = { 8 | tableName: "autoIncTable", 9 | primaryKey: [ 10 | { 'stringPK': pks[0]['value'] }, 11 | { 'integerPK': pks[1]['value'] }, 12 | { 'binaryPK': pks[2]['value'] }, 13 | { 'autoIncPK': pks[3]['value'] }], 14 | }; 15 | 16 | client.getRow(params, function (err, data) { 17 | if (err) { 18 | console.log('error:', err); 19 | return; 20 | } 21 | console.log('get row success:', data); 22 | }); 23 | } 24 | 25 | function putRow() { 26 | var putParams = { 27 | tableName: "autoIncTable", 28 | condition: new TableStore.Condition(TableStore.RowExistenceExpectation.IGNORE, null), 29 | primaryKey: [ 30 | { 'stringPK': 'pk1' }, 31 | { 'integerPK': Long.fromNumber(1) }, 32 | { 'binaryPK': new Buffer('test') }, 33 | { 'autoIncPK': TableStore.PK_AUTO_INCR } 34 | ], 35 | attributeColumns: [ 36 | { 'col1': 'col1val' } 37 | ], 38 | returnContent: { returnType: TableStore.ReturnType.Primarykey } 39 | }; 40 | 41 | client.putRow(putParams, function (err, data) { 42 | if (err) { 43 | console.log('error:', err); 44 | return; 45 | } 46 | 47 | console.log('put row success'); 48 | 49 | getRow(data.row.primaryKey) 50 | }); 51 | 52 | } 53 | 54 | 55 | function createTable() { 56 | var createParams = { 57 | tableMeta: { 58 | tableName: 'autoIncTable', 59 | primaryKey: [ 60 | { 61 | name: 'stringPK', 62 | type: 'STRING' 63 | }, 64 | { 65 | name: 'integerPK', 66 | type: 'INTEGER' 67 | }, 68 | { 69 | name: 'binaryPK', 70 | type: 'BINARY' 71 | }, 72 | { 73 | name: 'autoIncPK', 74 | type: 'INTEGER', 75 | option: 'AUTO_INCREMENT'//自增列,指定otpion为AUTO_INCREMENT 76 | }, 77 | ] 78 | }, 79 | reservedThroughput: { 80 | capacityUnit: { 81 | read: 0, 82 | write: 0 83 | } 84 | }, 85 | tableOptions: { 86 | timeToLive: -1,// 数据的过期时间, 单位秒, -1代表永不过期. 假如设置过期时间为一年, 即为 365 * 24 * 3600. 87 | maxVersions: 1// 保存的最大版本数, 设置为1即代表每列上最多保存一个版本(保存最新的版本). 88 | } 89 | }; 90 | 91 | client.createTable(createParams, function (err, data) { 92 | 93 | if (err) { 94 | console.log('error:', err); 95 | return; 96 | } 97 | console.log('create table success'); 98 | putRow() 99 | }); 100 | } 101 | 102 | createTable() 103 | 104 | 105 | 106 | -------------------------------------------------------------------------------- /samples/putRow.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../index.js'); 2 | var Long = TableStore.Long; 3 | var client = require('./client'); 4 | 5 | var currentTimeStamp = Date.now(); 6 | var params = { 7 | tableName: "sampleTable", 8 | //不管此行是否已经存在,都会插入新数据,如果之前有会被覆盖。condition的详细使用说明,请参考conditionUpdateRow.js 9 | condition: new TableStore.Condition(TableStore.RowExistenceExpectation.IGNORE, null), 10 | primaryKey: [{ 'gid': Long.fromNumber(20013) }, { 'uid': Long.fromNumber(20013) }], 11 | attributeColumns: [ 12 | { 'col1': '表格存储' }, 13 | //客户端可以自己指定版本号(时间戳) 14 | { 'col2': '2', 'timestamp': currentTimeStamp }, 15 | { 'col3': 3.1 }, 16 | { 'col4': -0.32 }, 17 | { 'col5': Long.fromNumber(123456789) } 18 | ], 19 | returnContent: { returnType: TableStore.ReturnType.Primarykey } 20 | }; 21 | 22 | client.putRow(params, function (err, data) { 23 | if (err) { 24 | console.log('error:', err); 25 | return; 26 | } 27 | 28 | console.log('success:', data); 29 | }); 30 | -------------------------------------------------------------------------------- /samples/search.js: -------------------------------------------------------------------------------- 1 | var client = require('./client'); 2 | var TableStore = require('../index.js'); 3 | var Long = TableStore.Long; 4 | 5 | 6 | var testQueryMap = { 7 | MATCH_QUERY: {//1 8 | queryType: TableStore.QueryType.MATCH_QUERY, 9 | query: { 10 | fieldName: "pic_id", 11 | text: "pic_id_5" 12 | } 13 | }, 14 | MATCH_QUERY_OR: {//1 15 | queryType: TableStore.QueryType.MATCH_QUERY, 16 | query: { 17 | fieldName: "pic_description", 18 | text: "some info", 19 | minimumShouldMatch: 2, 20 | operator: TableStore.QueryOperator.OR 21 | } 22 | }, 23 | MATCH_PHRASE_QUERY: {//2 24 | queryType: TableStore.QueryType.MATCH_PHRASE_QUERY, 25 | query: { 26 | fieldName: "pic_id", 27 | text: "pic_id_5" 28 | } 29 | }, 30 | TERM_QUERY: {//3 31 | queryType: TableStore.QueryType.TERM_QUERY, 32 | query: { 33 | fieldName: "pic_id", 34 | term: "pic_id_5" 35 | } 36 | }, 37 | RANGE_QUERY: {//4 38 | queryType: TableStore.QueryType.RANGE_QUERY, 39 | query: { 40 | fieldName: "pic_id", 41 | rangeFrom: "pic_id_10", 42 | includeLower: true, 43 | rangeTo: "pic_id_11", 44 | includeUpper: true, 45 | } 46 | }, 47 | PREFIX_QUERY: {//5 48 | queryType: TableStore.QueryType.PREFIX_QUERY, 49 | query: { 50 | fieldName: "pic_id", 51 | prefix: "pic_id_2" 52 | } 53 | }, 54 | BOOL_QUERY: {//6 55 | queryType: TableStore.QueryType.BOOL_QUERY, 56 | query: { 57 | mustQueries: [ 58 | { 59 | queryType: TableStore.QueryType.PREFIX_QUERY, 60 | query: { 61 | fieldName: "pic_id", 62 | prefix: "pic_id_" 63 | } 64 | }, 65 | { 66 | queryType: TableStore.QueryType.RANGE_QUERY, 67 | query: { 68 | fieldName: "pic_id", 69 | rangeFrom: "pic_id_0", 70 | includeLower: true, 71 | rangeTo: "pic_id_20", 72 | includeUpper: true, 73 | } 74 | } 75 | ], 76 | mustNotQueries: [ 77 | { 78 | queryType: TableStore.QueryType.PREFIX_QUERY, 79 | query: { 80 | fieldName: "pic_id", 81 | prefix: "pic_id_24" 82 | } 83 | }, 84 | { 85 | queryType: TableStore.QueryType.RANGE_QUERY, 86 | query: { 87 | fieldName: "pic_id", 88 | rangeFrom: "pic_id_10", 89 | includeLower: true, 90 | // rangeTo: "pic_id_20", 91 | // includeUpper: true, 92 | } 93 | }, 94 | ], 95 | filterQueries: [ 96 | { 97 | queryType: TableStore.QueryType.PREFIX_QUERY, 98 | query: { 99 | fieldName: "pic_id", 100 | prefix: "pic_id_" 101 | } 102 | }, 103 | { 104 | queryType: TableStore.QueryType.RANGE_QUERY, 105 | query: { 106 | fieldName: "pic_id", 107 | rangeFrom: "pic_id_0", 108 | includeLower: true, 109 | rangeTo: "pic_id_20", 110 | includeUpper: true, 111 | } 112 | } 113 | ], 114 | shouldQueries: [ 115 | { 116 | queryType: TableStore.QueryType.PREFIX_QUERY, 117 | query: { 118 | fieldName: "pic_id", 119 | prefix: "pic_id_2" 120 | } 121 | }, 122 | { 123 | queryType: TableStore.QueryType.RANGE_QUERY, 124 | query: { 125 | fieldName: "pic_id", 126 | rangeFrom: "pic_id_18", 127 | includeLower: true, 128 | rangeTo: "pic_id_20", 129 | includeUpper: true, 130 | } 131 | } 132 | ], 133 | minimumShouldMatch: 0 134 | } 135 | 136 | }, 137 | CONST_SCORE_QUERY: {//7 138 | queryType: TableStore.QueryType.CONST_SCORE_QUERY, 139 | query: { 140 | filter: { 141 | queryType: TableStore.QueryType.PREFIX_QUERY, 142 | query: { 143 | fieldName: "pic_id", 144 | prefix: "pic_id_21" 145 | } 146 | } 147 | } 148 | }, 149 | FUNCTION_SCORE_QUERY: {//8 150 | queryType: TableStore.QueryType.FUNCTION_SCORE_QUERY, 151 | query: { 152 | query: { 153 | queryType: TableStore.QueryType.PREFIX_QUERY, 154 | query: { 155 | fieldName: "pic_id", 156 | prefix: "pic_id_" 157 | } 158 | }, 159 | fieldValueFactor: { 160 | fieldName: "time_stamp"//数值字段 161 | } 162 | } 163 | }, 164 | NESTED_QUERY: {//9 165 | queryType: TableStore.QueryType.NESTED_QUERY, 166 | query: { 167 | path: "pic_tag", 168 | query: { 169 | queryType: TableStore.QueryType.MATCH_ALL_QUERY, 170 | query: { 171 | fieldName: "pic_tag.tag_name", 172 | term: "车" 173 | } 174 | }, 175 | } 176 | }, 177 | WILDCARD_QUERY: {//10 178 | queryType: TableStore.QueryType.WILDCARD_QUERY, 179 | query: { 180 | fieldName: "pic_id", 181 | value: "pic_id_*1" 182 | } 183 | }, 184 | MATCH_ALL_QUERY: {//11 185 | queryType: TableStore.QueryType.MATCH_ALL_QUERY, 186 | }, 187 | GEO_BOUNDING_BOX_QUERY: {//12 188 | queryType: TableStore.QueryType.GEO_BOUNDING_BOX_QUERY, 189 | query: { 190 | fieldName: "pos", 191 | topLeft: "1,0", // 设置矩形左上角(纬度,经度) 192 | bottomRight: "0,1" // 设置矩形右下角(纬度,经度) 193 | } 194 | }, 195 | GEO_DISTANCE_QUERY: {//13 196 | queryType: TableStore.QueryType.GEO_DISTANCE_QUERY, 197 | query: { 198 | fieldName: "pos", 199 | centerPoint: "1,1",// 设置中心点 200 | distance: 200000//单位米 201 | } 202 | }, 203 | GEO_POLYGON_QUERY: {//14 204 | queryType: TableStore.QueryType.GEO_POLYGON_QUERY, 205 | query: { 206 | fieldName: "pos", 207 | points: ["0,0", "0,1", "-1,-1", "1,0"] 208 | } 209 | }, 210 | TERMS_QUERY: {//15 211 | queryType: TableStore.QueryType.TERMS_QUERY, 212 | query: { 213 | fieldName: "pic_id", 214 | terms: ["pic_id_1", "pic_id_5"] 215 | } 216 | }, 217 | EXISTS_QUERY: {//16 218 | queryType: TableStore.QueryType.EXISTS_QUERY, 219 | query: { 220 | fieldName: "pic_id" 221 | } 222 | } 223 | }; 224 | 225 | client.search({ 226 | tableName: "nestedTag", 227 | indexName: "testIndex", 228 | searchQuery: { 229 | offset: 0, 230 | limit: 10, 231 | query: testQueryMap.NESTED_QUERY, 232 | getTotalCount: true, 233 | sort: { 234 | sorters: [ 235 | { 236 | fieldSort: { 237 | fieldName: "count", 238 | order: TableStore.SortOrder.SORT_ORDER_DESC, 239 | // mode: TableStore.SortMode.SORT_MODE_AVG,//for nested 240 | // nestedFilter: { 241 | // path: "pic_tag", 242 | // filter: { 243 | // queryType: TableStore.QueryType.MATCH_ALL_QUERY, 244 | // } 245 | // }, 246 | 247 | }, 248 | // scoreSort: { 249 | // order: TableStore.SortOrder.SORT_ORDER_ASC 250 | // }, 251 | // geoDistanceSort: { 252 | // fieldName: "pos", 253 | // points: ["0,0"], 254 | // order: TableStore.SortOrder.SORT_ORDER_ASC, 255 | // distanceType: TableStore.GeoDistanceType.GEO_DISTANCE_ARC, 256 | // // mode: TableStore.SortMode.SORT_MODE_MIN, 257 | // // nestedFilter: { 258 | // // path: "pos", 259 | // // filter: { 260 | // // queryType: TableStore.QueryType.MATCH_ALL_QUERY, 261 | // // } 262 | // // }, 263 | // 264 | // } 265 | } 266 | ] 267 | }, 268 | // collapse: { 269 | // fieldName: "col_keyword", 270 | // }, 271 | }, 272 | columnToGet: { 273 | returnType: TableStore.ColumnReturnType.RETURN_NONE, 274 | returnNames: ["pic_tag", "pic_description", "time_stamp", "pos"] 275 | }, 276 | routingValues: [ 277 | [{count: Long.fromNumber(0), pic_id: "pic_id_0"}],//pk顺序与创建index时routingFields一致 278 | [{count: Long.fromNumber(3), pic_id: "pic_id_3"}], 279 | ], 280 | timeoutMs: 300000, 281 | }, function (err, data) { 282 | if (err) { 283 | console.log('error:', err); 284 | return; 285 | } 286 | console.log('success:', JSON.stringify(data, null, 2)); 287 | }); 288 | 289 | -------------------------------------------------------------------------------- /samples/searchPaginateByToken.js: -------------------------------------------------------------------------------- 1 | var client = require('./client'); 2 | var TableStore = require('../index.js'); 3 | 4 | 5 | client.search({ 6 | tableName: "nestedTag", 7 | indexName: "testIndex", 8 | searchQuery: { 9 | offset: 0, 10 | limit: 1, 11 | query: { 12 | queryType: TableStore.QueryType.MATCH_ALL_QUERY 13 | }, 14 | sort: { 15 | sorters: [ 16 | { 17 | fieldSort: { 18 | fieldName: "pic_id", 19 | order: TableStore.SortOrder.SORT_ORDER_DESC, 20 | } 21 | } 22 | ] 23 | }, 24 | getTotalCount: true 25 | }, 26 | columnToGet: { 27 | returnType: TableStore.ColumnReturnType.RETURN_NONE, 28 | returnNames: ["pic_tag", "pic_description", "time_stamp", "pos"] 29 | } 30 | }, function (err, data) { 31 | var nextToken = data.nextToken.toString("base64"); 32 | var token = new Buffer(nextToken, "base64"); 33 | 34 | console.log('success:', JSON.stringify(data, null, 2)); 35 | console.log('token:', nextToken); 36 | 37 | client.search({ 38 | tableName: "nestedTag", 39 | indexName: "testIndex", 40 | searchQuery: { 41 | offset: 0, 42 | limit: 10, 43 | query: { 44 | queryType: TableStore.QueryType.MATCH_ALL_QUERY 45 | }, 46 | getTotalCount: true, 47 | token: token//nextToken包含sort+searchAfter信息,后续不需要提供 48 | }, 49 | columnToGet: { 50 | returnType: TableStore.ColumnReturnType.RETURN_NONE, 51 | returnNames: ["pic_tag", "pic_description", "time_stamp", "pos"] 52 | } 53 | }, function (err, data) { 54 | var nextToken = data.nextToken.toString("base64"); 55 | var token = new Buffer(nextToken, "base64"); 56 | 57 | console.log('token success:', JSON.stringify(data, null, 2)); 58 | console.log('token:', nextToken); 59 | }) 60 | }); 61 | 62 | -------------------------------------------------------------------------------- /samples/sql.js: -------------------------------------------------------------------------------- 1 | const client = require('./client'); 2 | 3 | const params = { 4 | query: "select * from test_table", 5 | } 6 | 7 | client.sqlQuery(params, function (err, resp) { 8 | if (err) { 9 | console.log('sqlQuery error:', err.toString()); 10 | } else { 11 | console.log('sqlQuery success:', resp); 12 | console.log(resp.sqlRows.rowCount.toFloat64()); 13 | console.log(resp.sqlRows.columnCount); 14 | console.log(resp.sqlRows.sqlTableMeta) 15 | for (let i = 0; i < resp.sqlRows.rowCount.toFloat64(); i++) { 16 | for (let j = 0; j < resp.sqlRows.columnCount; j++) { 17 | let data = resp.sqlRows.get(i, j); 18 | // 处理binary类型 19 | if (resp.sqlRows.sqlTableMeta.schemas[j].typeName === "BINARY") { 20 | let int8Array = data.valueArray(); 21 | console.log(int8Array); 22 | } 23 | // 处理Long类型 24 | if (resp.sqlRows.sqlTableMeta.schemas[j].typeName === "LONG") { 25 | console.log(data.toFloat64()); 26 | } 27 | console.log("i:" + i, ", j:" + j + ":" + data); 28 | } 29 | } 30 | } 31 | }); -------------------------------------------------------------------------------- /samples/stsTokenClient.js: -------------------------------------------------------------------------------- 1 | /* 2 | 1、使用阿里云STS SDK生成accessKeyId、secretAccessKey、stsToken,sdk地址:https://help.aliyun.com/document_detail/28786.html 3 | 2、授权的详细过程请参考:https://help.aliyun.com/document_detail/27364.html 4 | 3、生成后的示例结果如下 5 | */ 6 | 7 | var stsTokenClient = new TableStore.Client({ 8 | accessKeyId: 'STS.G1rT1R8UKu1XRL3BNDxe6mfkd', 9 | secretAccessKey: '4tJRTBXoaSY797sPJwzakVWsHrzxb1H754eKnPVzBc2H', 10 | stsToken: 'CAISnwJ1q6Ft5B2yfSjIpvLHH4vm1YpqwvOzUGqCpk4RdOoagqPAhjz2IHtKeHhhAeAfsf40nW5V6vsSlqB6T55OSAmcNZIoaHnsHo7iMeT7oMWQweEumv/MQBqOaXPS2MvVfJ/aLrf0ceusbFbpjzJ6xaCAGxypQ12iN+/S6/tgcs9FdACkZjppCcsURG5ltNRIGXbKPuysOBOo4ArXEFE6lQdgywEe7rikkOmd8Qac9iqYrNUYvIPsOJOpQtBxNZNkKbeP0fdxa7DK3Vw7iXEI1t8v1vQUpm+b7oHMXgMPu0rZCYeOrI0zdj0eT7MhBqtJoML7kfBFoeHJn+z1sU0WY70EAnyPHd/6m5KaRb/0ZsxWbqrgJ3nWzsBa8CFRI4LeCxqAAVrEjVoVtBxkF/kmhNeGFkgzbq6hLBmstxMcUcqLC0pjR1wiL95dvbUI+umLSooomhx9KyWQEzK5h1BZVAbrSMbgdJEUI0l9XTdWARydIwViMLZ0G7vzddiFGsyFcE3yr5OODg5IXWHp+elMT68mpRkXdv7QB3y3YPOca+Z4Ih+I', 11 | endpoint: ' ', 12 | instancename: '' 13 | }); 14 | 15 | -------------------------------------------------------------------------------- /samples/updateRow.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../index.js'); 2 | var Long = TableStore.Long; 3 | var client = require('./client'); 4 | 5 | var params = { 6 | tableName: "sampleTable", 7 | condition: new TableStore.Condition(TableStore.RowExistenceExpectation.IGNORE, null), 8 | primaryKey: [{ 'gid': Long.fromNumber(9) }, { 'uid': Long.fromNumber(90) }], 9 | updateOfAttributeColumns: [ 10 | { 'PUT': [{ 'col4': Long.fromNumber(4) }, { 'col5': '5' }, { 'col6': Long.fromNumber(7) }] }, 11 | { 'DELETE': [{ 'col1': Long.fromNumber(1496826473186) }] }, 12 | { 'DELETE_ALL': ['col2'] } 13 | ], 14 | returnContent: { returnType: TableStore.ReturnType.Primarykey } 15 | }; 16 | 17 | client.updateRow(params, 18 | function (err, data) { 19 | if (err) { 20 | console.log('error:', err); 21 | return; 22 | } 23 | 24 | console.log('success:', data); 25 | }); 26 | -------------------------------------------------------------------------------- /samples/updateSearchIndex.js: -------------------------------------------------------------------------------- 1 | const client = require('./client'); 2 | 3 | 4 | let params = { 5 | tableName: tableName, 6 | indexName: indexName, 7 | timeToLive: 8000000, 8 | } 9 | client.updateSearchIndex(params, function (err, data) { 10 | if (err) { 11 | console.log('updateSearchIndex error:', err.toString()); 12 | } else { 13 | console.log('updateSearchIndex success:', data); 14 | } 15 | }); -------------------------------------------------------------------------------- /samples/updateTable.js: -------------------------------------------------------------------------------- 1 | var client = require('./client'); 2 | 3 | var params = { 4 | tableName: 'sampleTable', 5 | reservedThroughput: { 6 | capacityUnit: { 7 | read: 0, 8 | write: 0 9 | } 10 | }, 11 | tableOptions: { 12 | maxVersions: 10, 13 | allowUpdate: true, // 是否允许"UpdateRow"相关写入操作 14 | } 15 | }; 16 | 17 | client.updateTable(params, function (err, data) { 18 | if (err) { 19 | console.log('error:', err); 20 | return; 21 | } 22 | console.log('success:', data); 23 | }); 24 | 25 | -------------------------------------------------------------------------------- /test/data_opration.test.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../index.js'); 2 | var client = require('../samples/client'); 3 | var Long = TableStore.Long; 4 | var assert = require("assert"); 5 | 6 | describe('data_opration', function () { 7 | it('getrow data should equal putrow data', function (done) { 8 | this.timeout(30000); 9 | var tableName = 'dataTestTable'; 10 | var binaryData = new Buffer('中华人民共和国'); 11 | 12 | var primaryKeyData = [{ 'stringPK': '表格存储' }, { 'integerPK': Long.fromNumber(10) }, { 'binaryPK': binaryData }]; 13 | 14 | const getRange = function () { 15 | const params = { 16 | tableName: tableName, 17 | direction: TableStore.Direction.FORWARD, 18 | maxVersions: 10, 19 | inclusiveStartPrimaryKey: [{ "stringPK": TableStore.INF_MIN }, { "integerPK": TableStore.INF_MIN }, { "binaryPK": TableStore.INF_MIN }], 20 | exclusiveEndPrimaryKey: [{ "stringPK": TableStore.INF_MAX }, { "integerPK": TableStore.INF_MAX }, { "binaryPK": TableStore.INF_MAX }], 21 | limit: 2 22 | }; 23 | client.getRange(params, function (err, data) { 24 | if (err) { 25 | console.log('error:', err); 26 | done(); 27 | } 28 | console.log(JSON.stringify(data, null, 4)); 29 | done(); 30 | }); 31 | }; 32 | 33 | var getRow = function () { 34 | var getRowParams = { 35 | tableName: tableName, 36 | primaryKey: primaryKeyData 37 | }; 38 | client.getRow(getRowParams, function (err, data) { 39 | assert.equal(err, undefined); 40 | console.log(data); 41 | getRange(); 42 | }); 43 | }; 44 | 45 | var putRow = function () { 46 | var putRowParams = { 47 | tableName: tableName, 48 | condition: new TableStore.Condition(TableStore.RowExistenceExpectation.IGNORE, null), 49 | primaryKey: primaryKeyData, 50 | attributeColumns: [ 51 | { 'col1': '表格存储' }, 52 | { 'col2': '2' }, 53 | { 'col3': 3.1 }, 54 | { 'col4': -0.32 }, 55 | { 'col5': Long.fromNumber(123456789) } 56 | ] 57 | }; 58 | 59 | client.putRow(putRowParams, function (err, data) { 60 | assert.equal(err, undefined); 61 | getRow(); 62 | }); 63 | }; 64 | 65 | var createTableParams = { 66 | tableMeta: { 67 | tableName: tableName, 68 | primaryKey: [ 69 | { 70 | name: 'stringPK', 71 | type: 'STRING' 72 | }, 73 | { 74 | name: 'integerPK', 75 | type: 'INTEGER' 76 | }, 77 | { 78 | name: 'binaryPK', 79 | type: 'BINARY' 80 | }, 81 | ] 82 | }, 83 | reservedThroughput: { 84 | capacityUnit: { 85 | read: 0, 86 | write: 0 87 | } 88 | }, 89 | tableOptions: { 90 | maxVersions: 1, 91 | timeToLive: -1, 92 | }, 93 | }; 94 | client.deleteTable({ 95 | tableName: tableName, 96 | }, function () { 97 | client.createTable(createTableParams, function (err, data) { 98 | assert.equal(err, undefined); 99 | setTimeout(() => { 100 | putRow(); 101 | }, 2000); 102 | }); 103 | }) 104 | }) 105 | }) -------------------------------------------------------------------------------- /test/globalIndex.test.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../index.js'); 2 | var client = require('../samples/client'); 3 | var Long = TableStore.Long; 4 | var assert = require("assert"); 5 | 6 | describe('#GlobalIndex Test:', function () { 7 | 8 | it('a)create table with globalIndex -> success', function (done) { 9 | var createTableParams = { 10 | tableMeta: { 11 | tableName: 'sdkGlobalTest', 12 | primaryKey: [ 13 | { 14 | name: 'pk1', 15 | type: TableStore.PrimaryKeyType.INTEGER 16 | }, 17 | { 18 | name: 'pk2', 19 | type: TableStore.PrimaryKeyType.INTEGER 20 | } 21 | ], 22 | definedColumn: [ 23 | { 24 | "name": "col1", 25 | "type": TableStore.DefinedColumnType.DCT_INTEGER 26 | }, 27 | { 28 | "name": "col2", 29 | "type": TableStore.DefinedColumnType.DCT_INTEGER 30 | } 31 | ], 32 | }, 33 | reservedThroughput: { 34 | capacityUnit: { 35 | read: 0, 36 | write: 0 37 | } 38 | }, 39 | tableOptions: { 40 | timeToLive: -1,// 数据的过期时间, 单位秒, -1代表永不过期. 假如设置过期时间为一年, 即为 365 * 24 * 3600. 41 | maxVersions: 1// 保存的最大版本数, 设置为1即代表每列上最多保存一个版本(保存最新的版本). 42 | }, 43 | streamSpecification: { 44 | enableStream: false, //globalIndex不支持开启Stream 45 | }, 46 | indexMetas: [ 47 | { 48 | name: "sdkIndex1", 49 | primaryKey: ["pk2"], 50 | definedColumn: ["col1", "col2"] 51 | } 52 | ] 53 | }; 54 | 55 | client.createTable(createTableParams, function (createTableErr, createTableData) { 56 | assert.equal(createTableErr, undefined); 57 | done(); 58 | }); 59 | }); 60 | 61 | it('b)after create table -> table and index exist 1', function (done) { 62 | var describeTableParams = { 63 | tableName: 'sdkGlobalTest' 64 | }; 65 | client.describeTable(describeTableParams, function (descriveTableErr, descriveTableData) { 66 | assert.equal(descriveTableData.indexMetas.length, 1); 67 | done(); 68 | }); 69 | }); 70 | 71 | it('c)create globalIndex -> success', function (done) { 72 | var createGlobalIndexParams = { 73 | mainTableName: "sdkGlobalTest", 74 | indexMeta: { 75 | name: "sdkIndex2", 76 | primaryKey: ["col1"], 77 | definedColumn: ["col2"] 78 | } 79 | }; 80 | client.createIndex(createGlobalIndexParams, function (createIndexErr, createIndexData) { 81 | assert.equal(createIndexErr, undefined); 82 | done(); 83 | }) 84 | }); 85 | 86 | it('d)after create index -> index exist 2', function (done) { 87 | var describeTableParams = { 88 | tableName: 'sdkGlobalTest' 89 | }; 90 | client.describeTable(describeTableParams, function (descriveTableErr, descriveTableData) { 91 | assert.equal(descriveTableData.indexMetas.length, 2); 92 | done(); 93 | }); 94 | }); 95 | 96 | it('e)delete 2 globalIndex -> success', function (done) { 97 | var dropGlobalIndexParams = { 98 | mainTableName: "sdkGlobalTest", 99 | indexName: "sdkIndex1" 100 | }; 101 | 102 | client.dropIndex(dropGlobalIndexParams, function (dropIndexErr, dropIndexData) { 103 | assert.equal(dropIndexErr, undefined); 104 | var dropGlobalIndexParams = { 105 | mainTableName: "sdkGlobalTest", 106 | indexName: "sdkIndex2" 107 | }; 108 | 109 | client.dropIndex(dropGlobalIndexParams, function (dropIndexErr, dropIndexData) { 110 | assert.equal(dropIndexErr, undefined); 111 | done(); 112 | }); 113 | }); 114 | }); 115 | 116 | it('f)after delete globalIndex -> not exist', function (done) { 117 | var describeTableParams = { 118 | tableName: 'sdkGlobalTest' 119 | }; 120 | client.describeTable(describeTableParams, function (describeTableErr, describeTableData) { 121 | assert.equal(describeTableData.indexMetas.length, 0); 122 | done(); 123 | }) 124 | }); 125 | 126 | 127 | it('g)delete table -> success', function (done) { 128 | var deleteTableParams = { 129 | tableName: 'sdkGlobalTest' 130 | }; 131 | client.deleteTable(deleteTableParams, function (deleteTableErr, deleteTableData) { 132 | assert.equal(deleteTableErr, undefined); 133 | done(); 134 | }); 135 | }); 136 | 137 | it('h)afater delete table -> not exist', function (done) { 138 | var describeTableParams = { 139 | tableName: 'sdkGlobalTest' 140 | }; 141 | client.listTable(describeTableParams, function (describeTableErr, listTableData) { 142 | assert.equal(listTableData.tableNames.indexOf(describeTableParams.tableName), -1); 143 | done(); 144 | }) 145 | }) 146 | }); 147 | -------------------------------------------------------------------------------- /test/long.test.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../index.js'); 2 | var Long = TableStore.Long; 3 | var assert = require("assert"); 4 | 5 | describe('Long', function () { 6 | describe('#fromNumber', function () { 7 | it('int64.toNumber() should equal num', function () { 8 | var num = 123456789; 9 | var int64 = Long.fromNumber(num); 10 | assert.equal(int64.toNumber(), num); 11 | }) 12 | }) 13 | }) -------------------------------------------------------------------------------- /test/ots_test_utils.js: -------------------------------------------------------------------------------- 1 | const client = require("../samples/client"); 2 | const TableStore = require("../index"); 3 | const assert = require("assert"); 4 | const {performance} = require('perf_hooks'); 5 | 6 | otsTestUtils = { 7 | testSimpleSchema: { 8 | fieldSchemas: [{ 9 | fieldName: "col_keyword", 10 | fieldType: TableStore.FieldType.KEYWORD, 11 | index: true, 12 | enableSortAndAgg: true, 13 | store: true, 14 | isAnArray: false 15 | }, { 16 | fieldName: "col_long", 17 | fieldType: TableStore.FieldType.LONG, 18 | index: true, 19 | enableSortAndAgg: true, 20 | store: true, 21 | isAnArray: false 22 | }, { 23 | fieldName: "col_text", 24 | fieldType: TableStore.FieldType.TEXT, 25 | analyzer: "fuzzy", 26 | index: true, 27 | enableSortAndAgg: false, 28 | store: true, 29 | isAnArray: false, 30 | analyzerParameter: { 31 | minChars: 1, 32 | maxChars: 5, 33 | } 34 | }, { 35 | fieldName: "col_geo", 36 | fieldType: TableStore.FieldType.GEO_POINT, 37 | index: true, 38 | enableSortAndAgg: true, 39 | store: true, 40 | isAnArray: false, 41 | }, { 42 | fieldName: "col_nested", 43 | fieldType: TableStore.FieldType.NESTED, 44 | index: false, 45 | enableSortAndAgg: false, 46 | store: false, 47 | fieldSchemas: [ 48 | { 49 | fieldName: "sub_keyword", 50 | fieldType: TableStore.FieldType.KEYWORD, 51 | index: true, 52 | enableSortAndAgg: true, 53 | store: false, 54 | }, 55 | { 56 | fieldName: "tag_long", 57 | fieldType: TableStore.FieldType.LONG, 58 | index: true, 59 | enableSortAndAgg: true, 60 | store: false, 61 | } 62 | ] 63 | }, { 64 | fieldName: "date", 65 | fieldType: TableStore.FieldType.DATE, 66 | index: true, 67 | enableSortAndAgg: true, 68 | store: true, 69 | isAnArray: false, 70 | dateFormats: ["yyyy-MM-dd'T'HH:mm:ss.SSSSSS"], 71 | }, { 72 | fieldName: "col_double", 73 | fieldType: TableStore.FieldType.DOUBLE, 74 | index: true, 75 | enableSortAndAgg: true, 76 | store: false, 77 | isAnArray: false 78 | }, { 79 | fieldName: "col_vector", 80 | fieldType: TableStore.FieldType.VECTOR, 81 | index: true, 82 | isAnArray: false, 83 | vectorOptions: { 84 | dataType: TableStore.VectorDataType.VD_FLOAT_32, 85 | dimension: 4, 86 | metricType: TableStore.VectorMetricType.VM_COSINE, 87 | } 88 | }, { 89 | fieldName: "col_text2", 90 | fieldType: TableStore.FieldType.TEXT, 91 | index: true, 92 | enableHighlighting: true, 93 | }, { 94 | fieldName: "col_nested2 ", 95 | fieldType: TableStore.FieldType.NESTED, 96 | index: false, 97 | enableSortAndAgg: false, 98 | store: false, 99 | fieldSchemas: [ 100 | { 101 | fieldName: "Level1_Col1_Text", 102 | fieldType: TableStore.FieldType.TEXT, 103 | index: true, 104 | enableHighlighting: true, 105 | enableSortAndAgg: false, 106 | store: true, 107 | }, 108 | { 109 | fieldName: "Level1_Col2_Nested", 110 | fieldType: TableStore.FieldType.NESTED, 111 | index: false, 112 | enableSortAndAgg: false, 113 | store: false, 114 | fieldSchemas: [ 115 | { 116 | fieldName: "Level2_Col1_Text", 117 | fieldType: TableStore.FieldType.TEXT, 118 | index: true, 119 | enableHighlighting: true, 120 | enableSortAndAgg: false, 121 | store: true, 122 | }, 123 | ] 124 | } 125 | ] 126 | },{ 127 | fieldName: "col_long_sec", 128 | fieldType: TableStore.FieldType.LONG, 129 | index: true, 130 | enableSortAndAgg: true, 131 | }, { 132 | fieldName: "col_date", 133 | fieldType: TableStore.FieldType.DATE, 134 | index: true, 135 | enableSortAndAgg: false, 136 | store: false, 137 | isAnArray: false, 138 | dateFormats: ["yyyy-MM-dd HH:mm:ss"], 139 | },{ 140 | fieldName: "col_keyword2", 141 | fieldType: TableStore.FieldType.KEYWORD, 142 | index: true, 143 | enableSortAndAgg: true, 144 | store: true, 145 | isAnArray: false 146 | },], 147 | }, 148 | emptyPromise: new Promise(function (resolve) { 149 | resolve() 150 | }), 151 | sleep: async function (ms) { 152 | return new Promise(resolve => setTimeout(resolve, ms)) 153 | }, 154 | deleteTable: async function (tableName) { 155 | const params = { 156 | tableName: tableName 157 | }; 158 | return new Promise(function (resolve, reject) { 159 | client.deleteTable(params, function (err, data) { 160 | if (err) { 161 | console.log('deleteTable error:', err.toString()); 162 | if (err.toString().includes("OTSObjectNotExist")) { 163 | resolve(data) 164 | return 165 | } 166 | reject(err); 167 | } else { 168 | console.log('deleteTable success:', data); 169 | resolve(data) 170 | } 171 | }); 172 | }); 173 | }, 174 | deleteTableAndSearchIndex: async function (tableName) { 175 | let listResp = await this.listSearchIndex(tableName); 176 | for (const item of listResp.indices) { 177 | console.log("start delete index", item.indexName); 178 | await this.deleteSearchIndex(tableName, item.indexName); 179 | } 180 | await this.deleteTable(tableName) 181 | }, 182 | listSearchIndex: async function (tableName) { 183 | return new Promise(function (resolve, reject) { 184 | client.listSearchIndex({ 185 | tableName: tableName, 186 | }, function (err, data) { 187 | if (err) { 188 | if (err.toString().includes("does not exist")) { 189 | resolve({ 190 | indices: [], 191 | }) 192 | return 193 | } 194 | console.log('listSearchIndex error:', err.toString()); 195 | reject(err); 196 | } else { 197 | console.log('listSearchIndex success:', data); 198 | resolve(data) 199 | } 200 | }); 201 | }) 202 | }, 203 | deleteSearchIndex: async function (tableName, indexName) { 204 | return new Promise(function (resolve, reject) { 205 | client.deleteSearchIndex({ 206 | tableName: tableName, indexName: indexName, 207 | }, function (err, data) { 208 | if (err) { 209 | console.log('deleteSearchIndex error:', err.toString()); 210 | if (err.toString().includes("OTSObjectNotExist")) { 211 | resolve(data) 212 | return 213 | } 214 | reject(err); 215 | } else { 216 | console.log('deleteSearchIndex success:', data); 217 | resolve(data) 218 | } 219 | }); 220 | }) 221 | }, 222 | createTable: async function (tableName, tableOptions = {}) { 223 | const params = { 224 | tableMeta: { 225 | tableName: tableName, primaryKey: [{ 226 | name: 'pk1', type: 'STRING' 227 | }] 228 | }, reservedThroughput: { 229 | capacityUnit: { 230 | read: 0, write: 0 231 | } 232 | }, tableOptions: { 233 | timeToLive: tableOptions.timeToLive === undefined ? -1 : tableOptions.timeToLive, 234 | maxVersions: tableOptions.maxVersions === undefined ? 1 : tableOptions.maxVersions, 235 | allowUpdate: tableOptions.allowUpdate === undefined ? true : tableOptions.allowUpdate, 236 | }, 237 | }; 238 | return new Promise(function (resolve, reject) { 239 | client.createTable(params, function (err, data) { 240 | if (err) { 241 | console.log('createTable error:', err.toString()); 242 | reject(err); 243 | } else { 244 | console.log('createTable success:', data); 245 | resolve(data) 246 | } 247 | }) 248 | }) 249 | }, 250 | createSearchIndex: async function (tableName, indexName, schema, sourceIndexName, timeToLive) { 251 | return new Promise(function (resolve, reject) { 252 | let params = { 253 | tableName: tableName, indexName: indexName, schema: schema, 254 | } 255 | if (sourceIndexName !== undefined) { 256 | params.sourceIndexName = sourceIndexName 257 | } 258 | if (timeToLive !== undefined) { 259 | params.timeToLive = timeToLive 260 | } 261 | client.createSearchIndex(params, function (err, data) { 262 | if (err) { 263 | console.log('createSearchIndex error:', err.toString()); 264 | reject(err); 265 | } else { 266 | console.log('createSearchIndex success:', data); 267 | resolve(data) 268 | } 269 | }); 270 | }) 271 | }, 272 | search: async function (tableName, indexName, searchQuery, columnToGet = { 273 | returnType: TableStore.ColumnReturnType.RETURN_ALL_FROM_INDEX, 274 | returnNames: ["col_long"], 275 | }, timeoutMs = 30000) { 276 | return new Promise(function (resolve, reject) { 277 | let params = { 278 | tableName: tableName, 279 | indexName: indexName, 280 | searchQuery: searchQuery, 281 | columnToGet: columnToGet, 282 | timeoutMs: timeoutMs, 283 | } 284 | client.search(params, function (err, data) { 285 | if (err) { 286 | console.log('search error:', err.toString()); 287 | reject(err); 288 | } else { 289 | console.log('search success:', data); 290 | resolve(data) 291 | } 292 | }); 293 | }) 294 | }, 295 | waitSearchSync: async function (tableName, indexName, expectTotalCount, timeoutSecond) { 296 | let start = performance.now(); 297 | while (true) { 298 | let searchResp = await this.search(tableName, indexName, { 299 | offset: 0, 300 | limit: 0, 301 | query: { 302 | queryType: TableStore.QueryType.MATCH_ALL_QUERY, 303 | }, 304 | getTotalCount: true, 305 | }, 306 | { 307 | returnType: TableStore.ColumnReturnType.RETURN_NONE, 308 | }) 309 | let getTotalCount = searchResp.totalCounts >> 0 310 | if (getTotalCount === expectTotalCount) { 311 | console.log("TotalHit: " + expectTotalCount); 312 | console.log("DataSyncTimeInMs:" + (performance.now() - start)); 313 | break; 314 | } else if (getTotalCount !== 0) { 315 | console.log("TotalHit: " + getTotalCount + ", Expect: " + expectTotalCount); 316 | } 317 | if (performance.now() - start > timeoutSecond * 1000) { 318 | assert.fail("等待同步数据超时") 319 | } 320 | await this.sleep(1000) 321 | } 322 | return this.emptyPromise 323 | }, 324 | createTableAndSearchIndex: async function (tableName, indexName, schema, tableOptions = {}, sourceIndexName, timeToLive) { 325 | await this.createTable(tableName, tableOptions) 326 | await this.sleep(1000) 327 | await this.createSearchIndex(tableName, indexName, schema, sourceIndexName, timeToLive) 328 | }, 329 | computeSplits: async function (tableName, indexName) { 330 | return new Promise((resolve, reject) => { 331 | client.computeSplits({ 332 | tableName: tableName, 333 | searchIndexSplitsOptions: { 334 | indexName: indexName, 335 | } 336 | }, function (err, data) { 337 | if (err) { 338 | console.log('computeSplits error:', err.toString()); 339 | reject(err); 340 | } else { 341 | console.log('computeSplits success:', data); 342 | assert.ok(data.splitsSize >= 1) 343 | resolve(data) 344 | } 345 | }) 346 | }) 347 | }, 348 | putRow: async function (tableName, pk1, attributeColumns) { 349 | let params = { 350 | tableName: tableName, 351 | condition: new TableStore.Condition(TableStore.RowExistenceExpectation.IGNORE, null), 352 | primaryKey: [{'pk1': pk1}], 353 | attributeColumns: attributeColumns, 354 | returnContent: {returnType: TableStore.ReturnType.Primarykey} 355 | }; 356 | return new Promise(function (resolve, reject) { 357 | client.putRow(params, function (err, data) { 358 | if (err) { 359 | console.log('putRow error:', err.toString()); 360 | reject(err); 361 | } else { 362 | console.log('putRow success:', pk1); 363 | resolve(data) 364 | } 365 | }); 366 | }); 367 | }, 368 | describeTable: async function (tableName) { 369 | const params = { 370 | tableName: tableName 371 | }; 372 | return new Promise(function (resolve, reject) { 373 | client.describeTable(params, function (err, data) { 374 | if (err) { 375 | console.log('describeTable error:', err.toString()); 376 | reject(err); 377 | } else { 378 | console.log('describeTable success:', data); 379 | resolve(data) 380 | } 381 | }); 382 | }); 383 | }, 384 | describeSearchIndex: async function (tableName, indexName) { 385 | const params = { 386 | tableName: tableName, indexName: indexName, 387 | }; 388 | return new Promise(function (resolve, reject) { 389 | client.describeSearchIndex(params, function (err, data) { 390 | if (err) { 391 | console.log('describeSearchIndex error:', err.toString()); 392 | reject(err); 393 | } else { 394 | console.log('describeSearchIndex success:', data); 395 | resolve(data) 396 | } 397 | }); 398 | }); 399 | }, 400 | sqlQuery: async function (sql, version) { 401 | const params = { 402 | query: sql, 403 | version: version, 404 | }; 405 | return new Promise(function (resolve, reject) { 406 | client.sqlQuery(params, function (err, data) { 407 | if (err) { 408 | console.log('sqlQuery error:', err.toString()); 409 | if (err.toString().includes("already exists")) { 410 | resolve(data) 411 | return 412 | } 413 | reject(err); 414 | } else { 415 | console.log('sqlQuery success:', data); 416 | resolve(data) 417 | } 418 | }); 419 | }); 420 | }, 421 | printSearchHit: function (searchHits, prefix) { 422 | TableStore.util.arrayEach(searchHits, function (searchHit) { 423 | if (searchHit.highlightResultItem != null) { 424 | console.log(prefix + "Highlight: \n"); 425 | var strBuilder = "" 426 | for (const [key,val] of searchHit.highlightResultItem.highlightFields.entries()) { 427 | strBuilder += key + ":["; 428 | strBuilder += val.fragments.join(",") + "]\n"; 429 | console.log(strBuilder); 430 | } 431 | } 432 | for (const [key,val] of searchHit.searchInnerHits.entries()) { 433 | console.log(prefix + "Path: " + key + "\n"); 434 | console.log(prefix + "InnerHit: \n"); 435 | otsTestUtils.printSearchHit(val.subSearchHits, prefix + " "); 436 | } 437 | }); 438 | } 439 | } 440 | 441 | 442 | module.exports = otsTestUtils -------------------------------------------------------------------------------- /test/performance.js: -------------------------------------------------------------------------------- 1 | var client = require('../samples/client'); 2 | 3 | // 测试的时候可以将这个值调大 4 | var testTimes = 1 5 | var roundTimes = testTimes; 6 | var completeTimes = 0; 7 | 8 | var start = new Date().getTime(); 9 | 10 | while (roundTimes > 0) { 11 | client.listTable({}, function (err, data) { 12 | completeTimes++; 13 | if (completeTimes >= testTimes) { 14 | var end = new Date().getTime(); 15 | completeTime = end - start; 16 | console.log('completeTime:' + completeTime); 17 | } 18 | }); 19 | roundTimes--; 20 | } 21 | 22 | var requestEnd = new Date().getTime(); 23 | var requestTime = requestEnd - start; 24 | 25 | console.log('requestTime:' + requestTime); 26 | -------------------------------------------------------------------------------- /test/protocol/plain_buffer.test.js: -------------------------------------------------------------------------------- 1 | var TableStore = require('../../index.js'); 2 | var Long = TableStore.Long; 3 | var assert = require("assert"); 4 | 5 | describe('plainBuffer', function () { 6 | describe('primaryKey_serialize_deserialize', function () { 7 | it('deserialized result should equal primaryKey', function () { 8 | var primaryKey = [{ 'gid': Long.fromNumber(20004) }]; 9 | var buffer = TableStore.PlainBufferBuilder.serializePrimaryKey(primaryKey); 10 | 11 | var dataBuffer = { 12 | buffer: buffer, 13 | offset: 0, 14 | limit: buffer.length 15 | }; 16 | 17 | var inputStream = new TableStore.PlainBufferInputStream(dataBuffer); 18 | var codedInputStream = new TableStore.PlainBufferCodedInputStream(inputStream); 19 | var decodedPK = codedInputStream.readRow(); 20 | 21 | assert.equal(decodedPK.primaryKey[0].name, 'gid'); 22 | assert.equal(decodedPK.primaryKey[0].value.toNumber(), 20004); 23 | }); 24 | }); 25 | 26 | describe('attributeColumn_serialize_deserialize', function () { 27 | it('deserialized result should equal attributeColumns', function () { 28 | var primaryKey = [{ 'gid': Long.fromNumber(20004) }]; 29 | var attributeColumns = [{ 'attrCol1': '表格存储' }, { 'attrCol2': 'NodeJS SDK' }]; 30 | var buffer = TableStore.PlainBufferBuilder.serializeForPutRow(primaryKey, attributeColumns); 31 | 32 | var dataBuffer = { 33 | buffer: buffer, 34 | offset: 0, 35 | limit: buffer.length 36 | }; 37 | 38 | var inputStream = new TableStore.PlainBufferInputStream(dataBuffer); 39 | var codedInputStream = new TableStore.PlainBufferCodedInputStream(inputStream); 40 | var decodedAttrs = codedInputStream.readRow(); 41 | 42 | assert.equal(decodedAttrs.attributes[0].columnName, 'attrCol1'); 43 | assert.equal(decodedAttrs.attributes[0].columnValue, '表格存储'); 44 | 45 | assert.equal(decodedAttrs.attributes[1].columnName, 'attrCol2'); 46 | assert.equal(decodedAttrs.attributes[1].columnValue, 'NodeJS SDK'); 47 | }); 48 | }); 49 | }); -------------------------------------------------------------------------------- /test/sql.test.js: -------------------------------------------------------------------------------- 1 | const assert = require("assert"); 2 | const otsTestUtils = require("./ots_test_utils"); 3 | const TableStore = require("../index"); 4 | const Long = TableStore.Long; 5 | 6 | 7 | describe('sql', function () { 8 | this.timeout(30000); 9 | const tableName = "js_sql"; 10 | it('1) createTable', async function () { 11 | await otsTestUtils.deleteTable(tableName); 12 | await otsTestUtils.createTable(tableName); 13 | await otsTestUtils.sleep(2000); 14 | return otsTestUtils.emptyPromise; 15 | }); 16 | 17 | it('2) putSomeData', async function () { 18 | // 写几行数据 19 | for (let i = 0; i < 30; i++) { 20 | await otsTestUtils.putRow(tableName, i + "", [ 21 | {'col_keyword': i % 4 + "_aaa"}, 22 | {'col_long': Long.fromNumber(i)}, 23 | {'col_bool': i % 2 !== 0}, 24 | {'col_double': i * 0.3}, 25 | {'col_binary': new Buffer('binary')}, 26 | ]); 27 | } 28 | return otsTestUtils.emptyPromise; 29 | }); 30 | 31 | it('3) sql:drop table', async function () { 32 | { 33 | const query = "drop MAPPING table " + tableName + " ;"; 34 | const resp = await otsTestUtils.sqlQuery(query, TableStore.SQLPayloadVersion.SQL_FLAT_BUFFERS) 35 | console.log(JSON.stringify(resp)) 36 | } 37 | return otsTestUtils.emptyPromise; 38 | }); 39 | 40 | it('4) sql:create table', async function () { 41 | { 42 | const query = "create table " + tableName + " (pk1 varchar(1024), col_long bigint, col_keyword mediumtext, col_bool bool, col_double double, col_binary MEDIUMBLOB, primary key(pk1))"; 43 | const resp = await otsTestUtils.sqlQuery(query, TableStore.SQLPayloadVersion.SQL_FLAT_BUFFERS) 44 | console.log(JSON.stringify(resp)) 45 | } 46 | return otsTestUtils.emptyPromise; 47 | }); 48 | 49 | it('5) sql:show tables', async function () { 50 | { 51 | const query = "show tables"; 52 | const resp = await otsTestUtils.sqlQuery(query, TableStore.SQLPayloadVersion.SQL_FLAT_BUFFERS) 53 | console.log(JSON.stringify(resp, null, "\t")); 54 | } 55 | return otsTestUtils.emptyPromise; 56 | }); 57 | 58 | it('6) sql:describe test_table', async function () { 59 | { 60 | const query = "describe " + tableName; 61 | const resp = await otsTestUtils.sqlQuery(query) 62 | console.log(JSON.stringify(resp, null, "\t")); 63 | } 64 | return otsTestUtils.emptyPromise; 65 | }); 66 | 67 | it('7) sql: sql', async function () { 68 | { 69 | const query = "select * from " + tableName + " where col_long>5 limit 4"; 70 | const resp = await otsTestUtils.sqlQuery(query) 71 | assert.equal(4, resp.sqlRows.rowCount.toFloat64()); 72 | assert.equal(6, resp.sqlRows.columnCount); 73 | assert.ok(resp.sqlRows.sqlTableMeta.schemas.length === 6) 74 | for (let i = 0; i < resp.sqlRows.rowCount.toFloat64(); i++) { 75 | for (let j = 0; j < resp.sqlRows.columnCount; j++) { 76 | let data = resp.sqlRows.get(i, j); 77 | // 处理binary类型 78 | if (resp.sqlRows.sqlTableMeta.schemas[j].typeName === "BINARY") { 79 | let int8Array = data.valueArray(); 80 | console.log(int8Array); 81 | } 82 | // 处理Long类型 83 | if (resp.sqlRows.sqlTableMeta.schemas[j].typeName === "LONG") { 84 | console.log(data.toFloat64()); 85 | } 86 | console.log("i:" + i, ", j:" + j + ":" + data); 87 | } 88 | } 89 | console.log(JSON.stringify(resp.sqlRows.sqlTableMeta, null, "\t")); 90 | } 91 | return otsTestUtils.emptyPromise; 92 | }); 93 | 94 | it('5) sql:only support fbs', async function () { 95 | try { 96 | const query = "show tables"; 97 | const resp = await otsTestUtils.sqlQuery(query, 1) 98 | console.log(JSON.stringify(resp, null, "\t")); 99 | assert.fail("except fail") 100 | } catch (e) { 101 | console.log(e); 102 | assert.ok(e.toString().includes("SQL payload version only support: 'TableStore.SQLPayloadVersion.SQL_FLAT_BUFFERS'")) 103 | } 104 | return otsTestUtils.emptyPromise; 105 | }); 106 | }); 107 | 108 | 109 | -------------------------------------------------------------------------------- /test/table.test.js: -------------------------------------------------------------------------------- 1 | const client = require('../samples/client'); 2 | const assert = require("assert"); 3 | const otsTestUtils = require("./ots_test_utils"); 4 | 5 | describe('table_test', () => { 6 | it('allowUpdate', async function () { 7 | this.timeout(30000); 8 | const tableName = "js_test_table" 9 | await otsTestUtils.deleteTable(tableName) 10 | 11 | const params = { 12 | tableMeta: { 13 | tableName: tableName, 14 | primaryKey: [ 15 | { 16 | name: 'gid', 17 | type: 'INTEGER' 18 | }, 19 | { 20 | name: 'uid', 21 | type: 'INTEGER' 22 | } 23 | ] 24 | }, 25 | reservedThroughput: { 26 | capacityUnit: { 27 | read: 0, 28 | write: 0 29 | } 30 | }, 31 | tableOptions: { 32 | timeToLive: -1, 33 | maxVersions: 1, 34 | allowUpdate: false, 35 | }, 36 | }; 37 | // 创建allowUpdate=false 38 | await new Promise(function (resolve, reject) { 39 | client.createTable(params, function (err, data) { 40 | if (err) { 41 | console.log('error:', err); 42 | reject(err); 43 | } else { 44 | console.log('success:', data); 45 | resolve(data) 46 | } 47 | }) 48 | }) 49 | 50 | { 51 | let info = await otsTestUtils.describeTable(tableName) 52 | console.log(info) 53 | assert.equal(false, info.tableOptions.allowUpdate) 54 | } 55 | 56 | // 更新allowUpdate=true 57 | await new Promise(function (resolve, reject) { 58 | client.updateTable({ 59 | tableName: tableName, 60 | tableOptions: { 61 | allowUpdate: true, 62 | } 63 | }, function (err, data) { 64 | if (err) { 65 | console.log('error:', err); 66 | reject(err); 67 | } else { 68 | console.log('success:', data); 69 | resolve(data) 70 | } 71 | }) 72 | }) 73 | { 74 | let info = await otsTestUtils.describeTable(tableName) 75 | console.log(info) 76 | assert.equal(true, info.tableOptions.allowUpdate) 77 | } 78 | return otsTestUtils.emptyPromise 79 | }); 80 | }); 81 | 82 | 83 | 84 | --------------------------------------------------------------------------------