├── .gitattributes ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.yml │ ├── config.yml │ ├── feature_request.yml │ └── question_ask.yml └── workflows │ ├── ci.yml │ └── stale.yml ├── .gitignore ├── .travis.yml ├── LICENSE ├── README.md ├── assembly ├── bin │ ├── backup.sh │ ├── clear.sh │ ├── deploy.sh │ ├── hugegraph │ ├── keystore │ ├── release.sh │ ├── schedule-backup.sh │ ├── start-all.sh │ ├── stop-all.sh │ ├── util.sh │ └── version-map.yaml ├── conf │ └── hugegraph.truststore ├── descriptor │ └── assembly.xml └── travis │ ├── conf │ ├── graphs │ │ └── hugegraph.properties │ ├── gremlin-server.yaml │ └── rest-server.properties │ └── install-hugegraph-from-source.sh ├── build.sh ├── checkstyle.xml ├── ci.yml ├── pom.xml └── src ├── main ├── java │ └── com │ │ └── baidu │ │ └── hugegraph │ │ ├── base │ │ ├── Directory.java │ │ ├── HdfsDirectory.java │ │ ├── LocalDirectory.java │ │ ├── Printer.java │ │ ├── RetryManager.java │ │ ├── ToolClient.java │ │ └── ToolManager.java │ │ ├── cmd │ │ ├── HugeGraphCommand.java │ │ └── SubCommands.java │ │ ├── constant │ │ ├── AuthRestoreConflictStrategy.java │ │ └── Constants.java │ │ ├── exception │ │ ├── ExitException.java │ │ └── ToolsException.java │ │ ├── formatter │ │ ├── Formatter.java │ │ ├── JsonFormatter.java │ │ └── kgdumper │ │ │ ├── ComputeSign.java │ │ │ ├── DumpKGFormatter.java │ │ │ └── SignFS64.java │ │ ├── manager │ │ ├── AuthBackupRestoreManager.java │ │ ├── BackupManager.java │ │ ├── BackupRestoreBaseManager.java │ │ ├── DumpGraphManager.java │ │ ├── GraphsManager.java │ │ ├── GremlinManager.java │ │ ├── RestoreManager.java │ │ └── TasksManager.java │ │ ├── structure │ │ └── JsonGraph.java │ │ └── util │ │ └── ToolUtil.java └── resources │ └── log4j2.xml └── test ├── java └── com │ └── baidu │ └── hugegraph │ └── test │ ├── functional │ ├── AuthBackupTest.java │ ├── AuthRestoreTest.java │ ├── AuthTest.java │ ├── CommandTest.java │ └── FuncTestSuite.java │ └── util │ └── FileUtil.java └── resources └── auth ├── auth_accesses.txt ├── auth_belongs.txt ├── auth_groups.txt ├── auth_targets.txt ├── auth_users.txt └── auth_users_conflict.txt /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto !eol 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | name: Bug report (反馈 Bug) 2 | description: Create a bug report to help HugeGraph improve 3 | title: '[Bug] describe the main problem' 4 | labels: 5 | - bug 6 | 7 | body: 8 | - type: markdown 9 | attributes: 10 | value: >- 11 | ### Note (特别注意) : 12 | 13 | > 1. 请先**搜索**现有的[Server-Issues](https://github.com/hugegraph/hugegraph/issues) 与 14 | [Tools-Issues](https://github.com/hugegraph/hugegraph-tools/issues) 中没有与您相同 15 | / 相关的问题 (请勿重复提交) 16 | 17 | > 2. 我们需要尽可能**详细**的信息来**复现**问题, 越详细的信息 (包括**日志 / 截图 / 配置**等) 18 | 会**越快**被响应和处理 19 | 20 | > 3. Issue 标题请保持原有模板分类(例如:`[Bug]`), 长段描述之间可以增加`空行`或使用`序号`标记, 保持排版清晰 21 | 22 | > 4. 请在对应的模块提交 issue, 缺乏有效信息 / 长时间 (> 14 天) 没有回复的 issue 可能会被 **关闭** 23 | (更新时会再开启) 24 | 25 | - type: dropdown 26 | attributes: 27 | label: Bug Type (问题类型) 28 | options: 29 | - exception / error (异常报错) 30 | - data inconsistency (备份 / 迁移后不合预期) 31 | - logic (逻辑设计问题) 32 | - others (please edit later) 33 | 34 | - type: checkboxes 35 | attributes: 36 | label: Before submit 37 | options: 38 | - label: 我已经确认现有的 [Server-Issues](https://github.com/hugegraph/hugegraph/issues) 与 [Tools-Issues](https://github.com/hugegraph/hugegraph-tools/issues) 中没有相同 / 重复问题 39 | required: true 40 | 41 | - type: textarea 42 | attributes: 43 | label: Environment (环境信息) 44 | description: | 45 | > server version could get from [rest-api](https://hugegraph.github.io/hugegraph-doc/clients/restful-api/other.html) (http://localhost:8080/versions) 46 | value: | 47 | - Server Version: v0.11.x 48 | - Tools Version: v1.x 49 | - Data Size: xx vertices, xx edges 50 | validations: 51 | required: true 52 | 53 | - type: textarea 54 | attributes: 55 | label: Expected & Actual behavior (期望与实际表现) 56 | description: | 57 | > we can refer [How to create a minimal reproducible Example](https://stackoverflow.com/help/minimal-reproducible-example) (如何提供最简的可复现用例) 58 | > if possible, please provide screenshots or GIF (请提供清晰的截图, 动图录屏更佳) 59 | placeholder: | 60 | type the main problem here 61 | 62 | ```java 63 | // Exception / Error info (尽可能详细的日志 + 完整异常栈) 64 | 65 | ``` 66 | validations: 67 | required: true 68 | 69 | - type: textarea 70 | attributes: 71 | label: Vertex/Edge example (问题点 / 边数据举例) 72 | description: | 73 | > 如果问题与具体的点 / 边数据相关, 请提供完整的`原始数据 + REST-API 查询结果` 74 | placeholder: | 75 | // Origin data 76 | tom 18 boy HK 77 | 78 | // JSON of Vertex / Edge 79 | { 80 | "vertex": { "id": "xxx" } 81 | } 82 | render: javascript 83 | 84 | - type: textarea 85 | attributes: 86 | label: Schema [VertexLabel, EdgeLabel, IndexLabel] (元数据结构) 87 | description: | 88 | > 如果问题与具体的点类型 / 边类型 / 索引类型相关, 请提供完整的 `Schema 返回 JSON 结果` 89 | placeholder: | 90 | // Query URL 91 | GET http://localhost:8080/graphs/hugegraph/schema/vertexlabels 92 | 93 | // JSON of GraphSchema 94 | { 95 | "vertex": { "id": "xxx" } 96 | } 97 | render: javascript 98 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | 3 | # 设置提 issue 前的参考文档 4 | contact_links: 5 | - name: HugeGraph Tools Doc 6 | url: https://hugegraph.github.io/hugegraph-doc/quickstart/hugegraph-tools.html 7 | about: Please search question here before opening a new issue 8 | - name: HugeGraph API Doc 9 | url: https://hugegraph.github.io/hugegraph-doc/clients/hugegraph-api.html 10 | about: Please search usage here before opening a new issue 11 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | name: Feature request (新需求 / 功能) 2 | description: Give an idea for HugeGraph 3 | title: '[Feature] describe the new feature' 4 | labels: 5 | - feature 6 | 7 | body: 8 | - type: textarea 9 | attributes: 10 | label: Feature Description (功能描述) 11 | description: | 12 | > 请简要描述新功能 / 需求的使用场景或上下文, 最好能给个具体的例子说明 13 | placeholder: type the feature description here 14 | validations: 15 | required: true 16 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/question_ask.yml: -------------------------------------------------------------------------------- 1 | name: Ask question (提问) 2 | description: Question about usage or configs in HugeGraph 3 | title: '[Question] describe your problem' 4 | 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: >- 9 | ### Note (特别注意) : 10 | 11 | > 1. 请先**搜索**现有的[Server-Issues](https://github.com/hugegraph/hugegraph/issues) 与 12 | [Tools-Issues](https://github.com/hugegraph/hugegraph-tools/issues) 中没有与您相同 13 | / 相关的问题 (请勿重复提交) 14 | 15 | > 2. 我们需要尽可能**详细**的信息来**复现**问题, 越详细的信息 (包括**日志 / 截图 / 配置**等) 16 | 会**越快**被响应和处理 17 | 18 | > 3. Issue 标题请保持原有模板分类(例如:`[Bug]`), 长段描述之间可以增加`空行`或使用`序号`标记, 保持排版清晰 19 | 20 | > 4. 请在对应的模块提交 issue, 缺乏有效信息 / 长时间 (> 14 天) 没有回复的 issue 可能会被 **关闭** 21 | (更新时会再开启) 22 | 23 | - type: dropdown 24 | attributes: 25 | label: Problem Type (问题类型) 26 | options: 27 | - exception / error (异常报错) 28 | - data inconsistency (备份 / 迁移后不合预期) 29 | - logic (逻辑设计问题) 30 | - configs (配置项 / 文档相关) 31 | - performence (性能优化) 32 | - others (please edit later) 33 | 34 | - type: checkboxes 35 | attributes: 36 | label: Before submit 37 | options: 38 | - label: 我已经确认现有的 [Server-Issues](https://github.com/hugegraph/hugegraph/issues) 与 [Tools-Issues](https://github.com/hugegraph/hugegraph-tools/issues) 中没有相同 / 重复问题 39 | required: true 40 | 41 | - type: textarea 42 | attributes: 43 | label: Environment (环境信息) 44 | description: | 45 | > server version could get from [rest-api](https://hugegraph.github.io/hugegraph-doc/clients/restful-api/other.html) (http://localhost:8080/versions) 46 | value: | 47 | - Server Version: v0.11.x 48 | - Tools Version: v1.x 49 | - Data Size: xx vertices, xx edges 50 | validations: 51 | required: true 52 | 53 | - type: textarea 54 | attributes: 55 | label: Your Question (问题描述) 56 | description: | 57 | > 图使用 / 配置相关问题,请优先参考 [REST-API 文档](https://hugegraph.github.io/hugegraph-doc/clients/hugegraph-api.html), 以及 [Server 配置文档](https://hugegraph.github.io/hugegraph-doc/config/config-option.html) 58 | > if possible, please provide screenshots or GIF (请提供清晰的截图, 动图录屏更佳) 59 | placeholder: | 60 | type the main problem here 61 | 62 | ```java 63 | // Exception / Error info (尽可能详细的日志 + 完整异常栈) 64 | 65 | ``` 66 | validations: 67 | required: true 68 | 69 | - type: textarea 70 | attributes: 71 | label: Vertex/Edge example (问题点 / 边数据举例) 72 | description: | 73 | > 如果问题与具体的点 / 边数据相关, 请提供完整的`原始数据 + REST-API 查询结果` 74 | placeholder: | 75 | // Origin data 76 | tom 18 boy HK 77 | 78 | // JSON of Vertex / Edge 79 | { 80 | "vertex": { "id": "xxx" } 81 | } 82 | render: javascript 83 | 84 | - type: textarea 85 | attributes: 86 | label: Schema [VertexLabel, EdgeLabel, IndexLabel] (元数据结构) 87 | description: | 88 | > 如果问题与具体的点类型 / 边类型 / 索引类型相关, 请提供完整的 `Schema 返回 JSON 结果` 89 | placeholder: | 90 | // Query URL 91 | GET http://localhost:8080/graphs/hugegraph/schema/vertexlabels 92 | 93 | // JSON of GraphSchema 94 | { 95 | "vertex": { "id": "xxx" } 96 | } 97 | render: javascript 98 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: hugegraph-tools ci 2 | on: 3 | push: 4 | branches: 5 | - master 6 | - /^release-.*$/ 7 | - /^test-.*$/ 8 | pull_request: 9 | branches: 10 | - master 11 | - /^release-.*$/ 12 | - /^test-.*$/ 13 | jobs: 14 | build: 15 | runs-on: ubuntu-20.04 16 | env: 17 | TRAVIS_DIR: assembly/travis 18 | COMMIT_ID: 1d031c5905cbef008dd5fb468576b0e6a9445181 19 | steps: 20 | - name: Install JDK 8 21 | uses: actions/setup-java@v2 22 | with: 23 | java-version: '8' 24 | distribution: 'zulu' 25 | - name: Cache Maven packages 26 | uses: actions/cache@v2 27 | with: 28 | path: ~/.m2 29 | key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} 30 | restore-keys: ${{ runner.os }}-m2 31 | - name: Checkout 32 | uses: actions/checkout@v2 33 | with: 34 | fetch-depth: 2 35 | - name: Compile 36 | run: | 37 | mvn compile -Dmaven.javadoc.skip=true 38 | - name: Prepare env and service 39 | run: | 40 | $TRAVIS_DIR/install-hugegraph-from-source.sh $COMMIT_ID 41 | - name: Run test 42 | run: | 43 | mvn test -Dtest=FuncTestSuite 44 | - name: Upload coverage to Codecov 45 | uses: codecov/codecov-action@v1 46 | with: 47 | file: target/jacoco.xml 48 | -------------------------------------------------------------------------------- /.github/workflows/stale.yml: -------------------------------------------------------------------------------- 1 | name: Mark stale issues and pull requests 2 | 3 | on: 4 | schedule: 5 | - cron: "0 21 * * *" 6 | 7 | jobs: 8 | stale: 9 | 10 | runs-on: ubuntu-latest 11 | permissions: 12 | issues: write 13 | pull-requests: write 14 | 15 | steps: 16 | - uses: actions/stale@v3 17 | with: 18 | repo-token: ${{ secrets.GITHUB_TOKEN }} 19 | stale-issue-message: 'Due to the lack of activity, the current issue is marked as stale and will be closed after 20 days, any update will remove the stale label' 20 | stale-pr-message: 'Due to the lack of activity, the current pr is marked as stale and will be closed after 180 days, any update will remove the stale label' 21 | stale-issue-label: 'inactive' 22 | stale-pr-label: 'inactive' 23 | exempt-issue-labels: 'feature,bug,enhancement,improvement,wontfix,todo' 24 | 25 | days-before-issue-stale: 15 26 | days-before-issue-close: 20 27 | days-before-pr-stale: 30 28 | days-before-pr-close: 180 29 | operations-per-run: 10 30 | start-date: '2018-12-01T00:00:00Z' 31 | 32 | exempt-all-assignees: true 33 | remove-stale-when-updated: true 34 | exempt-all-pr-milestones: true 35 | delete-branch: false 36 | enable-statistics: true 37 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # maven ignore 2 | target/ 3 | output/ 4 | # *.jar 5 | *.war 6 | *.zip 7 | *.tar 8 | *.tar.gz 9 | tree.txt 10 | 11 | # eclipse ignore 12 | .settings/ 13 | .project 14 | .classpath 15 | 16 | # idea ignore 17 | .idea/ 18 | *.ipr 19 | *.iml 20 | *.iws 21 | 22 | # temp ignore 23 | *.log 24 | *.cache 25 | *.diff 26 | *.patch 27 | *.tmp 28 | 29 | # system ignore 30 | .DS_Store 31 | Thumbs.db 32 | 33 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: java 2 | 3 | jdk: 4 | - openjdk8 5 | 6 | sudo: required 7 | 8 | branches: 9 | only: 10 | - master 11 | - /^release-.*$/ 12 | - /^test-.*$/ 13 | 14 | install: mvn compile -Dmaven.javadoc.skip=true | grep -v "Downloading\|Downloaded" 15 | 16 | before_script: 17 | - $TRAVIS_DIR/install-hugegraph.sh $TRAVIS_BRANCH | grep -v "Downloading\|Downloaded" 18 | 19 | script: 20 | - mvn test -Dtest=FuncTestSuite 21 | 22 | after_success: 23 | - bash <(curl -s https://codecov.io/bash) 24 | 25 | env: 26 | global: 27 | - TRAVIS_DIR=assembly/travis 28 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | 203 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # HugeGraph-Tools 2 | 3 | HugeGraph-Tools is a customizable command line utility for deploying, managing and backing up/restoring graphs from HugeGraph database. 4 | 5 | ## Main Functions 6 | 7 | - Deploy and clear HugeGraph-Server and HugeGraph-Studio automatically. 8 | - Manage graphs and query with Gremlin from multiple HugeGraph databases essily. 9 | - Backup/restore graph schema and graph data from/to HugeGraph databases conveniently, also support backup periodically 10 | 11 | ## Learn More 12 | 13 | The [project homepage](https://hugegraph.github.io/hugegraph-doc/quickstart/hugegraph-tools.html) contains more information about HugeGraph-Tools. 14 | 15 | ## License 16 | 17 | HugeGraph-Tools is licensed under Apache 2.0 License. -------------------------------------------------------------------------------- /assembly/bin/backup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function abs_path() { 4 | SOURCE="${BASH_SOURCE[0]}" 5 | while [ -h "$SOURCE" ]; do 6 | DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" 7 | SOURCE="$(readlink "$SOURCE")" 8 | [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" 9 | done 10 | echo "$( cd -P "$( dirname "$SOURCE" )" && pwd )" 11 | } 12 | 13 | BIN=`abs_path` 14 | cd $BIN 15 | 16 | # do backup 17 | bash $BIN/hugegraph ${*:1:$(($#-2))}"/hugegraph-backup-`date +%y%m%d%H%M`/" 18 | 19 | DIR=`eval echo '${'$(($#-2))'}'` 20 | NUM=`eval echo '${'$#'}'` 21 | # delete redundant backups if needed 22 | for i in `ls -lt $DIR | grep -v "total" | grep "hugegraph-backup-" | awk -v awkNum="$NUM" '{if(NR>awkNum){print $9}}'` 23 | do 24 | rm -fr "$DIR/$i" 25 | done 26 | -------------------------------------------------------------------------------- /assembly/bin/clear.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | INSTALL_PATH="" 4 | 5 | function print_usage() { 6 | echo "USAGE: $0 -p {install-path}" 7 | echo "eg : $0 -p ." 8 | } 9 | 10 | while getopts "p:" arg; do 11 | case ${arg} in 12 | p) INSTALL_PATH="$OPTARG" ;; 13 | ?) print_usage && exit 1 ;; 14 | esac 15 | done 16 | 17 | if [ "$INSTALL_PATH" = "" ]; then 18 | print_usage 19 | exit 1 20 | fi 21 | 22 | # Check path exist 23 | if [ ! -d "${INSTALL_PATH}" ]; then 24 | echo "Package storage directory '${INSTALL_PATH}' doesn't exist" 25 | fi 26 | # Check for write permission 27 | if [ ! -w "${INSTALL_PATH}" ]; then 28 | echo "No write permission on directory '${INSTALL_PATH}'" 29 | exit 1 30 | fi 31 | 32 | INSTALL_PATH="$(cd ${INSTALL_PATH} && pwd)" 33 | 34 | function abs_path() { 35 | SOURCE="${BASH_SOURCE[0]}" 36 | while [ -h "$SOURCE" ]; do 37 | DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" 38 | SOURCE="$(readlink "$SOURCE")" 39 | [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" 40 | done 41 | echo "$( cd -P "$( dirname "$SOURCE" )" && pwd )" 42 | } 43 | 44 | BIN=`abs_path` 45 | . ${BIN}/util.sh 46 | 47 | cd ${BIN} 48 | 49 | SERVER_RELEASE_PREFIX="hugegraph" 50 | STUDIO_RELEASE_PREFIX="hugegraph-studio" 51 | 52 | function ensure_no_process() { 53 | local path=$1 54 | local prefix=$2 55 | 56 | for file in `ls ${path}`; do 57 | file=${path}/${file} 58 | if [[ -d "${file}" && "${file}" =~ "${prefix}" ]]; then 59 | p_name=${file} 60 | process_status "${p_name}" >/dev/null 61 | if [ $? -eq 0 ]; then 62 | echo "Exist process corresponding to the directory '${file}', please stop it before clearing" 63 | exit 1 64 | fi 65 | fi 66 | done 67 | } 68 | 69 | ensure_no_process ${INSTALL_PATH} ${SERVER_RELEASE_PREFIX} 70 | ensure_no_process ${INSTALL_PATH} ${STUDIO_RELEASE_PREFIX} 71 | 72 | for file in ${INSTALL_PATH}/${SERVER_RELEASE_PREFIX}*; do 73 | remove_with_prompt "${file}" 74 | done 75 | 76 | for file in ${INSTALL_PATH}/${STUDIO_RELEASE_PREFIX}*; do 77 | remove_with_prompt "${file}" 78 | done 79 | -------------------------------------------------------------------------------- /assembly/bin/deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | VERSION="" 4 | INSTALL_PATH="" 5 | DOWNLOAD_LINK_PREFIX="" 6 | DEFAULT_DOWNLOAD_LINK_PREFIX="https://github.com/hugegraph" 7 | DOWNLOAD_LINK_PREFIX_CONFIG=`env | grep ^HOME= | cut -c 6-`"/hugegraph-download-url-prefix" 8 | 9 | function print_usage() { 10 | echo "USAGE: $0 -v {hugegraph-version} -p {install-path} [-u {download-path-prefix}]" 11 | echo "eg : $0 -v 0.8 -p ./ [-u http://xxx]" 12 | } 13 | 14 | while getopts "v:p:u:" arg; do 15 | case ${arg} in 16 | v) VERSION="$OPTARG" ;; 17 | p) INSTALL_PATH="$OPTARG" ;; 18 | u) DOWNLOAD_LINK_PREFIX="$OPTARG" ;; 19 | ?) print_usage && exit 1 ;; 20 | esac 21 | done 22 | 23 | if [[ "$VERSION" = "" || "$INSTALL_PATH" = "" ]]; then 24 | print_usage 25 | exit 1 26 | fi 27 | 28 | if [[ "$DOWNLOAD_LINK_PREFIX" = "" ]]; then 29 | if [ -f ${DOWNLOAD_LINK_PREFIX_CONFIG} ]; then 30 | DOWNLOAD_LINK_PREFIX=`sed -n "1p" ${DOWNLOAD_LINK_PREFIX_CONFIG}` 31 | else 32 | DOWNLOAD_LINK_PREFIX=${DEFAULT_DOWNLOAD_LINK_PREFIX} 33 | fi 34 | else 35 | echo ${DOWNLOAD_LINK_PREFIX} > ${DOWNLOAD_LINK_PREFIX_CONFIG} 36 | fi 37 | 38 | function abs_path() { 39 | SOURCE="${BASH_SOURCE[0]}" 40 | while [ -h "$SOURCE" ]; do 41 | DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" 42 | SOURCE="$(readlink "$SOURCE")" 43 | [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" 44 | done 45 | echo "$( cd -P "$( dirname "$SOURCE" )" && pwd )" 46 | } 47 | 48 | BIN=`abs_path` 49 | . ${BIN}/util.sh 50 | 51 | `ensure_path_writable ${INSTALL_PATH}` 52 | 53 | # Convert to absolute path 54 | INSTALL_PATH="$(cd ${INSTALL_PATH} && pwd)" 55 | 56 | cd ${BIN} 57 | 58 | # Check input version can be found in version-map.yaml 59 | OPTIONAL_VERSIONS=`cat version-map.yaml | grep 'version' | awk -F ':' '{print $1}' | xargs` 60 | if [[ ! "$OPTIONAL_VERSIONS" =~ "$VERSION" ]]; then 61 | echo "Invalid version '${VERSION}' for hugegraph, the optional values are [$OPTIONAL_VERSIONS]" 62 | exit 1 63 | fi 64 | 65 | # Parse module version from 'version-map.yaml' 66 | SERVER_VERSION=`parse_yaml version-map.yaml "${VERSION}" "server"` 67 | if [ "$SERVER_VERSION" = "" ]; then 68 | echo "Not found the key '$VERSION.server' in version-map.yaml" 69 | exit 1 70 | fi 71 | STUDIO_VERSION=`parse_yaml version-map.yaml "${VERSION}" "studio"` 72 | if [ "$STUDIO_VERSION" = "" ]; then 73 | echo "Not found the key '$VERSION.studio' in version-map.yaml" 74 | exit 1 75 | fi 76 | 77 | # Download and unzip 78 | ARCHIVE_FORMAT=".tar.gz" 79 | 80 | # HugeGraphServer dir and tar package name 81 | SERVER_DIR="hugegraph-${SERVER_VERSION}" 82 | SERVER_TAR=${SERVER_DIR}${ARCHIVE_FORMAT} 83 | 84 | # HugeGraphStudio dir and tar package name 85 | STUDIO_DIR="hugegraph-studio-${STUDIO_VERSION}" 86 | STUDIO_TAR=${STUDIO_DIR}${ARCHIVE_FORMAT} 87 | 88 | SERVER_DOWNLOAD_URL="${DOWNLOAD_LINK_PREFIX}/hugegraph/releases/download/v${SERVER_VERSION}/${SERVER_TAR}" 89 | STUDIO_DOWNLOAD_URL="${DOWNLOAD_LINK_PREFIX}/hugegraph-studio/releases/download/v${STUDIO_VERSION}/${STUDIO_TAR}" 90 | 91 | ensure_package_exist $INSTALL_PATH $SERVER_DIR $SERVER_TAR $SERVER_DOWNLOAD_URL 92 | ensure_package_exist $INSTALL_PATH $STUDIO_DIR $STUDIO_TAR $STUDIO_DOWNLOAD_URL 93 | 94 | IP=`get_ip` 95 | 96 | function config_hugegraph_server() { 97 | local rest_server_conf="$SERVER_DIR/conf/rest-server.properties" 98 | local server_url="http://$IP:8080" 99 | 100 | write_property $rest_server_conf "restserver\.url" $server_url 101 | } 102 | 103 | function config_hugegraph_studio() { 104 | local studio_server_conf="$STUDIO_DIR/conf/hugegraph-studio.properties" 105 | 106 | write_property $studio_server_conf "studio\.server\.host" $IP 107 | write_property $studio_server_conf "graph\.server\.host" $IP 108 | } 109 | 110 | cd ${INSTALL_PATH} 111 | config_hugegraph_server 112 | config_hugegraph_studio 113 | 114 | ${SERVER_DIR}/bin/init-store.sh 115 | 116 | ${BIN}/start-all.sh -v ${VERSION} -p ${INSTALL_PATH} 117 | -------------------------------------------------------------------------------- /assembly/bin/hugegraph: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Set environment here if needed 4 | #export HUGEGRAPH_URL= 5 | #export HUGEGRAPH_GRAPH= 6 | #export HUGEGRAPH_USERNAME= 7 | #export HUGEGRAPH_PASSWORD= 8 | #export HUGEGRAPH_TIMEOUT= 9 | #export HUGEGRAPH_TRUST_STORE_FILE= 10 | #export HUGEGRAPH_TRUST_STORE_PASSWORD= 11 | 12 | abs_path() { 13 | SOURCE="${BASH_SOURCE[0]}" 14 | while [ -h "$SOURCE" ]; do 15 | DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" 16 | SOURCE="$(readlink "$SOURCE")" 17 | [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" 18 | done 19 | echo "$( cd -P "$( dirname "$SOURCE" )" && pwd )" 20 | } 21 | 22 | BIN=`abs_path` 23 | TOP="$(cd $BIN/../ && pwd)" 24 | LIB=$TOP/lib:$JAVA_HOME/lib/ext:$JAVA_HOME/jre/lib/ext 25 | 26 | . $BIN/util.sh 27 | 28 | MAX_MEM=$[96*1024] 29 | MIN_MEM=512 30 | 31 | if [ -n "$JAVA_HOME" ]; then 32 | JAVA="$JAVA_HOME"/bin/java 33 | else 34 | JAVA=java 35 | echo "Warning: please set JAVA_HOME variable, otherwise some libraries related to https may be missing" 36 | fi 37 | 38 | if [ "$1" == "deploy" ]; then 39 | shift 40 | bash $BIN/deploy.sh $@ 41 | exit $? 42 | fi 43 | 44 | if [ "$1" == "start-all" ]; then 45 | shift 46 | bash $BIN/start-all.sh $@ 47 | exit $? 48 | fi 49 | 50 | if [ "$1" == "clear" ]; then 51 | shift 52 | bash $BIN/clear.sh $@ 53 | exit $? 54 | fi 55 | 56 | if [ "$1" == "stop-all" ]; then 57 | bash $BIN/stop-all.sh 58 | exit $? 59 | fi 60 | 61 | # Set default environment variables if not exist 62 | echo $* | grep "\--url" > /dev/null 2>&1 63 | if [ $? -ne 0 -a -n "$HUGEGRAPH_URL" ]; then 64 | URL_ARG="--url $HUGEGRAPH_URL" 65 | else 66 | URL_ARG="" 67 | fi 68 | 69 | echo $* | grep "\--graph " > /dev/null 2>&1 70 | if [ $? -ne 0 -a -n "$HUGEGRAPH_GRAPH" ]; then 71 | GRAPH_ARG="--graph $HUGEGRAPH_GRAPH" 72 | else 73 | GRAPH_ARG="" 74 | fi 75 | 76 | echo $* | grep "\--user" > /dev/null 2>&1 77 | if [ $? -ne 0 -a -n "$HUGEGRAPH_USERNAME" ]; then 78 | USER_ARG="--user $HUGEGRAPH_USERNAME" 79 | else 80 | USER_ARG="" 81 | fi 82 | 83 | echo $* | grep "\--password" > /dev/null 2>&1 84 | if [ $? -ne 0 -a -n "$HUGEGRAPH_PASSWORD" ]; then 85 | PASSWORD_ARG="--password $HUGEGRAPH_PASSWORD" 86 | else 87 | PASSWORD_ARG="" 88 | fi 89 | 90 | echo $* | grep "\--timeout" > /dev/null 2>&1 91 | if [ $? -ne 0 -a -n "$HUGEGRAPH_TIMEOUT" ]; then 92 | TIMEOUT_ARG="--timeout $HUGEGRAPH_TIMEOUT" 93 | else 94 | TIMEOUT_ARG="" 95 | fi 96 | 97 | echo $* | grep "\--trust-store-file" > /dev/null 2>&1 98 | if [ $? -ne 0 -a -n "$HUGEGRAPH_TRUST_STORE_FILE" ]; then 99 | TRUST_STORE_FILE_ARG="--trust-store-file $HUGEGRAPH_TRUST_STORE_FILE" 100 | else 101 | TRUST_STORE_FILE_ARG="" 102 | fi 103 | 104 | echo $* | grep "\--trust-store-password" > /dev/null 2>&1 105 | if [ $? -ne 0 -a -n "$HUGEGRAPH_TRUST_STORE_PASSWORD" ]; then 106 | TRUST_STORE_PASSWORD_ARG="--trust-store-password $HUGEGRAPH_TRUST_STORE_PASSWORD" 107 | else 108 | TRUST_STORE_PASSWORD_ARG="" 109 | fi 110 | 111 | for ((i=1;i<=$#;i++)); 112 | do 113 | if [ "`eval echo '$'"$i"`" == "schedule-backup" ]; then 114 | ARGS=${*//schedule-backup/} 115 | bash $BIN/schedule-backup.sh $URL_ARG $GRAPH_ARG $USER_ARG $PASSWORD_ARG $TIMEOUT_ARG $TRUST_STORE_FILE_ARG $TRUST_STORE_PASSWORD_ARG ${ARGS//'*'/'\*'} 116 | exit $? 117 | fi 118 | done 119 | 120 | # Set Java options 121 | if [ "$JAVA_OPTIONS" = "" ] ; then 122 | XMX=`calc_xmx $MIN_MEM $MAX_MEM` 123 | if [ $? -ne 0 ]; then 124 | echo "Failed to run bin/hugegraph, requires at least ${MIN_MEM}m free memory" 125 | exit 1 126 | fi 127 | JAVA_OPTIONS="-Xms${MIN_MEM}m -Xmx${XMX}m" 128 | fi 129 | 130 | exec $JAVA -Dtools.home.path=${TOP} $JAVA_OPTIONS -cp $LIB/hugegraph-tools-*.jar -Djava.ext.dirs=$LIB/ \ 131 | com.baidu.hugegraph.cmd.HugeGraphCommand $URL_ARG $GRAPH_ARG $USER_ARG $PASSWORD_ARG $TIMEOUT_ARG $TRUST_STORE_FILE_ARG $TRUST_STORE_PASSWORD_ARG "$@" 132 | -------------------------------------------------------------------------------- /assembly/bin/keystore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/apache/incubator-hugegraph-tools/668333a877106a9028174ca239655e491dcb2c5f/assembly/bin/keystore -------------------------------------------------------------------------------- /assembly/bin/release.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | BASE_BRANCH="master" 4 | 5 | # Checkout a new release-branch and bump base branch version 6 | function print_usage() { 7 | echo "USAGE: $0 -d {project-dir} [-o {base-branch}] -n {release-branch}" 8 | echo "eg : $0 -d ~/workspace/hugegraph [-o master2] -n release-1.6" 9 | } 10 | 11 | while getopts "d:n:o:" arg; do 12 | case ${arg} in 13 | d) PROJECT_DIR="$OPTARG" ;; 14 | n) RELEASE_BRANCH="$OPTARG" ;; 15 | o) BASE_BRANCH="$OPTARG" ;; 16 | ?) print_usage && exit 1 ;; 17 | esac 18 | done 19 | 20 | if [[ "$PROJECT_DIR" = "" || "$RELEASE_BRANCH" = "" || "$BASE_BRANCH" = "" ]]; then 21 | print_usage 22 | exit 1 23 | fi 24 | 25 | function abs_path() { 26 | SOURCE="${BASH_SOURCE[0]}" 27 | while [ -h "$SOURCE" ]; do 28 | DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" 29 | SOURCE="$(readlink "$SOURCE")" 30 | [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" 31 | done 32 | echo "$( cd -P "$( dirname "$SOURCE" )" && pwd )" 33 | } 34 | 35 | function ensure_path_writable() { 36 | local path=$1 37 | # Ensure input path exist 38 | if [ ! -d "${path}" ]; then 39 | mkdir -p ${path} 40 | if [ $? -ne 0 ]; then 41 | echo "Failed to mkdir $path" 42 | exit 1 43 | fi 44 | fi 45 | # Check for write permission 46 | if [ ! -w "${path}" ]; then 47 | echo "No write permission on directory ${path}" 48 | exit 1 49 | fi 50 | } 51 | 52 | function replace() { 53 | file=$1 54 | from=$2 55 | to=$3 56 | 57 | local os=`uname` 58 | case $os in 59 | Darwin) sed -i '' "s!$from!$to!g" "$file" >/dev/null 2>&1 ;; 60 | *) sed -i "s!$from!$to!g" "$file" >/dev/null 2>&1 ;; 61 | esac 62 | } 63 | 64 | # Ensure the project dir is valid 65 | ensure_path_writable $PROJECT_DIR 66 | 67 | cd $PROJECT_DIR 68 | 69 | ################################################################################ 70 | # Checkout release branch 71 | ################################################################################ 72 | git checkout $BASE_BRANCH >/dev/null 2>&1 73 | if [ $? -ne 0 ]; then 74 | echo "Failed to checkout old branch $BASE_BRANCH" 75 | exit 1 76 | fi 77 | 78 | git diff --quiet HEAD 79 | if [ $? -ne 0 ]; then 80 | echo "There are uncommited changes in branch $BASE_BRANCH" 81 | exit 1 82 | fi 83 | 84 | # Check new branch exist? 85 | git rev-parse --verify $RELEASE_BRANCH >/dev/null 2>&1 86 | if [ $? -eq 0 ]; then 87 | git checkout $RELEASE_BRANCH >/dev/null 2>&1 88 | else 89 | git checkout -b $RELEASE_BRANCH >/dev/null 2>&1 90 | fi 91 | 92 | if [ $? -ne 0 ]; then 93 | echo "Failed to checkout new branch $RELEASE_BRANCH" 94 | exit 1 95 | fi 96 | 97 | ################################################################################ 98 | # Modify README.md 99 | ################################################################################ 100 | README="README.md" 101 | RELEASE_VERSION=`cat "pom.xml" | grep "" | head -1 | awk -F '<|>' '{print $3}'` 102 | 103 | function update_readme_maven_version() { 104 | # Append version to maven repository link 105 | # Extract the Maven Central line, then split the badge and dependency link 106 | MAVEN_LINK_LINE=`cat $README | grep 'Maven Central'` 107 | OLD_MAVEN_BADGE_LINK=`echo $MAVEN_LINK_LINE | awk -F '[\\(|\\)]' '{print $2}'` 108 | OLD_MAVEN_DEPEN_LINK=`echo $MAVEN_LINK_LINE | awk -F '[\\(|\\)]' '{print $4}'` 109 | 110 | # Replace or append the branch name in maven badge link 111 | if [[ "$OLD_MAVEN_BADGE_LINK" =~ .*\?version=.* ]]; then 112 | NEW_MAVEN_BADGE_LINK=${OLD_MAVEN_BADGE_LINK/version=*/version=${RELEASE_VERSION}} 113 | else 114 | NEW_MAVEN_BADGE_LINK="$OLD_MAVEN_BADGE_LINK?version=$RELEASE_VERSION" 115 | fi 116 | 117 | # Replace or append the version in maven dependency link 118 | if [[ "$OLD_MAVEN_DEPEN_LINK" =~ .*/[0-9]\.[0-9]\.[0-9]$ ]]; then 119 | NEW_MAVEN_DEPEN_LINK=${OLD_MAVEN_DEPEN_LINK/[0-9]\.[0-9]\.[0-9]/${RELEASE_VERSION}} 120 | else 121 | NEW_MAVEN_DEPEN_LINK="$OLD_MAVEN_DEPEN_LINK/$RELEASE_VERSION" 122 | fi 123 | 124 | replace $README "$OLD_MAVEN_BADGE_LINK" "$NEW_MAVEN_BADGE_LINK" 125 | replace $README "$OLD_MAVEN_DEPEN_LINK" "$NEW_MAVEN_DEPEN_LINK" 126 | } 127 | 128 | function check_update_readme_status() { 129 | local tag=$1 130 | local result=$2 131 | 132 | if [ $result -eq 0 ]; then 133 | echo "Modify $README '$tag' successfully" 134 | else 135 | echo "Modify $README '$tag' failed" 136 | exit 1 137 | fi 138 | } 139 | 140 | if [ ! -f "$README" ]; then 141 | echo "Skipping modify $README" 142 | else 143 | echo "Checkout to branch $RELEASE_BRANCH, ready to modify $README" 144 | if [ `grep -c "Build Status" "$README"` -eq 1 ]; then 145 | # Replace old branch with new 146 | replace $README "branch=$BASE_BRANCH" "branch=$RELEASE_BRANCH" 147 | check_update_readme_status "Build Status" $? 148 | fi 149 | if [ `grep -c "codecov" "$README"` -eq 1 ]; then 150 | # Replace old branch with new 151 | replace $README "branch/$BASE_BRANCH" "branch/$RELEASE_BRANCH" 152 | check_update_readme_status "codecov" $? 153 | fi 154 | if [ `grep -c "Maven Central" "$README"` -eq 1 ]; then 155 | update_readme_maven_version 156 | check_update_readme_status "Maven Central" $? 157 | fi 158 | fi 159 | 160 | git diff --quiet HEAD 161 | if [ $? -ne 0 ]; then 162 | # Git commit in release branch 163 | git commit -a -m "HugeGraph-1358: Release $RELEASE_VERSION" >/dev/null 2>&1 || exit 1 164 | echo "Add a commit in branch $RELEASE_BRANCH, remember to push" 165 | else 166 | echo "Nothing modified for branch $RELEASE_BRANCH" 167 | fi 168 | 169 | ################################################################################ 170 | # Checkout to base branch and bump version 171 | ################################################################################ 172 | function update_hugegraph_version() { 173 | # Second digit plus 1 174 | BUMP_VERSION=`echo $RELEASE_VERSION | awk -F '.' '{prefix=$2}END{print $1"."prefix+1".0"}'` 175 | CORE_POM_XML_FILE="$PROJECT_DIR/hugegraph-core/pom.xml" 176 | CORE_VERSION_JAVA_FILE="$PROJECT_DIR/hugegraph-core/src/main/java/com/baidu/hugegraph/version/CoreVersion.java" 177 | API_VERSION_JAVA_FILE="$PROJECT_DIR/hugegraph-api/src/main/java/com/baidu/hugegraph/version/ApiVersion.java" 178 | # Replace Implementation-Version in core pom.xml 179 | replace $CORE_POM_XML_FILE ".*" \ 180 | "$BUMP_VERSION.0" || return 1 181 | # Replace version in CoreVersion.java 182 | replace $CORE_VERSION_JAVA_FILE "Version.of(CoreVersion.class, \".*\")" \ 183 | "Version.of(CoreVersion.class, \"$BUMP_VERSION\")" || return 1 184 | # Replace version in ApiVersion.java 185 | # Extract the first two digits of the version number 186 | MIN_VERSION=`echo $BUMP_VERSION | awk -F '.' '{print $1"."$2}'` 187 | # Second digit plus 1 188 | MAX_VERSION=`echo $BUMP_VERSION | awk -F '.' '{prefix=$2}END{print $1"."prefix+1}'` 189 | replace $API_VERSION_JAVA_FILE "VersionUtil.check(CoreVersion.VERSION, \".*\", \".*\", CoreVersion.NAME);" \ 190 | "VersionUtil.check(CoreVersion.VERSION, \"$MIN_VERSION\", \"$MAX_VERSION\", CoreVersion.NAME);" || return 1 191 | } 192 | 193 | function update_general_component_version() { 194 | # Third digit plus 1 195 | BUMP_VERSION=`echo $RELEASE_VERSION | awk -F '.' '{prefix=$3}END{print $1"."$2"."prefix+1}'` 196 | POM_XML_FILE="$PROJECT_DIR/pom.xml" 197 | # Replace Implementation-Version in pom.xml 198 | replace $POM_XML_FILE ".*" \ 199 | "$BUMP_VERSION.0" || return 1 200 | } 201 | 202 | function check_update_version_status() { 203 | local artifact=$1 204 | local result=$2 205 | 206 | if [ $result -eq 0 ]; then 207 | echo "Bump up artifact '$artifact' implementation version successfully" 208 | else 209 | echo "Bump up artifact '$artifact' implementation version failed" 210 | exit 1 211 | fi 212 | } 213 | 214 | git checkout $BASE_BRANCH >/dev/null 2>&1 || exit 1 215 | echo "Checkout to branch $BASE_BRANCH, ready to bump pom version" 216 | 217 | if [ -f "pom.xml" ]; then 218 | ARTIFACT=`cat "pom.xml" | grep "" | head -1 | awk -F '<|>' '{print $3}'` 219 | # Bump up maven implementation version 220 | if [ "$ARTIFACT" = "hugegraph" ]; then 221 | update_hugegraph_version 222 | check_update_version_status $ARTIFACT $? 223 | else 224 | update_general_component_version 225 | check_update_version_status $ARTIFACT $? 226 | fi 227 | 228 | # Bump up maven version 229 | mvn versions:set -DnewVersion=$BUMP_VERSION >/dev/null 2>&1 && mvn versions:commit >/dev/null 2>&1 230 | if [ $? -eq 0 ]; then 231 | echo "Bump up artifact '$ARTIFACT' version successfully" 232 | else 233 | echo "Bump up artifact '$ARTIFACT' version failed" 234 | exit 1 235 | fi 236 | fi 237 | 238 | git diff --quiet HEAD 239 | if [ $? -ne 0 ]; then 240 | # Git commit in base branch 241 | git commit -a -m "HugeGraph-622: Bump up to version $BUMP_VERSION" >/dev/null 2>&1 || exit 1 242 | echo "Add a commit in branch $BASE_BRANCH, remember to push" 243 | else 244 | echo "Nothing modified for branch $BASE_BRANCH" 245 | fi 246 | -------------------------------------------------------------------------------- /assembly/bin/schedule-backup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function abs_path() { 4 | SOURCE="${BASH_SOURCE[0]}" 5 | while [ -h "$SOURCE" ]; do 6 | DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" 7 | SOURCE="$(readlink "$SOURCE")" 8 | [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" 9 | done 10 | echo "$( cd -P "$( dirname "$SOURCE" )" && pwd )" 11 | } 12 | 13 | BIN=`abs_path` 14 | TOP=`dirname $BIN` 15 | cd $BIN 16 | 17 | if [ -n "$JAVA_HOME" ]; then 18 | JAVA="$JAVA_HOME"/bin/java 19 | else 20 | JAVA=java 21 | fi 22 | 23 | function parse_interval() { 24 | INTERVAL="${1//\\/} ${2//\\/} ${3//\\/} ${4//\\/} ${5//\\/}" 25 | } 26 | 27 | for((i=1;i<=$#;i+=2)); 28 | do 29 | current=`eval echo '${'$i'}'` 30 | case "$current" in 31 | "--url") 32 | URL=`eval echo '${'$(($i+1))'}'` 33 | URL_ARG="--url "$URL 34 | ;; 35 | "--graph") 36 | GRAPH=`eval echo '${'$(($i+1))'}'` 37 | GRAPH_ARG="--graph "$GRAPH 38 | ;; 39 | "--user") 40 | USERNAME=`eval echo '${'$(($i+1))'}'` 41 | USERNAME_ARG="--user "$USERNAME 42 | ;; 43 | "--password") 44 | PASSWORD=`eval echo '${'$(($i+1))'}'` 45 | PASSWORD_ARG="--password "$PASSWORD 46 | ;; 47 | "--timeout") 48 | TIMEOUT=`eval echo '${'$(($i+1))'}'` 49 | TIMEOUT_ARG="--timeout "$TIMEOUT 50 | ;; 51 | "--interval") 52 | position=$(($i+1)) 53 | INTERVAL=${@:$position:5} 54 | INTERVAL=${INTERVAL//\\/} 55 | let i+=4 56 | ;; 57 | "--backup-num") 58 | NUM=`eval echo '${'$(($i+1))'}'` 59 | ;; 60 | "--directory"|"-d") 61 | DIR=`eval echo '${'$(($i+1))'}'` 62 | ;; 63 | *) 64 | echo "Invalid argument: $current" 65 | bash $BIN/hugegraph 66 | exit 1 67 | esac 68 | done 69 | 70 | if [ -z "$DIR" ]; then 71 | echo "Must provide backup directory" 72 | exit 1 73 | else 74 | if [ ${DIR:0:1} != "/" ]; then 75 | DIR=$TOP"/"$DIR 76 | fi 77 | fi 78 | 79 | DIR=`dirname $DIR`/`basename $DIR` 80 | 81 | if [ -z "$GRAPH" ]; then 82 | GRAPH="hugegraph" 83 | fi 84 | GRAPH_DIR=$DIR/$GRAPH 85 | 86 | if [ -d $GRAPH_DIR -o -f $GRAPH_DIR ]; then 87 | echo "Error: Directory/file $GRAPH already exists in $DIR" 88 | exit 1 89 | fi 90 | 91 | mkdir "$GRAPH_DIR" 92 | if [ $? -ne 0 ]; then 93 | echo "Failed to create directory $GRAPH_DIR" 94 | exit 1 95 | fi 96 | 97 | if [ -z "$NUM" ]; then 98 | NUM=3 99 | elif [ ! "$NUM" -gt 0 ] 2>/dev/null ;then 100 | echo "Number of backups must be positive number." 101 | exit 1 102 | fi 103 | 104 | if [ -z "$INTERVAL" ]; then 105 | INTERVAL="0 0 * * *" 106 | fi 107 | 108 | CRONTAB_JOB="$INTERVAL export JAVA_HOME=$JAVA_HOME && bash $BIN/backup.sh $URL_ARG $GRAPH_ARG $USERNAME_ARG $PASSWORD_ARG $TIMEOUT_ARG backup -t all -d $GRAPH_DIR --backup-num $NUM" 109 | 110 | . $BIN/util.sh 111 | 112 | crontab_append "$CRONTAB_JOB" 113 | -------------------------------------------------------------------------------- /assembly/bin/start-all.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | VERSION="" 4 | INSTALL_PATH="" 5 | 6 | function print_usage() { 7 | echo "USAGE: $0 -v {hugegraph-version} -p {install-path}" 8 | echo "eg : $0 -v 0.8 -p ." 9 | } 10 | 11 | while getopts "v:p:" arg; do 12 | case ${arg} in 13 | v) VERSION="$OPTARG" ;; 14 | p) INSTALL_PATH="$OPTARG" ;; 15 | ?) print_usage && exit 1 ;; 16 | esac 17 | done 18 | 19 | if [[ "$VERSION" = "" || "$INSTALL_PATH" = "" ]]; then 20 | print_usage 21 | exit 1 22 | fi 23 | 24 | function abs_path() { 25 | SOURCE="${BASH_SOURCE[0]}" 26 | while [ -h "$SOURCE" ]; do 27 | DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" 28 | SOURCE="$(readlink "$SOURCE")" 29 | [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" 30 | done 31 | echo "$( cd -P "$( dirname "$SOURCE" )" && pwd )" 32 | } 33 | 34 | BIN=`abs_path` 35 | . ${BIN}/util.sh 36 | 37 | `ensure_path_writable ${INSTALL_PATH}` 38 | 39 | # Convert to absolute path 40 | INSTALL_PATH="$(cd ${INSTALL_PATH} && pwd)" 41 | 42 | cd ${BIN} 43 | 44 | # Check input version can be found in version-map.yaml 45 | OPTIONAL_VERSIONS=`cat version-map.yaml | grep 'version' | awk -F ':' '{print $1}' | xargs` 46 | if [[ ! "$OPTIONAL_VERSIONS" =~ "$VERSION" ]]; then 47 | echo "Invalid version '${VERSION}' for hugegraph, the optional values are [$OPTIONAL_VERSIONS]" 48 | exit 1 49 | fi 50 | 51 | # Parse module version from 'version-map.yaml' 52 | SERVER_VERSION=`parse_yaml version-map.yaml "${VERSION}" "server"` 53 | if [ "$SERVER_VERSION" = "" ]; then 54 | echo "Please check the format and content of file 'version-map.yaml' is normal" 55 | exit 1 56 | fi 57 | STUDIO_VERSION=`parse_yaml version-map.yaml "${VERSION}" "studio"` 58 | if [ "$STUDIO_VERSION" = "" ]; then 59 | echo "Please check the format and content of file 'version-map.yaml' is normal" 60 | exit 1 61 | fi 62 | 63 | SERVER_DIR="${INSTALL_PATH}/hugegraph-${SERVER_VERSION}" 64 | STUDIO_DIR="${INSTALL_PATH}/hugegraph-studio-${STUDIO_VERSION}" 65 | 66 | if [ ! -d "${SERVER_DIR}" ]; then 67 | echo "The server dir ${SERVER_DIR} doesn't exist" 68 | exit 1 69 | fi 70 | 71 | if [ ! -d "${STUDIO_DIR}" ]; then 72 | echo "The studio dir ${STUDIO_DIR} doesn't exist" 73 | exit 1 74 | fi 75 | 76 | function start_hugegraph_server() { 77 | $SERVER_DIR/bin/start-hugegraph.sh 78 | if [ $? -ne 0 ]; then 79 | echo "Failed to start HugeGraphServer, please check the logs under '$SERVER_DIR/logs' for details" 80 | exit 1 81 | fi 82 | } 83 | 84 | function start_hugegraph_studio() { 85 | # TODO: Let hugegraph-studio.sh can execute in any directory instead of $STUDIO_DIR 86 | cd $STUDIO_DIR 87 | 88 | local server_host=`read_property "conf/hugegraph-studio.properties" "studio.server.host"` 89 | local server_port=`read_property "conf/hugegraph-studio.properties" "studio.server.port"` 90 | local server_url="http://${server_host}:${server_port}" 91 | local start_timeout_s=20 92 | 93 | echo "Starting HugeGraphStudio..." 94 | bin/hugegraph-studio.sh >/dev/null 2>&1 & 95 | 96 | pid="$!" 97 | trap '$BIN/stop-all.sh; exit' SIGHUP SIGINT SIGQUIT SIGTERM 98 | 99 | wait_for_startup 'HugeGraphStudio' "$server_url" $start_timeout_s || { 100 | echo "Failed to start HugeGraphStudio, please check the logs under '$STUDIO_DIR/logs' for details" 101 | $SERVER_DIR/bin/stop-hugegraph.sh 102 | exit 1 103 | } 104 | cd .. 105 | } 106 | 107 | start_hugegraph_server 108 | start_hugegraph_studio 109 | 110 | echo "[OK] Started HugeGraphServer and HugeGraphStudio" 111 | -------------------------------------------------------------------------------- /assembly/bin/stop-all.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function abs_path() { 4 | SOURCE="${BASH_SOURCE[0]}" 5 | while [ -h "$SOURCE" ]; do 6 | DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" 7 | SOURCE="$(readlink "$SOURCE")" 8 | [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" 9 | done 10 | echo "$( cd -P "$( dirname "$SOURCE" )" && pwd )" 11 | } 12 | 13 | BIN=`abs_path` 14 | cd $BIN 15 | 16 | . util.sh 17 | 18 | SLEEP_INTERVAL_S=2 19 | SHUTDOWN_TIMEOUT_S=10 20 | 21 | kill_process 'HugeGraphServer' 22 | wait_for_shutdown 'HugeGraphServer' $SHUTDOWN_TIMEOUT_S 23 | kill_process 'HugeGraphStudio' 24 | wait_for_shutdown 'HugeGraphStudio' $SHUTDOWN_TIMEOUT_S 25 | -------------------------------------------------------------------------------- /assembly/bin/util.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # TODO: Need to check all command before using? 4 | 5 | function command_available() { 6 | local cmd=$1 7 | command -v $cmd >/dev/null 2>&1 8 | return $? 9 | } 10 | 11 | # read a property from .properties file 12 | function read_property() { 13 | # file path 14 | file_name=$1 15 | # replace "." to "\." 16 | property_name=`echo $2 | sed 's/\./\\\./g'` 17 | cat $file_name | sed -n -e "s/^[ ]*//g;/^#/d;s/^$property_name=//p" | tail -1 18 | } 19 | 20 | function write_property() { 21 | local file=$1 22 | local key=$2 23 | local value=$3 24 | 25 | local os=`uname` 26 | case $os in 27 | # Note: in mac os should use sed -i '' "xxx" to replace string, 28 | # otherwise prompt 'command c expects \ followed by text'. 29 | # See http://www.cnblogs.com/greedy-day/p/5952899.html 30 | Darwin) sed -i '' "s!$key=.*!$key=$value!g" "$file" ;; 31 | *) sed -i "s!$key=.*!$key=$value!g" "$file" ;; 32 | esac 33 | } 34 | 35 | function parse_yaml() { 36 | local file=$1 37 | local version=$2 38 | local module=$3 39 | 40 | cat $file | tr -d '\n {}'| awk -F',+|:' '''{ 41 | pre=""; 42 | for(i=1; i<=NF; ) { 43 | if(match($i, /version/)) { 44 | pre=$i; 45 | i+=1 46 | } else { 47 | result[pre"-"$i] = $(i+1); 48 | i+=2 49 | } 50 | } 51 | } END {for(e in result) {print e": "result[e]}}''' \ 52 | | grep "$version-$module" | awk -F':' '{print $2}' | tr -d ' ' && echo 53 | } 54 | 55 | function process_num() { 56 | num=`ps -ef | grep $1 | grep -v grep | wc -l` 57 | return $num 58 | } 59 | 60 | function process_id() { 61 | pid=`ps -ef | grep $1 | grep -v grep | awk '{print $2}'` 62 | return $pid 63 | } 64 | 65 | # check the port of rest server is occupied 66 | function check_port() { 67 | local port=`echo $1 | awk -F':' '{print $3}'` 68 | if ! command_available "lsof"; then 69 | echo "Required lsof but it is unavailable" 70 | exit 1 71 | fi 72 | lsof -i :$port >/dev/null 73 | # Centos 7 74 | # ss -lnp | grep $port >/dev/null 75 | if [ $? -eq 0 ]; then 76 | echo "The port $port has already been used" 77 | exit 1 78 | fi 79 | } 80 | 81 | function crontab_append() { 82 | local job="$1" 83 | crontab -l | grep -F "$job" >/dev/null 2>&1 84 | if [ $? -eq 0 ]; then 85 | return 1 86 | fi 87 | (crontab -l ; echo "$job") | crontab - 88 | } 89 | 90 | function crontab_remove() { 91 | local job="$1" 92 | # check exist before remove 93 | crontab -l | grep -F "$job" >/dev/null 2>&1 94 | if [ $? -eq 1 ]; then 95 | return 0 96 | fi 97 | 98 | crontab -l | grep -Fv "$job" | crontab - 99 | 100 | # Check exist after remove 101 | crontab -l | grep -F "$job" >/dev/null 2>&1 102 | if [ $? -eq 0 ]; then 103 | return 1 104 | else 105 | return 0 106 | fi 107 | } 108 | 109 | # wait_for_startup friendly_name host port timeout_s 110 | function wait_for_startup() { 111 | local server_name="$1" 112 | local server_url="$2" 113 | local timeout_s="$3" 114 | 115 | local now_s=`date '+%s'` 116 | local stop_s=$(( $now_s + $timeout_s )) 117 | 118 | local status 119 | 120 | echo -n "Connecting to $server_name ($server_url)" 121 | while [ $now_s -le $stop_s ]; do 122 | echo -n . 123 | status=`curl -o /dev/null -s -w %{http_code} $server_url` 124 | if [ $status -eq 200 ]; then 125 | echo "OK" 126 | return 0 127 | fi 128 | sleep 2 129 | now_s=`date '+%s'` 130 | done 131 | 132 | echo "The operation timed out when attempting to connect to $server_url" >&2 133 | return 1 134 | } 135 | 136 | function free_memory() { 137 | local free="" 138 | local os=`uname` 139 | if [ "$os" == "Linux" ]; then 140 | local mem_free=`cat /proc/meminfo | grep -w "MemFree" | awk '{print $2}'` 141 | local mem_buffer=`cat /proc/meminfo | grep -w "Buffers" | awk '{print $2}'` 142 | local mem_cached=`cat /proc/meminfo | grep -w "Cached" | awk '{print $2}'` 143 | if [[ "$mem_free" == "" || "$mem_buffer" == "" || "$mem_cached" == "" ]]; then 144 | echo "Failed to get free memory" 145 | exit 1 146 | fi 147 | free=`expr $mem_free + $mem_buffer + $mem_cached` 148 | free=`expr $free / 1024` 149 | elif [ "$os" == "Darwin" ]; then 150 | local pages_free=`vm_stat | awk '/Pages free/{print $0}' | awk -F'[:.]+' '{print $2}' | tr -d " "` 151 | local pages_inactive=`vm_stat | awk '/Pages inactive/{print $0}' | awk -F'[:.]+' '{print $2}' | tr -d " "` 152 | local pages_available=`expr $pages_free + $pages_inactive` 153 | free=`expr $pages_available \* 4096 / 1024 / 1024` 154 | else 155 | echo "Unsupported operating system $os" 156 | exit 1 157 | fi 158 | echo $free 159 | } 160 | 161 | function calc_xmx() { 162 | local min_mem=$1 163 | local max_mem=$2 164 | # Get machine available memory 165 | local free=`free_memory` 166 | local half_free=$[free/2] 167 | 168 | local xmx=$min_mem 169 | if [[ "$free" -lt "$min_mem" ]]; then 170 | exit 1 171 | elif [[ "$half_free" -ge "$max_mem" ]]; then 172 | xmx=$max_mem 173 | elif [[ "$half_free" -lt "$min_mem" ]]; then 174 | xmx=$min_mem 175 | else 176 | xmx=$half_free 177 | fi 178 | echo $xmx 179 | } 180 | 181 | function remove_with_prompt() { 182 | local path=$1 183 | local tips="" 184 | 185 | if [ -d "$path" ]; then 186 | tips="Remove directory '$path' and all sub files [y/n]?" 187 | elif [ -f "$path" ]; then 188 | tips="Remove file '$path' [y/n]?" 189 | else 190 | return 0 191 | fi 192 | 193 | read -p "$tips " yn 194 | case $yn in 195 | [Yy]* ) rm -rf "$path";; 196 | * ) ;; 197 | esac 198 | } 199 | 200 | function ensure_path_writable() { 201 | local path=$1 202 | # Ensure input path exist 203 | if [ ! -d "${path}" ]; then 204 | mkdir -p ${path} 205 | fi 206 | # Check for write permission 207 | if [ ! -w "${path}" ]; then 208 | echo "No write permission on directory ${path}" 209 | exit 1 210 | fi 211 | } 212 | 213 | function get_ip() { 214 | local os=`uname` 215 | local loopback="127.0.0.1" 216 | local ip="" 217 | case $os in 218 | Linux) 219 | if command_available "ifconfig"; then 220 | ip=`ifconfig | grep 'inet addr:' | grep -v "$loopback" | cut -d: -f2 | awk '{ print $1}'` 221 | elif command_available "ip"; then 222 | ip=`ip addr | grep 'state UP' -A2 | tail -n1 | awk '{print $2}' | awk -F"/" '{print $1}'` 223 | else 224 | ip=$loopback 225 | fi 226 | ;; 227 | FreeBSD|OpenBSD|Darwin) 228 | if command_available "ifconfig"; then 229 | ip=`ifconfig | grep -E 'inet.[0-9]' | grep -v "$loopback" | awk '{ print $2}'` 230 | else 231 | ip=$loopback 232 | fi 233 | ;; 234 | SunOS) 235 | if command_available "ifconfig"; then 236 | ip=`ifconfig -a | grep inet | grep -v "$loopback" | awk '{ print $2} '` 237 | else 238 | ip=$loopback 239 | fi 240 | ;; 241 | *) ip=$loopback;; 242 | esac 243 | echo $ip 244 | } 245 | 246 | function download() { 247 | local link_url=$1 248 | local path=$2 249 | local tar=$3 250 | 251 | if command_available "wget"; then 252 | wget --help | grep -q '\--show-progress' && progress_opt="-q --show-progress" || progress_opt="" 253 | wget ${link_url} -P ${path} $progress_opt --no-check-certificate 254 | elif command_available "curl"; then 255 | curl -L -k ${link_url} -o ${path}/${tar} 256 | else 257 | echo "Required wget or curl but they are unavailable" 258 | exit 1 259 | fi 260 | } 261 | 262 | function ensure_package_exist() { 263 | local path=$1 264 | local dir=$2 265 | local tar=$3 266 | local link_url=$4 267 | 268 | if [ ! -d ${path}/${dir} ]; then 269 | if [ ! -f ${path}/${tar} ]; then 270 | echo "Downloading the compressed package '${tar}'" 271 | download ${link_url} ${path} ${tar} 272 | if [ $? -ne 0 ]; then 273 | echo "Failed to download, please ensure the network is available and link is valid" 274 | exit 1 275 | fi 276 | echo "[OK] Finished download" 277 | fi 278 | echo "Unzip the compressed package '$tar'" 279 | tar -zxvf ${path}/${tar} -C ${path} >/dev/null 2>&1 280 | if [ $? -ne 0 ]; then 281 | echo "Failed to unzip, please check the compressed package" 282 | exit 1 283 | fi 284 | echo "[OK] Finished unzip" 285 | fi 286 | } 287 | 288 | ########################################################################### 289 | 290 | function wait_for_shutdown() { 291 | local p_name="$1" 292 | local timeout_s="$2" 293 | 294 | local now_s=`date '+%s'` 295 | local stop_s=$(( $now_s + $timeout_s )) 296 | 297 | while [ $now_s -le $stop_s ]; do 298 | process_status "$p_name" >/dev/null 299 | if [ $? -eq 1 ]; then 300 | # Class not found in the jps output. Assume that it stopped. 301 | return 0 302 | fi 303 | sleep 2 304 | now_s=`date '+%s'` 305 | done 306 | 307 | echo "$p_name shutdown timeout(exceeded $timeout_s seconds)" >&2 308 | return 1 309 | } 310 | 311 | function process_status() { 312 | local p=`ps -ef | grep "$1" | grep -v grep | awk '{print $2}'` 313 | if [ -n "$p" ]; then 314 | echo "$1 is running with pid $p" 315 | return 0 316 | else 317 | echo "The process $1 does not exist" 318 | return 1 319 | fi 320 | } 321 | 322 | function kill_process() { 323 | local pids=`ps -ef | grep "$1" | grep -v grep | awk '{print $2}' | xargs` 324 | 325 | if [ "$pids" = "" ]; then 326 | echo "There is no $1 process" 327 | fi 328 | 329 | for pid in ${pids[@]} 330 | do 331 | if [ -z "$pid" ]; then 332 | echo "The process $1 does not exist" 333 | return 334 | fi 335 | echo "Killing $1 (pid $pid)..." >&2 336 | case "`uname`" in 337 | CYGWIN*) taskkill /F /PID "$pid" ;; 338 | *) kill "$pid" ;; 339 | esac 340 | done 341 | } 342 | -------------------------------------------------------------------------------- /assembly/bin/version-map.yaml: -------------------------------------------------------------------------------- 1 | version-0.6: { 2 | server: 0.6.1, 3 | studio: 0.6.1 4 | }, 5 | version-0.7: { 6 | server: 0.7.4, 7 | studio: 0.7.0 8 | }, 9 | version-0.8: { 10 | server: 0.8.0, 11 | studio: 0.8.0 12 | }, 13 | version-0.9: { 14 | server: 0.9.2, 15 | studio: 0.9.0 16 | }, 17 | version-0.10: { 18 | server: 0.10.4, 19 | studio: 0.10.0 20 | } 21 | -------------------------------------------------------------------------------- /assembly/conf/hugegraph.truststore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/apache/incubator-hugegraph-tools/668333a877106a9028174ca239655e491dcb2c5f/assembly/conf/hugegraph.truststore -------------------------------------------------------------------------------- /assembly/descriptor/assembly.xml: -------------------------------------------------------------------------------- 1 | 4 | 5 | distribution 6 | false 7 | 8 | 9 | dir 10 | 11 | 12 | 13 | 14 | ${assembly.dir}/bin 15 | bin 16 | 17 | * 18 | 19 | 755 20 | 21 | 22 | ${assembly.dir}/conf 23 | conf 24 | 25 | * 26 | 27 | 755 28 | 29 | 30 | ${project.build.directory} 31 | lib 32 | 33 | *.jar 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | /lib 42 | false 43 | runtime 44 | false 45 | 46 | *:*:jar:* 47 | 48 | 49 | 50 | 51 | -------------------------------------------------------------------------------- /assembly/travis/conf/graphs/hugegraph.properties: -------------------------------------------------------------------------------- 1 | # gremlin entrence to create graph 2 | gremlin.graph=com.baidu.hugegraph.auth.HugeFactoryAuthProxy 3 | 4 | # cache config 5 | #schema.cache_capacity=100000 6 | # vertex-cache default is 1000w, 10min expired 7 | #vertex.cache_capacity=10000000 8 | #vertex.cache_expire=600 9 | # edge-cache default is 100w, 10min expired 10 | #edge.cache_capacity=1000000 11 | #edge.cache_expire=600 12 | 13 | 14 | # schema illegal name template 15 | #schema.illegal_name_regex=\s+|~.* 16 | 17 | #vertex.default_label=vertex 18 | 19 | backend=rocksdb 20 | serializer=binary 21 | 22 | store=hugegraph 23 | 24 | search.text_analyzer=jieba 25 | search.text_analyzer_mode=INDEX 26 | 27 | # rocksdb backend config 28 | #rocksdb.data_path=/path/to/disk 29 | #rocksdb.wal_path=/path/to/disk 30 | 31 | 32 | # cassandra backend config 33 | cassandra.host=localhost 34 | cassandra.port=9042 35 | cassandra.username= 36 | cassandra.password= 37 | #cassandra.connect_timeout=5 38 | #cassandra.read_timeout=20 39 | #cassandra.keyspace.strategy=SimpleStrategy 40 | #cassandra.keyspace.replication=3 41 | 42 | # hbase backend config 43 | #hbase.hosts=localhost 44 | #hbase.port=2181 45 | #hbase.znode_parent=/hbase 46 | #hbase.threads_max=64 47 | 48 | # mysql backend config 49 | #jdbc.driver=com.mysql.jdbc.Driver 50 | #jdbc.url=jdbc:mysql://127.0.0.1:3306 51 | #jdbc.username=root 52 | #jdbc.password= 53 | #jdbc.reconnect_max_times=3 54 | #jdbc.reconnect_interval=3 55 | #jdbc.sslmode=false 56 | 57 | # palo backend config 58 | #palo.host=127.0.0.1 59 | #palo.poll_interval=10 60 | #palo.temp_dir=./palo-data 61 | #palo.file_limit_size=32 62 | -------------------------------------------------------------------------------- /assembly/travis/conf/gremlin-server.yaml: -------------------------------------------------------------------------------- 1 | # host and port of gremlin server, need to be consistent with host and port in rest-server.properties 2 | #host: 127.0.0.1 3 | #port: 8182 4 | 5 | # timeout in ms of gremlin query 6 | scriptEvaluationTimeout: 30000 7 | 8 | channelizer: org.apache.tinkerpop.gremlin.server.channel.WsAndHttpChannelizer 9 | # don't set graph at here, this happens after support for dynamically adding graph 10 | graphs: { 11 | } 12 | scriptEngines: { 13 | gremlin-groovy: { 14 | plugins: { 15 | com.baidu.hugegraph.plugin.HugeGraphGremlinPlugin: {}, 16 | org.apache.tinkerpop.gremlin.server.jsr223.GremlinServerGremlinPlugin: {}, 17 | org.apache.tinkerpop.gremlin.jsr223.ImportGremlinPlugin: { 18 | classImports: [ 19 | java.lang.Math, 20 | com.baidu.hugegraph.backend.id.IdGenerator, 21 | com.baidu.hugegraph.type.define.Directions, 22 | com.baidu.hugegraph.type.define.NodeRole, 23 | com.baidu.hugegraph.traversal.algorithm.CollectionPathsTraverser, 24 | com.baidu.hugegraph.traversal.algorithm.CountTraverser, 25 | com.baidu.hugegraph.traversal.algorithm.CustomizedCrosspointsTraverser, 26 | com.baidu.hugegraph.traversal.algorithm.CustomizePathsTraverser, 27 | com.baidu.hugegraph.traversal.algorithm.FusiformSimilarityTraverser, 28 | com.baidu.hugegraph.traversal.algorithm.HugeTraverser, 29 | com.baidu.hugegraph.traversal.algorithm.JaccardSimilarTraverser, 30 | com.baidu.hugegraph.traversal.algorithm.KneighborTraverser, 31 | com.baidu.hugegraph.traversal.algorithm.KoutTraverser, 32 | com.baidu.hugegraph.traversal.algorithm.MultiNodeShortestPathTraverser, 33 | com.baidu.hugegraph.traversal.algorithm.NeighborRankTraverser, 34 | com.baidu.hugegraph.traversal.algorithm.PathsTraverser, 35 | com.baidu.hugegraph.traversal.algorithm.PersonalRankTraverser, 36 | com.baidu.hugegraph.traversal.algorithm.SameNeighborTraverser, 37 | com.baidu.hugegraph.traversal.algorithm.ShortestPathTraverser, 38 | com.baidu.hugegraph.traversal.algorithm.SingleSourceShortestPathTraverser, 39 | com.baidu.hugegraph.traversal.algorithm.SubGraphTraverser, 40 | com.baidu.hugegraph.traversal.algorithm.TemplatePathsTraverser, 41 | com.baidu.hugegraph.traversal.algorithm.steps.EdgeStep, 42 | com.baidu.hugegraph.traversal.algorithm.steps.RepeatEdgeStep, 43 | com.baidu.hugegraph.traversal.algorithm.steps.WeightedEdgeStep, 44 | com.baidu.hugegraph.traversal.optimize.Text, 45 | com.baidu.hugegraph.traversal.optimize.TraversalUtil, 46 | com.baidu.hugegraph.util.DateUtil 47 | ], 48 | methodImports: [java.lang.Math#*] 49 | }, 50 | org.apache.tinkerpop.gremlin.jsr223.ScriptFileGremlinPlugin: { 51 | files: [scripts/empty-sample.groovy] 52 | } 53 | } 54 | } 55 | } 56 | serializers: 57 | - { className: org.apache.tinkerpop.gremlin.driver.ser.GraphBinaryMessageSerializerV1, 58 | config: { 59 | serializeResultToString: false, 60 | ioRegistries: [com.baidu.hugegraph.io.HugeGraphIoRegistry] 61 | } 62 | } 63 | - { className: org.apache.tinkerpop.gremlin.driver.ser.GraphSONMessageSerializerV1d0, 64 | config: { 65 | serializeResultToString: false, 66 | ioRegistries: [com.baidu.hugegraph.io.HugeGraphIoRegistry] 67 | } 68 | } 69 | - { className: org.apache.tinkerpop.gremlin.driver.ser.GraphSONMessageSerializerV2d0, 70 | config: { 71 | serializeResultToString: false, 72 | ioRegistries: [com.baidu.hugegraph.io.HugeGraphIoRegistry] 73 | } 74 | } 75 | - { className: org.apache.tinkerpop.gremlin.driver.ser.GraphSONMessageSerializerV3d0, 76 | config: { 77 | serializeResultToString: false, 78 | ioRegistries: [com.baidu.hugegraph.io.HugeGraphIoRegistry] 79 | } 80 | } 81 | metrics: { 82 | consoleReporter: {enabled: false, interval: 180000}, 83 | csvReporter: {enabled: false, interval: 180000, fileName: ./metrics/gremlin-server-metrics.csv}, 84 | jmxReporter: {enabled: false}, 85 | slf4jReporter: {enabled: false, interval: 180000}, 86 | gangliaReporter: {enabled: false, interval: 180000, addressingMode: MULTICAST}, 87 | graphiteReporter: {enabled: false, interval: 180000} 88 | } 89 | maxInitialLineLength: 4096 90 | maxHeaderSize: 8192 91 | maxChunkSize: 8192 92 | maxContentLength: 65536 93 | maxAccumulationBufferComponents: 1024 94 | resultIterationBatchSize: 64 95 | writeBufferLowWaterMark: 32768 96 | writeBufferHighWaterMark: 65536 97 | ssl: { 98 | enabled: false 99 | } 100 | authentication: { 101 | authenticator: com.baidu.hugegraph.auth.StandardAuthenticator, 102 | authenticationHandler: com.baidu.hugegraph.auth.WsAndHttpBasicAuthHandler, 103 | config: {tokens: conf/rest-server.properties} 104 | } 105 | -------------------------------------------------------------------------------- /assembly/travis/conf/rest-server.properties: -------------------------------------------------------------------------------- 1 | # bind url 2 | restserver.url=http://127.0.0.1:8080 3 | # gremlin server url, need to be consistent with host and port in gremlin-server.yaml 4 | #gremlinserver.url=http://127.0.0.1:8182 5 | 6 | graphs=./conf/graphs 7 | 8 | # The maximum thread ratio for batch writing, only take effect if the batch.max_write_threads is 0 9 | batch.max_write_ratio=80 10 | batch.max_write_threads=0 11 | 12 | # authentication configs 13 | # choose 'com.baidu.hugegraph.auth.StandardAuthenticator' or 'com.baidu.hugegraph.auth.ConfigAuthenticator' 14 | auth.authenticator=com.baidu.hugegraph.auth.StandardAuthenticator 15 | 16 | # for StandardAuthenticator mode 17 | #auth.graph_store=hugegraph 18 | # auth client config 19 | #auth.remote_url=127.0.0.1:8899,127.0.0.1:8898,127.0.0.1:8897 20 | 21 | # for ConfigAuthenticator mode 22 | #auth.admin_token= 23 | #auth.user_tokens=[] 24 | 25 | # rpc group configs of multi graph servers 26 | # rpc server configs 27 | rpc.server_host=127.0.0.1 28 | rpc.server_port=8090 29 | #rpc.server_timeout=30 30 | 31 | # rpc client configs (like enable to keep cache consistency) 32 | rpc.remote_url=127.0.0.1:8090 33 | #rpc.client_connect_timeout=20 34 | #rpc.client_reconnect_period=10 35 | #rpc.client_read_timeout=40 36 | #rpc.client_retries=3 37 | #rpc.client_load_balancer=consistentHash 38 | 39 | # lightweight load balancing (beta) 40 | server.id=server-1 41 | server.role=master 42 | -------------------------------------------------------------------------------- /assembly/travis/install-hugegraph-from-source.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -ev 3 | if [[ $# -ne 1 ]]; then 4 | echo "Must pass commit id of hugegraph repo" 5 | exit 1 6 | fi 7 | 8 | COMMIT_ID=$1 9 | HUGEGRAPH_GIT_URL="https://github.com/hugegraph/hugegraph.git" 10 | GIT_DIR=hugegraph 11 | 12 | # download code and compile 13 | git clone --depth 100 ${HUGEGRAPH_GIT_URL} 14 | cd "${GIT_DIR}" 15 | git checkout ${COMMIT_ID} 16 | mvn package -DskipTests 17 | 18 | TAR=$(echo hugegraph-*.tar.gz) 19 | tar -zxvf "${TAR}" -C ../ 20 | cd ../ 21 | rm -rf "${GIT_DIR}" 22 | 23 | HTTP_SERVER_DIR=$(echo hugegraph-*) 24 | HTTPS_SERVER_DIR="hugegraph_https" 25 | 26 | cp -r "${HTTP_SERVER_DIR}" "${HTTPS_SERVER_DIR}" 27 | 28 | # config auth options just for http server (must keep '/.') 29 | cp -rf "${TRAVIS_DIR}"/conf/. "${HTTP_SERVER_DIR}"/conf/ 30 | 31 | # start HugeGraphServer with http protocol 32 | cd "${HTTP_SERVER_DIR}" 33 | echo -e "pa" | bin/init-store.sh || exit 1 34 | bin/start-hugegraph.sh || exit 1 35 | 36 | # config options for https server 37 | cd ../"${HTTPS_SERVER_DIR}" 38 | REST_SERVER_CONFIG="conf/rest-server.properties" 39 | GREMLIN_SERVER_CONFIG="conf/gremlin-server.yaml" 40 | sed -i "s?http://127.0.0.1:8080?https://127.0.0.1:8443?g" "$REST_SERVER_CONFIG" 41 | sed -i "s/#port: 8182/port: 8282/g" "$GREMLIN_SERVER_CONFIG" 42 | echo "gremlinserver.url=http://127.0.0.1:8282" >> ${REST_SERVER_CONFIG} 43 | # start HugeGraphServer with https protocol 44 | bin/init-store.sh 45 | bin/start-hugegraph.sh 46 | cd ../ 47 | -------------------------------------------------------------------------------- /build.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | export MAVEN_HOME="/home/scmtools/buildkit/maven/apache-maven-3.3.9/" 4 | export JAVA_HOME="/home/scmtools/buildkit/java/jdk1.8.0_25/" 5 | export PATH="$JAVA_HOME/bin:$MAVEN_HOME/bin:$PATH" 6 | 7 | mvn clean compile -------------------------------------------------------------------------------- /checkstyle.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | -------------------------------------------------------------------------------- /ci.yml: -------------------------------------------------------------------------------- 1 | Global: 2 | tool : build_submitter 3 | languages: 4 | # Java Version: 1.7, 1.8, 9, 10 5 | - language : java 6 | version : 1.8 7 | envs: 8 | # Maven Version: 3.0.4, 3.1.1, 3.2.5, 3.3.9, 3.5.3 9 | - env : maven 10 | version : 3.3.9 11 | Default: 12 | profile : [dev] 13 | Profiles: 14 | - profile: 15 | name : dev 16 | env : CENTOS6U3 17 | command : sh ./build.sh 18 | release : false 19 | - profile: 20 | name : publish 21 | env : cmc_standard 22 | command : mvn -U clean deploy -Prelease 23 | release : true -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/base/Directory.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.base; 21 | 22 | import java.io.Closeable; 23 | import java.io.InputStream; 24 | import java.io.OutputStream; 25 | import java.util.List; 26 | 27 | import com.baidu.hugegraph.util.E; 28 | 29 | public abstract class Directory { 30 | 31 | private final String directory; 32 | 33 | public Directory(String directory) { 34 | E.checkArgument(directory != null && !directory.isEmpty(), 35 | "Directory can't be null or empty"); 36 | this.directory = directory; 37 | } 38 | 39 | public String directory() { 40 | return this.directory; 41 | } 42 | 43 | public abstract List files(); 44 | 45 | public abstract String suffix(boolean compress); 46 | 47 | public abstract void ensureDirectoryExist(boolean create); 48 | 49 | public abstract void removeDirectory(); 50 | 51 | public abstract InputStream inputStream(String path); 52 | 53 | public abstract OutputStream outputStream(String path, boolean compress, 54 | boolean override); 55 | 56 | public static void closeAndIgnoreException(Closeable stream) { 57 | if (stream == null) { 58 | return; 59 | } 60 | try { 61 | stream.close(); 62 | } catch (Exception ignored) { 63 | // Ignore 64 | } 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/base/HdfsDirectory.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.base; 21 | 22 | import java.io.IOException; 23 | import java.io.InputStream; 24 | import java.io.OutputStream; 25 | import java.util.ArrayList; 26 | import java.util.List; 27 | import java.util.Map; 28 | import java.util.zip.ZipEntry; 29 | import java.util.zip.ZipInputStream; 30 | import java.util.zip.ZipOutputStream; 31 | 32 | import org.apache.hadoop.conf.Configuration; 33 | import org.apache.hadoop.fs.FSDataInputStream; 34 | import org.apache.hadoop.fs.FSDataOutputStream; 35 | import org.apache.hadoop.fs.FileStatus; 36 | import org.apache.hadoop.fs.FileSystem; 37 | import org.apache.hadoop.fs.Path; 38 | 39 | import com.baidu.hugegraph.exception.ToolsException; 40 | import com.baidu.hugegraph.rest.ClientException; 41 | import com.baidu.hugegraph.util.E; 42 | 43 | public class HdfsDirectory extends Directory { 44 | 45 | public static final String HDFS_FS_DEFAULT_NAME = "fs.default.name"; 46 | 47 | private final Map conf; 48 | 49 | public HdfsDirectory(String directory, Map conf) { 50 | super(directory); 51 | this.conf = conf; 52 | } 53 | 54 | private FileSystem fileSystem() { 55 | Configuration conf = new Configuration(); 56 | for (Map.Entry entry : this.conf.entrySet()) { 57 | conf.set(entry.getKey(), entry.getValue()); 58 | } 59 | try { 60 | return FileSystem.get(conf); 61 | } catch (IOException e) { 62 | throw new ClientException("Failed to access HDFS with " + 63 | "configuration %s", this.conf, e); 64 | } 65 | } 66 | 67 | @Override 68 | public List files() { 69 | FileSystem fs = this.fileSystem(); 70 | FileStatus[] statuses; 71 | try { 72 | statuses = fs.listStatus(new Path(this.directory())); 73 | } catch (IOException e) { 74 | throw new ToolsException("Failed to get file list in directory " + 75 | "'%s'", e, this.directory()); 76 | } 77 | List files = new ArrayList<>(); 78 | for (FileStatus status : statuses) { 79 | if (status.isFile()) { 80 | files.add(status.getPath().getName()); 81 | } 82 | } 83 | return files; 84 | } 85 | 86 | @Override 87 | public void ensureDirectoryExist(boolean create) { 88 | FileSystem fs = this.fileSystem(); 89 | Path path = new Path(this.directory()); 90 | try { 91 | if (fs.exists(path)) { 92 | E.checkState(fs.getFileStatus(path).isDirectory(), 93 | "Can't use directory '%s' because " + 94 | "a file with same name exists.", this.directory()); 95 | } else { 96 | if (create) { 97 | E.checkState(fs.mkdirs(path), 98 | "The directory does not exist and created " + 99 | "failed: '%s'", path.toString()); 100 | } else { 101 | E.checkState(false, 102 | "The directory does not exist: '%s'", 103 | path.toString()); 104 | } 105 | } 106 | } catch (IOException e) { 107 | throw new ToolsException("Invalid directory '%s'", 108 | e, this.directory()); 109 | } 110 | } 111 | 112 | @Override 113 | public void removeDirectory() { 114 | FileSystem fs = this.fileSystem(); 115 | Path path = new Path(this.directory()); 116 | try { 117 | E.checkState(fs.exists(path) && 118 | fs.getFileStatus(path).isDirectory(), 119 | "The directory does not exist: '%s'", 120 | this.directory()); 121 | fs.delete(path, true); 122 | } catch (IOException e) { 123 | throw new ToolsException("Failed to delete directory '%s'", path); 124 | } 125 | } 126 | 127 | @Override 128 | public String suffix(boolean compress) { 129 | return compress ? ".zip" : ""; 130 | } 131 | 132 | @Override 133 | public InputStream inputStream(String file) { 134 | String path = this.path(file); 135 | FileSystem fs = this.fileSystem(); 136 | FSDataInputStream is = null; 137 | ZipInputStream zis; 138 | Path source = new Path(path); 139 | try { 140 | is = fs.open(source); 141 | zis = new ZipInputStream(is); 142 | E.checkState(zis.getNextEntry() != null, 143 | "Invalid zip file '%s'", file); 144 | } catch (IOException e) { 145 | closeAndIgnoreException(is); 146 | throw new ClientException("Failed to read from %s", e, path); 147 | } 148 | return zis; 149 | } 150 | 151 | @Override 152 | public OutputStream outputStream(String file, boolean compress, 153 | boolean override) { 154 | String path = this.path(file + this.suffix(compress)); 155 | FileSystem fs = this.fileSystem(); 156 | FSDataOutputStream os = null; 157 | ZipOutputStream zos = null; 158 | Path dest = new Path(path); 159 | try { 160 | if (override) { 161 | os = fs.create(dest, true); 162 | } else { 163 | os = fs.append(dest); 164 | } 165 | if (!compress) { 166 | return os; 167 | } 168 | zos = new ZipOutputStream(os); 169 | ZipEntry entry = new ZipEntry(file); 170 | zos.putNextEntry(entry); 171 | } catch (IOException e) { 172 | closeAndIgnoreException(zos); 173 | closeAndIgnoreException(os); 174 | throw new ClientException("Failed to write to %s", e, path); 175 | } 176 | return zos; 177 | } 178 | 179 | public static HdfsDirectory constructDir(String directory, String graph, 180 | Map hdfsConf) { 181 | String hdfsFs = hdfsConf.get(HDFS_FS_DEFAULT_NAME); 182 | E.checkArgument(hdfsFs != null && !hdfsFs.isEmpty(), 183 | "'%s' can not be null or empty " + 184 | "when try to backup to HDFS", HDFS_FS_DEFAULT_NAME); 185 | if (directory == null || directory.isEmpty()) { 186 | if (hdfsFs.endsWith("/")) { 187 | directory = hdfsFs + graph; 188 | } else { 189 | directory = hdfsFs + "/" + graph; 190 | } 191 | } 192 | return new HdfsDirectory(directory, hdfsConf); 193 | } 194 | 195 | private String path(String file) { 196 | if (this.directory().endsWith(Path.SEPARATOR)) { 197 | return this.directory() + file; 198 | } else { 199 | return this.directory() + Path.SEPARATOR + file; 200 | } 201 | } 202 | } 203 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/base/LocalDirectory.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.base; 21 | 22 | import java.io.File; 23 | import java.io.FileInputStream; 24 | import java.io.FileOutputStream; 25 | import java.io.IOException; 26 | import java.io.InputStream; 27 | import java.io.OutputStream; 28 | import java.nio.file.Paths; 29 | import java.util.ArrayList; 30 | import java.util.List; 31 | import java.util.zip.ZipEntry; 32 | import java.util.zip.ZipInputStream; 33 | import java.util.zip.ZipOutputStream; 34 | 35 | import org.apache.commons.io.FileUtils; 36 | 37 | import com.baidu.hugegraph.exception.ToolsException; 38 | import com.baidu.hugegraph.rest.ClientException; 39 | import com.baidu.hugegraph.util.E; 40 | import com.google.common.collect.ImmutableList; 41 | 42 | public class LocalDirectory extends Directory { 43 | 44 | public LocalDirectory(String directory) { 45 | super(directory); 46 | } 47 | 48 | @Override 49 | public List files() { 50 | List fileList = new ArrayList<>(8); 51 | File dir = new File(this.directory()); 52 | String[] files = dir.list(); 53 | if (files == null) { 54 | return ImmutableList.of(); 55 | } 56 | for (String f : files) { 57 | File file = Paths.get(dir.getAbsolutePath(), f).toFile(); 58 | if (file.isFile()) { 59 | fileList.add(file.getName()); 60 | } 61 | } 62 | return fileList; 63 | } 64 | 65 | @Override 66 | public void removeDirectory() { 67 | removeDirectory(this.directory()); 68 | } 69 | 70 | @Override 71 | public void ensureDirectoryExist(boolean create) { 72 | ensureDirectoryExist(this.directory(), create); 73 | } 74 | 75 | @Override 76 | public String suffix(boolean compress) { 77 | return compress ? ".zip" : ""; 78 | } 79 | 80 | @Override 81 | public InputStream inputStream(String file) { 82 | if (file.endsWith(this.suffix(true))) { 83 | return this.zipInputStream(file); 84 | } 85 | // Keep compatible with version before 1.3.0, which backup data no zip 86 | return this.textInputStream(file); 87 | } 88 | 89 | private ZipInputStream zipInputStream(String file) { 90 | String path = Paths.get(this.directory(), file).toString(); 91 | InputStream is = null; 92 | ZipInputStream zis; 93 | try { 94 | is = new FileInputStream(path); 95 | zis = new ZipInputStream(is); 96 | E.checkState(zis.getNextEntry() != null, 97 | "Invalid zip file '%s'", file); 98 | } catch (IOException | IllegalStateException e) { 99 | closeAndIgnoreException(is); 100 | throw new ClientException("Failed to read from local file: %s", 101 | e, path); 102 | } 103 | return zis; 104 | } 105 | 106 | private InputStream textInputStream(String file) { 107 | String path = Paths.get(this.directory(), file).toString(); 108 | try { 109 | return new FileInputStream(path); 110 | } catch (IOException e) { 111 | throw new ClientException("Failed to read from local file: %s", 112 | e, path); 113 | } 114 | } 115 | 116 | @Override 117 | public OutputStream outputStream(String file, boolean compress, 118 | boolean override) { 119 | String path = Paths.get(this.directory(), file + this.suffix(compress)) 120 | .toString(); 121 | FileOutputStream os = null; 122 | ZipOutputStream zos = null; 123 | try { 124 | os = new FileOutputStream(path, !override); 125 | if (!compress) { 126 | return os; 127 | } 128 | zos = new ZipOutputStream(os); 129 | ZipEntry entry = new ZipEntry(file); 130 | zos.putNextEntry(entry); 131 | } catch (IOException e) { 132 | closeAndIgnoreException(zos); 133 | closeAndIgnoreException(os); 134 | throw new ClientException("Failed to write to local file: %s", 135 | e, path); 136 | } 137 | return zos; 138 | } 139 | 140 | public static LocalDirectory constructDir(String directory, String graph) { 141 | if (directory == null || directory.isEmpty()) { 142 | directory = "./" + graph; 143 | } 144 | return new LocalDirectory(directory); 145 | } 146 | 147 | public static void ensureDirectoryExist(String directory) { 148 | ensureDirectoryExist(directory, true); 149 | } 150 | 151 | private static void ensureDirectoryExist(String directory, boolean create) { 152 | File file = new File(directory); 153 | if (file.exists()) { 154 | E.checkState(file.isDirectory(), 155 | "Can't use directory '%s' because a file with " + 156 | "same name exists.", file.getAbsolutePath()); 157 | } else { 158 | if (create) { 159 | E.checkState(file.mkdirs(), 160 | "The directory does not exist and created " + 161 | "failed: '%s'", file.getAbsolutePath()); 162 | } else { 163 | E.checkState(false, "The directory does not exist: '%s'", 164 | file.getAbsolutePath()); 165 | } 166 | } 167 | } 168 | 169 | private static void removeDirectory(String directory) { 170 | File dir = new File(directory); 171 | E.checkState(dir.exists() && dir.isDirectory(), 172 | "The directory does not exist: '%s'", 173 | dir.getAbsolutePath()); 174 | try { 175 | FileUtils.deleteDirectory(dir); 176 | } catch (IOException e) { 177 | throw new ToolsException("Failed to delete directory '%s'", 178 | dir.getAbsolutePath()); 179 | } 180 | } 181 | } 182 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/base/Printer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.base; 21 | 22 | import java.util.List; 23 | import java.util.Map; 24 | import java.util.Set; 25 | 26 | public class Printer { 27 | 28 | public static void print(String content) { 29 | System.out.println(content); 30 | } 31 | 32 | public static void print(String content, Object ... objects) { 33 | System.out.println(String.format(content, objects)); 34 | } 35 | 36 | public static void printKV(String key, Object value) { 37 | System.out.println(key + ": " + value); 38 | } 39 | 40 | public static void printSet(String name, Set set) { 41 | System.out.println(name + ": ("); 42 | for (Object object : set) { 43 | System.out.println("\t" + object + ","); 44 | } 45 | System.out.println(")"); 46 | } 47 | 48 | public static void printList(String name, List list) { 49 | System.out.println(name + ": ["); 50 | for (Object object : list) { 51 | System.out.println("\t" + object + ","); 52 | } 53 | System.out.println("]"); 54 | } 55 | 56 | public static void printMap(String name, Map map) { 57 | System.out.println(name + ": {"); 58 | for (Map.Entry entry : map.entrySet()) { 59 | System.out.println("\t" + entry.getKey() + ": " + 60 | entry.getValue() + ","); 61 | } 62 | System.out.println("}"); 63 | } 64 | 65 | public static void printInBackward(String message) { 66 | System.out.print(message); 67 | printInBackward(0L); 68 | } 69 | 70 | public static void printInBackward(long count) { 71 | System.out.print(String.format("%d%s", count, backward(count))); 72 | } 73 | 74 | public static String backward(long word) { 75 | StringBuilder backward = new StringBuilder(); 76 | for (int i = 0, len = String.valueOf(word).length(); i < len; i++) { 77 | backward.append("\b"); 78 | } 79 | return backward.toString(); 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/base/RetryManager.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.base; 21 | 22 | import java.util.Queue; 23 | import java.util.concurrent.ConcurrentLinkedQueue; 24 | import java.util.concurrent.ExecutionException; 25 | import java.util.concurrent.ExecutorService; 26 | import java.util.concurrent.Executors; 27 | import java.util.concurrent.Future; 28 | import java.util.concurrent.TimeUnit; 29 | import java.util.function.Supplier; 30 | 31 | import com.baidu.hugegraph.exception.ToolsException; 32 | 33 | public class RetryManager extends ToolManager { 34 | 35 | private int CPUS = Runtime.getRuntime().availableProcessors(); 36 | private int threadsNum = Math.min(10, Math.max(4, CPUS / 2)); 37 | private ExecutorService pool; 38 | private final Queue> futures = new ConcurrentLinkedQueue<>(); 39 | private int retry = 0; 40 | 41 | public RetryManager(ToolClient.ConnectionInfo info, String type) { 42 | super(info, type); 43 | } 44 | 45 | public void initExecutors() { 46 | Printer.print("Init %s executors", this.threadsNum); 47 | this.pool = Executors.newFixedThreadPool(this.threadsNum); 48 | } 49 | 50 | public R retry(Supplier supplier, String description) { 51 | int retries = 0; 52 | R r = null; 53 | do { 54 | try { 55 | r = supplier.get(); 56 | } catch (Exception e) { 57 | if (retries == this.retry) { 58 | throw new ToolsException( 59 | "Exception occurred while %s(after %s retries)", 60 | e, description, this.retry); 61 | } 62 | // Ignore exception and retry 63 | continue; 64 | } 65 | break; 66 | } while (retries++ < this.retry); 67 | return r; 68 | } 69 | 70 | public void submit(Runnable task) { 71 | this.futures.add(this.pool.submit(task)); 72 | } 73 | 74 | public void awaitTasks() { 75 | Future future; 76 | while ((future = this.futures.poll()) != null) { 77 | try { 78 | future.get(); 79 | } catch (InterruptedException | ExecutionException e) { 80 | e.printStackTrace(); 81 | } 82 | } 83 | } 84 | 85 | public void shutdown(String taskType) { 86 | if (this.pool == null) { 87 | return; 88 | } 89 | this.pool.shutdown(); 90 | try { 91 | this.pool.awaitTermination(24, TimeUnit.HOURS); 92 | } catch (InterruptedException e) { 93 | throw new ToolsException( 94 | "Exception appears in %s threads", e, taskType); 95 | } 96 | } 97 | 98 | public int retry() { 99 | return this.retry; 100 | } 101 | 102 | public void retry(int retry) { 103 | this.retry = retry; 104 | } 105 | 106 | public int threadsNum() { 107 | return this.threadsNum; 108 | } 109 | 110 | public void threadsNum(int threadsNum) { 111 | if (threadsNum > 0) { 112 | this.threadsNum = threadsNum; 113 | } 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/base/ToolClient.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.base; 21 | 22 | import java.nio.file.Paths; 23 | 24 | import org.apache.commons.lang3.StringUtils; 25 | 26 | import com.baidu.hugegraph.driver.AuthManager; 27 | import com.baidu.hugegraph.driver.GraphManager; 28 | import com.baidu.hugegraph.driver.GremlinManager; 29 | import com.baidu.hugegraph.driver.HugeClient; 30 | import com.baidu.hugegraph.driver.SchemaManager; 31 | import com.baidu.hugegraph.driver.TaskManager; 32 | import com.baidu.hugegraph.driver.TraverserManager; 33 | import com.baidu.hugegraph.util.E; 34 | import com.fasterxml.jackson.databind.ObjectMapper; 35 | 36 | public class ToolClient { 37 | 38 | private static final String DEFAULT_TRUST_STORE_FILE = 39 | "conf/hugegraph.truststore"; 40 | private static final String DEFAULT_TRUST_STORE_PASSWORD = "hugegraph"; 41 | 42 | private HugeClient client; 43 | private ObjectMapper mapper; 44 | 45 | public ToolClient(ConnectionInfo info) { 46 | if (info.username == null) { 47 | info.username = ""; 48 | info.password = ""; 49 | } 50 | String trustStoreFile, trustStorePassword; 51 | if (info.url.startsWith("https")) { 52 | if (info.trustStoreFile == null || info.trustStoreFile.isEmpty()) { 53 | trustStoreFile = Paths.get(homePath(), DEFAULT_TRUST_STORE_FILE) 54 | .toString(); 55 | trustStorePassword = DEFAULT_TRUST_STORE_PASSWORD; 56 | } else { 57 | E.checkArgumentNotNull(info.trustStorePassword, 58 | "The trust store password can't be " + 59 | "null when use https"); 60 | trustStoreFile = info.trustStoreFile; 61 | trustStorePassword = info.trustStorePassword; 62 | } 63 | } else { 64 | assert info.url.startsWith("http"); 65 | E.checkArgument(info.trustStoreFile == null, 66 | "Can't set --trust-store-file when use http"); 67 | E.checkArgument(info.trustStorePassword == null, 68 | "Can't set --trust-store-password when use http"); 69 | trustStoreFile = info.trustStoreFile; 70 | trustStorePassword = info.trustStorePassword; 71 | } 72 | this.client = HugeClient.builder(info.url, info.graph) 73 | .configUser(info.username, info.password) 74 | .configTimeout(info.timeout) 75 | .configSSL(trustStoreFile, trustStorePassword) 76 | .build(); 77 | 78 | this.mapper = new ObjectMapper(); 79 | } 80 | 81 | public TraverserManager traverser() { 82 | return this.client.traverser(); 83 | } 84 | 85 | public GraphManager graph() { 86 | return this.client.graph(); 87 | } 88 | 89 | public SchemaManager schema() { 90 | return this.client.schema(); 91 | } 92 | 93 | public com.baidu.hugegraph.driver.GraphsManager graphs() { 94 | return this.client.graphs(); 95 | } 96 | 97 | public TaskManager tasks() { 98 | return this.client.task(); 99 | } 100 | 101 | public GremlinManager gremlin() { 102 | return this.client.gremlin(); 103 | } 104 | 105 | public ObjectMapper mapper() { 106 | return this.mapper; 107 | } 108 | 109 | public static String homePath() { 110 | String homePath = System.getProperty("tools.home.path"); 111 | E.checkArgument(StringUtils.isNotEmpty(homePath), 112 | "The system property 'tools.home.path' " + 113 | "can't be empty when enable https protocol"); 114 | return homePath; 115 | } 116 | 117 | public AuthManager authManager() { 118 | return this.client.auth(); 119 | } 120 | 121 | public void close() { 122 | if (this.client != null) { 123 | this.client.close(); 124 | } 125 | } 126 | 127 | public static class ConnectionInfo { 128 | 129 | private String url; 130 | private String graph; 131 | private String username; 132 | private String password; 133 | private Integer timeout; 134 | private String trustStoreFile; 135 | private String trustStorePassword; 136 | 137 | public ConnectionInfo(String url, String graph, 138 | String username, String password, 139 | Integer timeout, 140 | String trustStoreFile, 141 | String trustStorePassword) { 142 | this.url = url; 143 | this.graph = graph; 144 | this.username = username; 145 | this.password = password; 146 | this.timeout = timeout; 147 | this.trustStoreFile = trustStoreFile; 148 | this.trustStorePassword = trustStorePassword; 149 | } 150 | } 151 | } 152 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/base/ToolManager.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.base; 21 | 22 | import java.io.IOException; 23 | import java.util.List; 24 | 25 | import com.baidu.hugegraph.rest.SerializeException; 26 | import com.fasterxml.jackson.databind.JavaType; 27 | import com.fasterxml.jackson.databind.JsonNode; 28 | import com.fasterxml.jackson.databind.ObjectMapper; 29 | 30 | public class ToolManager { 31 | 32 | protected final ToolClient client; 33 | private final String type; 34 | 35 | public ToolManager(ToolClient.ConnectionInfo info, String type) { 36 | this.client = new ToolClient(info); 37 | this.type = type; 38 | } 39 | 40 | protected String type() { 41 | return this.type; 42 | } 43 | 44 | protected String graph() { 45 | return this.client.graph().graph(); 46 | } 47 | 48 | @SuppressWarnings("unchecked") 49 | protected List readList(String key, Class clazz, 50 | String content) { 51 | ObjectMapper mapper = this.client.mapper(); 52 | try { 53 | JsonNode root = mapper.readTree(content); 54 | JsonNode element = root.get(key); 55 | if(element == null) { 56 | throw new SerializeException( 57 | "Can't find value of the key: %s in json.", key); 58 | } else { 59 | JavaType t = mapper.getTypeFactory() 60 | .constructParametricType(List.class, clazz); 61 | return (List) mapper.readValue(element.toString(), t); 62 | } 63 | } catch (IOException e) { 64 | throw new SerializeException( 65 | "Failed to deserialize %s", e, content); 66 | } 67 | } 68 | 69 | public void close () { 70 | this.client.close(); 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/constant/AuthRestoreConflictStrategy.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.constant; 21 | 22 | public enum AuthRestoreConflictStrategy { 23 | 24 | STOP(1, "stop"), 25 | IGNORE(2, "ignore"); 26 | 27 | private int code; 28 | private String name = null; 29 | 30 | AuthRestoreConflictStrategy(int code, String name) { 31 | assert code < 256; 32 | this.code = code; 33 | this.name = name; 34 | } 35 | 36 | public int code() { 37 | return this.code; 38 | } 39 | 40 | public String string() { 41 | return this.name; 42 | } 43 | 44 | public boolean isStopStrategy() { 45 | return this == AuthRestoreConflictStrategy.STOP; 46 | } 47 | 48 | public boolean isIgnoreStrategy() { 49 | return this == AuthRestoreConflictStrategy.IGNORE; 50 | } 51 | 52 | public static boolean matchStrategy(String strategy) { 53 | if (AuthRestoreConflictStrategy.STOP.string().equals(strategy) || 54 | AuthRestoreConflictStrategy.IGNORE.string().equals(strategy)) { 55 | return true; 56 | } 57 | return false; 58 | } 59 | 60 | public static AuthRestoreConflictStrategy fromName(String name) { 61 | AuthRestoreConflictStrategy[] restoreStrategys = AuthRestoreConflictStrategy.values(); 62 | for (AuthRestoreConflictStrategy strategy : restoreStrategys) { 63 | if (strategy.string().equals(name)) { 64 | return strategy; 65 | } 66 | } 67 | return null; 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/constant/Constants.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.constant; 21 | 22 | public final class Constants { 23 | 24 | public static final int EXIT_CODE_ERROR = -1; 25 | public static final int EXIT_CODE_NORMAL = 0; 26 | 27 | public static final String INPUT_YES = "yes"; 28 | public static final String INPUT_Y= "y"; 29 | public static final String COMMAND_HELP = "help"; 30 | public static final String COMMAND_THROW_MODE = "--throw-mode"; 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/exception/ExitException.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.exception; 21 | 22 | import com.baidu.hugegraph.constant.Constants; 23 | 24 | public class ExitException extends RuntimeException { 25 | 26 | private final String details; 27 | private final Integer exitCode; 28 | 29 | public ExitException(String details, String reason) { 30 | super(reason); 31 | this.details = details; 32 | this.exitCode = Constants.EXIT_CODE_NORMAL; 33 | } 34 | 35 | public ExitException(String details, String reason, 36 | Object... args) { 37 | super(String.format(reason, args)); 38 | this.details = details; 39 | this.exitCode = Constants.EXIT_CODE_NORMAL; 40 | } 41 | 42 | public ExitException(Integer exitCode, String details, 43 | String reason) { 44 | super(reason); 45 | this.details = details; 46 | this.exitCode = exitCode; 47 | } 48 | 49 | public ExitException(Integer exitCode, String details, 50 | String reason, Throwable cause) { 51 | super(reason, cause); 52 | this.details = details; 53 | this.exitCode = exitCode; 54 | } 55 | 56 | public ExitException(Integer exitCode, String details, 57 | String reason, Object... args) { 58 | super(String.format(reason, args)); 59 | this.details = details; 60 | this.exitCode = exitCode; 61 | } 62 | 63 | public ExitException(Integer exitCode, String details, 64 | String reason, Throwable cause, 65 | Object... args) { 66 | super(String.format(reason, args), cause); 67 | this.details = details; 68 | this.exitCode = exitCode; 69 | } 70 | 71 | public String details() { 72 | return this.details; 73 | } 74 | 75 | public Integer exitCode() { 76 | return this.exitCode; 77 | } 78 | 79 | public static ExitException exception(String details, String reason, 80 | Object... args) { 81 | return new ExitException(Constants.EXIT_CODE_ERROR, 82 | details, reason, args); 83 | } 84 | 85 | public static ExitException normal(String details, String reason, 86 | Object... args) { 87 | return new ExitException(Constants.EXIT_CODE_NORMAL, 88 | details, reason, args); 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/exception/ToolsException.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.exception; 21 | 22 | public class ToolsException extends RuntimeException { 23 | 24 | public ToolsException(String message) { 25 | super(message); 26 | } 27 | 28 | public ToolsException(String message, Throwable cause) { 29 | super(message, cause); 30 | } 31 | 32 | public ToolsException(String message, Object... args) { 33 | super(String.format(message, args)); 34 | } 35 | 36 | public ToolsException(String message, Throwable cause, Object... args) { 37 | super(String.format(message, args), cause); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/formatter/Formatter.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.formatter; 21 | 22 | import com.baidu.hugegraph.structure.JsonGraph.JsonVertex; 23 | 24 | public interface Formatter { 25 | 26 | // Serialize a vertex(with edge and property) to string 27 | public String dump(JsonVertex vertex) throws Exception; 28 | 29 | public static final String PACKAGE = Formatter.class.getPackage().getName(); 30 | 31 | public static Formatter loadFormatter(String formatter) { 32 | String classPath = String.format("%s.%s", PACKAGE, formatter); 33 | ClassLoader loader = Formatter.class.getClassLoader(); 34 | try { 35 | // Check subclass 36 | Class clazz = loader.loadClass(classPath); 37 | if (!Formatter.class.isAssignableFrom(clazz)) { 38 | throw new RuntimeException("Invalid formatter: " + formatter); 39 | } 40 | // New instance of formatter 41 | return (Formatter) clazz.newInstance(); 42 | } catch (Exception e) { 43 | throw new RuntimeException("Can't load formatter: " + formatter, e); 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/formatter/JsonFormatter.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.formatter; 21 | 22 | import com.baidu.hugegraph.structure.JsonGraph.JsonVertex; 23 | import com.fasterxml.jackson.databind.ObjectMapper; 24 | 25 | public class JsonFormatter implements Formatter { 26 | 27 | private final ObjectMapper mapper = new ObjectMapper(); 28 | 29 | @Override 30 | public String dump(JsonVertex vertex) throws Exception { 31 | return this.mapper.writeValueAsString(vertex); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/formatter/kgdumper/ComputeSign.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.formatter.kgdumper; 21 | 22 | import java.io.File; 23 | import java.io.FileInputStream; 24 | import java.io.IOException; 25 | import java.io.UnsupportedEncodingException; 26 | import java.util.ArrayList; 27 | import java.util.HashMap; 28 | import java.util.LinkedHashMap; 29 | import java.util.List; 30 | import java.util.Map; 31 | 32 | public class ComputeSign { 33 | 34 | private Map tradeValue2Num; 35 | private LinkedHashMap entPlain2Id; 36 | private String inputEncoding; 37 | 38 | public ComputeSign(String tradeValueFile, String inputEncode) 39 | throws IOException { 40 | 41 | File file = new File(tradeValueFile); 42 | Long fileLen = file.length(); 43 | byte[] fileBytes = new byte[fileLen.intValue()]; 44 | try (FileInputStream in = new FileInputStream(file)) { 45 | in.read(fileBytes); 46 | } 47 | 48 | tradeValue2Num = new HashMap<>(); 49 | String fileContent = new String(fileBytes); 50 | String[] lines = fileContent.split("\n"); 51 | for (int i = 0; i < lines.length; i++) { 52 | String[] cols = lines[i].split("\t"); 53 | if (cols.length >= 2) { 54 | tradeValue2Num.put(cols[0], Integer.valueOf(cols[1])); 55 | } 56 | } 57 | inputEncoding = inputEncode; 58 | 59 | final int cacheSize = 1000; 60 | final int capacity = (int) Math.ceil(cacheSize / 0.75f) + 1; 61 | entPlain2Id = new LinkedHashMap(capacity, 0.75f, true) { 62 | @Override 63 | protected boolean removeEldestEntry(Map.Entry e) { 64 | return size() > cacheSize; 65 | } 66 | }; 67 | } 68 | 69 | public synchronized String computeSeqNum(String entPlain) 70 | throws UnsupportedEncodingException { 71 | String seqNum = "0"; 72 | if (entPlain2Id.containsKey(entPlain)) { 73 | seqNum = entPlain2Id.get(entPlain); 74 | return seqNum; 75 | } 76 | 77 | String[] entSpa = entPlain.split("__"); 78 | if (entSpa.length != 3) { 79 | return seqNum; 80 | } 81 | List trans = new ArrayList<>(); 82 | if (tradeValue2Num.containsKey(entSpa[0])) { 83 | trans.add(tradeValue2Num.get(entSpa[0]).toString()); 84 | } else { 85 | trans.add(entSpa[0]); 86 | } 87 | trans.add(entSpa[1]); 88 | if (entSpa[1].contains("trade") && 89 | tradeValue2Num.containsKey(entSpa[2])) { 90 | trans.add(tradeValue2Num.get(entSpa[2]).toString()); 91 | } else { 92 | trans.add(entSpa[2]); 93 | } 94 | if (trans.contains("common") && trans.contains("regioncode")) { 95 | seqNum = trans.get(2); 96 | } else { 97 | String text = String.join("__", trans); 98 | seqNum = SignFS64.createSignFs64(text, inputEncoding); 99 | } 100 | entPlain2Id.put(entPlain, seqNum); 101 | return seqNum; 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/formatter/kgdumper/DumpKGFormatter.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.formatter.kgdumper; 21 | 22 | import java.io.IOException; 23 | import java.io.UnsupportedEncodingException; 24 | import java.util.ArrayList; 25 | import java.util.List; 26 | import java.util.Map; 27 | import java.util.Set; 28 | 29 | import com.baidu.hugegraph.formatter.Formatter; 30 | import com.baidu.hugegraph.structure.JsonGraph.JsonEdge; 31 | import com.baidu.hugegraph.structure.JsonGraph.JsonVertex; 32 | 33 | public class DumpKGFormatter implements Formatter { 34 | 35 | private ComputeSign cs; 36 | 37 | // entity dump format:"plaint_id value key weight type parent parent_weight 38 | // child child_weight region region_weight\t\t\t" 39 | private static final String ENTITY_FORMAT = 40 | "%s\t%s\t%s\t%s\t%d\t%s\t%s\t%s\t%s\t%s\t%s\t\t\t"; 41 | // mention dump format: "mention descript descript_weight" 42 | private static final String MENTION_FORMAT = "%s\t%s\t%s"; 43 | 44 | public DumpKGFormatter() throws IOException { 45 | cs = new ComputeSign("./trade_value", "GBK"); 46 | } 47 | 48 | @Override 49 | public String dump(JsonVertex vertex) throws Exception { 50 | switch (vertex.getLabel()) { 51 | case "entity": 52 | return this.dumpEntity(vertex); 53 | case "mention": 54 | return this.dumpMemtion(vertex); 55 | default: 56 | return ""; 57 | } 58 | } 59 | 60 | private String dumpEntity(JsonVertex vertex) 61 | throws UnsupportedEncodingException { 62 | if (vertex == null) { 63 | return ""; 64 | } 65 | Set edges = vertex.getEdges(); 66 | Map properties = vertex.properties(); 67 | 68 | String plainId = (String) properties.get("plain_id"); 69 | String seqPlainId = cs.computeSeqNum(plainId); 70 | String value = (String) properties.get("value"); 71 | String key = (String) properties.get("key"); 72 | double weight = (double) properties.get("weight"); 73 | int type = (int) properties.get("type"); 74 | 75 | List parent = new ArrayList<>(); 76 | List parentWeight = new ArrayList<>(); 77 | List child = new ArrayList<>(); 78 | List childWeight = new ArrayList<>(); 79 | List region = new ArrayList<>(); 80 | List regionWeight = new ArrayList<>(); 81 | 82 | for (JsonEdge edge : edges) { 83 | if (edge == null) { 84 | continue; 85 | } 86 | if (!vertex.getId().equals(edge.getSource())) { 87 | continue; 88 | } 89 | 90 | Map props = edge.properties(); 91 | switch (edge.getLabel()) { 92 | case "is": 93 | parent.add(((String) edge.getTarget()).split(":", 2)[1]); 94 | parentWeight.add(props.get("weight").toString()); 95 | break; 96 | case "has": 97 | child.add(((String) edge.getTarget()).split(":", 2)[1]); 98 | childWeight.add(props.get("weight").toString()); 99 | break; 100 | case "region": 101 | region.add(((String) edge.getTarget()).split(":", 2)[1]); 102 | regionWeight.add(props.get("weight").toString()); 103 | break; 104 | } 105 | } 106 | return String.format(ENTITY_FORMAT, 107 | seqPlainId, value, key, weight, type, 108 | String.join("|*|", parent), 109 | String.join("|*|", parentWeight), 110 | String.join("|*|", child), 111 | String.join("|*|", childWeight), 112 | String.join("|*|", region), 113 | String.join("|*|", regionWeight)); 114 | } 115 | 116 | private String dumpMemtion(JsonVertex vertex) 117 | throws UnsupportedEncodingException { 118 | if (vertex == null) { 119 | return ""; 120 | } 121 | 122 | Set edges = vertex.getEdges(); 123 | String value = (String) vertex.properties().get("value"); 124 | 125 | List descript = new ArrayList<>(); 126 | List descriptWeight = new ArrayList<>(); 127 | for (JsonEdge edge : edges) { 128 | if (!vertex.getId().equals(edge.getSource())) { 129 | continue; 130 | } 131 | if (!edge.getLabel().equals("describe")) { 132 | continue; 133 | } 134 | 135 | String plainId = ((String) edge.getTarget()).split(":", 2)[1]; 136 | String seqPlainId = cs.computeSeqNum(plainId); 137 | descript.add(seqPlainId); 138 | descriptWeight.add(edge.properties().get("confidence").toString()); 139 | } 140 | 141 | return String.format(MENTION_FORMAT, value, 142 | String.join("|*|", descript), 143 | String.join("|*|", descriptWeight)); 144 | } 145 | } 146 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/formatter/kgdumper/SignFS64.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.formatter.kgdumper; 21 | 22 | import java.io.UnsupportedEncodingException; 23 | import java.math.BigInteger; 24 | import java.nio.ByteBuffer; 25 | import java.util.Arrays; 26 | import java.util.List; 27 | 28 | public class SignFS64 { 29 | 30 | static List _MOD_PRIME_LIST_1 = Arrays.asList(256, 65536, 3, 768, 196608, 9, 2304, 589824, 31 | 27, 6912, 1769472, 81, 20736, 5308416, 243, 62208, 15925248, 729, 186624, 14221318, 2187, 559872, 9109528, 32 | 6561, 1679616, 10551371, 19683, 5038848, 14876900, 59049, 15116544, 11076274, 177147, 11795206, 16451609, 33 | 531441, 1831192, 15800401, 1594323, 5493576, 13846777, 4782969, 16480728, 7985905, 14348907, 15887758, 34 | 7180502, 9492295, 14108848, 4764293, 11699672, 8772118, 14292879, 1544590, 9539141, 9324211, 4633770, 35 | 11840210, 11195420, 13901310, 1966204, 31834, 8149504, 5898612, 95502, 7671299, 918623, 286506, 6236684, 36 | 2755869, 859518, 1932839, 8267607, 2578554, 5798517, 8025608, 7735662, 618338, 7299611, 6429773, 1855014, 37 | 5121620, 2512106, 5565042, 15364860, 7536318, 16695126, 12540154, 5831741, 16530952, 4066036, 718010, 38 | 16038430, 12198108, 2154030, 14560864, 3039898, 6462090, 10128166, 9119694, 2609057, 13607285, 10581869, 39 | 7827171, 7267429, 14968394, 6704300, 5025074, 11350756, 3335687, 15075222, 497842, 10007061, 11671240, 40 | 1493526, 13243970, 1459294, 4480578, 6177484, 4377882, 13441734, 1755239, 13133646, 6770776, 5265717, 41 | 5846512, 3535115, 15797151, 762323, 10605345, 13837027, 2286969, 15038822, 7956655, 6860907, 11562040, 42 | 7092752, 3805508, 1131694, 4501043, 11416524, 3395082, 13503129, 695146, 10185246, 6954961, 2085438, 43 | 13778525, 4087670, 6256314, 7781149, 12263010, 1991729, 6566234, 3234604, 5975187, 2921489, 9703812, 44 | 1148348, 8764467, 12334223, 3445044, 9516188, 3448243, 10335132, 11771351, 10344729, 14228183, 1759627, 45 | 14256974, 9130123, 5278881, 9216496, 10613156, 15836643, 10872275, 15062255, 13955503, 15839612, 11632339, 46 | 8312083, 13964410, 1342591, 8159036, 8338804, 4027773, 7699895, 8239199, 12083319, 6322472, 7940384, 47 | 2695531, 2190203, 7043939, 8086593, 6570609, 4354604, 7482566, 2934614, 13063812, 5670485, 8803842, 5637010, 48 | 234242, 9634313, 133817, 702726, 12125726, 401451, 2108178, 2822752, 1204353, 6324534, 8468256, 3613059, 49 | 2196389, 8627555, 10839177, 6589167, 9105452, 15740318, 2990288, 10539143, 13666528, 8970864, 14840216, 50 | 7445158, 10135379, 10966222, 5558261, 13628924, 16121453, 16674783, 7332346, 14809933, 16469923, 5219825, 51 | 10875373, 15855343, 15659475, 15848906, 14011603, 13423999, 13992292, 8480383, 6717571, 8422450, 8663936, 52 | 3375500, 8490137, 9214595, 10126500, 8693198, 10866572, 13602287, 9302381); 53 | 54 | static List _MOD_PRIME_LIST_2 = Arrays.asList(15824477, 7761153, 7145686, 580925, 14499208, 55 | 4036269, 9875725, 11605750, 1507777, 115335, 12748561, 8855010, 1960695, 15399149, 16317578, 16554616, 56 | 10127548, 8963642, 12993288, 4396326, 1387123, 2782309, 7628746, 6803892, 13744855, 12248289, 15002970, 57 | 15558948, 6894525, 3392505, 12844131, 16543731, 7340988, 246640, 12808243, 7356403, 4192880, 16413743, 58 | 7618458, 4170164, 10598447, 12073393, 3783992, 12401609, 3921293, 13996267, 9500965, 16330384, 3055753, 59 | 10521614, 9181344, 1616204, 11095448, 5088057, 10698269, 4073427, 2610974, 14098583, 2139463, 10832160, 60 | 4795125, 2816473, 16374730, 14408329, 14325643, 9935226, 10060807, 8655145, 1126852, 3261729, 12919873, 61 | 2379285, 5117796, 1534254, 6893447, 3116537, 9305119, 16525405, 2649532, 7192232, 12496701, 11487646, 62 | 4827551, 11117529, 10740793, 14959571, 4448804, 14821491, 2654722, 8520872, 307362, 11575876, 10637232, 63 | 5225154, 12240703, 13060954, 4941623, 6765563, 3932631, 121596, 14351377, 16523130, 2067132, 9092623, 64 | 12458026, 1586846, 3579800, 10460054, 10199183, 10525003, 10048928, 5614121, 11153061, 3059786, 11554062, 65 | 5052848, 1684765, 11869865, 2012421, 11863806, 461317, 656759, 358314, 7842389, 11164903, 6091338, 15880220, 66 | 5254162, 2889552, 1528556, 5434759, 15567986, 9208253, 8504908, 12997777, 5545510, 10365844, 2858622, 67 | 10387675, 8447358, 15042176, 8818485, 9387494, 4059007, 15696653, 8592607, 1894323, 15185116, 11856727, 68 | 15426292, 6488987, 237971, 10588979, 9649585, 4045507, 12240653, 13048154, 1664823, 6764713, 3715031, 69 | 11524792, 14336927, 12823930, 11372275, 8846973, 16680422, 8779486, 16180949, 15131990, 15033670, 6640949, 70 | 5585845, 3914405, 12232939, 11073370, 16213288, 6633575, 3698101, 7190712, 12107581, 12536120, 4801711, 71 | 4502489, 11787652, 14520291, 9433517, 15840895, 11964161, 9374998, 860031, 2064349, 8380175, 14620527, 72 | 1539535, 8245383, 13668173, 9394896, 5953919, 14255354, 8718441, 553429, 7460232, 13995905, 9408293, 73 | 9383551, 3049599, 8946190, 8525576, 1511586, 1090439, 10717200, 8919763, 1760264, 14420410, 641180, 74 | 13147289, 10266184, 10900060, 5400326, 6753138, 751831, 7919547, 14140152, 12781127, 414707, 5501798, 75 | 15952771, 7050019, 9644571, 2761923, 2409930, 12962916, 13398293, 7414412, 2265985, 9667394, 8604611, 76 | 4967347, 13350907, 12060795, 558904, 8861832, 3707127, 9501368, 16433552, 12689562, 10528465, 10935200, 77 | 14396166, 11211915, 1349211, 9854036, 6053366, 6159388, 16523821, 2244028); 78 | 79 | private static long getsigns_24(String text, 80 | String encode, 81 | List primes) 82 | throws UnsupportedEncodingException { 83 | long sign = 0; 84 | byte[] textBytes = text.getBytes(encode); 85 | int textLen = textBytes.length; 86 | for (int i = 0; i < textLen; i++) { 87 | int c = textBytes[i]; 88 | c = c >= 0 ? c : 256 + c; 89 | sign += c * primes.get(i & 0xFF); 90 | } 91 | return sign & 0xFFFFFFFFL; 92 | } 93 | 94 | private static long getsigns_24_1(String text, String encode) 95 | throws UnsupportedEncodingException { 96 | return getsigns_24(text, encode, _MOD_PRIME_LIST_1); 97 | } 98 | 99 | private static long getsigns_24_2(String text, String encode) 100 | throws UnsupportedEncodingException { 101 | return getsigns_24(text, encode, _MOD_PRIME_LIST_2); 102 | } 103 | 104 | private static String longBytes2String(ByteBuffer buffer) { 105 | BigInteger bi = new BigInteger("0"); 106 | for (int i = 0; i < buffer.capacity(); i++) { 107 | BigInteger tmpBI = new BigInteger("1"); 108 | tmpBI = tmpBI.shiftLeft((8 - i - 1) * 8); 109 | int byteValue = buffer.get(i); 110 | byteValue = byteValue >= 0 ? byteValue : 256 + byteValue; 111 | tmpBI = tmpBI.multiply(BigInteger.valueOf(byteValue)); 112 | bi = bi.add(tmpBI); 113 | } 114 | return bi.toString(); 115 | } 116 | 117 | public static String createSignFs64(String text, String encode) 118 | throws UnsupportedEncodingException { 119 | 120 | int byteLen = text.getBytes(encode).length; 121 | 122 | if (byteLen <= 4) { 123 | ByteBuffer buffer = ByteBuffer.allocate(8); 124 | for (int i = 0; i < 4 - byteLen; i++) { 125 | buffer.put("\0".getBytes()); 126 | } 127 | for (int i = byteLen - 1; i >= 0; i--) { 128 | buffer.put(text.getBytes(encode)[i]); 129 | } 130 | buffer.put("\0\0\0\0".getBytes()); 131 | return longBytes2String(buffer); 132 | } else if (byteLen <= 8) { 133 | ByteBuffer buffer = ByteBuffer.allocate(8); 134 | for (int i = 3; i >= 0; i--) { 135 | buffer.put(text.getBytes(encode)[i]); 136 | } 137 | for (int i = 0; i < 8 - byteLen; i++) { 138 | buffer.put("\0".getBytes()); 139 | } 140 | for (int i = byteLen - 1; i >= 4; i--) { 141 | buffer.put(text.getBytes(encode)[i]); 142 | } 143 | return longBytes2String(buffer); 144 | } else { 145 | long left = getsigns_24_1(text, encode); 146 | long right = getsigns_24_2(text, encode); 147 | BigInteger tbi = BigInteger.valueOf(left); 148 | tbi = tbi.shiftLeft(32); 149 | tbi = tbi.add(BigInteger.valueOf(right)); 150 | return tbi.toString(); 151 | } 152 | } 153 | } 154 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/manager/BackupRestoreBaseManager.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.manager; 21 | 22 | import java.io.BufferedReader; 23 | import java.io.ByteArrayOutputStream; 24 | import java.io.IOException; 25 | import java.io.InputStream; 26 | import java.io.InputStreamReader; 27 | import java.io.OutputStream; 28 | import java.util.ArrayList; 29 | import java.util.List; 30 | import java.util.Map; 31 | import java.util.concurrent.ConcurrentHashMap; 32 | import java.util.concurrent.atomic.AtomicLong; 33 | import java.util.function.BiConsumer; 34 | 35 | import com.baidu.hugegraph.api.API; 36 | import com.baidu.hugegraph.base.Directory; 37 | import com.baidu.hugegraph.base.HdfsDirectory; 38 | import com.baidu.hugegraph.base.LocalDirectory; 39 | import com.baidu.hugegraph.base.Printer; 40 | import com.baidu.hugegraph.base.RetryManager; 41 | import com.baidu.hugegraph.base.ToolClient; 42 | import com.baidu.hugegraph.cmd.SubCommands; 43 | import com.baidu.hugegraph.concurrent.KeyLock; 44 | import com.baidu.hugegraph.exception.ToolsException; 45 | import com.baidu.hugegraph.structure.GraphElement; 46 | import com.baidu.hugegraph.structure.constant.HugeType; 47 | import com.baidu.hugegraph.structure.graph.Edge; 48 | import com.baidu.hugegraph.util.E; 49 | import com.google.common.collect.ImmutableMap; 50 | 51 | public class BackupRestoreBaseManager extends RetryManager { 52 | 53 | public static final int BATCH = 500; 54 | protected static final int LBUF_SIZE = 1024; 55 | 56 | protected AtomicLong propertyKeyCounter = new AtomicLong(0); 57 | protected AtomicLong vertexLabelCounter = new AtomicLong(0); 58 | protected AtomicLong edgeLabelCounter = new AtomicLong(0); 59 | protected AtomicLong indexLabelCounter = new AtomicLong(0); 60 | protected AtomicLong vertexCounter = new AtomicLong(0); 61 | protected AtomicLong edgeCounter = new AtomicLong(0); 62 | 63 | private long startTime = 0L; 64 | protected static KeyLock locks = new KeyLock(); 65 | private String logDir; 66 | protected Directory directory; 67 | private Map outputStreams; 68 | private Map inputStreams; 69 | 70 | public BackupRestoreBaseManager(ToolClient.ConnectionInfo info, 71 | String type) { 72 | super(info, type); 73 | this.outputStreams = new ConcurrentHashMap<>(); 74 | this.inputStreams = new ConcurrentHashMap<>(); 75 | } 76 | 77 | public void init(SubCommands.BackupRestore cmd) { 78 | this.threadsNum(cmd.threadsNum()); 79 | assert cmd.retry() > 0; 80 | this.retry(cmd.retry()); 81 | LocalDirectory.ensureDirectoryExist(cmd.logDir()); 82 | this.logDir(cmd.logDir()); 83 | this.directory(cmd.directory(), cmd.hdfsConf()); 84 | this.initExecutors(); 85 | } 86 | 87 | public void logDir(String logDir) { 88 | this.logDir = logDir; 89 | } 90 | 91 | public String logDir() { 92 | return this.logDir; 93 | } 94 | 95 | public Directory directory() { 96 | return this.directory; 97 | } 98 | 99 | protected void ensureDirectoryExist(boolean create) { 100 | this.directory.ensureDirectoryExist(create); 101 | } 102 | 103 | protected void removeDirectory() { 104 | this.directory.removeDirectory(); 105 | } 106 | 107 | protected long write(String path, HugeType type, 108 | List list, boolean compress) { 109 | OutputStream os = this.outputStream(path, compress); 110 | ByteArrayOutputStream baos = new ByteArrayOutputStream(LBUF_SIZE); 111 | try { 112 | String key = String.format("{\"%s\": ", type.string()); 113 | baos.write(key.getBytes(API.CHARSET)); 114 | this.client.mapper().writeValue(baos, list); 115 | baos.write("}\n".getBytes(API.CHARSET)); 116 | os.write(baos.toByteArray()); 117 | } catch (Throwable e) { 118 | throw new ToolsException("Failed to serialize %s to %s", 119 | e, type, path); 120 | } 121 | return list.size(); 122 | } 123 | 124 | protected long write(String path, HugeType type, List list, 125 | boolean compress, String format, 126 | String label, boolean allProperties, 127 | List properties) { 128 | // format == null when called by dump 129 | if (format == null || format.equals("json")) { 130 | return this.write(path, type, list, compress); 131 | } 132 | 133 | assert format.equals("text"); 134 | return this.writeText(path, type, list, compress, label, 135 | allProperties, properties); 136 | } 137 | 138 | protected long writeText(String path, HugeType type, List list, 139 | boolean compress, String label, 140 | boolean allProperties, List properties) { 141 | OutputStream os = this.outputStream(path, compress); 142 | ByteArrayOutputStream baos = new ByteArrayOutputStream(LBUF_SIZE); 143 | StringBuilder builder = new StringBuilder(LBUF_SIZE); 144 | long count = 0L; 145 | try { 146 | for (Object e : list) { 147 | GraphElement element = (GraphElement) e; 148 | if (label != null && !label.equals(element.label())) { 149 | continue; 150 | } 151 | count++; 152 | if (type == HugeType.VERTEX) { 153 | builder.append(element.id()).append("\t"); 154 | } else { 155 | Edge edge = (Edge) e; 156 | builder.append(edge.sourceId()).append("\t") 157 | .append(edge.targetId()).append("\t"); 158 | } 159 | if (allProperties) { 160 | for (Object value : element.properties().values()) { 161 | builder.append(value).append(","); 162 | } 163 | } else { 164 | for (String property : properties) { 165 | builder.append(element.property(property)).append(","); 166 | } 167 | } 168 | builder.setCharAt(builder.length() - 1, '\n'); 169 | } 170 | baos.write(builder.toString().getBytes(API.CHARSET)); 171 | os.write(baos.toByteArray()); 172 | } catch (Throwable e) { 173 | throw new ToolsException("Failed to serialize %s to %s", 174 | e, type, path); 175 | } 176 | return count; 177 | } 178 | 179 | protected void read(String file, HugeType type, 180 | BiConsumer consumer) { 181 | InputStream is = this.inputStream(file); 182 | try (InputStreamReader isr = new InputStreamReader(is, API.CHARSET); 183 | BufferedReader reader = new BufferedReader(isr)) { 184 | String line; 185 | while ((line = reader.readLine()) != null) { 186 | consumer.accept(type.string(), line); 187 | } 188 | } catch (IOException e) { 189 | throw new ToolsException("Failed to deserialize %s from %s", 190 | e, type, file); 191 | } 192 | } 193 | 194 | protected OutputStream outputStream(String file, boolean compress) { 195 | OutputStream os = this.outputStreams.get(file); 196 | if (os != null) { 197 | return os; 198 | } 199 | os = this.directory.outputStream(file, compress, true); 200 | OutputStream prev = this.outputStreams.putIfAbsent(file, os); 201 | if (prev != null) { 202 | Directory.closeAndIgnoreException(os); 203 | os = prev; 204 | } 205 | return os; 206 | } 207 | 208 | protected InputStream inputStream(String file) { 209 | InputStream is = this.inputStreams.get(file); 210 | if (is != null) { 211 | return is; 212 | } 213 | is = this.directory.inputStream(file); 214 | InputStream prev = this.inputStreams.putIfAbsent(file, is); 215 | if (prev != null) { 216 | Directory.closeAndIgnoreException(is); 217 | is = prev; 218 | } 219 | return is; 220 | } 221 | 222 | protected void directory(String dir, Map hdfsConf) { 223 | if (hdfsConf == null || hdfsConf.isEmpty()) { 224 | // Local FS directory 225 | this.directory = LocalDirectory.constructDir(dir, this.graph()); 226 | } else { 227 | // HDFS directory 228 | this.directory = HdfsDirectory.constructDir(dir, this.graph(), 229 | hdfsConf); 230 | } 231 | } 232 | 233 | protected String fileWithPrefix(HugeType type) { 234 | List files = this.filesWithPrefix(type); 235 | E.checkState(files.size() == 1, 236 | "There should be only one file of '%s', but got '%s'", 237 | type, files.size()); 238 | return files.get(0); 239 | } 240 | 241 | protected List filesWithPrefix(HugeType type) { 242 | List files = new ArrayList<>(); 243 | for (String file : this.directory.files()) { 244 | if (file.startsWith(type.string())) { 245 | files.add(file); 246 | } 247 | } 248 | return files; 249 | } 250 | 251 | public void startTimer() { 252 | this.startTime = System.currentTimeMillis(); 253 | } 254 | 255 | public long elapseSeconds() { 256 | E.checkState(this.startTime != 0, 257 | "Must call startTimer() to set start time, " + 258 | "before call elapse()"); 259 | return (System.currentTimeMillis() - this.startTime) / 1000; 260 | } 261 | 262 | protected void printSummary() { 263 | this.printSummary(this.type()); 264 | } 265 | 266 | protected void printSummary(String type) { 267 | Printer.print("==============================================="); 268 | Map summary = ImmutableMap.builder() 269 | .put("property key number", this.propertyKeyCounter.longValue()) 270 | .put("vertex label number", this.vertexLabelCounter.longValue()) 271 | .put("edge label number", this.edgeLabelCounter.longValue()) 272 | .put("index label number", this.indexLabelCounter.longValue()) 273 | .put("vertex number", this.vertexCounter.longValue()) 274 | .put("edge number", this.edgeCounter.longValue()).build(); 275 | Printer.printMap(type + " summary", summary); 276 | 277 | Printer.printKV("cost time(s)", this.elapseSeconds()); 278 | } 279 | 280 | @Override 281 | public void shutdown(String taskType) { 282 | super.shutdown(taskType); 283 | for (Map.Entry e : this.outputStreams.entrySet()) { 284 | try { 285 | OutputStream os = e.getValue(); 286 | os.close(); 287 | } catch (IOException exception) { 288 | Printer.print("Failed to close file '%s'", e.getKey()); 289 | } 290 | } 291 | for (Map.Entry e : this.inputStreams.entrySet()) { 292 | try { 293 | InputStream is = e.getValue(); 294 | is.close(); 295 | } catch (IOException exception) { 296 | Printer.print("Failed to close file '%s'", e.getKey()); 297 | } 298 | } 299 | } 300 | } 301 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/manager/DumpGraphManager.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.manager; 21 | 22 | import java.io.BufferedOutputStream; 23 | import java.io.OutputStream; 24 | import java.util.Collection; 25 | import java.util.List; 26 | 27 | import com.baidu.hugegraph.api.API; 28 | import com.baidu.hugegraph.base.LocalDirectory; 29 | import com.baidu.hugegraph.base.Printer; 30 | import com.baidu.hugegraph.base.ToolClient; 31 | import com.baidu.hugegraph.cmd.SubCommands; 32 | import com.baidu.hugegraph.formatter.Formatter; 33 | import com.baidu.hugegraph.structure.JsonGraph; 34 | import com.baidu.hugegraph.structure.JsonGraph.JsonVertex; 35 | import com.baidu.hugegraph.structure.constant.HugeType; 36 | import com.baidu.hugegraph.structure.graph.Edge; 37 | import com.baidu.hugegraph.structure.graph.Vertex; 38 | 39 | public class DumpGraphManager extends BackupManager { 40 | 41 | private static final byte[] EOF = "\n".getBytes(); 42 | 43 | private final JsonGraph graph; 44 | 45 | private Formatter dumpFormatter; 46 | 47 | public DumpGraphManager(ToolClient.ConnectionInfo info) { 48 | this(info, "JsonFormatter"); 49 | } 50 | 51 | public DumpGraphManager(ToolClient.ConnectionInfo info, String formatter) { 52 | super(info); 53 | this.graph = new JsonGraph(); 54 | this.dumpFormatter = Formatter.loadFormatter(formatter); 55 | } 56 | 57 | public void dumpFormatter(String formatter) { 58 | this.dumpFormatter = Formatter.loadFormatter(formatter); 59 | } 60 | 61 | public void init(SubCommands.DumpGraph dump) { 62 | assert dump.retry() > 0; 63 | this.retry(dump.retry()); 64 | LocalDirectory.ensureDirectoryExist(dump.logDir()); 65 | this.logDir(dump.logDir()); 66 | this.directory(dump.directory(), dump.hdfsConf()); 67 | this.removeShardsFilesIfExists(); 68 | this.ensureDirectoryExist(true); 69 | this.splitSize(dump.splitSize()); 70 | } 71 | 72 | public void dump() { 73 | this.startTimer(); 74 | try { 75 | // Fetch data to JsonGraph 76 | this.backupVertices(); 77 | this.backupEdges(); 78 | 79 | // Dump to file 80 | for (String table : this.graph.tables()) { 81 | this.submit(() -> dump(table, this.graph.table(table).values())); 82 | } 83 | } catch (Throwable e) { 84 | throw e; 85 | } finally { 86 | this.shutdown(this.type()); 87 | } 88 | 89 | this.printSummary("dump graph"); 90 | } 91 | 92 | private void dump(String file, Collection vertices) { 93 | try (OutputStream os = this.outputStream(file, false); 94 | BufferedOutputStream bos = new BufferedOutputStream(os)) { 95 | for (JsonVertex vertex : vertices) { 96 | String content = this.dumpFormatter.dump(vertex); 97 | bos.write(content.getBytes(API.CHARSET)); 98 | bos.write(EOF); 99 | } 100 | } catch (Throwable e) { 101 | Printer.print("Failed to write vertex: %s", e); 102 | } 103 | } 104 | 105 | @Override 106 | protected long write(String file, HugeType type, 107 | List list, boolean compress) { 108 | switch (type) { 109 | case VERTEX: 110 | for (Object vertex : list) { 111 | this.graph.put((Vertex) vertex); 112 | } 113 | break; 114 | case EDGE: 115 | for (Object edge : list) { 116 | this.graph.put((Edge) edge); 117 | } 118 | break; 119 | default: 120 | throw new AssertionError("Invalid type " + type); 121 | } 122 | return list.size(); 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/manager/GraphsManager.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.manager; 21 | 22 | import java.util.List; 23 | import java.util.Map; 24 | 25 | import com.baidu.hugegraph.base.ToolClient; 26 | import com.baidu.hugegraph.base.ToolManager; 27 | import com.baidu.hugegraph.structure.constant.GraphMode; 28 | 29 | public class GraphsManager extends ToolManager { 30 | 31 | public GraphsManager(ToolClient.ConnectionInfo info) { 32 | super(info, "graphs"); 33 | } 34 | 35 | public Map create(String name, String config) { 36 | return this.client.graphs().createGraph(name, config); 37 | } 38 | 39 | public Map clone(String name, String cloneGraphName) { 40 | return this.client.graphs().cloneGraph(name, cloneGraphName); 41 | } 42 | 43 | public List list() { 44 | return this.client.graphs().listGraph(); 45 | } 46 | 47 | public Map get(String graph) { 48 | return this.client.graphs().getGraph(graph); 49 | } 50 | 51 | public void clear(String graph, String confirmMessage) { 52 | this.client.graphs().clearGraph(graph, confirmMessage); 53 | } 54 | 55 | public void drop(String graph, String confirmMessage) { 56 | this.client.graphs().dropGraph(graph, confirmMessage); 57 | } 58 | 59 | public void mode(String graph, GraphMode mode) { 60 | this.client.graphs().mode(graph, mode); 61 | } 62 | 63 | public GraphMode mode(String graph) { 64 | return this.client.graphs().mode(graph); 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/manager/GremlinManager.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.manager; 21 | 22 | import java.util.Map; 23 | 24 | import com.baidu.hugegraph.api.gremlin.GremlinRequest; 25 | import com.baidu.hugegraph.base.ToolClient; 26 | import com.baidu.hugegraph.base.ToolManager; 27 | import com.baidu.hugegraph.structure.gremlin.ResultSet; 28 | 29 | public class GremlinManager extends ToolManager { 30 | 31 | public GremlinManager(ToolClient.ConnectionInfo info) { 32 | super(info, "gremlin"); 33 | } 34 | 35 | public ResultSet execute(String gremlin, Map bindings, 36 | String language, Map aliases) { 37 | GremlinRequest.Builder builder = this.client.gremlin().gremlin(gremlin); 38 | for (Map.Entry entry : aliases.entrySet()) { 39 | builder.alias(entry.getKey(), entry.getValue()); 40 | } 41 | for (Map.Entry entry : bindings.entrySet()) { 42 | builder.binding(entry.getKey(), entry.getValue()); 43 | } 44 | builder.language(language); 45 | return builder.execute(); 46 | } 47 | 48 | public long executeAsTask(String gremlin, Map bindings, 49 | String language) { 50 | GremlinRequest.Builder builder = this.client.gremlin().gremlin(gremlin); 51 | for (Map.Entry entry : bindings.entrySet()) { 52 | builder.binding(entry.getKey(), entry.getValue()); 53 | } 54 | builder.language(language); 55 | return builder.executeAsTask(); 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/manager/RestoreManager.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.manager; 21 | 22 | import java.util.HashMap; 23 | import java.util.List; 24 | import java.util.Map; 25 | import java.util.function.BiConsumer; 26 | 27 | import com.baidu.hugegraph.base.Printer; 28 | import com.baidu.hugegraph.base.ToolClient; 29 | import com.baidu.hugegraph.cmd.SubCommands; 30 | import com.baidu.hugegraph.structure.constant.GraphMode; 31 | import com.baidu.hugegraph.structure.constant.HugeType; 32 | import com.baidu.hugegraph.structure.constant.IdStrategy; 33 | import com.baidu.hugegraph.structure.graph.Edge; 34 | import com.baidu.hugegraph.structure.graph.Vertex; 35 | import com.baidu.hugegraph.structure.schema.EdgeLabel; 36 | import com.baidu.hugegraph.structure.schema.IndexLabel; 37 | import com.baidu.hugegraph.structure.schema.PropertyKey; 38 | import com.baidu.hugegraph.structure.schema.VertexLabel; 39 | import com.baidu.hugegraph.util.E; 40 | 41 | public class RestoreManager extends BackupRestoreBaseManager { 42 | 43 | private GraphMode mode = null; 44 | private boolean clean; 45 | 46 | private Map primaryKeyVLs = null; 47 | 48 | public RestoreManager(ToolClient.ConnectionInfo info) { 49 | super(info, "restore"); 50 | } 51 | 52 | public void init(SubCommands.Restore restore) { 53 | super.init(restore); 54 | this.ensureDirectoryExist(false); 55 | this.clean = restore.clean(); 56 | } 57 | 58 | public void mode(GraphMode mode) { 59 | this.mode = mode; 60 | } 61 | 62 | public void restore(List types) { 63 | try { 64 | this.doRestore(types); 65 | } catch (Throwable e) { 66 | throw e; 67 | } finally { 68 | this.shutdown(this.type()); 69 | } 70 | } 71 | 72 | public void doRestore(List types) { 73 | E.checkNotNull(this.mode, "mode"); 74 | this.startTimer(); 75 | for (HugeType type : types) { 76 | switch (type) { 77 | case VERTEX: 78 | this.restoreVertices(type); 79 | break; 80 | case EDGE: 81 | this.restoreEdges(type); 82 | break; 83 | case PROPERTY_KEY: 84 | this.restorePropertyKeys(type); 85 | break; 86 | case VERTEX_LABEL: 87 | this.restoreVertexLabels(type); 88 | break; 89 | case EDGE_LABEL: 90 | this.restoreEdgeLabels(type); 91 | break; 92 | case INDEX_LABEL: 93 | this.restoreIndexLabels(type); 94 | break; 95 | default: 96 | throw new AssertionError(String.format( 97 | "Bad restore type: %s", type)); 98 | } 99 | } 100 | this.printSummary(); 101 | if (this.clean) { 102 | this.removeDirectory(); 103 | } 104 | } 105 | 106 | private void restoreVertices(HugeType type) { 107 | Printer.print("Vertices restore started"); 108 | this.initPrimaryKeyVLs(); 109 | List files = this.filesWithPrefix(HugeType.VERTEX); 110 | printRestoreFiles(type, files); 111 | Printer.printInBackward("Vertices has been restored: "); 112 | BiConsumer consumer = (t, l) -> { 113 | List vertices = this.readList(t, Vertex.class, l); 114 | int size = vertices.size(); 115 | for (int start = 0; start < size; start += BATCH) { 116 | int end = Math.min(start + BATCH, size); 117 | List subVertices = vertices.subList(start, end); 118 | for (Vertex vertex : subVertices) { 119 | if (this.primaryKeyVLs.containsKey(vertex.label())) { 120 | vertex.id(null); 121 | } 122 | } 123 | this.retry(() -> this.client.graph().addVertices(subVertices), 124 | "restoring vertices"); 125 | this.vertexCounter.getAndAdd(end - start); 126 | Printer.printInBackward(this.vertexCounter.get()); 127 | } 128 | }; 129 | for (String file : files) { 130 | this.submit(() -> { 131 | try { 132 | this.restore(type, file, consumer); 133 | } catch (Throwable e) { 134 | Printer.print("When restoring vertices in file '%s' " + 135 | "occurs exception '%s'", file, e); 136 | } 137 | }); 138 | } 139 | this.awaitTasks(); 140 | Printer.print("%d", this.vertexCounter.get()); 141 | Printer.print("Vertices restore finished: %d", 142 | this.vertexCounter.get()); 143 | } 144 | 145 | private void restoreEdges(HugeType type) { 146 | Printer.print("Edges restore started"); 147 | this.initPrimaryKeyVLs(); 148 | List files = this.filesWithPrefix(HugeType.EDGE); 149 | printRestoreFiles(type, files); 150 | Printer.printInBackward("Edges has been restored: "); 151 | BiConsumer consumer = (t, l) -> { 152 | List edges = this.readList(t, Edge.class, l); 153 | int size = edges.size(); 154 | for (int start = 0; start < size; start += BATCH) { 155 | int end = Math.min(start + BATCH, size); 156 | List subEdges = edges.subList(start, end); 157 | /* 158 | * Edge id is concat using source and target vertex id and 159 | * vertices of primary key id strategy might have changed 160 | * their id 161 | */ 162 | this.updateVertexIdInEdge(subEdges); 163 | this.retry(() -> this.client.graph().addEdges(subEdges, false), 164 | "restoring edges"); 165 | this.edgeCounter.getAndAdd(end - start); 166 | Printer.printInBackward(this.edgeCounter.get()); 167 | } 168 | }; 169 | for (String file : files) { 170 | this.submit(() -> { 171 | try { 172 | this.restore(type, file, consumer); 173 | } catch (Throwable e) { 174 | Printer.print("When restoring edges in file '%s' " + 175 | "occurs exception '%s'", file, e); 176 | } 177 | }); 178 | } 179 | this.awaitTasks(); 180 | Printer.print("%d", this.edgeCounter.get()); 181 | Printer.print("Edges restore finished: %d", this.edgeCounter.get()); 182 | } 183 | 184 | private void restorePropertyKeys(HugeType type) { 185 | Printer.print("Property key restore started"); 186 | BiConsumer consumer = (t, l) -> { 187 | for (PropertyKey pk : this.readList(t, PropertyKey.class, l)) { 188 | if (this.mode == GraphMode.MERGING) { 189 | pk.resetId(); 190 | pk.checkExist(false); 191 | } 192 | this.client.schema().addPropertyKey(pk); 193 | this.propertyKeyCounter.getAndIncrement(); 194 | } 195 | }; 196 | String path = this.fileWithPrefix(HugeType.PROPERTY_KEY); 197 | this.restore(type, path, consumer); 198 | Printer.print("Property key restore finished: %d", 199 | this.propertyKeyCounter.get()); 200 | } 201 | 202 | private void restoreVertexLabels(HugeType type) { 203 | Printer.print("Vertex label restore started"); 204 | BiConsumer consumer = (t, l) -> { 205 | for (VertexLabel vl : this.readList(t, VertexLabel.class, l)) { 206 | if (this.mode == GraphMode.MERGING) { 207 | vl.resetId(); 208 | vl.checkExist(false); 209 | } 210 | this.client.schema().addVertexLabel(vl); 211 | this.vertexLabelCounter.getAndIncrement(); 212 | } 213 | }; 214 | String path = this.fileWithPrefix(HugeType.VERTEX_LABEL); 215 | this.restore(type, path, consumer); 216 | Printer.print("Vertex label restore finished: %d", 217 | this.vertexLabelCounter.get()); 218 | } 219 | 220 | private void restoreEdgeLabels(HugeType type) { 221 | Printer.print("Edge label restore started"); 222 | BiConsumer consumer = (t, l) -> { 223 | for (EdgeLabel el : this.readList(t, EdgeLabel.class, l)) { 224 | if (this.mode == GraphMode.MERGING) { 225 | el.resetId(); 226 | el.checkExist(false); 227 | } 228 | this.client.schema().addEdgeLabel(el); 229 | this.edgeLabelCounter.getAndIncrement(); 230 | } 231 | }; 232 | String path = this.fileWithPrefix(HugeType.EDGE_LABEL); 233 | this.restore(type, path, consumer); 234 | Printer.print("Edge label restore finished: %d", 235 | this.edgeLabelCounter.get()); 236 | } 237 | 238 | private void restoreIndexLabels(HugeType type) { 239 | Printer.print("Index label restore started"); 240 | BiConsumer consumer = (t, l) -> { 241 | for (IndexLabel il : this.readList(t, IndexLabel.class, l)) { 242 | if (this.mode == GraphMode.MERGING) { 243 | il.resetId(); 244 | il.checkExist(false); 245 | } 246 | this.client.schema().addIndexLabel(il); 247 | this.indexLabelCounter.getAndIncrement(); 248 | } 249 | }; 250 | String path = this.fileWithPrefix(HugeType.INDEX_LABEL); 251 | this.restore(type, path, consumer); 252 | Printer.print("Index label restore finished: %d", 253 | this.indexLabelCounter.get()); 254 | } 255 | 256 | private void restore(HugeType type, String file, 257 | BiConsumer consumer) { 258 | this.read(file, type, consumer); 259 | } 260 | 261 | private void initPrimaryKeyVLs() { 262 | if (this.primaryKeyVLs != null) { 263 | return; 264 | } 265 | this.primaryKeyVLs = new HashMap<>(); 266 | List vertexLabels = this.client.schema().getVertexLabels(); 267 | for (VertexLabel vl : vertexLabels) { 268 | if (vl.idStrategy() == IdStrategy.PRIMARY_KEY) { 269 | this.primaryKeyVLs.put(vl.name(), vl.id()); 270 | } 271 | } 272 | } 273 | 274 | private void updateVertexIdInEdge(List edges) { 275 | for (Edge edge : edges) { 276 | edge.sourceId(this.updateVid(edge.sourceLabel(), edge.sourceId())); 277 | edge.targetId(this.updateVid(edge.targetLabel(), edge.targetId())); 278 | } 279 | } 280 | 281 | private Object updateVid(String label, Object id) { 282 | if (this.primaryKeyVLs.containsKey(label)) { 283 | String sid = (String) id; 284 | return this.primaryKeyVLs.get(label) + 285 | sid.substring(sid.indexOf(':')); 286 | } 287 | return id; 288 | } 289 | 290 | private void printRestoreFiles(HugeType type, List files) { 291 | Printer.print("Restoring %s ...", type); 292 | Printer.printList("files", files); 293 | } 294 | } 295 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/manager/TasksManager.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.manager; 21 | 22 | import java.util.List; 23 | import java.util.Set; 24 | 25 | import com.baidu.hugegraph.base.ToolClient; 26 | import com.baidu.hugegraph.base.ToolManager; 27 | import com.baidu.hugegraph.structure.Task; 28 | import com.google.common.collect.ImmutableSet; 29 | 30 | public class TasksManager extends ToolManager { 31 | 32 | public static final Set TASK_STATUSES = ImmutableSet.of( 33 | "UNKNOWN", "NEW", "QUEUED", "RESTORING", "RUNNING", 34 | "SUCCESS", "CANCELLED", "FAILED" 35 | ); 36 | 37 | private static final Set UNCOMPLETED_STATUSES = ImmutableSet.of( 38 | "UNKNOWN", "NEW", "QUEUED", "RESTORING", "RUNNING" 39 | ); 40 | 41 | private static final Set COMPLETED_STATUSES = ImmutableSet.of( 42 | "SUCCESS", "CANCELLED", "FAILED" 43 | ); 44 | 45 | private static long TASK_LIMIT = 10000; 46 | 47 | public TasksManager(ToolClient.ConnectionInfo info) { 48 | super(info, "tasks"); 49 | } 50 | 51 | public List list(String status, long limit) { 52 | return this.client.tasks().list(status, limit); 53 | } 54 | 55 | public Task get(long taskId) { 56 | return this.client.tasks().get(taskId); 57 | } 58 | 59 | public void delete(long taskId) { 60 | this.client.tasks().delete(taskId); 61 | } 62 | 63 | public void cancel(long taskId) { 64 | this.client.tasks().cancel(taskId); 65 | } 66 | 67 | public void clear(boolean force) { 68 | if (force) { 69 | // Cancel all uncompleted tasks 70 | for (String status : UNCOMPLETED_STATUSES) { 71 | do { 72 | List tasks = this.list(status, TASK_LIMIT); 73 | tasks.forEach(t -> this.cancel(t.id())); 74 | if (tasks.size() < TASK_LIMIT) { 75 | break; 76 | } 77 | } while (true); 78 | } 79 | } 80 | 81 | // Delete all completed tasks 82 | for (String status : COMPLETED_STATUSES) { 83 | do { 84 | List tasks = this.list(status, TASK_LIMIT); 85 | tasks.forEach(t -> this.delete(t.id())); 86 | if (tasks.size() < TASK_LIMIT) { 87 | break; 88 | } 89 | } while (true); 90 | } 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/structure/JsonGraph.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.structure; 21 | 22 | import java.util.Map; 23 | import java.util.Set; 24 | import java.util.concurrent.ConcurrentHashMap; 25 | 26 | import org.eclipse.jetty.util.ConcurrentHashSet; 27 | 28 | import com.baidu.hugegraph.structure.graph.Edge; 29 | import com.baidu.hugegraph.structure.graph.Vertex; 30 | import com.baidu.hugegraph.util.JsonUtil; 31 | import com.fasterxml.jackson.annotation.JsonRawValue; 32 | 33 | public class JsonGraph { 34 | 35 | private static final int INIT_VERTEX_CAPACITY = 1_000_000; 36 | 37 | private Map> tables; 38 | 39 | public JsonGraph() { 40 | this.tables = new ConcurrentHashMap<>(); 41 | } 42 | 43 | public Set tables() { 44 | return this.tables.keySet(); 45 | } 46 | 47 | public void put(Vertex vertex) { 48 | // Add vertex to table of `label` 49 | Map vertices = this.table(vertex.label()); 50 | vertices.put(vertex.id(), JsonVertex.from(vertex)); 51 | } 52 | 53 | public void put(Edge edge) { 54 | // Find source vertex 55 | Map vertices = this.table(edge.sourceLabel()); 56 | assert vertices != null; 57 | JsonVertex source = vertices.get(edge.sourceId()); 58 | if (source == null) { 59 | // Printer.print("Invalid edge without source vertex: %s", edge); 60 | return; 61 | } 62 | 63 | // Find target vertex 64 | vertices = this.table(edge.targetLabel()); 65 | assert vertices != null; 66 | JsonVertex target = vertices.get(edge.targetId()); 67 | if (target == null) { 68 | // Printer.print("Invalid edge without target vertex: %s", edge); 69 | return; 70 | } 71 | 72 | // Add edge to source&target vertex 73 | JsonEdge jsonEdge = JsonEdge.from(edge); 74 | source.addEdge(jsonEdge); 75 | target.addEdge(jsonEdge); 76 | } 77 | 78 | public Map table(String table) { 79 | Map vertices = this.tables.get(table); 80 | if (vertices == null) { 81 | vertices = new ConcurrentHashMap<>(INIT_VERTEX_CAPACITY); 82 | this.tables.putIfAbsent(table, vertices); 83 | } 84 | return this.tables.get(table); 85 | } 86 | 87 | public static class JsonVertex { 88 | 89 | private Object id; 90 | private String label; 91 | private String properties; 92 | private Set edges; 93 | 94 | public JsonVertex() { 95 | this.edges = new ConcurrentHashSet<>(); 96 | } 97 | 98 | public void addEdge(JsonEdge edge) { 99 | this.edges.add(edge); 100 | } 101 | 102 | public Object getId() { 103 | return this.id; 104 | } 105 | 106 | public String getLabel() { 107 | return this.label; 108 | } 109 | 110 | @JsonRawValue 111 | public String getProperties() { 112 | return this.properties; 113 | } 114 | 115 | public Set getEdges() { 116 | return this.edges; 117 | } 118 | 119 | @SuppressWarnings("unchecked") 120 | public Map properties() { 121 | return JsonUtil.fromJson(this.properties, Map.class); 122 | } 123 | 124 | public static JsonVertex from(Vertex v) { 125 | JsonVertex vertex = new JsonVertex(); 126 | vertex.id = v.id(); 127 | vertex.label = v.label(); 128 | vertex.properties = JsonUtil.toJson(v.properties()); 129 | return vertex; 130 | } 131 | } 132 | 133 | public static class JsonEdge { 134 | 135 | private String id; 136 | private String label; 137 | private Object source; 138 | private Object target; 139 | private String properties; 140 | 141 | public String getId() { 142 | return this.id; 143 | } 144 | 145 | public String getLabel() { 146 | return this.label; 147 | } 148 | 149 | public Object getSource() { 150 | return this.source; 151 | } 152 | 153 | public Object getTarget() { 154 | return this.target; 155 | } 156 | 157 | @JsonRawValue 158 | public String getProperties() { 159 | return this.properties; 160 | } 161 | 162 | @SuppressWarnings("unchecked") 163 | public Map properties() { 164 | return JsonUtil.fromJson(this.properties, Map.class); 165 | } 166 | 167 | public static JsonEdge from(Edge e) { 168 | JsonEdge edge = new JsonEdge(); 169 | edge.id = e.id(); 170 | edge.label = e.label(); 171 | edge.source = e.sourceId(); 172 | edge.target = e.targetId(); 173 | edge.properties = JsonUtil.toJson(e.properties()); 174 | return edge; 175 | } 176 | } 177 | } 178 | -------------------------------------------------------------------------------- /src/main/java/com/baidu/hugegraph/util/ToolUtil.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.util; 21 | 22 | import java.util.Map; 23 | import java.util.Scanner; 24 | 25 | import com.baidu.hugegraph.base.Printer; 26 | import com.baidu.hugegraph.constant.Constants; 27 | import com.baidu.hugegraph.exception.ExitException; 28 | import com.beust.jcommander.JCommander; 29 | 30 | public final class ToolUtil { 31 | 32 | public static void printOrThrow(Throwable e, boolean throwMode) { 33 | Printer.print("Failed to execute %s", e.getMessage()); 34 | if (throwMode) { 35 | if (e instanceof RuntimeException) { 36 | throw (RuntimeException) e; 37 | } 38 | throw new RuntimeException(e); 39 | } 40 | printExceptionStackIfNeeded(e); 41 | } 42 | 43 | public static void printExceptionStackIfNeeded(Throwable e) { 44 | System.out.println("Type y(yes) to print exception stack[default n]?"); 45 | Scanner scan = new Scanner(System.in); 46 | String inputInfomation = scan.nextLine(); 47 | 48 | if (inputInfomation.equalsIgnoreCase(Constants.INPUT_YES) || 49 | inputInfomation.equalsIgnoreCase(Constants.INPUT_Y)) { 50 | e.printStackTrace(); 51 | } 52 | } 53 | 54 | public static void exitOrThrow(ExitException e, boolean throwMode) { 55 | if (throwMode) { 56 | throw e; 57 | } 58 | 59 | if(e.exitCode() != Constants.EXIT_CODE_NORMAL) { 60 | Printer.print(e.getMessage()); 61 | } 62 | Printer.print(e.details()); 63 | } 64 | 65 | public static String commandsCategory(JCommander jCommander) { 66 | StringBuffer sb = new StringBuffer(); 67 | sb.append("================================================"); 68 | sb.append("\n"); 69 | sb.append("Warning : must provide one sub-command"); 70 | sb.append("\n"); 71 | sb.append("================================================"); 72 | sb.append("\n"); 73 | sb.append("Here are some sub-command :"); 74 | sb.append("\n"); 75 | Map subCommandes = jCommander.getCommands(); 76 | for (String subCommand : subCommandes.keySet()) { 77 | sb.append("|"); 78 | sb.append(subCommand); 79 | sb.append("\n"); 80 | } 81 | sb.append("================================================"); 82 | sb.append("\n"); 83 | sb.append("Please use 'hugegraph help' to get detail help info " + 84 | "of all sub-commands or 'hugegraph help {sub-command}' " + 85 | "to get detail help info of one sub-command"); 86 | sb.append("\n"); 87 | sb.append("================================================"); 88 | 89 | return sb.toString(); 90 | } 91 | 92 | public static String commandUsage(JCommander jCommander) { 93 | StringBuilder sb = new StringBuilder(); 94 | jCommander.usage(sb); 95 | 96 | return sb.toString(); 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /src/main/resources/log4j2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /src/test/java/com/baidu/hugegraph/test/functional/AuthBackupTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.test.functional; 21 | 22 | import java.util.List; 23 | 24 | import org.junit.Before; 25 | import org.junit.Test; 26 | 27 | import com.baidu.hugegraph.cmd.HugeGraphCommand; 28 | import com.baidu.hugegraph.test.util.FileUtil; 29 | import com.baidu.hugegraph.testutil.Assert; 30 | 31 | public class AuthBackupTest extends AuthTest { 32 | 33 | @Before 34 | public void init() { 35 | FileUtil.clearDirectories(DEFAULT_URL); 36 | } 37 | 38 | @Test 39 | public void testAuthBackup() { 40 | String[] args = new String[]{ 41 | "--throw-mode", "true", 42 | "--user", USER_NAME, 43 | "--password", USER_PASSWORD, 44 | "auth-backup" 45 | }; 46 | 47 | HugeGraphCommand.main(args); 48 | 49 | Assert.assertTrue(FileUtil.checkFileExists(DEFAULT_URL)); 50 | List fileNames = FileUtil.subdirectories(DEFAULT_URL); 51 | Assert.assertTrue(fileNames.size() == 5); 52 | } 53 | 54 | @Test 55 | public void testAuthBackupByTypes() { 56 | String[] args = new String[]{ 57 | "--throw-mode", "true", 58 | "--user", USER_NAME, 59 | "--password", USER_PASSWORD, 60 | "auth-backup", 61 | "--types", "user,group" 62 | }; 63 | 64 | HugeGraphCommand.main(args); 65 | 66 | Assert.assertTrue(FileUtil.checkFileExists(DEFAULT_URL)); 67 | List fileNames = FileUtil.subdirectories(DEFAULT_URL); 68 | Assert.assertTrue(fileNames.size() == 2); 69 | } 70 | 71 | @Test 72 | public void testAuthBackupWithWrongType() { 73 | String[] args = new String[]{ 74 | "--throw-mode", "true", 75 | "--user", USER_NAME, 76 | "--password", USER_PASSWORD, 77 | "auth-backup", 78 | "--types", "user,group,test" 79 | }; 80 | 81 | Assert.assertThrows(IllegalArgumentException.class, () -> { 82 | HugeGraphCommand.main(args); 83 | }, e -> { 84 | Assert.assertContains("valid value is 'all' or combination of " + 85 | "[user,group,target,belong,access]", 86 | e.getMessage()); 87 | }); 88 | } 89 | 90 | @Test 91 | public void testAuthBackupByDirectory() { 92 | String directory = "./backup"; 93 | String[] args = new String[]{ 94 | "--throw-mode", "true", 95 | "--user", USER_NAME, 96 | "--password", USER_PASSWORD, 97 | "auth-backup", 98 | "--directory", directory 99 | }; 100 | 101 | HugeGraphCommand.main(args); 102 | 103 | Assert.assertTrue(FileUtil.checkFileExists(directory)); 104 | List fileNames = FileUtil.subdirectories(directory); 105 | Assert.assertTrue(fileNames.size() == 5); 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /src/test/java/com/baidu/hugegraph/test/functional/AuthRestoreTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.test.functional; 21 | 22 | import java.util.List; 23 | import java.util.Map; 24 | 25 | import org.apache.commons.collections.CollectionUtils; 26 | import org.junit.Before; 27 | import org.junit.Test; 28 | 29 | import com.baidu.hugegraph.cmd.HugeGraphCommand; 30 | import com.baidu.hugegraph.driver.HugeClient; 31 | import com.baidu.hugegraph.structure.auth.Access; 32 | import com.baidu.hugegraph.structure.auth.Belong; 33 | import com.baidu.hugegraph.structure.auth.Group; 34 | import com.baidu.hugegraph.structure.auth.Target; 35 | import com.baidu.hugegraph.structure.auth.User; 36 | import com.baidu.hugegraph.structure.constant.HugeType; 37 | import com.baidu.hugegraph.test.util.FileUtil; 38 | import com.baidu.hugegraph.testutil.Assert; 39 | import com.beust.jcommander.ParameterException; 40 | import com.google.common.collect.Lists; 41 | import com.google.common.collect.Maps; 42 | 43 | public class AuthRestoreTest extends AuthTest { 44 | 45 | private HugeClient client; 46 | 47 | @Before 48 | public void init() { 49 | client = HugeClient.builder(URL, GRAPH) 50 | .configUser(USER_NAME, USER_PASSWORD) 51 | .configTimeout(TIME_OUT) 52 | .configSSL(TRUST_STORE_FILE, TRUST_STORE_PASSWORD) 53 | .build(); 54 | } 55 | 56 | @Test 57 | public void testAuthRestoreForAllType() { 58 | this.loadData(HugeType.USER, "auth_users.txt"); 59 | this.loadData(HugeType.TARGET, "auth_targets.txt"); 60 | this.loadData(HugeType.GROUP, "auth_groups.txt"); 61 | this.loadData(HugeType.BELONG, "auth_belongs.txt"); 62 | this.loadData(HugeType.ACCESS, "auth_accesses.txt"); 63 | 64 | String[] args = new String[]{ 65 | "--throw-mode", "true", 66 | "--user", USER_NAME, 67 | "--password", USER_PASSWORD, 68 | "auth-restore", 69 | "--directory", DEFAULT_URL, 70 | "--init-password", "123456", 71 | "--strategy", "ignore" 72 | }; 73 | 74 | HugeGraphCommand.main(args); 75 | 76 | List idList = Lists.newArrayList(); 77 | List userList = this.client.auth().listUsers(); 78 | Map userMap = Maps.newHashMap(); 79 | for (User user1 : userList) { 80 | userMap.put(user1.name(), user1); 81 | } 82 | Assert.assertTrue(userMap.containsKey("test_user1")); 83 | idList.add(userMap.get("test_user1").id().toString()); 84 | 85 | List groups = this.client.auth().listGroups(); 86 | Map groupMap = Maps.newHashMap(); 87 | for (Group group : groups) { 88 | groupMap.put(group.name(), group); 89 | } 90 | Assert.assertTrue(groupMap.containsKey("test_group6")); 91 | idList.add(groupMap.get("test_group6").id().toString()); 92 | 93 | List targets = this.client.auth().listTargets(); 94 | Map targetMap = Maps.newHashMap(); 95 | for (Target target : targets) { 96 | targetMap.put(target.name(), target); 97 | } 98 | Assert.assertTrue(targetMap.containsKey("test_target1")); 99 | idList.add(targetMap.get("test_target1").id().toString()); 100 | 101 | List belongs = this.client.auth().listBelongs(); 102 | Assert.assertTrue(CollectionUtils.isNotEmpty(belongs)); 103 | boolean checkUserAndGroup = false; 104 | for (Belong belong : belongs) { 105 | if (idList.contains(belong.user().toString()) && 106 | idList.contains(belong.group().toString())) { 107 | checkUserAndGroup = true; 108 | break; 109 | } 110 | } 111 | Assert.assertTrue(checkUserAndGroup); 112 | 113 | List accesses = this.client.auth().listAccesses(); 114 | Assert.assertTrue(CollectionUtils.isNotEmpty(accesses)); 115 | boolean checkGroupAndTarget = false; 116 | for (Access access : accesses) { 117 | if (idList.contains(access.group().toString()) && 118 | idList.contains(access.target().toString())) { 119 | checkGroupAndTarget = true; 120 | break; 121 | } 122 | } 123 | Assert.assertTrue(checkGroupAndTarget); 124 | } 125 | 126 | @Test 127 | public void testAuthRestoreForUser() { 128 | this.loadData(HugeType.USER, "auth_users.txt"); 129 | 130 | String[] args = new String[]{ 131 | "--throw-mode", "true", 132 | "--user", USER_NAME, 133 | "--password", USER_PASSWORD, 134 | "auth-restore", 135 | "--types", "user", 136 | "--directory", DEFAULT_URL, 137 | "--init-password", "123456" 138 | }; 139 | 140 | HugeGraphCommand.main(args); 141 | 142 | List userList = this.client.auth().listUsers(); 143 | Map userMap = Maps.newHashMap(); 144 | for (User user1 : userList) { 145 | userMap.put(user1.name(), user1); 146 | } 147 | 148 | Assert.assertTrue(userMap.containsKey("test_user1")); 149 | } 150 | 151 | @Test 152 | public void testRestoreWithoutInitPassword() { 153 | String[] args = new String[]{ 154 | "--throw-mode", "true", 155 | "--user", USER_NAME, 156 | "--password", USER_PASSWORD, 157 | "auth-restore", 158 | "--types", "user", 159 | "--directory", DEFAULT_URL 160 | }; 161 | 162 | Assert.assertThrows(IllegalStateException.class, () -> { 163 | HugeGraphCommand.main(args); 164 | }, e -> { 165 | String msg = e.getMessage(); 166 | Assert.assertTrue(msg.endsWith("The following option is " + 167 | "required: [--init-password]")); 168 | }); 169 | } 170 | 171 | @Test 172 | public void testAuthRestoreWithConflictAndStopStrategy() { 173 | this.loadData(HugeType.USER, "auth_users_conflict.txt"); 174 | 175 | String[] args = new String[]{ 176 | "--throw-mode", "true", 177 | "--user", USER_NAME, 178 | "--password", USER_PASSWORD, 179 | "auth-restore", 180 | "--types", "user", 181 | "--strategy", "stop", 182 | "--init-password", "123456" 183 | }; 184 | 185 | Assert.assertThrows(IllegalStateException.class, () -> { 186 | HugeGraphCommand.main(args); 187 | }, e -> { 188 | Assert.assertContains("Restore conflict with STOP strategy", 189 | e.getMessage()); 190 | }); 191 | } 192 | 193 | @Test 194 | public void testAuthRestoreWithIgnoreStrategy() { 195 | this.loadData(HugeType.USER, "auth_users_conflict.txt"); 196 | 197 | String[] args = new String[]{ 198 | "--throw-mode", "true", 199 | "--user", USER_NAME, 200 | "--password", USER_PASSWORD, 201 | "auth-restore", 202 | "--types", "user", 203 | "--strategy", "ignore", 204 | "--init-password", "123456" 205 | }; 206 | 207 | HugeGraphCommand.main(args); 208 | 209 | List userList = this.client.auth().listUsers(); 210 | Map userMap = Maps.newHashMap(); 211 | for (User user1 : userList) { 212 | userMap.put(user1.name(), user1); 213 | } 214 | 215 | Assert.assertTrue(userMap.containsKey("admin")); 216 | } 217 | 218 | @Test 219 | public void testAuthRestoreWithWrongDirectory() { 220 | String filePath = "./auth-test-test"; 221 | 222 | String[] args = new String[]{ 223 | "--throw-mode", "true", 224 | "--user", USER_NAME, 225 | "--password", USER_PASSWORD, 226 | "auth-restore", 227 | "--types", "user", 228 | "--strategy", "stop", 229 | "--init-password", "123456", 230 | "--directory", filePath 231 | }; 232 | 233 | Assert.assertThrows(IllegalStateException.class, () -> { 234 | HugeGraphCommand.main(args); 235 | }, e -> { 236 | Assert.assertContains("The directory does not exist", 237 | e.getMessage()); 238 | }); 239 | } 240 | 241 | @Test 242 | public void testAuthRestoreWithWrongType() { 243 | String filePath = "./auth-test-test"; 244 | 245 | String[] args = new String[]{ 246 | "--throw-mode", "true", 247 | "--user", USER_NAME, 248 | "--password", USER_PASSWORD, 249 | "auth-restore", 250 | "--types", "user,test", 251 | "--strategy", "stop", 252 | "--init-password", "123456", 253 | "--directory", filePath 254 | }; 255 | 256 | Assert.assertThrows(IllegalArgumentException.class, () -> { 257 | HugeGraphCommand.main(args); 258 | }, e -> { 259 | Assert.assertContains("valid value is 'all' or combination of " + 260 | "[user,group,target,belong,access]", 261 | e.getMessage()); 262 | }); 263 | } 264 | 265 | @Test 266 | public void testAuthRestoreByBelongWithoutDependency() { 267 | String filePath = "./auth-test-test"; 268 | 269 | String[] args = new String[]{ 270 | "--throw-mode", "true", 271 | "--user", USER_NAME, 272 | "--password", USER_PASSWORD, 273 | "auth-restore", 274 | "--types", "belong", 275 | "--strategy", "stop", 276 | "--init-password", "123456", 277 | "--directory", filePath 278 | }; 279 | 280 | Assert.assertThrows(IllegalArgumentException.class, () -> { 281 | HugeGraphCommand.main(args); 282 | }, e -> { 283 | Assert.assertContains("if type contains 'belong' then " + 284 | "'user' and 'group' are required.", 285 | e.getMessage()); 286 | }); 287 | } 288 | 289 | @Test 290 | public void testAuthRestoreByAccessWithoutDependency() { 291 | String filePath = "./auth-test-test"; 292 | 293 | String[] args = new String[]{ 294 | "--throw-mode", "true", 295 | "--user", USER_NAME, 296 | "--password", USER_PASSWORD, 297 | "auth-restore", 298 | "--types", "access", 299 | "--strategy", "stop", 300 | "--init-password", "123456", 301 | "--directory", filePath 302 | }; 303 | 304 | Assert.assertThrows(IllegalArgumentException.class, () -> { 305 | HugeGraphCommand.main(args); 306 | }, e -> { 307 | Assert.assertContains("if type contains 'access' then " + 308 | "'group' and 'target' are required.", 309 | e.getMessage()); 310 | }); 311 | } 312 | 313 | @Test 314 | public void testAuthRestoreWithWrongStrategy() { 315 | String filePath = "./auth-test-test"; 316 | 317 | String[] args = new String[]{ 318 | "--throw-mode", "true", 319 | "--user", USER_NAME, 320 | "--password", USER_PASSWORD, 321 | "auth-restore", 322 | "--types", "user", 323 | "--strategy", "test", 324 | "--init-password", "123456", 325 | "--directory", filePath 326 | }; 327 | 328 | Assert.assertThrows(IllegalArgumentException.class, () -> { 329 | HugeGraphCommand.main(args); 330 | }, e -> { 331 | Assert.assertContains("Invalid --strategy 'test', valid " + 332 | "value is 'stop' or 'ignore", 333 | e.getMessage()); 334 | }); 335 | } 336 | 337 | private void loadData(HugeType hugeType, String dataFilePath) { 338 | String restoreDataPath = DEFAULT_URL + hugeType.string(); 339 | String testRestoreDataPath = DEFAULT_TEST_URL + dataFilePath; 340 | 341 | List list = FileUtil.readTestRestoreData(FileUtil.configPath( 342 | testRestoreDataPath)); 343 | FileUtil.writeTestRestoreData(restoreDataPath, list); 344 | } 345 | } 346 | -------------------------------------------------------------------------------- /src/test/java/com/baidu/hugegraph/test/functional/AuthTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.test.functional; 21 | 22 | public class AuthTest { 23 | 24 | public static final String DEFAULT_URL = "./auth-backup-restore/"; 25 | public static final String DEFAULT_TEST_URL = "/auth/"; 26 | public static final String USER_NAME = "admin"; 27 | public static final String USER_PASSWORD = "pa"; 28 | public static final String URL = "http://127.0.0.1:8080"; 29 | public static final String GRAPH = "hugegraph"; 30 | public static final Integer TIME_OUT = 30; 31 | public static final String TRUST_STORE_FILE = ""; 32 | public static final String TRUST_STORE_PASSWORD = ""; 33 | } 34 | -------------------------------------------------------------------------------- /src/test/java/com/baidu/hugegraph/test/functional/CommandTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.test.functional; 21 | 22 | import org.junit.Test; 23 | 24 | import com.baidu.hugegraph.cmd.HugeGraphCommand; 25 | import com.baidu.hugegraph.exception.ExitException; 26 | import com.baidu.hugegraph.testutil.Assert; 27 | 28 | public class CommandTest extends AuthTest { 29 | 30 | @Test 31 | public void testHelpCommand() { 32 | String[] args = new String[]{ 33 | "--throw-mode", "true", 34 | "--user", USER_NAME, 35 | "--password", USER_PASSWORD, 36 | "help" 37 | }; 38 | 39 | Assert.assertThrows(ExitException.class, () -> { 40 | HugeGraphCommand.main(args); 41 | }, e -> { 42 | ExitException exception = (ExitException) e; 43 | Assert.assertContains("Command : hugegragh help", 44 | exception.getMessage()); 45 | Assert.assertContains("Usage: hugegraph [options] [command]", 46 | exception.details()); 47 | }); 48 | } 49 | 50 | @Test 51 | public void testHelpSubCommand() { 52 | String[] args = new String[]{ 53 | "--throw-mode", "true", 54 | "--user", USER_NAME, 55 | "--password", USER_PASSWORD, 56 | "help", "auth-backup" 57 | }; 58 | 59 | Assert.assertThrows(ExitException.class, () -> { 60 | HugeGraphCommand.main(args); 61 | }, e -> { 62 | ExitException exception = (ExitException) e; 63 | Assert.assertContains("Command : hugegragh help auth-backup", 64 | exception.getMessage()); 65 | Assert.assertContains("Usage: auth-backup [options]", 66 | exception.details()); 67 | }); 68 | } 69 | 70 | @Test 71 | public void testBadHelpSubCommandException() { 72 | String badCommand = "asd"; 73 | String[] args = new String[]{ 74 | "--throw-mode", "true", 75 | "--user", USER_NAME, 76 | "--password", USER_PASSWORD, 77 | "help", badCommand 78 | }; 79 | 80 | Assert.assertThrows(ExitException.class, () -> { 81 | HugeGraphCommand.main(args); 82 | }, e -> { 83 | ExitException exception = (ExitException) e; 84 | Assert.assertContains(String.format( 85 | "Unexpected help sub-command %s", 86 | badCommand), exception.getMessage()); 87 | Assert.assertContains("Here are some sub-command ", 88 | exception.details()); 89 | }); 90 | } 91 | 92 | @Test 93 | public void testEmptyCommandException() { 94 | String[] args = new String[]{ 95 | "--throw-mode", "true", 96 | "--user", USER_NAME, 97 | "--password", USER_PASSWORD 98 | }; 99 | 100 | Assert.assertThrows(ExitException.class, () -> { 101 | HugeGraphCommand.main(args); 102 | }, e -> { 103 | ExitException exception = (ExitException) e; 104 | Assert.assertContains("No sub-command found", 105 | exception.getMessage()); 106 | Assert.assertContains("Warning : must provide one sub-command", 107 | exception.details()); 108 | }); 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /src/test/java/com/baidu/hugegraph/test/functional/FuncTestSuite.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.test.functional; 21 | 22 | import org.junit.runner.RunWith; 23 | import org.junit.runners.Suite; 24 | 25 | @RunWith(Suite.class) 26 | @Suite.SuiteClasses({ 27 | AuthBackupTest.class, 28 | AuthRestoreTest.class, 29 | CommandTest.class 30 | }) 31 | public class FuncTestSuite { 32 | } 33 | -------------------------------------------------------------------------------- /src/test/java/com/baidu/hugegraph/test/util/FileUtil.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 HugeGraph Authors 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with this 6 | * work for additional information regarding copyright ownership. The ASF 7 | * licenses this file to You under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance with the License. 9 | * You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 15 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 16 | * License for the specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.baidu.hugegraph.test.util; 21 | 22 | import java.io.BufferedReader; 23 | import java.io.ByteArrayOutputStream; 24 | import java.io.File; 25 | import java.io.FileInputStream; 26 | import java.io.FileOutputStream; 27 | import java.io.IOException; 28 | import java.io.InputStream; 29 | import java.io.InputStreamReader; 30 | import java.nio.file.Paths; 31 | import java.util.List; 32 | 33 | import org.apache.commons.collections.ListUtils; 34 | 35 | import com.baidu.hugegraph.api.API; 36 | import com.baidu.hugegraph.exception.ToolsException; 37 | import com.google.common.collect.Lists; 38 | 39 | public class FileUtil { 40 | 41 | protected static final int LBUF_SIZE = 1024; 42 | protected static final String CONFIG_PATH = "src/test/resources"; 43 | 44 | public static String configPath(String fileName) { 45 | return Paths.get(CONFIG_PATH, fileName).toString(); 46 | } 47 | 48 | public static boolean checkFileExists(String filePath) { 49 | File file = new File(filePath); 50 | if (file.exists()) { 51 | return true; 52 | } 53 | return false; 54 | } 55 | 56 | public static List subdirectories(String filePath) { 57 | File file = new File(filePath); 58 | if (!file.exists()) { 59 | return ListUtils.EMPTY_LIST; 60 | } 61 | String[] files = file.list(); 62 | List list = Lists.newArrayList(); 63 | for (int i = 0; i < files.length; i++) { 64 | File fileDir = new File(file, files[i]); 65 | list.add(fileDir.getName()); 66 | } 67 | 68 | return list; 69 | } 70 | 71 | public static void clearDirectories(String filePath) { 72 | File file = new File(filePath); 73 | if (file.exists()) { 74 | String[] files = file.list(); 75 | for (int i = 0; i < files.length; i++) { 76 | File fileDir = new File(file, files[i]); 77 | fileDir.delete(); 78 | } 79 | } 80 | } 81 | 82 | public static long writeTestRestoreData(String filePath, List list) { 83 | long count = 0L; 84 | try (FileOutputStream os = new FileOutputStream(filePath); 85 | ByteArrayOutputStream baos = new ByteArrayOutputStream(LBUF_SIZE)) { 86 | StringBuilder builder = new StringBuilder(LBUF_SIZE); 87 | for (Object e : list) { 88 | count++; 89 | builder.append(e).append("\n"); 90 | } 91 | baos.write(builder.toString().getBytes(API.CHARSET)); 92 | os.write(baos.toByteArray()); 93 | } catch (IOException e) { 94 | throw new ToolsException("Failed write file path is %s", 95 | e, filePath); 96 | } 97 | 98 | return count; 99 | } 100 | 101 | public static List readTestRestoreData(String filePath) { 102 | List results = Lists.newArrayList(); 103 | try (InputStream is = new FileInputStream(filePath); 104 | InputStreamReader isr = new InputStreamReader(is, API.CHARSET)) { 105 | BufferedReader reader = new BufferedReader(isr); 106 | String line; 107 | while ((line = reader.readLine()) != null) { 108 | results.add(line); 109 | } 110 | } catch (IOException e) { 111 | throw new ToolsException("Failed read file path is %s", 112 | e, filePath); 113 | } 114 | 115 | return results; 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /src/test/resources/auth/auth_accesses.txt: -------------------------------------------------------------------------------- 1 | {"id":"S-66:test_group6>-88>11>S-66:test_target1","group":"-66:test_group6","target":"-66:test_target1","access_permission":"READ","access_description":"test","access_create":"2020-11-11 15:54:54.008","access_update":"2020-11-18 15:01:13.518","access_creator":"admin"} -------------------------------------------------------------------------------- /src/test/resources/auth/auth_belongs.txt: -------------------------------------------------------------------------------- 1 | {"id":"S-66:test_user1>-82>>S-66:test_group6","user":"-66:test_user1","group":"-66:test_group6","belong_description":"restore test","belong_create":"2020-12-01 09:44:40.117","belong_update":"2020-12-01 09:44:40.117","belong_creator":"admin"} -------------------------------------------------------------------------------- /src/test/resources/auth/auth_groups.txt: -------------------------------------------------------------------------------- 1 | {"id":"-66:test_group6","group_name":"test_group6","group_description":"user is conflict check user restore test test","group_create":"2020-11-27 20:08:21.270","group_update":"2020-11-27 20:08:21.270","group_creator":"admin"} -------------------------------------------------------------------------------- /src/test/resources/auth/auth_targets.txt: -------------------------------------------------------------------------------- 1 | {"id":"-66:test_target1","target_name":"test_target1","target_graph":"hugegraph","target_url":"127.0.0.1:8080","target_resources":[{"type":"ALL","label":"*","properties":null}],"target_create":"2020-11-11 15:32:01.192","target_update":"2020-11-11 15:32:01.192","target_creator":"admin"} -------------------------------------------------------------------------------- /src/test/resources/auth/auth_users.txt: -------------------------------------------------------------------------------- 1 | {"id":"-66:test_user1","user_name":"test_user1","user_password":"$2a$04$vXkz8UYV7Gwagj6zA1ifNuSQfAmzuYb2tXdqDoWKEG.nYVc186JXO","user_create":"2020-11-30 22:26:42.225","user_update":"2020-11-30 22:26:42.225","user_creator":"admin"} -------------------------------------------------------------------------------- /src/test/resources/auth/auth_users_conflict.txt: -------------------------------------------------------------------------------- 1 | {"id":"-63:admin","user_name":"admin","user_phone":"13255447788","user_password":"$2a$04$1tl1IKTncjcmMojLdt2qO.EAJ1w0TGunAZ5IJXWwBgPLvTPk366Ly","user_create":"2020-11-11 11:41:12.254","user_update":"2020-11-11 11:41:12.254","user_creator":"system"} --------------------------------------------------------------------------------