├── .gitignore ├── .travis.yml ├── LICENSE.txt ├── README.md ├── dev-tools ├── release.py └── tests.policy ├── plugin └── plugin-descriptor.properties ├── pom.xml └── src ├── main ├── assemblies │ └── plugin.xml ├── java │ └── org │ │ └── elasticsearch │ │ ├── index │ │ └── analysis │ │ │ ├── HanLpAnalysisBinderProcessor.java │ │ │ ├── HanLpAnalyzerProvider.java │ │ │ ├── HanLpNoOpTokenFilterFactory.java │ │ │ └── HanLpTokenizerTokenizerFactory.java │ │ ├── indices │ │ └── analysis │ │ │ └── hanlp │ │ │ ├── HanLpIndicesAnalysis.java │ │ │ ├── HanLpIndicesAnalysisModule.java │ │ │ └── Settings.java │ │ └── plugin │ │ └── analysis │ │ └── hanlp │ │ └── AnalysisHanLpPlugin.java └── resources │ └── es-plugin.properties └── test ├── java └── org │ └── elasticsearch │ └── index │ └── analysis │ ├── AnalysisHanLpRestIT.java │ └── HanLpAnalysisTests.java └── resources ├── log4j.properties └── rest-api-spec └── test └── analysis_hanlp ├── 10_basic.yaml └── 20_search.yaml /.gitignore: -------------------------------------------------------------------------------- 1 | ### JetBrains template 2 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio 3 | 4 | *.iml 5 | 6 | ## Directory-based project format: 7 | .idea/ 8 | # if you remove the above rule, at least ignore the following: 9 | 10 | # User-specific stuff: 11 | # .idea/workspace.xml 12 | # .idea/tasks.xml 13 | # .idea/dictionaries 14 | 15 | # Sensitive or high-churn files: 16 | # .idea/dataSources.ids 17 | # .idea/dataSources.xml 18 | # .idea/sqlDataSources.xml 19 | # .idea/dynamic.xml 20 | # .idea/uiDesigner.xml 21 | 22 | # Gradle: 23 | # .idea/gradle.xml 24 | # .idea/libraries 25 | 26 | # Mongo Explorer plugin: 27 | # .idea/mongoSettings.xml 28 | 29 | ## File-based project format: 30 | *.ipr 31 | *.iws 32 | 33 | ## Plugin-specific files: 34 | 35 | # IntelliJ 36 | /out/ 37 | 38 | # mpeltonen/sbt-idea plugin 39 | .idea_modules/ 40 | 41 | # JIRA plugin 42 | atlassian-ide-plugin.xml 43 | 44 | # Crashlytics plugin (for Android Studio and IntelliJ) 45 | com_crashlytics_export_strings.xml 46 | crashlytics.properties 47 | crashlytics-build.properties 48 | ### Maven template 49 | target/ 50 | pom.xml.tag 51 | pom.xml.releaseBackup 52 | pom.xml.versionsBackup 53 | pom.xml.next 54 | release.properties 55 | dependency-reduced-pom.xml 56 | buildNumber.properties 57 | .mvn/timing.properties 58 | 59 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: false 2 | 3 | language: java 4 | 5 | jdk: 6 | - oraclejdk8 7 | - oraclejdk7 8 | - openjdk7 9 | 10 | install: mvn install -U -DskipTests=true 11 | 12 | script: mvn verify -U -Dmaven.javadoc.skip=true 13 | 14 | cache: 15 | directories: 16 | - $HOME/.m2 17 | 18 | branches: 19 | except: 20 | - gh-pages 21 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | 203 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | HanLP Chinese Analysis for Elasticsearch 2 | ======================================== 3 | 4 | [![Build Status](https://travis-ci.org/donbeave/elasticsearch-analysis-hanlp.svg?branch=master)](https://travis-ci.org/donbeave/elasticsearch-analysis-hanlp) 5 | 6 | The HanLP Chinese Analysis plugin integrates Lucene HanLP Chinese analysis module into elasticsearch. 7 | 8 | Install 9 | 10 | ```bash 11 | ./bin/plugin --url https://bintray.com/artifact/download/donbeave/maven/org/elasticsearch/elasticsearch-analysis-hanlp/0.1.0/elasticsearch-analysis-hanlp-0.1.0.zip --install analysis-hanlp 12 | ``` 13 | -------------------------------------------------------------------------------- /dev-tools/release.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on 13 | # an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, 14 | # either express or implied. See the License for the specific 15 | # language governing permissions and limitations under the License. 16 | 17 | import datetime 18 | import os 19 | import shutil 20 | import sys 21 | import time 22 | import urllib 23 | import urllib.request 24 | import zipfile 25 | 26 | from os.path import dirname, abspath 27 | 28 | """ 29 | This tool builds a release from the a given elasticsearch plugin branch. 30 | 31 | It is basically a wrapper on top of launch_release.py which: 32 | 33 | - tries to get a more recent version of launch_release.py in ... 34 | - download it if needed 35 | - launch it passing all arguments to it, like: 36 | 37 | $ python3 dev_tools/release.py --branch master --publish --remote origin 38 | 39 | Important options: 40 | 41 | # Dry run 42 | $ python3 dev_tools/release.py 43 | 44 | # Dry run without tests 45 | python3 dev_tools/release.py --skiptests 46 | 47 | # Release, publish artifacts and announce 48 | $ python3 dev_tools/release.py --publish 49 | 50 | See full documentation in launch_release.py 51 | """ 52 | env = os.environ 53 | 54 | # Change this if the source repository for your scripts is at a different location 55 | SOURCE_REPO = 'elasticsearch/elasticsearch-plugins-script' 56 | # We define that we should download again the script after 1 days 57 | SCRIPT_OBSOLETE_DAYS = 1 58 | # We ignore in master.zip file the following files 59 | IGNORED_FILES = ['.gitignore', 'README.md'] 60 | 61 | 62 | ROOT_DIR = abspath(os.path.join(abspath(dirname(__file__)), '../')) 63 | TARGET_TOOLS_DIR = ROOT_DIR + '/plugin_tools' 64 | DEV_TOOLS_DIR = ROOT_DIR + '/dev-tools' 65 | BUILD_RELEASE_FILENAME = 'release.zip' 66 | BUILD_RELEASE_FILE = TARGET_TOOLS_DIR + '/' + BUILD_RELEASE_FILENAME 67 | SOURCE_URL = 'https://github.com/%s/archive/master.zip' % SOURCE_REPO 68 | 69 | # Download a recent version of the release plugin tool 70 | try: 71 | os.mkdir(TARGET_TOOLS_DIR) 72 | print('directory %s created' % TARGET_TOOLS_DIR) 73 | except FileExistsError: 74 | pass 75 | 76 | 77 | try: 78 | # we check latest update. If we ran an update recently, we 79 | # are not going to check it again 80 | download = True 81 | 82 | try: 83 | last_download_time = datetime.datetime.fromtimestamp(os.path.getmtime(BUILD_RELEASE_FILE)) 84 | if (datetime.datetime.now()-last_download_time).days < SCRIPT_OBSOLETE_DAYS: 85 | download = False 86 | except FileNotFoundError: 87 | pass 88 | 89 | if download: 90 | urllib.request.urlretrieve(SOURCE_URL, BUILD_RELEASE_FILE) 91 | with zipfile.ZipFile(BUILD_RELEASE_FILE) as myzip: 92 | for member in myzip.infolist(): 93 | filename = os.path.basename(member.filename) 94 | # skip directories 95 | if not filename: 96 | continue 97 | if filename in IGNORED_FILES: 98 | continue 99 | 100 | # copy file (taken from zipfile's extract) 101 | source = myzip.open(member.filename) 102 | target = open(os.path.join(TARGET_TOOLS_DIR, filename), "wb") 103 | with source, target: 104 | shutil.copyfileobj(source, target) 105 | # We keep the original date 106 | date_time = time.mktime(member.date_time + (0, 0, -1)) 107 | os.utime(os.path.join(TARGET_TOOLS_DIR, filename), (date_time, date_time)) 108 | print('plugin-tools updated from %s' % SOURCE_URL) 109 | except urllib.error.HTTPError: 110 | pass 111 | 112 | 113 | # Let see if we need to update the release.py script itself 114 | source_time = os.path.getmtime(TARGET_TOOLS_DIR + '/release.py') 115 | repo_time = os.path.getmtime(DEV_TOOLS_DIR + '/release.py') 116 | if source_time > repo_time: 117 | input('release.py needs an update. Press a key to update it...') 118 | shutil.copyfile(TARGET_TOOLS_DIR + '/release.py', DEV_TOOLS_DIR + '/release.py') 119 | 120 | # We can launch the build process 121 | try: 122 | PYTHON = 'python' 123 | # make sure python3 is used if python3 is available 124 | # some systems use python 2 as default 125 | os.system('python3 --version > /dev/null 2>&1') 126 | PYTHON = 'python3' 127 | except RuntimeError: 128 | pass 129 | 130 | release_args = '' 131 | for x in range(1, len(sys.argv)): 132 | release_args += ' ' + sys.argv[x] 133 | 134 | os.system('%s %s/build_release.py %s' % (PYTHON, TARGET_TOOLS_DIR, release_args)) 135 | -------------------------------------------------------------------------------- /dev-tools/tests.policy: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to Elasticsearch under one or more contributor 3 | * license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright 5 | * ownership. Elasticsearch licenses this file to you under 6 | * the Apache License, Version 2.0 (the "License"); you may 7 | * not use this file except in compliance with the License. 8 | * You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | // Policy file to prevent tests from writing outside the test sandbox directory 21 | // PLEASE NOTE: You may need to enable other permissions when new tests are added, 22 | // everything not allowed here is forbidden! 23 | 24 | grant { 25 | // permissions for file access, write access only to sandbox: 26 | permission java.io.FilePermission "<>", "read,execute"; 27 | permission java.io.FilePermission "${junit4.childvm.cwd}", "read,execute,write"; 28 | permission java.io.FilePermission "${junit4.childvm.cwd}${/}-", "read,execute,write,delete"; 29 | permission java.io.FilePermission "${junit4.tempDir}${/}*", "read,execute,write,delete"; 30 | permission groovy.security.GroovyCodeSourcePermission "/groovy/script"; 31 | 32 | // Allow connecting to the internet anywhere 33 | permission java.net.SocketPermission "*", "accept,listen,connect,resolve"; 34 | 35 | // Basic permissions needed for Lucene / Elasticsearch to work: 36 | permission java.util.PropertyPermission "*", "read,write"; 37 | permission java.lang.reflect.ReflectPermission "*"; 38 | permission java.lang.RuntimePermission "*"; 39 | 40 | // These two *have* to be spelled out a separate 41 | permission java.lang.management.ManagementPermission "control"; 42 | permission java.lang.management.ManagementPermission "monitor"; 43 | 44 | permission java.net.NetPermission "*"; 45 | permission java.util.logging.LoggingPermission "control"; 46 | permission javax.management.MBeanPermission "*", "*"; 47 | permission javax.management.MBeanServerPermission "*"; 48 | permission javax.management.MBeanTrustPermission "*"; 49 | 50 | // Needed for some things in DNS caching in the JVM 51 | permission java.security.SecurityPermission "getProperty.networkaddress.cache.ttl"; 52 | permission java.security.SecurityPermission "getProperty.networkaddress.cache.negative.ttl"; 53 | 54 | }; 55 | -------------------------------------------------------------------------------- /plugin/plugin-descriptor.properties: -------------------------------------------------------------------------------- 1 | description=HanLP Chinese Analysis for Elasticsearch 2 | version=master 3 | site=false 4 | name=analysis-hanlp 5 | jvm=true 6 | elasticsearch.version=2.3.4 7 | java.version=1.8 8 | classname=org.elasticsearch.plugin.analysis.hanlp.AnalysisHanLpPlugin -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | org.elasticsearch 8 | elasticsearch-analysis-hanlp 9 | 2.3.4 10 | jar 11 | 12 | Elasticsearch HanLP Chinese Analysis plugin 13 | 14 | HanLP Chinese Analysis plugin integrates Lucene HanLP Chinese analysis module into elasticsearch. 15 | 16 | https://github.com/donbeave/elasticsearch-analysis-hanlp/ 17 | 18 | 2015 19 | 20 | 21 | AZ 22 | http://www.zhokhov.com 23 | 24 | 25 | 26 | 27 | The Apache Software License, Version 2.0 28 | http://www.apache.org/licenses/LICENSE-2.0.txt 29 | repo 30 | 31 | 32 | 33 | 34 | 35 | donbeave 36 | Alexey Zhokhov 37 | alexey@zhokhov.com 38 | 39 | 40 | boliza 41 | Ranger Tsao 42 | cao.zhifu@gmail.com 43 | 44 | 45 | 46 | 47 | scm:git:git@github.com:donbeave/elasticsearch-analysis-hanlp.git 48 | scm:git:git@github.com:donbeave/elasticsearch-analysis-hanlp.git 49 | http://github.com/donbeave/elasticsearch-analysis-hanlp 50 | 51 | 52 | 53 | 54 | 5.5.0 55 | org.elasticsearch.plugin.analysis.hanlp.AnalysisHanLpPlugin 56 | analysis_hanlp 57 | false 58 | 59 | 60 | 61 | 62 | org.hamcrest 63 | hamcrest-all 64 | 1.3 65 | 66 | 67 | 68 | junit 69 | junit 70 | 4.11 71 | test 72 | 73 | 74 | 75 | com.carrotsearch.randomizedtesting 76 | randomizedtesting-runner 77 | 2.3.2 78 | 79 | 80 | org.apache.lucene 81 | lucene-test-framework 82 | 5.5.0 83 | 84 | 85 | 86 | org.elasticsearch 87 | elasticsearch 88 | 2.3.4 89 | 90 | 91 | 92 | com.hankcs.nlp 93 | lucene-analyzers-hanlp 94 | 5.5.0_1.2.10 95 | 96 | 97 | 98 | log4j 99 | log4j 100 | 1.2.17 101 | 102 | 103 | 104 | org.elasticsearch 105 | elasticsearch 106 | test-jar 107 | 2.3.4 108 | 109 | 110 | 111 | 112 | 113 | bintray 114 | https://api.bintray.com/maven/donbeave/maven/elasticsearch-analysis-hanlp 115 | 116 | 117 | 118 | 119 | 120 | 121 | src/main/resources 122 | true 123 | 124 | 125 | 126 | 127 | maven-release-plugin 128 | 2.5.3 129 | 130 | false 131 | release 132 | true 133 | 134 | 135 | 136 | org.apache.maven.plugins 137 | maven-compiler-plugin 138 | 139 | 140 | com.carrotsearch.randomizedtesting 141 | junit4-maven-plugin 142 | 143 | 144 | 145 | org.apache.maven.plugins 146 | maven-source-plugin 147 | 148 | 149 | maven-assembly-plugin 150 | 2.3 151 | 152 | false 153 | ${project.build.directory}/releases/ 154 | 155 | ${basedir}/src/main/assemblies/plugin.xml 156 | 157 | 158 | 159 | 160 | package 161 | 162 | single 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | openmg-release 173 | https://github.com/openmg/mvn-repository/raw/master/releases 174 | 175 | 176 | openmg-snapshot 177 | https://github.com/openmg/mvn-repository/raw/master/snapshots 178 | 179 | 180 | 181 | 182 | 183 | release 184 | 185 | 186 | 187 | maven-source-plugin 188 | 189 | 190 | attach-sources 191 | 192 | jar 193 | 194 | 195 | 196 | 197 | 198 | maven-javadoc-plugin 199 | 200 | 201 | attach-javadocs 202 | 203 | jar 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | -------------------------------------------------------------------------------- /src/main/assemblies/plugin.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | plugin 4 | 5 | zip 6 | 7 | false 8 | 9 | 10 | ${project.basedir}/plugin 11 | / 12 | 13 | 14 | 15 | 16 | / 17 | true 18 | true 19 | 20 | org.elasticsearch:elasticsearch 21 | 22 | 23 | 24 | / 25 | true 26 | true 27 | 28 | com.hankcs:hanlp 29 | com.hankcs.nlp:lucene-analyzers-hanlp 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/index/analysis/HanLpAnalysisBinderProcessor.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2015 the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package org.elasticsearch.index.analysis; 17 | 18 | /** 19 | * @author Alexey Zhokhov 20 | */ 21 | public class HanLpAnalysisBinderProcessor extends AnalysisModule.AnalysisBinderProcessor { 22 | 23 | @Override 24 | public void processAnalyzers(AnalyzersBindings analyzersBindings) { 25 | analyzersBindings.processAnalyzer("hanlp", HanLpAnalyzerProvider.class); 26 | } 27 | 28 | @Override 29 | public void processTokenizers(TokenizersBindings tokenizersBindings) { 30 | tokenizersBindings.processTokenizer("hanlp_tokenizer", HanLpTokenizerTokenizerFactory.class); 31 | // This is an alias to "hanlp_tokenizer"; it's here for backwards compat 32 | tokenizersBindings.processTokenizer("hanlp_sentence", HanLpTokenizerTokenizerFactory.class); 33 | } 34 | 35 | @Override 36 | public void processTokenFilters(TokenFiltersBindings tokenFiltersBindings) { 37 | // This is a noop token filter; it's here for backwards compat before we had "hanlp_tokenizer" 38 | tokenFiltersBindings.processTokenFilter("hanlp_word", HanLpNoOpTokenFilterFactory.class); 39 | } 40 | 41 | } 42 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/index/analysis/HanLpAnalyzerProvider.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2015 the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package org.elasticsearch.index.analysis; 17 | 18 | import static org.elasticsearch.indices.analysis.hanlp.Settings.*; 19 | 20 | import com.hankcs.lucene.HanLPAnalyzer; 21 | 22 | import org.elasticsearch.common.inject.Inject; 23 | import org.elasticsearch.common.inject.assistedinject.Assisted; 24 | import org.elasticsearch.common.settings.Settings; 25 | import org.elasticsearch.index.Index; 26 | import org.elasticsearch.index.settings.IndexSettingsService; 27 | 28 | /** 29 | * @author Alexey Zhokhov 30 | */ 31 | public class HanLpAnalyzerProvider extends AbstractIndexAnalyzerProvider { 32 | 33 | private final HanLPAnalyzer analyzer; 34 | 35 | @Inject 36 | public HanLpAnalyzerProvider(Index index, IndexSettingsService indexSettingsService, @Assisted String name, @Assisted Settings settings) { 37 | super(index, indexSettingsService.getSettings(), name, settings); 38 | 39 | boolean indexMode = settings.getAsBoolean(INDEX_MODE, false); 40 | boolean nameRecognize = settings.getAsBoolean(NAME_RECOGNIZE, true); 41 | boolean translatedNameRecognize = settings.getAsBoolean(TRANSLATED_NAME_RECOGNIZE, true); 42 | boolean japaneseNameRecognize = settings.getAsBoolean(JAPANESE_NAME_RECOGNIZE, false); 43 | boolean placeRecognize = settings.getAsBoolean(PLACE_RECOGNIZE, false); 44 | boolean organizationRecognize = settings.getAsBoolean(ORGANIZATION_RECOGNIZE, false); 45 | boolean useCustomDictionary = settings.getAsBoolean(USE_CUSTOM_DICTIONARY, true); // enableCustomDictionary 46 | boolean speechTagging = settings.getAsBoolean(SPEECH_TAGGING, false); // PorterStemming 47 | boolean offset = settings.getAsBoolean(OFFSET, false); 48 | boolean numberQuantifierRecognize = settings.getAsBoolean(NUMBER_QUANTIFIER_RECOGNIZE, false); 49 | int threads = settings.getAsInt(THREADS, 1); // if more than 1, it means use multi-threading 50 | 51 | analyzer = new HanLPAnalyzer(indexMode, nameRecognize, translatedNameRecognize, japaneseNameRecognize, 52 | placeRecognize, organizationRecognize, useCustomDictionary, speechTagging, offset, 53 | numberQuantifierRecognize, threads, null); 54 | } 55 | 56 | @Override 57 | public HanLPAnalyzer get() { 58 | return this.analyzer; 59 | } 60 | 61 | } 62 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/index/analysis/HanLpNoOpTokenFilterFactory.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2015 the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package org.elasticsearch.index.analysis; 17 | 18 | import org.apache.lucene.analysis.TokenStream; 19 | import org.elasticsearch.common.inject.Inject; 20 | import org.elasticsearch.common.inject.assistedinject.Assisted; 21 | import org.elasticsearch.common.settings.Settings; 22 | import org.elasticsearch.index.Index; 23 | import org.elasticsearch.index.settings.IndexSettingsService; 24 | 25 | /** 26 | * @author Alexey Zhokhov 27 | */ 28 | public class HanLpNoOpTokenFilterFactory extends AbstractTokenFilterFactory { 29 | 30 | @Inject 31 | public HanLpNoOpTokenFilterFactory(Index index, IndexSettingsService indexSettingsService, @Assisted String name, @Assisted Settings settings) { 32 | super(index, indexSettingsService.getSettings(), name, settings); 33 | } 34 | 35 | @Override 36 | public TokenStream create(TokenStream tokenStream) { 37 | return tokenStream; 38 | } 39 | 40 | } 41 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/index/analysis/HanLpTokenizerTokenizerFactory.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2015 the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package org.elasticsearch.index.analysis; 17 | 18 | import static org.elasticsearch.indices.analysis.hanlp.Settings.*; 19 | 20 | import com.hankcs.hanlp.HanLP; 21 | import com.hankcs.lucene.HanLPTokenizer; 22 | 23 | import org.apache.lucene.analysis.Tokenizer; 24 | import org.elasticsearch.common.inject.Inject; 25 | import org.elasticsearch.common.inject.assistedinject.Assisted; 26 | import org.elasticsearch.common.settings.Settings; 27 | import org.elasticsearch.index.Index; 28 | import org.elasticsearch.index.settings.IndexSettingsService; 29 | 30 | /** 31 | * @author Alexey Zhokhov 32 | */ 33 | public class HanLpTokenizerTokenizerFactory extends AbstractTokenizerFactory { 34 | 35 | private boolean indexMode = false; 36 | private boolean nameRecognize = true; 37 | private boolean translatedNameRecognize = true; 38 | private boolean japaneseNameRecognize = false; 39 | private boolean placeRecognize = false; 40 | private boolean organizationRecognize = false; 41 | private boolean useCustomDictionary = true; 42 | private boolean speechTagging = false; 43 | private boolean offset = false; 44 | private boolean numberQuantifierRecognize = false; 45 | private int threads = 1; 46 | 47 | @Inject 48 | public HanLpTokenizerTokenizerFactory(Index index, IndexSettingsService indexSettingsService, @Assisted String name, @Assisted Settings settings) { 49 | super(index, indexSettingsService.getSettings(), name, settings); 50 | 51 | indexMode = settings.getAsBoolean(INDEX_MODE, indexMode); 52 | nameRecognize = settings.getAsBoolean(NAME_RECOGNIZE, nameRecognize); 53 | translatedNameRecognize = settings.getAsBoolean(TRANSLATED_NAME_RECOGNIZE, translatedNameRecognize); 54 | japaneseNameRecognize = settings.getAsBoolean(JAPANESE_NAME_RECOGNIZE, japaneseNameRecognize); 55 | placeRecognize = settings.getAsBoolean(PLACE_RECOGNIZE, placeRecognize); 56 | organizationRecognize = settings.getAsBoolean(ORGANIZATION_RECOGNIZE, organizationRecognize); 57 | useCustomDictionary = settings.getAsBoolean(USE_CUSTOM_DICTIONARY, useCustomDictionary); // enableCustomDictionary 58 | speechTagging = settings.getAsBoolean(SPEECH_TAGGING, speechTagging); // PorterStemming 59 | offset = settings.getAsBoolean(OFFSET, offset); 60 | numberQuantifierRecognize = settings.getAsBoolean(NUMBER_QUANTIFIER_RECOGNIZE, numberQuantifierRecognize); 61 | threads = settings.getAsInt(THREADS, threads); // if more than 1, it means use multi-threading 62 | } 63 | 64 | @Override 65 | public Tokenizer create() { 66 | return new HanLPTokenizer(HanLP.newSegment() 67 | .enableIndexMode(indexMode) 68 | .enableNameRecognize(nameRecognize) 69 | .enableTranslatedNameRecognize(translatedNameRecognize) 70 | .enableJapaneseNameRecognize(japaneseNameRecognize) 71 | .enablePlaceRecognize(placeRecognize) 72 | .enableOrganizationRecognize(organizationRecognize) 73 | .enableCustomDictionary(useCustomDictionary) 74 | .enablePartOfSpeechTagging(speechTagging) 75 | .enableOffset(offset) 76 | .enableNumberQuantifierRecognize(numberQuantifierRecognize) 77 | .enableMultithreading(threads), null, speechTagging); 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/indices/analysis/hanlp/HanLpIndicesAnalysis.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2015 the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package org.elasticsearch.indices.analysis.hanlp; 17 | 18 | import com.hankcs.hanlp.HanLP; 19 | import com.hankcs.lucene.HanLPAnalyzer; 20 | import com.hankcs.lucene.HanLPTokenizer; 21 | 22 | import org.apache.lucene.analysis.TokenStream; 23 | import org.apache.lucene.analysis.Tokenizer; 24 | import org.elasticsearch.common.component.AbstractComponent; 25 | import org.elasticsearch.common.inject.Inject; 26 | import org.elasticsearch.common.settings.Settings; 27 | import org.elasticsearch.index.analysis.*; 28 | import org.elasticsearch.indices.analysis.IndicesAnalysisService; 29 | 30 | import java.io.Reader; 31 | 32 | import static org.elasticsearch.indices.analysis.hanlp.Settings.*; 33 | 34 | /** 35 | * Registers indices level analysis components so, if not explicitly configured, will be shared 36 | * among all indices. 37 | * 38 | * @author Alexey Zhokhov 39 | */ 40 | public class HanLpIndicesAnalysis extends AbstractComponent { 41 | 42 | private boolean analyzerIndexMode = false; 43 | private boolean analyzerNameRecognize = true; 44 | private boolean analyzerTranslatedNameRecognize = true; 45 | private boolean analyzerJapaneseNameRecognize = false; 46 | private boolean analyzerPlaceRecognize = false; 47 | private boolean analyzerOrganizationRecognize = false; 48 | private boolean analyzerUseCustomDictionary = true; // enableCustomDictionary 49 | private boolean analyzerSpeechTagging = false; // PorterStemming 50 | private boolean analyzerOffset = false; 51 | private boolean analyzerNumberQuantifierRecognize = false; 52 | private int analyzerThreads = 1; // if more than 1, it means use multi-threading 53 | 54 | private boolean tokenizerIndexMode = false; 55 | private boolean tokenizerNameRecognize = true; 56 | private boolean tokenizerTranslatedNameRecognize = true; 57 | private boolean tokenizerJapaneseNameRecognize = false; 58 | private boolean tokenizerPlaceRecognize = false; 59 | private boolean tokenizerOrganizationRecognize = false; 60 | private boolean tokenizerUseCustomDictionary = true; // enableCustomDictionary 61 | private boolean tokenizerSpeechTagging = false; // PorterStemming 62 | private boolean tokenizerOffset = false; 63 | private boolean tokenizerNumberQuantifierRecognize = false; 64 | private int tokenizerThreads = 1; // if more than 1, it means use multi-threading 65 | 66 | @Inject 67 | public HanLpIndicesAnalysis(Settings settings, IndicesAnalysisService indicesAnalysisService) { 68 | super(settings); 69 | 70 | initSettings(settings); 71 | 72 | // Register hanlp analyzer 73 | indicesAnalysisService.analyzerProviderFactories().put("hanlp", 74 | new PreBuiltAnalyzerProviderFactory("hanlp", AnalyzerScope.INDICES, 75 | new HanLPAnalyzer(analyzerIndexMode, analyzerNameRecognize, analyzerTranslatedNameRecognize, 76 | analyzerJapaneseNameRecognize, analyzerPlaceRecognize, analyzerOrganizationRecognize, 77 | analyzerUseCustomDictionary, analyzerSpeechTagging, analyzerOffset, 78 | analyzerNumberQuantifierRecognize, analyzerThreads, null))); 79 | 80 | // Register hanlp_tokenizer tokenizer 81 | indicesAnalysisService.tokenizerFactories().put("hanlp_tokenizer", new PreBuiltTokenizerFactoryFactory(new TokenizerFactory() { 82 | @Override 83 | public String name() { 84 | return "hanlp_tokenizer"; 85 | } 86 | 87 | @Override 88 | public Tokenizer create() { 89 | return new HanLPTokenizer(HanLP.newSegment() 90 | .enableIndexMode(tokenizerIndexMode) 91 | .enableNameRecognize(tokenizerNameRecognize) 92 | .enableTranslatedNameRecognize(tokenizerTranslatedNameRecognize) 93 | .enableJapaneseNameRecognize(tokenizerJapaneseNameRecognize) 94 | .enablePlaceRecognize(tokenizerPlaceRecognize) 95 | .enableOrganizationRecognize(tokenizerOrganizationRecognize) 96 | .enableCustomDictionary(tokenizerUseCustomDictionary) 97 | .enablePartOfSpeechTagging(tokenizerSpeechTagging) 98 | .enableOffset(tokenizerOffset) 99 | .enableNumberQuantifierRecognize(tokenizerNumberQuantifierRecognize) 100 | .enableMultithreading(tokenizerThreads), null, tokenizerSpeechTagging); 101 | } 102 | 103 | })); 104 | 105 | // Register hanlp_word token filter -- noop 106 | indicesAnalysisService.tokenFilterFactories().put("hanlp_word", new PreBuiltTokenFilterFactoryFactory(new TokenFilterFactory() { 107 | @Override 108 | public String name() { 109 | return "hanlp_word"; 110 | } 111 | 112 | @Override 113 | public TokenStream create(TokenStream tokenStream) { 114 | return tokenStream; 115 | } 116 | })); 117 | } 118 | 119 | private void initSettings(Settings settings) { 120 | // get tokenizer settings 121 | analyzerIndexMode = settings.getAsBoolean(ANALYZER_CONFIG_PREFIX + INDEX_MODE, analyzerIndexMode); 122 | analyzerNameRecognize = settings.getAsBoolean(ANALYZER_CONFIG_PREFIX + NAME_RECOGNIZE, analyzerNameRecognize); 123 | analyzerTranslatedNameRecognize = settings.getAsBoolean(ANALYZER_CONFIG_PREFIX + TRANSLATED_NAME_RECOGNIZE, analyzerTranslatedNameRecognize); 124 | analyzerJapaneseNameRecognize = settings.getAsBoolean(ANALYZER_CONFIG_PREFIX + JAPANESE_NAME_RECOGNIZE, analyzerJapaneseNameRecognize); 125 | analyzerPlaceRecognize = settings.getAsBoolean(ANALYZER_CONFIG_PREFIX + PLACE_RECOGNIZE, analyzerPlaceRecognize); 126 | analyzerOrganizationRecognize = settings.getAsBoolean(ANALYZER_CONFIG_PREFIX + ORGANIZATION_RECOGNIZE, analyzerOrganizationRecognize); 127 | analyzerUseCustomDictionary = settings.getAsBoolean(ANALYZER_CONFIG_PREFIX + USE_CUSTOM_DICTIONARY, analyzerUseCustomDictionary); 128 | analyzerSpeechTagging = settings.getAsBoolean(ANALYZER_CONFIG_PREFIX + SPEECH_TAGGING, analyzerSpeechTagging); 129 | analyzerOffset = settings.getAsBoolean(ANALYZER_CONFIG_PREFIX + OFFSET, analyzerOffset); 130 | analyzerNumberQuantifierRecognize = settings.getAsBoolean(ANALYZER_CONFIG_PREFIX + NUMBER_QUANTIFIER_RECOGNIZE, analyzerNumberQuantifierRecognize); 131 | analyzerThreads = settings.getAsInt(ANALYZER_CONFIG_PREFIX + THREADS, analyzerThreads); 132 | 133 | // get tokenizer settings 134 | tokenizerIndexMode = settings.getAsBoolean(TOKENIZER_CONFIG_PREFIX + INDEX_MODE, tokenizerIndexMode); 135 | tokenizerNameRecognize = settings.getAsBoolean(TOKENIZER_CONFIG_PREFIX + NAME_RECOGNIZE, tokenizerNameRecognize); 136 | tokenizerTranslatedNameRecognize = settings.getAsBoolean(TOKENIZER_CONFIG_PREFIX + TRANSLATED_NAME_RECOGNIZE, tokenizerTranslatedNameRecognize); 137 | tokenizerJapaneseNameRecognize = settings.getAsBoolean(TOKENIZER_CONFIG_PREFIX + JAPANESE_NAME_RECOGNIZE, tokenizerJapaneseNameRecognize); 138 | tokenizerPlaceRecognize = settings.getAsBoolean(TOKENIZER_CONFIG_PREFIX + PLACE_RECOGNIZE, tokenizerPlaceRecognize); 139 | tokenizerOrganizationRecognize = settings.getAsBoolean(TOKENIZER_CONFIG_PREFIX + ORGANIZATION_RECOGNIZE, tokenizerOrganizationRecognize); 140 | tokenizerUseCustomDictionary = settings.getAsBoolean(TOKENIZER_CONFIG_PREFIX + USE_CUSTOM_DICTIONARY, tokenizerUseCustomDictionary); 141 | tokenizerSpeechTagging = settings.getAsBoolean(TOKENIZER_CONFIG_PREFIX + SPEECH_TAGGING, tokenizerSpeechTagging); 142 | tokenizerOffset = settings.getAsBoolean(TOKENIZER_CONFIG_PREFIX + OFFSET, tokenizerOffset); 143 | tokenizerNumberQuantifierRecognize = settings.getAsBoolean(TOKENIZER_CONFIG_PREFIX + NUMBER_QUANTIFIER_RECOGNIZE, tokenizerNumberQuantifierRecognize); 144 | tokenizerThreads = settings.getAsInt(TOKENIZER_CONFIG_PREFIX + THREADS, tokenizerThreads); 145 | 146 | // fix threads 147 | if (analyzerThreads < 1) { 148 | analyzerThreads = 1; 149 | } 150 | if (tokenizerThreads < 1) { 151 | tokenizerThreads = 1; 152 | } 153 | 154 | // get global HanLP settings 155 | HanLP.Config.CoreDictionaryPath = 156 | settings.get(CORE_DICTIONARY_PATH, HanLP.Config.CoreDictionaryPath); 157 | HanLP.Config.CoreDictionaryTransformMatrixDictionaryPath = 158 | settings.get(CORE_DICTIONARY_TRANSFORM_MATRIX_DICTIONARY_PATH, HanLP.Config.CoreDictionaryTransformMatrixDictionaryPath); 159 | HanLP.Config.BiGramDictionaryPath = 160 | settings.get(BI_GRAM_DICTIONARY_PATH, HanLP.Config.BiGramDictionaryPath); 161 | HanLP.Config.CoreStopWordDictionaryPath = 162 | settings.get(CORE_STOP_WORD_DICTIONARY_PATH, HanLP.Config.CoreStopWordDictionaryPath); 163 | HanLP.Config.CoreSynonymDictionaryDictionaryPath = 164 | settings.get(CORE_SYNONYM_DICTIONARY_DICTIONARY_PATH, HanLP.Config.CoreSynonymDictionaryDictionaryPath); 165 | HanLP.Config.PersonDictionaryPath = 166 | settings.get(PERSON_DICTIONARY_PATH, HanLP.Config.PersonDictionaryPath); 167 | HanLP.Config.PersonDictionaryTrPath = 168 | settings.get(PERSON_DICTIONARY_TR_PATH, HanLP.Config.PersonDictionaryTrPath); 169 | HanLP.Config.CustomDictionaryPath = 170 | settings.getAsArray(CUSTOM_DICTIONARY_PATH, HanLP.Config.CustomDictionaryPath); 171 | HanLP.Config.TraditionalChineseDictionaryPath = 172 | settings.get(TRADITIONAL_CHINESE_DICTIONARY_PATH, HanLP.Config.TraditionalChineseDictionaryPath); 173 | HanLP.Config.SYTDictionaryPath = 174 | settings.get(SYT_DICTIONARY_PATH, HanLP.Config.SYTDictionaryPath); 175 | HanLP.Config.PinyinDictionaryPath = 176 | settings.get(PINYIN_DICTIONARY_PATH, HanLP.Config.PinyinDictionaryPath); 177 | HanLP.Config.TranslatedPersonDictionaryPath = 178 | settings.get(TRANSLATED_PERSON_DICTIONARY_PATH, HanLP.Config.TranslatedPersonDictionaryPath); 179 | HanLP.Config.JapanesePersonDictionaryPath = 180 | settings.get(JAPANESE_PERSON_DICTIONARY_PATH, HanLP.Config.JapanesePersonDictionaryPath); 181 | HanLP.Config.PlaceDictionaryPath = 182 | settings.get(PLACE_DICTIONARY_PATH, HanLP.Config.PlaceDictionaryPath); 183 | HanLP.Config.PlaceDictionaryTrPath = 184 | settings.get(PLACE_DICTIONARY_TR_PATH, HanLP.Config.PlaceDictionaryTrPath); 185 | HanLP.Config.OrganizationDictionaryPath = 186 | settings.get(ORGANIZATION_DICTIONARY_PATH, HanLP.Config.OrganizationDictionaryPath); 187 | HanLP.Config.OrganizationDictionaryTrPath = 188 | settings.get(ORGANIZATION_DICTIONARY_TR_PATH, HanLP.Config.OrganizationDictionaryTrPath); 189 | HanLP.Config.CharTypePath = 190 | settings.get(CHAR_TYPE_PATH, HanLP.Config.CharTypePath); 191 | HanLP.Config.CharTablePath = 192 | settings.get(CHAR_TABLE_PATH, HanLP.Config.CharTablePath); 193 | HanLP.Config.WordNatureModelPath = 194 | settings.get(WORD_NATURE_MODEL_PATH, HanLP.Config.WordNatureModelPath); 195 | HanLP.Config.MaxEntModelPath = 196 | settings.get(MAX_ENT_MODEL_PATH, HanLP.Config.MaxEntModelPath); 197 | HanLP.Config.CRFSegmentModelPath = 198 | settings.get(CRF_SEGMENT_MODEL_PATH, HanLP.Config.CRFSegmentModelPath); 199 | HanLP.Config.CRFDependencyModelPath = 200 | settings.get(CRF_DEPENDENCY_MODEL_PATH, HanLP.Config.CRFDependencyModelPath); 201 | HanLP.Config.HMMSegmentModelPath = 202 | settings.get(HMM_SEGMENT_MODEL_PATH, HanLP.Config.HMMSegmentModelPath); 203 | HanLP.Config.ShowTermNature = settings.getAsBoolean(SHOW_TERM_NATURE, true); 204 | HanLP.Config.Normalization = settings.getAsBoolean(NORMALIZATION, false); 205 | } 206 | 207 | } 208 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/indices/analysis/hanlp/HanLpIndicesAnalysisModule.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2015 the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package org.elasticsearch.indices.analysis.hanlp; 17 | 18 | import org.elasticsearch.common.inject.AbstractModule; 19 | 20 | /** 21 | * @author Alexey Zhokhov 22 | */ 23 | public class HanLpIndicesAnalysisModule extends AbstractModule { 24 | 25 | @Override 26 | protected void configure() { 27 | bind(HanLpIndicesAnalysis.class).asEagerSingleton(); 28 | } 29 | 30 | } 31 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/indices/analysis/hanlp/Settings.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.indices.analysis.hanlp; 2 | 3 | /** 4 | * @author Alexey Zhokhov 5 | */ 6 | public final class Settings { 7 | 8 | public static final String CONFIG_PREFIX = "analysis.hanlp."; 9 | public static final String ANALYZER_CONFIG_PREFIX = "analysis.hanlp.analyzer."; 10 | public static final String TOKENIZER_CONFIG_PREFIX = "analysis.hanlp.tokenizer."; 11 | 12 | public static final String CORE_DICTIONARY_PATH = CONFIG_PREFIX + "CoreDictionaryPath"; 13 | public static final String CORE_DICTIONARY_TRANSFORM_MATRIX_DICTIONARY_PATH = CONFIG_PREFIX + "CoreDictionaryTransformMatrixDictionaryPath"; 14 | public static final String BI_GRAM_DICTIONARY_PATH = CONFIG_PREFIX + "BiGramDictionaryPath"; 15 | public static final String CORE_STOP_WORD_DICTIONARY_PATH = CONFIG_PREFIX + "CoreStopWordDictionaryPath"; 16 | public static final String CORE_SYNONYM_DICTIONARY_DICTIONARY_PATH = CONFIG_PREFIX + "CoreSynonymDictionaryDictionaryPath"; 17 | public static final String PERSON_DICTIONARY_PATH = CONFIG_PREFIX + "PersonDictionaryPath"; 18 | public static final String PERSON_DICTIONARY_TR_PATH = CONFIG_PREFIX + "PersonDictionaryTrPath"; 19 | public static final String CUSTOM_DICTIONARY_PATH = CONFIG_PREFIX + "CustomDictionaryPath"; 20 | public static final String TRADITIONAL_CHINESE_DICTIONARY_PATH = CONFIG_PREFIX + "TraditionalChineseDictionaryPath"; 21 | public static final String SYT_DICTIONARY_PATH = CONFIG_PREFIX + "SYTDictionaryPath"; 22 | public static final String PINYIN_DICTIONARY_PATH = CONFIG_PREFIX + "PinyinDictionaryPath"; 23 | public static final String TRANSLATED_PERSON_DICTIONARY_PATH = CONFIG_PREFIX + "TranslatedPersonDictionaryPath"; 24 | public static final String JAPANESE_PERSON_DICTIONARY_PATH = CONFIG_PREFIX + "JapanesePersonDictionaryPath"; 25 | public static final String PLACE_DICTIONARY_PATH = CONFIG_PREFIX + "PlaceDictionaryPath"; 26 | public static final String PLACE_DICTIONARY_TR_PATH = CONFIG_PREFIX + "PlaceDictionaryTrPath"; 27 | public static final String ORGANIZATION_DICTIONARY_PATH = CONFIG_PREFIX + "OrganizationDictionaryPath"; 28 | public static final String ORGANIZATION_DICTIONARY_TR_PATH = CONFIG_PREFIX + "OrganizationDictionaryTrPath"; 29 | public static final String CHAR_TYPE_PATH = CONFIG_PREFIX + "CharTypePath"; 30 | public static final String CHAR_TABLE_PATH = CONFIG_PREFIX + "CharTablePath"; 31 | public static final String WORD_NATURE_MODEL_PATH = CONFIG_PREFIX + "WordNatureModelPath"; 32 | public static final String MAX_ENT_MODEL_PATH = CONFIG_PREFIX + "MaxEntModelPath"; 33 | public static final String CRF_SEGMENT_MODEL_PATH = CONFIG_PREFIX + "CRFSegmentModelPath"; 34 | public static final String CRF_DEPENDENCY_MODEL_PATH = CONFIG_PREFIX + "CRFDependencyModelPath"; 35 | public static final String HMM_SEGMENT_MODEL_PATH = CONFIG_PREFIX + "HMMSegmentModelPath"; 36 | public static final String SHOW_TERM_NATURE = CONFIG_PREFIX + "ShowTermNature"; 37 | public static final String NORMALIZATION = CONFIG_PREFIX + "Normalization"; 38 | 39 | public static final String INDEX_MODE = "indexMode"; 40 | public static final String NAME_RECOGNIZE = "nameRecognize"; 41 | public static final String TRANSLATED_NAME_RECOGNIZE = "translatedNameRecognize"; 42 | public static final String JAPANESE_NAME_RECOGNIZE = "japaneseNameRecognize"; 43 | public static final String PLACE_RECOGNIZE = "placeRecognize"; 44 | public static final String ORGANIZATION_RECOGNIZE = "organizationRecognize"; 45 | public static final String USE_CUSTOM_DICTIONARY = "useCustomDictionary"; 46 | public static final String SPEECH_TAGGING = "speechTagging"; 47 | public static final String OFFSET = "offset"; 48 | public static final String NUMBER_QUANTIFIER_RECOGNIZE = "numberQuantifierRecognize"; 49 | public static final String THREADS = "threads"; 50 | 51 | } 52 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/plugin/analysis/hanlp/AnalysisHanLpPlugin.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2015 the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package org.elasticsearch.plugin.analysis.hanlp; 17 | 18 | import org.elasticsearch.common.inject.Module; 19 | import org.elasticsearch.index.analysis.AnalysisModule; 20 | import org.elasticsearch.index.analysis.HanLpAnalysisBinderProcessor; 21 | import org.elasticsearch.indices.analysis.hanlp.HanLpIndicesAnalysisModule; 22 | import org.elasticsearch.plugins.Plugin; 23 | 24 | import java.util.Collection; 25 | import java.util.Collections; 26 | 27 | /** 28 | * @author Alexey Zhokhov 29 | */ 30 | public class AnalysisHanLpPlugin extends Plugin { 31 | 32 | @Override 33 | public String name() { 34 | return "analysis-hanlp"; 35 | } 36 | 37 | @Override 38 | public String description() { 39 | return "HanLP Chinese analysis support"; 40 | } 41 | 42 | @Override 43 | public Collection nodeModules() { 44 | return Collections.singletonList(new HanLpIndicesAnalysisModule()); 45 | } 46 | 47 | /** 48 | * Automatically called with the analysis module. 49 | */ 50 | public void onModule(AnalysisModule module) { 51 | module.addProcessor(new HanLpAnalysisBinderProcessor()); 52 | } 53 | 54 | } 55 | -------------------------------------------------------------------------------- /src/main/resources/es-plugin.properties: -------------------------------------------------------------------------------- 1 | plugin=org.elasticsearch.plugin.analysis.hanlp.AnalysisHanLpPlugin 2 | version=${project.version} 3 | lucene=${lucene.version} -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/index/analysis/AnalysisHanLpRestIT.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.index.analysis; 2 | 3 | import com.carrotsearch.randomizedtesting.annotations.Name; 4 | import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; 5 | 6 | import org.elasticsearch.plugin.analysis.hanlp.AnalysisHanLpPlugin; 7 | import org.elasticsearch.plugins.Plugin; 8 | import org.elasticsearch.test.rest.ESRestTestCase; 9 | import org.elasticsearch.test.rest.RestTestCandidate; 10 | import org.elasticsearch.test.rest.parser.RestTestParseException; 11 | 12 | import java.io.IOException; 13 | import java.util.Collection; 14 | 15 | /** 16 | * @author Ranger Tsao(cao.zhifu@gmail.com) 17 | */ 18 | public class AnalysisHanLpRestIT extends ESRestTestCase { 19 | 20 | @Override 21 | protected Collection> nodePlugins() { 22 | return pluginList(AnalysisHanLpPlugin.class); 23 | } 24 | 25 | public AnalysisHanLpRestIT(@Name("yaml") RestTestCandidate testCandidate) { 26 | super(testCandidate); 27 | } 28 | 29 | @ParametersFactory 30 | public static Iterable parameters() throws IOException, RestTestParseException { 31 | return ESRestTestCase.createParameters(0, 1); 32 | } 33 | } 34 | 35 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/index/analysis/HanLpAnalysisTests.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.index.analysis; 2 | 3 | import org.elasticsearch.Version; 4 | import org.elasticsearch.cluster.metadata.IndexMetaData; 5 | import org.elasticsearch.common.inject.Injector; 6 | import org.elasticsearch.common.inject.ModulesBuilder; 7 | import org.elasticsearch.common.settings.Settings; 8 | import org.elasticsearch.common.settings.SettingsModule; 9 | import org.elasticsearch.env.Environment; 10 | import org.elasticsearch.env.EnvironmentModule; 11 | import org.elasticsearch.index.Index; 12 | import org.elasticsearch.index.IndexNameModule; 13 | import org.elasticsearch.index.settings.IndexSettingsModule; 14 | import org.elasticsearch.indices.analysis.IndicesAnalysisService; 15 | import org.elasticsearch.test.ESTestCase; 16 | import org.hamcrest.MatcherAssert; 17 | import org.junit.Test; 18 | 19 | import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; 20 | import static org.elasticsearch.common.settings.Settings.settingsBuilder; 21 | import static org.hamcrest.Matchers.instanceOf; 22 | 23 | /** 24 | * @author Ranger Tsao(cao.zhifu@gmail.com) 25 | */ 26 | public class HanLpAnalysisTests extends ESTestCase { 27 | 28 | @Test 29 | public void testDefaultsIcuAnalysis() { 30 | Index index = new Index("test"); 31 | Settings settings = settingsBuilder() 32 | .put("path.home", createTempDir()) 33 | .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) 34 | .build(); 35 | Injector parentInjector = new ModulesBuilder().add(new SettingsModule(EMPTY_SETTINGS), new EnvironmentModule(new Environment(settings))).createInjector(); 36 | Injector injector = new ModulesBuilder().add( 37 | new IndexSettingsModule(index, settings), 38 | new IndexNameModule(index), 39 | new AnalysisModule(EMPTY_SETTINGS, parentInjector.getInstance(IndicesAnalysisService.class)).addProcessor(new HanLpAnalysisBinderProcessor())) 40 | .createChildInjector(parentInjector); 41 | 42 | AnalysisService analysisService = injector.getInstance(AnalysisService.class); 43 | 44 | TokenizerFactory tokenizerFactory = analysisService.tokenizer("smartcn_tokenizer"); 45 | MatcherAssert.assertThat(tokenizerFactory, instanceOf(HanLpTokenizerTokenizerFactory.class)); 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/test/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.rootLogger=INFO, out 2 | 3 | log4j.appender.out=org.apache.log4j.ConsoleAppender 4 | log4j.appender.out.layout=org.apache.log4j.PatternLayout 5 | log4j.appender.out.layout.conversionPattern=[%d{ISO8601}][%-5p][%-25c] %m%n 6 | -------------------------------------------------------------------------------- /src/test/resources/rest-api-spec/test/analysis_hanlp/10_basic.yaml: -------------------------------------------------------------------------------- 1 | "Tokenizer": 2 | - do: 3 | indices.analyze: 4 | text: 我购买了道具和服装。 5 | tokenizer: smartcn_tokenizer 6 | - length: { tokens: 7 } 7 | - match: { tokens.0.token: 我 } 8 | - match: { tokens.1.token: 购买 } 9 | - match: { tokens.2.token: 了 } 10 | - match: { tokens.3.token: 道具 } 11 | - match: { tokens.4.token: 和 } 12 | - match: { tokens.5.token: 服装 } 13 | - match: { tokens.6.token: "," } 14 | --- 15 | "Analyzer": 16 | - do: 17 | indices.analyze: 18 | text: 我购买了道具和服装。 19 | analyzer: smartcn 20 | - length: { tokens: 6 } 21 | - match: { tokens.0.token: 我 } 22 | - match: { tokens.1.token: 购买 } 23 | - match: { tokens.2.token: 了 } 24 | - match: { tokens.3.token: 道具 } 25 | - match: { tokens.4.token: 和 } 26 | - match: { tokens.5.token: 服装 } -------------------------------------------------------------------------------- /src/test/resources/rest-api-spec/test/analysis_hanlp/20_search.yaml: -------------------------------------------------------------------------------- 1 | # Integration tests for Smartcn analysis component 2 | # 3 | --- 4 | "Index Smartcn content": 5 | - do: 6 | indices.create: 7 | index: test 8 | body: 9 | mappings: 10 | type: 11 | properties: 12 | text: 13 | type: string 14 | analyzer: smartcn 15 | - do: 16 | cluster.health: 17 | wait_for_status: yellow 18 | 19 | - do: 20 | index: 21 | index: test 22 | type: type 23 | id: 1 24 | body: { "text": "我购买了道具和服装" } 25 | - do: 26 | indices.refresh: {} 27 | 28 | - do: 29 | search: 30 | index: test 31 | body: 32 | query: 33 | match: 34 | text: 购买 35 | - match: { hits.total: 1 } --------------------------------------------------------------------------------