├── .gitignore ├── .travis.yml ├── LICENSE.txt ├── README.rst ├── pom.xml └── src ├── main ├── assemblies │ └── plugin.xml ├── java │ └── org │ │ └── xbib │ │ └── elasticsearch │ │ ├── action │ │ ├── admin │ │ │ ├── cluster │ │ │ │ └── state │ │ │ │ │ ├── ConsistencyCheckAction.java │ │ │ │ │ ├── ConsistencyCheckRequest.java │ │ │ │ │ ├── ConsistencyCheckRequestBuilder.java │ │ │ │ │ ├── ConsistencyCheckResponse.java │ │ │ │ │ └── TransportConsistencyCheckAction.java │ │ │ └── indices │ │ │ │ └── reconstruct │ │ │ │ ├── ReconstructIndexAction.java │ │ │ │ ├── ReconstructIndexRequest.java │ │ │ │ ├── ReconstructIndexRequestBuilder.java │ │ │ │ ├── ReconstructIndexResponse.java │ │ │ │ ├── ShardReconstructIndexRequest.java │ │ │ │ ├── ShardReconstructIndexResponse.java │ │ │ │ └── TransportReconstructAction.java │ │ └── skywalker │ │ │ ├── ShardSkywalkerRequest.java │ │ │ ├── ShardSkywalkerResponse.java │ │ │ ├── SkywalkerAction.java │ │ │ ├── SkywalkerRequest.java │ │ │ ├── SkywalkerRequestBuilder.java │ │ │ ├── SkywalkerResponse.java │ │ │ ├── TransportSkywalkerAction.java │ │ │ └── support │ │ │ ├── IndexableFieldToXContent.java │ │ │ └── MetaDataToXContent.java │ │ ├── plugin │ │ └── skywalker │ │ │ └── SkywalkerPlugin.java │ │ ├── rest │ │ └── action │ │ │ └── skywalker │ │ │ ├── RestConsistencyCheckAction.java │ │ │ ├── RestReconstructIndexAction.java │ │ │ └── RestSkywalkerAction.java │ │ └── skywalker │ │ ├── FormatDetails.java │ │ ├── LuceneFormats.java │ │ ├── Skywalker.java │ │ ├── collectors │ │ ├── AccessibleHitCollector.java │ │ ├── AccessibleTopHitCollector.java │ │ ├── AllHit.java │ │ ├── AllHitsCollector.java │ │ ├── CountLimitedHitCollector.java │ │ ├── IntervalLimitedCollector.java │ │ ├── LimitedException.java │ │ ├── LimitedHitCollector.java │ │ └── NoScoringScorer.java │ │ ├── reconstruct │ │ ├── DocumentReconstructor.java │ │ ├── DocumentTerm.java │ │ └── DocumentTermComparator.java │ │ └── stats │ │ ├── DocFreqComparatorSortDescending.java │ │ ├── FieldTermCount.java │ │ ├── TermStats.java │ │ └── TermStatsQueue.java └── resources │ └── es-plugin.properties ├── site ├── resources │ ├── cologne-skywalker.jpg │ └── skywalker.png └── site.xml └── test └── java └── org └── xbib └── elasticsearch └── plugin └── skywalker ├── AbstractNodeTest.java └── SkywalkerTests.java /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | *.iml 3 | .classpath 4 | .project 5 | target 6 | *.db 7 | data 8 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: java 2 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | .. image:: ../../../elasticsearch-skywalker/raw/master/src/site/resources/cologne-skywalker.jpg 2 | 3 | Image `Skywalking the Cologne Cathedral by Vadim Makhorov `_ 4 | 5 | Elasticsearch Skywalker plugin 6 | ============================== 7 | 8 | The Skywalker plugin for `Elasticsearch `_ is like Luke for Lucene. 9 | 10 | Well, almost, it's not coming with a GUI right now. 11 | 12 | Skywalking gives you 13 | 14 | - a deeper insight of how Elasticsearch stores Lucene indices and fields 15 | 16 | - lists timestamps and checksums of Elasticsearch's Lucene files 17 | 18 | - shows field names, both from mapping and Lucene indices 19 | 20 | - shows field types and attributes, both from mapping and Lucene indices 21 | 22 | - retrieves a ranked list of most frequent terms 23 | 24 | Because Elasticsearch and Elasticsearch Head offer already a lot what Luke does with Lucene, Skywalker has rather few features, just to complement some missing parts. 25 | 26 | It is recommended to use Skywalker for learning on rather small indices. 27 | 28 | With large indices, or on production machines, unpredictable exceptions may happen, mostly out of memory execptions. You have been warned. Use at your own risk! 29 | 30 | Installation 31 | ------------ 32 | 33 | .. image:: https://travis-ci.org/jprante/elasticsearch-skywalker.png 34 | 35 | ============= =========== ================= ============================================================ 36 | ES version Plugin Release date Command 37 | ------------- ----------- ----------------- ------------------------------------------------------------ 38 | 0.90.0 3.0.0 May 24, 2013 ./bin/plugin --install skywalker --url http://bit.ly/1eYTIHj 39 | 0.90.5 3.1.0 Nov 9, 2013 ./bin/plugin --install skywalker --url http://bit.ly/HFJos6 40 | 0.90.6 3.2.0 Nov 9, 2013 ./bin/plugin --install skywalker --url http://bit.ly/19PbcoJ 41 | 1.0.0.RC1 1.0.0.RC1.1 Jan 16, 2014 ./bin/plugin --install skywalker --url http://bit.ly/1htPlFK 42 | ============= =========== ================= ============================================================ 43 | 44 | Do not forget to restart the node after installing. 45 | 46 | Project docs 47 | ------------ 48 | 49 | The Maven project site is available at `Github `_ 50 | 51 | Binaries 52 | -------- 53 | 54 | Binaries are available at `Bintray `_ 55 | 56 | 57 | Documentation 58 | ------------- 59 | 60 | Example request:: 61 | 62 | curl -XDELETE 'localhot:9200/test' 63 | 64 | curl -XPUT 'localhost:9200/test/docs/1' -d ' 65 | { 66 | "content" : "Hello World" 67 | } 68 | ' 69 | 70 | curl -XGET 'localhost:9200/_skywalker' 71 | 72 | curl -XGET 'localhost:9200/test/_skywalker' 73 | 74 | 75 | Example output:: 76 | 77 | { 78 | "ok" : true, 79 | "_shards" : { 80 | "total" : 5, 81 | "successful" : 5, 82 | "failed" : 0 83 | }, 84 | "result" : { 85 | "testindex" : { 86 | "3" : { 87 | "store" : [ { 88 | "name" : "_v.fdx", 89 | "length" : 1332, 90 | "lastmodified" : "2012-05-24T14:47:09.000Z", 91 | "func" : "stored fields index data", 92 | "checksum" : "ee2hoy" 93 | }, { 94 | "name" : "_z.frq", 95 | "length" : 32465, 96 | "lastmodified" : "2012-05-24T14:47:15.000Z", 97 | "func" : "term frequency postings data", 98 | "checksum" : "db6lgl" 99 | }, { 100 | "name" : "segments.gen", 101 | "length" : 20, 102 | "lastmodified" : "2012-05-24T15:16:03.000Z", 103 | "func" : "generation number - global file", 104 | "checksum" : null 105 | }, { 106 | "name" : "_z.fnm", 107 | "length" : 2368, 108 | "lastmodified" : "2012-05-24T14:47:15.000Z", 109 | "func" : "field names / infos", 110 | "checksum" : "13tfrvf" 111 | }, { 112 | "name" : "_y.fdt", 113 | "length" : 593872, 114 | "lastmodified" : "2012-05-24T14:47:14.000Z", 115 | "func" : "stored fields data", 116 | "checksum" : "rmf3zi" 117 | }, { 118 | [...] 119 | }, { 120 | "name" : "segments_2", 121 | "length" : 1832, 122 | "lastmodified" : "2012-05-24T15:16:03.000Z", 123 | "func" : "per-commit list of segments", 124 | "checksum" : null 125 | }, { 126 | "name" : "_10.nrm", 127 | "length" : 1524, 128 | "lastmodified" : "2012-05-24T14:47:16.000Z", 129 | "func" : "norms data for all fields", 130 | "checksum" : "yv7s2l" 131 | } ], 132 | "numTerms" : 153043, 133 | "topterms" : [ { 134 | "field" : "_type", 135 | "text" : "__dc:subject.xbib:subject", 136 | "docFreq" : 6191 137 | }, { 138 | "field" : "dc:subject.xbib:subject.xbib:subjectAuthority", 139 | "text" : "RSWK", 140 | "docFreq" : 4342 141 | }, { 142 | "field" : "dc:subject.xbib:subject.xbib:subjectIDAuthority", 143 | "text" : "gnd", 144 | "docFreq" : 3217 145 | }, { 146 | "field" : "dc:subject.xbib:subject.xbib:subjectType", 147 | "text" : "topic", 148 | "docFreq" : 2451 149 | [...] 150 | } ], 151 | "maxlastmodified" : 1337872563000, 152 | "hasDeletions" : false, 153 | "directoryImpl" : "org.elasticsearch.index.store.Store$StoreDirectory", 154 | "indexFormat" : { 155 | "id" : -11, 156 | "capabilities" : "lock-less, single norms, shared doc store, checksum, del count, omitTf, user data, diagnostics, hasVectors", 157 | "genericName" : "Lucene 3.1" 158 | }, 159 | "minlastmodified" : 1337870802000, 160 | "numDocs" : 8229, 161 | "indexversion" : "1337870762887", 162 | "maxDoc" : 8229, 163 | "commits" : [ { 164 | "files" : [ "_p.fdx", "_z.frq", "_p.fdt", "_12.fdt", "_12.tii", "_z.fnm", "_12.fdx", "_y.fdt", "_10.tii", "_p.nrm", "_w.tii", "_y.fdx", "_y.nrm", "_12.tis", "_w.fnm", "_10.tis", "_x.tis", "_l.nrm", "_w.tis", "_w.fdt", "_w.frq", "_l.prx", "_11.fdx", "_w.fdx", "_11.fdt", "_x.tii", "_z.nrm", "_10.prx", "_l.fdx", "_12.fnm", "_11.prx", "_l.fdt", "_12.frq", "_x.fdt", "_z.fdt", "_x.nrm", "_11.tii", "_10.fdt", "_l.fnm", "_z.tii", "_p.fnm", "_y.tis", "_x.fdx", "_z.fdx", "_y.frq", "_11.tis", "_z.tis", "_l.frq", "_w.prx", "_p.frq", "_y.tii", "_10.fdx", "_l.tis", "_11.nrm", "_p.tii", "_w.nrm", "_l.tii", "_y.fnm", "_10.fnm", "_x.fnm", "_p.tis", "_z.prx", "_12.prx", "_10.frq", "_x.frq", "_11.frq", "_y.prx", "_12.nrm", "_x.prx", "_11.fnm", "segments_2", "_10.nrm", "_p.prx" ], 165 | "userdata" : { 166 | "translog_id" : "1337870762809" 167 | }, 168 | "count" : 9, 169 | "segment" : "segments_2", 170 | "deleted" : false 171 | } ], 172 | "numDeletedDocs" : 0 173 | }, 174 | "fieldInfos" : [ { 175 | "name" : "_uid", 176 | "mapper" : { 177 | "indexNameClean" : "_uid", 178 | "indexed" : true, 179 | "omitTermFreqAndPositions" : false, 180 | "analyzed" : false, 181 | "indexName" : "_uid", 182 | "boost" : 1.0, 183 | "fullName" : "_uid", 184 | "fieldDataType" : "STRING", 185 | "omitNorms" : true 186 | }, 187 | "number" : 0, 188 | "storePayloads" : true, 189 | "omitNorms" : false, 190 | "options" : "DOCS_AND_FREQS_AND_POSITIONS", 191 | "storeTermVector" : false, 192 | "isindexed" : true 193 | }, { 194 | "name" : "_type", 195 | "mapper" : { 196 | "indexNameClean" : "_type", 197 | "indexed" : true, 198 | "omitTermFreqAndPositions" : true, 199 | "analyzed" : false, 200 | "indexName" : "_type", 201 | "boost" : 1.0, 202 | "fullName" : "_type", 203 | "fieldDataType" : "STRING", 204 | "omitNorms" : true 205 | }, 206 | "number" : 1, 207 | "storePayloads" : false, 208 | "omitNorms" : false, 209 | "options" : "DOCS_ONLY", 210 | "storeTermVector" : false, 211 | "isindexed" : true 212 | }, { 213 | [...] 214 | 215 | License 216 | ======= 217 | 218 | This plugin re-uses code of the Luke project 219 | 220 | Elasticsearch Skywalker Plugin 221 | 222 | Copyright (C) 2012,2013 Jörg Prante 223 | 224 | Licensed under the Apache License, Version 2.0 (the "License"); 225 | you may not use this file except in compliance with the License. 226 | You may obtain a copy of the License at 227 | 228 | http://www.apache.org/licenses/LICENSE-2.0 229 | 230 | Unless required by applicable law or agreed to in writing, software 231 | distributed under the License is distributed on an "AS IS" BASIS, 232 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 233 | See the License for the specific language governing permissions and 234 | limitations under the License. -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | 4.0.0 7 | 8 | org.xbib.elasticsearch.plugin 9 | elasticsearch-skywalker 10 | 1.3.0-SNAPSHOT 11 | 12 | jar 13 | 14 | elasticsearch-skywalker 15 | Skywalker for Elasticsearch is like Luke for Lucene 16 | 17 | http://github.com/jprante/elasticsearch-skywalker 18 | 19 | 2012 20 | 21 | 22 | 23 | The Apache Software License, Version 2.0 24 | http://www.apache.org/licenses/LICENSE-2.0.txt 25 | repo 26 | 27 | 28 | 29 | 30 | 31 | jprante 32 | Jörg Prante 33 | https://github.com/jprante 34 | 35 | 36 | 37 | 38 | scm:git:git@github.com:jprante/elasticsearch-skywalker.git 39 | scm:git:git@github.com:jprante/elasticsearch-skywalker.git 40 | http://github.com/jprante/elasticsearch-skywalker 41 | 42 | 43 | 44 | Github Issue Tracker 45 | https://github.com/jprante/elasticsearch-skywalker/issues 46 | 47 | 48 | 49 | 50 | bintray-jprante-elasticsearch-plugins-elasticsearch-skywalker 51 | jprante-elasticsearch-plugins-elasticsearch-skywalker 52 | https://api.bintray.com/maven/jprante/elasticsearch-plugins/elasticsearch-skywalker 53 | 54 | 55 | 56 | 57 | 58 | bintray-elasticsearch-plugins 59 | http://dl.bintray.com/jprante/elasticsearch-plugins 60 | 61 | true 62 | 63 | 64 | false 65 | 66 | 67 | 68 | 69 | 70 | github 71 | UTF-8 72 | 1.3.2 73 | 74 | 75 | 76 | 77 | org.elasticsearch 78 | elasticsearch 79 | ${elasticsearch.version} 80 | jar 81 | compile 82 | 83 | 84 | 85 | org.testng 86 | testng 87 | 6.8.7 88 | test 89 | 90 | 91 | 92 | org.hamcrest 93 | hamcrest-all 94 | 1.3 95 | test 96 | 97 | 98 | 99 | log4j 100 | log4j 101 | 1.2.17 102 | true 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | maven-compiler-plugin 111 | 3.1 112 | 113 | 1.7 114 | 1.7 115 | UTF-8 116 | true 117 | true 118 | true 119 | -Xlint:all,-serial,-path,-rawtypes,-unchecked 120 | 121 | 122 | 123 | maven-clean-plugin 124 | 2.4.1 125 | 126 | 127 | 128 | data 129 | 130 | 131 | 132 | 133 | 134 | maven-surefire-plugin 135 | 2.15 136 | 137 | false 138 | once 139 | 140 | UTF-8 141 | ${project.build.directory} 142 | true 143 | 144 | 145 | **/*Tests.java 146 | 147 | 148 | 149 | 150 | maven-source-plugin 151 | 2.2.1 152 | 153 | 154 | attach-sources 155 | 156 | jar 157 | 158 | 159 | 160 | 161 | 162 | maven-assembly-plugin 163 | 2.4 164 | 165 | false 166 | ${project.build.directory}/releases/ 167 | 168 | ${basedir}/src/main/assemblies/plugin.xml 169 | 170 | 171 | 172 | 173 | package 174 | 175 | single 176 | 177 | 178 | 179 | 180 | 181 | maven-resources-plugin 182 | 2.6 183 | 184 | UTF-8 185 | 186 | 187 | 188 | maven-project-info-reports-plugin 189 | 2.7 190 | 191 | 192 | maven-javadoc-plugin 193 | 2.9.1 194 | 195 | ${project.build.sourceEncoding} 196 | en 197 | true 198 | true 199 | 200 | 201 | 202 | package 203 | 204 | jar 205 | 206 | 207 | 208 | 209 | 210 | maven-site-plugin 211 | 3.3 212 | 213 | en 214 | UTF-8 215 | UTF-8 216 | 217 | 218 | 219 | com.github.github 220 | site-maven-plugin 221 | 0.9 222 | 223 | Building site for ${project.version} 224 | 225 | 226 | 227 | 228 | site 229 | 230 | site 231 | 232 | 233 | 234 | 235 | 236 | 237 | 238 | 239 | 240 | maven-javadoc-plugin 241 | 2.9.1 242 | 243 | ${project.build.sourceEncoding} 244 | en 245 | true 246 | true 247 | 248 | 249 | 250 | maven-surefire-report-plugin 251 | 2.15 252 | 253 | 254 | 255 | 256 | 257 | -------------------------------------------------------------------------------- /src/main/assemblies/plugin.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | plugin 4 | 5 | zip 6 | 7 | false 8 | 9 | 10 | / 11 | true 12 | false 13 | compile 14 | 15 | org.xbib.elasticsearch.plugin 16 | 17 | 18 | 19 | 20 | 21 | ${project.build.directory} 22 | / 23 | 24 | *.jar 25 | 26 | 27 | *sources*.jar 28 | 29 | 30 | 31 | 32 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/admin/cluster/state/ConsistencyCheckAction.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.action.admin.cluster.state; 3 | 4 | import org.elasticsearch.action.admin.cluster.ClusterAction; 5 | import org.elasticsearch.client.ClusterAdminClient; 6 | 7 | /** 8 | * Consistency check action 9 | */ 10 | public class ConsistencyCheckAction extends ClusterAction { 11 | 12 | public static final ConsistencyCheckAction INSTANCE = new ConsistencyCheckAction(); 13 | public static final String NAME = "cluster/state/consistencycheck"; 14 | 15 | private ConsistencyCheckAction() { 16 | super(NAME); 17 | } 18 | 19 | @Override 20 | public ConsistencyCheckResponse newResponse() { 21 | return new ConsistencyCheckResponse(); 22 | } 23 | 24 | @Override 25 | public ConsistencyCheckRequestBuilder newRequestBuilder(ClusterAdminClient client) { 26 | return new ConsistencyCheckRequestBuilder(client); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/admin/cluster/state/ConsistencyCheckRequest.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.action.admin.cluster.state; 3 | 4 | import org.elasticsearch.action.ActionRequestValidationException; 5 | import org.elasticsearch.action.support.master.MasterNodeOperationRequest; 6 | import org.elasticsearch.common.io.stream.StreamInput; 7 | import org.elasticsearch.common.io.stream.StreamOutput; 8 | 9 | import java.io.IOException; 10 | 11 | /** 12 | * Consistency check request 13 | * 14 | */ 15 | public class ConsistencyCheckRequest extends MasterNodeOperationRequest { 16 | 17 | public ConsistencyCheckRequest() { 18 | } 19 | 20 | @Override 21 | public ActionRequestValidationException validate() { 22 | return null; 23 | } 24 | 25 | @Override 26 | public void readFrom(StreamInput in) throws IOException { 27 | super.readFrom(in); 28 | } 29 | 30 | @Override 31 | public void writeTo(StreamOutput out) throws IOException { 32 | super.writeTo(out); 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/admin/cluster/state/ConsistencyCheckRequestBuilder.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.action.admin.cluster.state; 3 | 4 | import org.elasticsearch.action.ActionListener; 5 | import org.elasticsearch.action.ActionRequestBuilder; 6 | import org.elasticsearch.client.ClusterAdminClient; 7 | 8 | /** 9 | * Consistency check request builder 10 | * 11 | */ 12 | public class ConsistencyCheckRequestBuilder extends ActionRequestBuilder { 13 | 14 | public ConsistencyCheckRequestBuilder(ClusterAdminClient clusterClient) { 15 | super(clusterClient, new ConsistencyCheckRequest()); 16 | } 17 | 18 | @Override 19 | protected void doExecute(ActionListener listener) { 20 | client.execute(ConsistencyCheckAction.INSTANCE, request, listener); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/admin/cluster/state/ConsistencyCheckResponse.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.action.admin.cluster.state; 3 | 4 | import org.elasticsearch.action.ActionResponse; 5 | import org.elasticsearch.cluster.ClusterName; 6 | import org.elasticsearch.cluster.ClusterState; 7 | import org.elasticsearch.common.io.stream.StreamInput; 8 | import org.elasticsearch.common.io.stream.StreamOutput; 9 | 10 | import java.io.File; 11 | import java.io.IOException; 12 | import java.util.ArrayList; 13 | import java.util.List; 14 | 15 | /** 16 | * Consistency check response 17 | */ 18 | public class ConsistencyCheckResponse extends ActionResponse { 19 | 20 | private ClusterName clusterName; 21 | 22 | private ClusterState clusterState; 23 | 24 | private List files; 25 | 26 | public ConsistencyCheckResponse() { 27 | } 28 | 29 | ConsistencyCheckResponse(ClusterName clusterName, ClusterState clusterState, List files) { 30 | this.clusterName = clusterName; 31 | this.clusterState = clusterState; 32 | this.files = files; 33 | } 34 | 35 | public ClusterState getState() { 36 | return this.clusterState; 37 | } 38 | 39 | public ClusterName getClusterName() { 40 | return this.clusterName; 41 | } 42 | 43 | public List getFiles() { 44 | return this.files; 45 | } 46 | 47 | @Override 48 | public void readFrom(StreamInput in) throws IOException { 49 | super.readFrom(in); 50 | clusterName = ClusterName.readClusterName(in); 51 | clusterState = ClusterState.Builder.readFrom(in, null); 52 | int n = in.read(); 53 | files = new ArrayList(); 54 | for (int i = 0; i < n; i++) { 55 | files.set(i, new File(in.readString())); 56 | } 57 | } 58 | 59 | @Override 60 | public void writeTo(StreamOutput out) throws IOException { 61 | super.writeTo(out); 62 | clusterName.writeTo(out); 63 | ClusterState.Builder.writeTo(clusterState, out); 64 | out.write(files.size()); 65 | for (File file : files) { 66 | out.writeString(file.getAbsolutePath()); 67 | } 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/admin/cluster/state/TransportConsistencyCheckAction.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.action.admin.cluster.state; 3 | 4 | import org.elasticsearch.ElasticsearchException; 5 | import org.elasticsearch.action.ActionListener; 6 | import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; 7 | import org.elasticsearch.cluster.ClusterName; 8 | import org.elasticsearch.cluster.ClusterService; 9 | import org.elasticsearch.cluster.ClusterState; 10 | import org.elasticsearch.common.inject.Inject; 11 | import org.elasticsearch.common.settings.Settings; 12 | import org.elasticsearch.env.NodeEnvironment; 13 | import org.elasticsearch.threadpool.ThreadPool; 14 | import org.elasticsearch.transport.TransportService; 15 | import org.xbib.elasticsearch.skywalker.Skywalker; 16 | 17 | import java.io.File; 18 | import java.util.ArrayList; 19 | import java.util.List; 20 | 21 | import static org.elasticsearch.cluster.ClusterState.builder; 22 | 23 | /** 24 | * Transport consistency check action 25 | * 26 | */ 27 | public class TransportConsistencyCheckAction extends TransportMasterNodeOperationAction { 28 | 29 | private final ClusterName clusterName; 30 | 31 | private final NodeEnvironment nodeEnv; 32 | 33 | @Inject 34 | public TransportConsistencyCheckAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, 35 | ClusterName clusterName, NodeEnvironment nodeEnvironment) { 36 | super(settings, ConsistencyCheckAction.NAME, transportService, clusterService, threadPool); 37 | this.clusterName = clusterName; 38 | this.nodeEnv = nodeEnvironment; 39 | } 40 | 41 | @Override 42 | protected String executor() { 43 | return ThreadPool.Names.GENERIC; 44 | } 45 | 46 | @Override 47 | protected ConsistencyCheckRequest newRequest() { 48 | return new ConsistencyCheckRequest(); 49 | } 50 | 51 | @Override 52 | protected ConsistencyCheckResponse newResponse() { 53 | return new ConsistencyCheckResponse(); 54 | } 55 | 56 | @Override 57 | protected void masterOperation(ConsistencyCheckRequest request, ClusterState state, ActionListener listener) throws ElasticsearchException { 58 | ClusterState.Builder builder = builder(state); 59 | List files = new ArrayList(); 60 | builder.metaData(Skywalker.loadState(files, nodeEnv)); 61 | listener.onResponse(new ConsistencyCheckResponse(clusterName, builder.build(), files)); 62 | } 63 | 64 | @Override 65 | protected boolean localExecute(ConsistencyCheckRequest request) { 66 | return true; 67 | } 68 | 69 | } 70 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/admin/indices/reconstruct/ReconstructIndexAction.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.action.admin.indices.reconstruct; 3 | 4 | import org.elasticsearch.action.admin.indices.IndicesAction; 5 | import org.elasticsearch.client.IndicesAdminClient; 6 | 7 | /** 8 | * Reconstruct document action 9 | */ 10 | public class ReconstructIndexAction extends IndicesAction { 11 | 12 | public static final ReconstructIndexAction INSTANCE = new ReconstructIndexAction(); 13 | public static final String NAME = "indices/reconstruct"; 14 | 15 | private ReconstructIndexAction() { 16 | super(NAME); 17 | } 18 | 19 | @Override 20 | public ReconstructIndexResponse newResponse() { 21 | return new ReconstructIndexResponse(); 22 | } 23 | 24 | @Override 25 | public ReconstructIndexRequestBuilder newRequestBuilder(IndicesAdminClient client) { 26 | return new ReconstructIndexRequestBuilder(client); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/admin/indices/reconstruct/ReconstructIndexRequest.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.action.admin.indices.reconstruct; 3 | 4 | import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; 5 | import org.elasticsearch.common.io.stream.StreamInput; 6 | import org.elasticsearch.common.io.stream.StreamOutput; 7 | 8 | import java.io.IOException; 9 | 10 | public class ReconstructIndexRequest extends BroadcastOperationRequest { 11 | 12 | private String index; 13 | 14 | ReconstructIndexRequest() { 15 | } 16 | 17 | public ReconstructIndexRequest(String index) { 18 | this.index = index; 19 | } 20 | 21 | public String index() { 22 | return index; 23 | } 24 | 25 | @Override 26 | public void readFrom(StreamInput in) throws IOException { 27 | super.readFrom(in); 28 | this.index = in.readString(); 29 | } 30 | 31 | @Override 32 | public void writeTo(StreamOutput out) throws IOException { 33 | super.writeTo(out); 34 | out.writeString(index); 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/admin/indices/reconstruct/ReconstructIndexRequestBuilder.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.action.admin.indices.reconstruct; 3 | 4 | import org.elasticsearch.action.ActionListener; 5 | import org.elasticsearch.action.ActionRequestBuilder; 6 | import org.elasticsearch.client.IndicesAdminClient; 7 | 8 | /** 9 | * A request builder for reconstructing deleted documents. 10 | */ 11 | public class ReconstructIndexRequestBuilder extends ActionRequestBuilder { 12 | 13 | /** 14 | * Constructor 15 | * 16 | * @param client 17 | */ 18 | public ReconstructIndexRequestBuilder(IndicesAdminClient client) { 19 | super(client, new ReconstructIndexRequest()); 20 | } 21 | 22 | /** 23 | * Execute action. 24 | * 25 | * @param listener a response listener 26 | */ 27 | @Override 28 | protected void doExecute(ActionListener listener) { 29 | client.execute(ReconstructIndexAction.INSTANCE, request, listener); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/admin/indices/reconstruct/ReconstructIndexResponse.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.action.admin.indices.reconstruct; 3 | 4 | import org.elasticsearch.action.ShardOperationFailedException; 5 | import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; 6 | import org.elasticsearch.common.io.stream.StreamInput; 7 | import org.elasticsearch.common.io.stream.StreamOutput; 8 | 9 | import java.io.IOException; 10 | import java.util.List; 11 | 12 | import static org.elasticsearch.common.collect.Lists.newArrayList; 13 | 14 | /** 15 | * A response for a reconstruct action. 16 | */ 17 | public class ReconstructIndexResponse extends BroadcastOperationResponse { 18 | 19 | protected List shards; 20 | 21 | ReconstructIndexResponse() { 22 | } 23 | 24 | ReconstructIndexResponse(List shards, int totalShards, int successfulShards, int failedShards, List shardFailures) { 25 | super(totalShards, successfulShards, failedShards, shardFailures); 26 | this.shards = shards; 27 | } 28 | 29 | public List shards() { 30 | return shards; 31 | } 32 | 33 | @Override 34 | public void readFrom(StreamInput in) throws IOException { 35 | super.readFrom(in); 36 | int n = in.readVInt(); 37 | shards = newArrayList(); 38 | for (int i = 0; i < n; i++) { 39 | ShardReconstructIndexResponse r = new ShardReconstructIndexResponse(); 40 | r.readFrom(in); 41 | shards.add(r); 42 | } 43 | } 44 | 45 | @Override 46 | public void writeTo(StreamOutput out) throws IOException { 47 | super.writeTo(out); 48 | out.writeVInt(shards.size()); 49 | for (ShardReconstructIndexResponse r : shards) { 50 | r.writeTo(out); 51 | } 52 | } 53 | } -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/admin/indices/reconstruct/ShardReconstructIndexRequest.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.action.admin.indices.reconstruct; 3 | 4 | import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; 5 | import org.elasticsearch.common.io.stream.StreamInput; 6 | import org.elasticsearch.common.io.stream.StreamOutput; 7 | 8 | import java.io.IOException; 9 | 10 | /** 11 | * Shard Skywalker request 12 | */ 13 | public class ShardReconstructIndexRequest extends BroadcastShardOperationRequest { 14 | 15 | ShardReconstructIndexRequest() { 16 | } 17 | 18 | public ShardReconstructIndexRequest(String index, int shardId, ReconstructIndexRequest request) { 19 | super(index, shardId, request); 20 | } 21 | 22 | @Override 23 | public void readFrom(StreamInput in) throws IOException { 24 | super.readFrom(in); 25 | } 26 | 27 | @Override 28 | public void writeTo(StreamOutput out) throws IOException { 29 | super.writeTo(out); 30 | } 31 | } -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/admin/indices/reconstruct/ShardReconstructIndexResponse.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.action.admin.indices.reconstruct; 3 | 4 | import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; 5 | import org.elasticsearch.common.io.stream.StreamInput; 6 | import org.elasticsearch.common.io.stream.StreamOutput; 7 | import org.elasticsearch.common.xcontent.XContentBuilder; 8 | import org.elasticsearch.common.xcontent.XContentHelper; 9 | import org.elasticsearch.common.xcontent.XContentParser; 10 | 11 | import java.io.IOException; 12 | 13 | import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; 14 | 15 | public class ShardReconstructIndexResponse extends BroadcastShardOperationResponse { 16 | 17 | private boolean exists; 18 | private XContentBuilder builder; 19 | 20 | ShardReconstructIndexResponse() { 21 | } 22 | 23 | ShardReconstructIndexResponse(boolean exists) { 24 | this.exists = exists; 25 | } 26 | 27 | ShardReconstructIndexResponse(boolean exists, XContentBuilder builder) { 28 | this.exists = exists; 29 | this.builder = builder; 30 | } 31 | 32 | public XContentBuilder getReconstructedIndex() { 33 | return builder; 34 | } 35 | 36 | @Override 37 | public void readFrom(StreamInput in) throws IOException { 38 | super.readFrom(in); 39 | exists = in.readBoolean(); 40 | if (in.readBoolean()) { 41 | builder = jsonBuilder(); 42 | XContentParser p = XContentHelper.createParser(in.readBytesReference()); 43 | builder.copyCurrentStructure(p); 44 | } 45 | } 46 | 47 | @Override 48 | public void writeTo(StreamOutput out) throws IOException { 49 | super.writeTo(out); 50 | out.writeBoolean(exists); 51 | if (builder == null) { 52 | out.writeBoolean(false); 53 | } else { 54 | out.writeBoolean(true); 55 | out.writeBytesReference(builder.bytes()); 56 | } 57 | } 58 | } -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/admin/indices/reconstruct/TransportReconstructAction.java: -------------------------------------------------------------------------------- 1 | package org.xbib.elasticsearch.action.admin.indices.reconstruct; 2 | 3 | import org.apache.lucene.index.IndexReader; 4 | import org.elasticsearch.ElasticsearchException; 5 | import org.elasticsearch.action.ShardOperationFailedException; 6 | import org.elasticsearch.action.support.DefaultShardOperationFailedException; 7 | import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; 8 | import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; 9 | import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; 10 | import org.elasticsearch.cluster.ClusterService; 11 | import org.elasticsearch.cluster.ClusterState; 12 | import org.elasticsearch.cluster.block.ClusterBlockException; 13 | import org.elasticsearch.cluster.block.ClusterBlockLevel; 14 | import org.elasticsearch.cluster.routing.GroupShardsIterator; 15 | import org.elasticsearch.cluster.routing.ShardRouting; 16 | import org.elasticsearch.common.inject.Inject; 17 | import org.elasticsearch.common.settings.Settings; 18 | import org.elasticsearch.index.engine.Engine; 19 | import org.elasticsearch.index.service.IndexService; 20 | import org.elasticsearch.index.shard.service.InternalIndexShard; 21 | import org.elasticsearch.indices.IndicesService; 22 | import org.xbib.elasticsearch.skywalker.reconstruct.DocumentReconstructor; 23 | import org.elasticsearch.threadpool.ThreadPool; 24 | import org.elasticsearch.transport.TransportService; 25 | 26 | import java.io.IOException; 27 | import java.util.List; 28 | import java.util.concurrent.atomic.AtomicReferenceArray; 29 | 30 | import static org.elasticsearch.common.collect.Lists.newArrayList; 31 | 32 | /** 33 | * Transport reconstruct index action 34 | */ 35 | public class TransportReconstructAction extends TransportBroadcastOperationAction { 36 | 37 | private final IndicesService indicesService; 38 | 39 | @Inject 40 | public TransportReconstructAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, 41 | TransportService transportService, IndicesService indicesService) { 42 | super(settings, ReconstructIndexAction.NAME, threadPool, clusterService, transportService); 43 | this.indicesService = indicesService; 44 | } 45 | 46 | @Override 47 | protected String executor() { 48 | return ThreadPool.Names.GET; 49 | } 50 | 51 | @Override 52 | protected ReconstructIndexRequest newRequest() { 53 | return new ReconstructIndexRequest(); 54 | } 55 | 56 | @Override 57 | protected ReconstructIndexResponse newResponse(ReconstructIndexRequest reconstructIndexRequest, AtomicReferenceArray shardsResponses, ClusterState clusterState) { 58 | int successfulShards = 0; 59 | int failedShards = 0; 60 | List shardFailures = null; 61 | final List shards = newArrayList(); 62 | for (int i = 0; i < shardsResponses.length(); i++) { 63 | Object shardResponse = shardsResponses.get(i); 64 | if (shardResponse == null) { 65 | // a non active shard, ignore... 66 | } else if (shardResponse instanceof BroadcastShardOperationFailedException) { 67 | failedShards++; 68 | if (shardFailures == null) { 69 | shardFailures = newArrayList(); 70 | } 71 | shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); 72 | } else { 73 | shards.add((ShardReconstructIndexResponse)shardResponse); 74 | successfulShards++; 75 | } 76 | } 77 | return new ReconstructIndexResponse(shards, shardsResponses.length(), successfulShards, failedShards, shardFailures); 78 | } 79 | 80 | @Override 81 | protected ShardReconstructIndexRequest newShardRequest() { 82 | return new ShardReconstructIndexRequest(); 83 | } 84 | 85 | @Override 86 | protected ShardReconstructIndexRequest newShardRequest(int numShards, ShardRouting shardRouting, ReconstructIndexRequest reconstructIndexRequest) { 87 | return new ShardReconstructIndexRequest(shardRouting.index(), shardRouting.id(), reconstructIndexRequest); 88 | } 89 | 90 | @Override 91 | protected BroadcastShardOperationResponse newShardResponse() { 92 | return new ShardReconstructIndexResponse(); 93 | } 94 | 95 | @Override 96 | protected ShardReconstructIndexResponse shardOperation(ShardReconstructIndexRequest request) throws ElasticsearchException { 97 | IndexService indexService = indicesService.indexService(request.index()); 98 | InternalIndexShard indexShard = (InternalIndexShard) indexService.shardSafe(request.shardId()); 99 | Engine.Searcher searcher = indexShard.engine().acquireSearcher("transport_reconstruct"); 100 | IndexReader reader = searcher.reader(); 101 | DocumentReconstructor dr = new DocumentReconstructor(reader); 102 | try { 103 | return new ShardReconstructIndexResponse(true, dr.reconstruct(request.shardId())); 104 | } catch (IOException e) { 105 | throw new ElasticsearchException("failed to reconstruct index", e); 106 | } 107 | } 108 | 109 | @Override 110 | protected GroupShardsIterator shards(ClusterState clusterState, ReconstructIndexRequest reconstructIndexRequest, String[] concreteIndices) { 111 | return clusterState.routingTable().allActiveShardsGrouped(concreteIndices, true); 112 | } 113 | 114 | @Override 115 | protected ClusterBlockException checkGlobalBlock(ClusterState state, ReconstructIndexRequest request) { 116 | return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA); 117 | } 118 | 119 | @Override 120 | protected ClusterBlockException checkRequestBlock(ClusterState state, ReconstructIndexRequest reconstructIndexRequest, String[] concreteIndices) { 121 | return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA, concreteIndices); 122 | } 123 | 124 | } 125 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/skywalker/ShardSkywalkerRequest.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.action.skywalker; 3 | 4 | import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; 5 | import org.elasticsearch.common.io.stream.StreamInput; 6 | import org.elasticsearch.common.io.stream.StreamOutput; 7 | 8 | import java.io.IOException; 9 | 10 | /** 11 | * Shard Skywalker request 12 | */ 13 | public class ShardSkywalkerRequest extends BroadcastShardOperationRequest { 14 | 15 | ShardSkywalkerRequest() { 16 | } 17 | 18 | public ShardSkywalkerRequest(String index, int shardId, SkywalkerRequest request) { 19 | super(index, shardId, request); 20 | } 21 | 22 | @Override 23 | public void readFrom(StreamInput in) throws IOException { 24 | super.readFrom(in); 25 | } 26 | 27 | @Override 28 | public void writeTo(StreamOutput out) throws IOException { 29 | super.writeTo(out); 30 | } 31 | } -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/skywalker/ShardSkywalkerResponse.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.action.skywalker; 3 | 4 | import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; 5 | import org.elasticsearch.common.io.stream.StreamInput; 6 | import org.elasticsearch.common.io.stream.StreamOutput; 7 | 8 | import java.io.IOException; 9 | import java.util.Map; 10 | 11 | public class ShardSkywalkerResponse extends BroadcastShardOperationResponse { 12 | 13 | private Map response; 14 | 15 | ShardSkywalkerResponse() { 16 | } 17 | 18 | public ShardSkywalkerResponse(String index, int shardId) { 19 | super(index, shardId); 20 | } 21 | 22 | public ShardSkywalkerResponse setResponse(Map response) { 23 | this.response = response; 24 | return this; 25 | } 26 | 27 | public Map getResponse() { 28 | return response; 29 | } 30 | 31 | @Override 32 | public void readFrom(StreamInput in) throws IOException { 33 | super.readFrom(in); 34 | response = in.readMap(); 35 | } 36 | 37 | @Override 38 | public void writeTo(StreamOutput out) throws IOException { 39 | super.writeTo(out); 40 | out.writeMap(response); 41 | } 42 | } -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/skywalker/SkywalkerAction.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.action.skywalker; 3 | 4 | import org.elasticsearch.action.admin.cluster.ClusterAction; 5 | import org.elasticsearch.client.ClusterAdminClient; 6 | 7 | /** 8 | * Skywalker action 9 | */ 10 | public class SkywalkerAction extends ClusterAction { 11 | 12 | public static final SkywalkerAction INSTANCE = new SkywalkerAction(); 13 | 14 | public static final String NAME = "indices/skywalker"; 15 | 16 | private SkywalkerAction() { 17 | super(NAME); 18 | } 19 | 20 | @Override 21 | public SkywalkerResponse newResponse() { 22 | return new SkywalkerResponse(); 23 | } 24 | 25 | @Override 26 | public SkywalkerRequestBuilder newRequestBuilder(ClusterAdminClient client) { 27 | return new SkywalkerRequestBuilder(client); 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/skywalker/SkywalkerRequest.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.action.skywalker; 3 | 4 | import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; 5 | import org.elasticsearch.common.io.stream.StreamInput; 6 | import org.elasticsearch.common.io.stream.StreamOutput; 7 | 8 | import java.io.IOException; 9 | 10 | public class SkywalkerRequest extends BroadcastOperationRequest { 11 | 12 | SkywalkerRequest() { 13 | } 14 | 15 | public SkywalkerRequest(String... indices) { 16 | super(indices); 17 | } 18 | 19 | @Override 20 | public void readFrom(StreamInput in) throws IOException { 21 | super.readFrom(in); 22 | } 23 | 24 | @Override 25 | public void writeTo(StreamOutput out) throws IOException { 26 | super.writeTo(out); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/skywalker/SkywalkerRequestBuilder.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.action.skywalker; 3 | 4 | import org.elasticsearch.action.ActionListener; 5 | import org.elasticsearch.action.ActionRequestBuilder; 6 | import org.elasticsearch.client.Client; 7 | import org.elasticsearch.client.ClusterAdminClient; 8 | 9 | /** 10 | * A request to skywalk one or more indices. 11 | */ 12 | public class SkywalkerRequestBuilder extends ActionRequestBuilder { 13 | 14 | /** 15 | * Constructor 16 | * 17 | * @param client 18 | */ 19 | public SkywalkerRequestBuilder(ClusterAdminClient client) { 20 | super(client, new SkywalkerRequest()); 21 | } 22 | 23 | /** 24 | * Execute Skywalker action. 25 | * 26 | * @param listener a response listener 27 | */ 28 | @Override 29 | protected void doExecute(ActionListener listener) { 30 | client.execute(SkywalkerAction.INSTANCE, request, listener); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/skywalker/SkywalkerResponse.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.action.skywalker; 3 | 4 | import org.elasticsearch.action.ShardOperationFailedException; 5 | import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; 6 | import org.elasticsearch.common.io.stream.StreamInput; 7 | import org.elasticsearch.common.io.stream.StreamOutput; 8 | 9 | import java.io.IOException; 10 | import java.util.HashMap; 11 | import java.util.List; 12 | import java.util.Map; 13 | 14 | /** 15 | * A response for a skywalker action. 16 | */ 17 | public class SkywalkerResponse extends BroadcastOperationResponse { 18 | 19 | private Map>> response; 20 | 21 | SkywalkerResponse() { 22 | } 23 | 24 | SkywalkerResponse(int totalShards, int successfulShards, int failedShards, List shardFailures) { 25 | super(totalShards, successfulShards, failedShards, shardFailures); 26 | } 27 | 28 | public SkywalkerResponse setResponse(Map>> response) { 29 | this.response = response; 30 | return this; 31 | } 32 | 33 | public Map>> getResponse() { 34 | return response; 35 | } 36 | 37 | @Override 38 | public void readFrom(StreamInput in) throws IOException { 39 | super.readFrom(in); 40 | int indexCount = in.readInt(); 41 | this.response = new HashMap(); 42 | for (int i = 0; i < indexCount; i++) { 43 | String index = in.readString(); 44 | Map> shards = new HashMap(); 45 | int shardCount = in.readInt(); 46 | for (int j = 0; j < shardCount; j++) { 47 | String shard = in.readString(); 48 | Map fields = in.readMap(); 49 | shards.put(shard, fields); 50 | } 51 | response.put(index, shards); 52 | } 53 | } 54 | 55 | @Override 56 | public void writeTo(StreamOutput out) throws IOException { 57 | super.writeTo(out); 58 | int indexCount = response.keySet().size(); 59 | out.writeInt(indexCount); 60 | for (String index : response.keySet()) { 61 | out.writeString(index); 62 | int shardCount = response.get(index).keySet().size(); 63 | out.writeInt(shardCount); 64 | for (String shard : response.get(index).keySet()) { 65 | out.writeString(shard); 66 | out.writeMap(response.get(index).get(shard)); 67 | } 68 | } 69 | } 70 | } -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/skywalker/TransportSkywalkerAction.java: -------------------------------------------------------------------------------- 1 | package org.xbib.elasticsearch.action.skywalker; 2 | 3 | import org.apache.lucene.index.FieldInfo; 4 | import org.apache.lucene.index.IndexReader; 5 | import org.apache.lucene.index.MultiFields; 6 | import org.apache.lucene.store.Directory; 7 | import org.elasticsearch.ElasticsearchException; 8 | import org.elasticsearch.action.ShardOperationFailedException; 9 | import org.elasticsearch.action.support.DefaultShardOperationFailedException; 10 | import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; 11 | import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; 12 | import org.elasticsearch.cluster.ClusterService; 13 | import org.elasticsearch.cluster.ClusterState; 14 | import org.elasticsearch.cluster.block.ClusterBlockException; 15 | import org.elasticsearch.cluster.block.ClusterBlockLevel; 16 | import org.elasticsearch.cluster.routing.GroupShardsIterator; 17 | import org.elasticsearch.cluster.routing.ShardRouting; 18 | import org.elasticsearch.common.inject.Inject; 19 | import org.elasticsearch.common.settings.Settings; 20 | import org.elasticsearch.env.NodeEnvironment; 21 | import org.elasticsearch.index.engine.Engine; 22 | import org.elasticsearch.index.engine.Segment; 23 | import org.elasticsearch.index.mapper.MapperService; 24 | import org.elasticsearch.index.service.IndexService; 25 | import org.elasticsearch.index.shard.service.InternalIndexShard; 26 | import org.elasticsearch.indices.IndicesService; 27 | import org.elasticsearch.threadpool.ThreadPool; 28 | import org.elasticsearch.transport.TransportService; 29 | import org.xbib.elasticsearch.skywalker.FormatDetails; 30 | import org.xbib.elasticsearch.skywalker.Skywalker; 31 | import org.xbib.elasticsearch.skywalker.stats.FieldTermCount; 32 | import org.xbib.elasticsearch.skywalker.stats.TermStats; 33 | 34 | import java.util.*; 35 | import java.util.concurrent.atomic.AtomicReferenceArray; 36 | 37 | import static org.elasticsearch.common.collect.Lists.newArrayList; 38 | 39 | /** 40 | * Transport action for Skywalker plugin. 41 | */ 42 | public class TransportSkywalkerAction 43 | extends TransportBroadcastOperationAction { 44 | 45 | private final IndicesService indicesService; 46 | 47 | private final NodeEnvironment nodeEnv; 48 | 49 | private final Object mutex = new Object(); 50 | 51 | @Inject 52 | public TransportSkywalkerAction(Settings settings, ThreadPool threadPool, 53 | ClusterService clusterService, 54 | TransportService transportService, 55 | IndicesService indicesService, 56 | NodeEnvironment nodeEnv) { 57 | super(settings, SkywalkerAction.NAME, threadPool, clusterService, transportService); 58 | this.indicesService = indicesService; 59 | this.nodeEnv = nodeEnv; 60 | } 61 | 62 | @Override 63 | protected String executor() { 64 | return ThreadPool.Names.MERGE; 65 | } 66 | 67 | @Override 68 | protected SkywalkerRequest newRequest() { 69 | return new SkywalkerRequest(); 70 | } 71 | 72 | @Override 73 | protected SkywalkerResponse newResponse(SkywalkerRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) { 74 | int successfulShards = 0; 75 | int failedShards = 0; 76 | List shardFailures = null; 77 | Map>> response = new HashMap(); 78 | for (int i = 0; i < shardsResponses.length(); i++) { 79 | Object shardResponse = shardsResponses.get(i); 80 | if (shardResponse == null) { 81 | // a non active shard, ignore... 82 | } else if (shardResponse instanceof BroadcastShardOperationFailedException) { 83 | failedShards++; 84 | if (shardFailures == null) { 85 | shardFailures = newArrayList(); 86 | } 87 | shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); 88 | } else { 89 | successfulShards++; 90 | if (shardResponse instanceof ShardSkywalkerResponse) { 91 | ShardSkywalkerResponse shardResp = (ShardSkywalkerResponse) shardResponse; 92 | String index = shardResp.getIndex(); 93 | int shardId = shardResp.getShardId(); 94 | // one map per index 95 | Map indexresponse = response.get(index); 96 | if (indexresponse == null) { 97 | indexresponse = new HashMap(); 98 | } 99 | // merge index-wide fieldInfo into single field 100 | //indexresponse.put("fieldInfos", shardResp.getResponse().get("fieldInfos")); 101 | //shardResp.getResponse().remove("fieldInfos"); 102 | indexresponse.put(Integer.toString(shardId), shardResp.getResponse()); 103 | response.put(index, indexresponse); 104 | } 105 | } 106 | } 107 | return new SkywalkerResponse(shardsResponses.length(), successfulShards, failedShards, shardFailures).setResponse(response); 108 | } 109 | 110 | @Override 111 | protected ShardSkywalkerRequest newShardRequest() { 112 | return new ShardSkywalkerRequest(); 113 | } 114 | 115 | @Override 116 | protected ShardSkywalkerRequest newShardRequest(int numShards, ShardRouting shard, SkywalkerRequest request) { 117 | return new ShardSkywalkerRequest(shard.index(), shard.id(), request); 118 | } 119 | 120 | @Override 121 | protected ShardSkywalkerResponse newShardResponse() { 122 | return new ShardSkywalkerResponse(); 123 | } 124 | 125 | @Override 126 | protected ShardSkywalkerResponse shardOperation(ShardSkywalkerRequest request) throws ElasticsearchException { 127 | synchronized (mutex) { 128 | IndexService indexService = indicesService.indexServiceSafe(request.index()); 129 | InternalIndexShard indexShard = (InternalIndexShard) indexService.shardSafe(request.shardId()); 130 | MapperService mapperService = indexService.mapperService(); 131 | Engine.Searcher searcher = indexShard.acquireSearcher("skywalker_action"); 132 | try { 133 | IndexReader reader = searcher.reader(); 134 | 135 | Skywalker skywalker = new Skywalker(reader); 136 | 137 | Map response = new HashMap(); 138 | 139 | Directory directory = indexShard.store().directory(); 140 | List indexFiles = new ArrayList(); 141 | for (String f : skywalker.getIndexFiles(directory)) { 142 | Map indexFile = new HashMap(); 143 | indexFile.put("name", f); 144 | indexFile.put("function", skywalker.getFileFunction(f)); 145 | indexFiles.add(indexFile); 146 | } 147 | response.put("indexFiles", indexFiles); 148 | 149 | skywalker.getStoreMetadata(response, indexShard.store().getMetadata()); 150 | 151 | response.put("indexVersion", skywalker.getVersion()); 152 | response.put("directoryImpl", skywalker.getDirImpl()); 153 | response.put("numDocs", reader.numDocs()); 154 | response.put("maxDoc", reader.maxDoc()); 155 | response.put("hasDeletions", reader.hasDeletions()); 156 | response.put("numDeletedDocs", reader.numDeletedDocs()); 157 | 158 | Set ftc = skywalker.getFieldTermCounts(); 159 | response.put("numTerms", skywalker.getNumTerms()); 160 | 161 | Map indexFormatInfo = new HashMap(); 162 | FormatDetails details = skywalker.getFormatDetails(); 163 | indexFormatInfo.put("version", details.getVersion()); 164 | indexFormatInfo.put("genericName", details.getGenericName()); 165 | indexFormatInfo.put("capabilities", details.getCapabilities()); 166 | response.put("indexFormat", indexFormatInfo); 167 | 168 | List commits = new ArrayList(); 169 | Iterator it = indexShard.engine().segments().iterator(); 170 | while (it.hasNext()) { 171 | Segment segment = it.next(); 172 | Map m = new HashMap(); 173 | m.put("segment", segment.getName()); 174 | m.put("count", segment.getNumDocs()); 175 | m.put("deleted", segment.getDeletedDocs()); 176 | m.put("generation", segment.getGeneration()); 177 | m.put("sizeInBytes", segment.getSizeInBytes()); 178 | m.put("version", segment.getVersion()); 179 | m.put("committed", segment.committed); 180 | m.put("compound", segment.compound); 181 | m.put("size", segment.getSize().toString()); 182 | commits.add(m); 183 | } 184 | response.put("commits", commits); 185 | 186 | List fieldInfos = new ArrayList(); 187 | for (FieldInfo fi : MultiFields.getMergedFieldInfos(reader)) { 188 | fieldInfos.add(skywalker.getFieldInfo(mapperService, fi)); 189 | } 190 | response.put("fieldInfos", fieldInfos); 191 | 192 | List termList = new ArrayList(); 193 | for (TermStats ts : skywalker.getTopTerms(50)) { 194 | Map m = new HashMap(); 195 | m.put("field", ts.field()); 196 | m.put("text", ts.text()); 197 | m.put("docFreq", ts.docFreq()); 198 | termList.add(m); 199 | } 200 | response.put("topterms", termList); 201 | return new ShardSkywalkerResponse(request.index(), request.shardId()).setResponse(response); 202 | } catch (Exception ex) { 203 | throw new ElasticsearchException(ex.getMessage(), ex); 204 | } 205 | } 206 | } 207 | 208 | @Override 209 | protected GroupShardsIterator shards(ClusterState clusterState, SkywalkerRequest request, String[] concreteIndices) { 210 | return clusterState.routingTable().activePrimaryShardsGrouped(concreteIndices, true); 211 | } 212 | 213 | @Override 214 | protected ClusterBlockException checkRequestBlock(ClusterState state, SkywalkerRequest request, String[] concreteIndices) { 215 | return state.blocks().indicesBlockedException(ClusterBlockLevel.READ, concreteIndices); 216 | } 217 | 218 | @Override 219 | protected ClusterBlockException checkGlobalBlock(ClusterState state, SkywalkerRequest request) { 220 | return state.blocks().globalBlockedException(ClusterBlockLevel.READ); 221 | } 222 | 223 | } 224 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/skywalker/support/IndexableFieldToXContent.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.action.skywalker.support; 3 | 4 | import org.apache.lucene.document.DoubleField; 5 | import org.apache.lucene.document.FloatField; 6 | import org.apache.lucene.document.IntField; 7 | import org.apache.lucene.document.LongField; 8 | import org.apache.lucene.index.IndexableField; 9 | import org.elasticsearch.common.xcontent.ToXContent; 10 | import org.elasticsearch.common.xcontent.XContentBuilder; 11 | 12 | import java.io.IOException; 13 | 14 | /** 15 | * Indexable field to XContent 16 | */ 17 | public class IndexableFieldToXContent implements ToXContent { 18 | 19 | private IndexableField field; 20 | 21 | public IndexableFieldToXContent field(IndexableField field) { 22 | this.field = field; 23 | return this; 24 | } 25 | 26 | @Override 27 | public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { 28 | builder.startObject(); 29 | builder.field("name", field.name()); 30 | if (field.binaryValue() != null) { 31 | builder.field("type", "binary") 32 | .field("value", field.binaryValue()); 33 | } else if (field.stringValue() != null) { 34 | builder.field("type", "string") 35 | .field("value", field.stringValue()); 36 | } else if (field.numericValue() != null) { 37 | if (field instanceof IntField) { 38 | builder.field("type", "integer") 39 | .field("value", field.numericValue().intValue()); 40 | } else if (field instanceof LongField) { 41 | builder.field("type", "long") 42 | .field("value", field.numericValue().longValue()); 43 | } else if (field instanceof FloatField) { 44 | builder.field("type", "float") 45 | .field("value", field.numericValue().floatValue()); 46 | } else if (field instanceof DoubleField) { 47 | builder.field("type", "double") 48 | .field("value", field.numericValue().doubleValue()); 49 | } 50 | } 51 | builder.field("stored", field.fieldType().stored()); 52 | builder.field("indexed", field.fieldType().indexed()); 53 | builder.field("omitNorms", field.fieldType().omitNorms()); 54 | builder.field("storeTermVectors", field.fieldType().storeTermVectors()); 55 | builder.field("storeTermVectorOffsets", field.fieldType().storeTermVectorOffsets()); 56 | builder.field("storeTermVectorPayloads", field.fieldType().storeTermVectorPayloads()); 57 | builder.field("storeTermVectorPositions", field.fieldType().storeTermVectorPositions()); 58 | builder.field("boost", field.boost()); 59 | builder.endObject(); 60 | return builder; 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/action/skywalker/support/MetaDataToXContent.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.action.skywalker.support; 3 | 4 | import org.elasticsearch.cluster.metadata.AliasMetaData; 5 | import org.elasticsearch.cluster.metadata.IndexMetaData; 6 | import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; 7 | import org.elasticsearch.cluster.metadata.MappingMetaData; 8 | import org.elasticsearch.cluster.metadata.MetaData; 9 | import org.elasticsearch.common.collect.ImmutableOpenMap; 10 | import org.elasticsearch.common.hppc.cursors.ObjectCursor; 11 | import org.elasticsearch.common.xcontent.ToXContent; 12 | import org.elasticsearch.common.xcontent.XContentBuilder; 13 | 14 | import java.io.IOException; 15 | import java.util.Map; 16 | 17 | /** 18 | * Metadata to XContent 19 | */ 20 | public class MetaDataToXContent implements ToXContent { 21 | 22 | MetaData metadata; 23 | 24 | public MetaDataToXContent metadata(MetaData metadata) { 25 | this.metadata = metadata; 26 | return this; 27 | } 28 | 29 | @Override 30 | public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { 31 | builder.startObject(); 32 | builder.field("version", metadata.version()); 33 | builder.field("numberOfShards", metadata.numberOfShards()); 34 | builder.startArray("concreteIndices"); 35 | for (String index : metadata.concreteAllIndices()) { 36 | builder.value(index); 37 | } 38 | builder.endArray(); 39 | ImmutableOpenMap> aliases = metadata.getAliases(); 40 | builder.startArray("aliases"); 41 | for (ObjectCursor alias : aliases.keys()) { 42 | builder.startObject(alias.value); 43 | builder.startArray("aliasMetadata"); 44 | for (ObjectCursor s : aliases.get(alias.value).keys()) { 45 | builder.startObject(); 46 | AliasMetaData amd = aliases.get(alias.value).get(s.value); 47 | builder.field("alias", amd.getAlias()); 48 | builder.field("filter", amd.getFilter().string()); 49 | builder.field("indexRouting", amd.getIndexRouting()); 50 | builder.field("searchRouting", amd.getSearchRouting()); 51 | builder.endObject(); 52 | } 53 | builder.endArray(); 54 | builder.endObject(); 55 | } 56 | builder.endArray(); 57 | builder.startArray("indexes"); 58 | ImmutableOpenMap indices = metadata.getIndices(); 59 | for (ObjectCursor s : indices.keys()) { 60 | IndexMetaData imd = indices.get(s.value); 61 | builder.startObject(); 62 | builder.field("index", imd.getIndex()); 63 | builder.field("state", imd.getState().name()); 64 | builder.field("numberOfReplicas", imd.getNumberOfReplicas()); 65 | builder.field("numberOfShards", imd.getNumberOfShards()); 66 | builder.field("totalNumberOfShards", imd.getTotalNumberOfShards()); 67 | builder.field("version", imd.getVersion()); 68 | builder.field("settings", imd.getSettings().getAsMap()); 69 | ImmutableOpenMap m = imd.getMappings(); 70 | // skip mappings here 71 | builder.endObject(); 72 | } 73 | builder.endArray(); 74 | builder.startArray("templates"); 75 | ImmutableOpenMap templates = metadata.getTemplates(); 76 | for (ObjectCursor s : templates.keys()) { 77 | IndexTemplateMetaData itmd = templates.get(s.value); 78 | itmd.getName(); 79 | itmd.getOrder(); 80 | itmd.getTemplate(); 81 | itmd.getSettings(); 82 | itmd.getMappings(); 83 | } 84 | builder.endArray(); 85 | builder.endObject(); 86 | return builder; 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/plugin/skywalker/SkywalkerPlugin.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.plugin.skywalker; 3 | 4 | import org.elasticsearch.action.ActionModule; 5 | import org.elasticsearch.plugins.AbstractPlugin; 6 | import org.elasticsearch.rest.RestModule; 7 | import org.xbib.elasticsearch.action.admin.cluster.state.ConsistencyCheckAction; 8 | import org.xbib.elasticsearch.action.admin.cluster.state.TransportConsistencyCheckAction; 9 | import org.xbib.elasticsearch.action.admin.indices.reconstruct.ReconstructIndexAction; 10 | import org.xbib.elasticsearch.action.admin.indices.reconstruct.TransportReconstructAction; 11 | import org.xbib.elasticsearch.action.skywalker.SkywalkerAction; 12 | import org.xbib.elasticsearch.action.skywalker.TransportSkywalkerAction; 13 | import org.xbib.elasticsearch.rest.action.skywalker.RestConsistencyCheckAction; 14 | import org.xbib.elasticsearch.rest.action.skywalker.RestReconstructIndexAction; 15 | import org.xbib.elasticsearch.rest.action.skywalker.RestSkywalkerAction; 16 | 17 | /** 18 | * Skywalker plugin 19 | */ 20 | public class SkywalkerPlugin extends AbstractPlugin { 21 | 22 | @Override 23 | public String name() { 24 | return "skywalker"; 25 | } 26 | 27 | @Override 28 | public String description() { 29 | return "Skywalker - Luke for Elasticsearch"; 30 | } 31 | 32 | public void onModule(RestModule module) { 33 | module.addRestAction(RestSkywalkerAction.class); 34 | module.addRestAction(RestConsistencyCheckAction.class); 35 | module.addRestAction(RestReconstructIndexAction.class); 36 | } 37 | 38 | public void onModule(ActionModule module) { 39 | module.registerAction(SkywalkerAction.INSTANCE, TransportSkywalkerAction.class); 40 | module.registerAction(ConsistencyCheckAction.INSTANCE, TransportConsistencyCheckAction.class); 41 | module.registerAction(ReconstructIndexAction.INSTANCE, TransportReconstructAction.class); 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/rest/action/skywalker/RestConsistencyCheckAction.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.rest.action.skywalker; 3 | 4 | import org.elasticsearch.client.Client; 5 | import org.elasticsearch.common.inject.Inject; 6 | import org.elasticsearch.common.joda.time.Instant; 7 | import org.elasticsearch.common.settings.Settings; 8 | import org.elasticsearch.common.unit.SizeUnit; 9 | import org.elasticsearch.common.unit.SizeValue; 10 | import org.elasticsearch.common.xcontent.ToXContent; 11 | import org.elasticsearch.common.xcontent.XContentBuilder; 12 | import org.elasticsearch.common.xcontent.XContentFactory; 13 | import org.elasticsearch.rest.*; 14 | import org.elasticsearch.rest.action.support.RestResponseListener; 15 | import org.xbib.elasticsearch.action.admin.cluster.state.ConsistencyCheckAction; 16 | import org.xbib.elasticsearch.action.admin.cluster.state.ConsistencyCheckRequest; 17 | import org.xbib.elasticsearch.action.admin.cluster.state.ConsistencyCheckResponse; 18 | 19 | import java.io.File; 20 | 21 | import static org.elasticsearch.rest.RestRequest.Method.GET; 22 | import static org.elasticsearch.rest.RestStatus.OK; 23 | 24 | /** 25 | * REST consistency check action 26 | */ 27 | public class RestConsistencyCheckAction extends BaseRestHandler { 28 | 29 | @Inject 30 | public RestConsistencyCheckAction(Settings settings, Client client, RestController controller) { 31 | super(settings, client); 32 | controller.registerHandler(GET, "/_skywalker/consistencycheck", this); 33 | } 34 | 35 | @Override 36 | public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) throws Exception { 37 | ConsistencyCheckRequest r = new ConsistencyCheckRequest(); 38 | client.admin().cluster().execute(ConsistencyCheckAction.INSTANCE, r, new RestResponseListener(channel) { 39 | @Override 40 | public RestResponse buildResponse(ConsistencyCheckResponse response) throws Exception { 41 | XContentBuilder builder = XContentFactory.jsonBuilder(); 42 | builder.startObject(); 43 | builder.field("ok", true); 44 | builder.startObject("state"); 45 | response.getState().toXContent(builder, ToXContent.EMPTY_PARAMS); 46 | builder.startArray("files"); 47 | for (File file : response.getFiles()) { 48 | Instant instant = new Instant(file.lastModified()); 49 | builder.startObject() 50 | .field("path", file.getAbsolutePath()) 51 | .field("lastmodified", instant.toDateTime().toString()) 52 | .field("size", new SizeValue(file.length(), SizeUnit.SINGLE).toString()) 53 | .field("totalspace", new SizeValue(file.getTotalSpace(), SizeUnit.SINGLE).toString()) 54 | .field("usablespace", new SizeValue(file.getUsableSpace(), SizeUnit.SINGLE).toString()) 55 | .field("freespace", new SizeValue(file.getFreeSpace(), SizeUnit.SINGLE).toString()) 56 | .endObject(); 57 | } 58 | builder.endArray(); 59 | builder.endObject(); 60 | return new BytesRestResponse(OK, builder); 61 | } 62 | }); 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/rest/action/skywalker/RestReconstructIndexAction.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.rest.action.skywalker; 3 | 4 | import org.elasticsearch.client.Client; 5 | import org.elasticsearch.common.inject.Inject; 6 | import org.elasticsearch.common.settings.Settings; 7 | import org.elasticsearch.common.xcontent.XContentBuilder; 8 | import org.elasticsearch.common.xcontent.XContentHelper; 9 | import org.elasticsearch.common.xcontent.XContentParser; 10 | import org.elasticsearch.rest.*; 11 | import org.elasticsearch.rest.action.support.RestResponseListener; 12 | import org.xbib.elasticsearch.action.admin.indices.reconstruct.ReconstructIndexAction; 13 | import org.xbib.elasticsearch.action.admin.indices.reconstruct.ReconstructIndexRequest; 14 | import org.xbib.elasticsearch.action.admin.indices.reconstruct.ReconstructIndexResponse; 15 | import org.xbib.elasticsearch.action.admin.indices.reconstruct.ShardReconstructIndexResponse; 16 | 17 | import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; 18 | import static org.elasticsearch.rest.RestRequest.Method.GET; 19 | import static org.elasticsearch.rest.RestStatus.OK; 20 | 21 | /** 22 | * REST action for reconstructing an index 23 | */ 24 | public class RestReconstructIndexAction extends BaseRestHandler { 25 | 26 | @Inject 27 | public RestReconstructIndexAction(Settings settings, Client client, RestController controller) { 28 | super(settings, client); 29 | controller.registerHandler(GET, "/{index}/_skywalker/reconstruct", this); 30 | } 31 | 32 | @Override 33 | public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) { 34 | ReconstructIndexRequest r = new ReconstructIndexRequest(request.param("index")); 35 | client.admin().indices().execute(ReconstructIndexAction.INSTANCE, r, new RestResponseListener(channel) { 36 | @Override 37 | public RestResponse buildResponse(ReconstructIndexResponse response) throws Exception { 38 | XContentBuilder builder = jsonBuilder(); 39 | builder.startObject() 40 | .field("ok", true) 41 | .field("index", request.param("index")) 42 | .startArray("shards"); 43 | for (ShardReconstructIndexResponse r : response.shards()) { 44 | XContentParser p = XContentHelper.createParser(r.getReconstructedIndex().bytes()); 45 | builder.copyCurrentStructure(p); 46 | } 47 | builder.endArray(); 48 | builder.endObject(); 49 | return new BytesRestResponse(OK, builder); 50 | } 51 | }); 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/rest/action/skywalker/RestSkywalkerAction.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.rest.action.skywalker; 3 | 4 | import org.elasticsearch.common.Strings; 5 | import org.elasticsearch.rest.*; 6 | import org.elasticsearch.rest.action.support.RestResponseListener; 7 | import org.xbib.elasticsearch.action.skywalker.SkywalkerAction; 8 | import org.xbib.elasticsearch.action.skywalker.SkywalkerRequest; 9 | import org.xbib.elasticsearch.action.skywalker.SkywalkerResponse; 10 | import org.elasticsearch.client.Client; 11 | import org.elasticsearch.common.inject.Inject; 12 | import org.elasticsearch.common.settings.Settings; 13 | import org.elasticsearch.common.xcontent.XContentBuilder; 14 | 15 | import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; 16 | import static org.elasticsearch.rest.RestRequest.Method.GET; 17 | import static org.elasticsearch.rest.RestRequest.Method.POST; 18 | import static org.elasticsearch.rest.RestStatus.OK; 19 | import static org.elasticsearch.rest.action.support.RestActions.buildBroadcastShardsHeader; 20 | 21 | /** 22 | * REST skywalker action 23 | */ 24 | public class RestSkywalkerAction extends BaseRestHandler { 25 | 26 | @Inject 27 | public RestSkywalkerAction(Settings settings, Client client, RestController controller) { 28 | super(settings, client); 29 | controller.registerHandler(POST, "/_skywalker", this); 30 | controller.registerHandler(POST, "/{index}/_skywalker", this); 31 | controller.registerHandler(GET, "/_skywalker", this); 32 | controller.registerHandler(GET, "/{index}/_skywalker", this); 33 | } 34 | 35 | @Override 36 | public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) { 37 | SkywalkerRequest r = new SkywalkerRequest(Strings.splitStringByCommaToArray(request.param("index"))); 38 | client.admin().cluster().execute(SkywalkerAction.INSTANCE, r, new RestResponseListener(channel) { 39 | @Override 40 | public RestResponse buildResponse(SkywalkerResponse response) throws Exception { 41 | XContentBuilder builder = jsonBuilder(); 42 | builder.startObject(); 43 | builder.field("ok", true); 44 | buildBroadcastShardsHeader(builder, response); 45 | builder.field("result", response.getResponse()); 46 | builder.endObject(); 47 | return new BytesRestResponse(OK, builder); 48 | } 49 | }); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/skywalker/FormatDetails.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.skywalker; 3 | 4 | /** 5 | * Format details 6 | */ 7 | public class FormatDetails { 8 | 9 | private String genericName = "N/A"; 10 | 11 | private String capabilities = "N/A"; 12 | 13 | private String version = "N/A"; 14 | 15 | public FormatDetails(String capabilities, String genericName, String version) { 16 | this.genericName = genericName; 17 | this.capabilities = capabilities; 18 | this.version = version; 19 | } 20 | 21 | public void setGenericName(String genericName) { 22 | this.genericName = genericName; 23 | } 24 | 25 | public String getGenericName() { 26 | return genericName; 27 | } 28 | 29 | public void setCapabilities(String capabilities) { 30 | this.capabilities = capabilities; 31 | } 32 | 33 | public String getCapabilities() { 34 | return capabilities; 35 | } 36 | 37 | public void setVersion(String version) { 38 | this.version = version; 39 | } 40 | 41 | public String getVersion() { 42 | return version; 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/skywalker/LuceneFormats.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.skywalker; 3 | 4 | /** 5 | * Lucene format constants 6 | * 7 | */ 8 | public interface LuceneFormats { 9 | 10 | // old version constants 11 | int OLD_FORMAT = -1; 12 | 13 | /** 14 | * This format adds details used for lockless commits. It differs 15 | * slightly from the previous format in that file names 16 | * are never re-used (write once). Instead, each file is 17 | * written to the next generation. For example, 18 | * segments_1, segments_2, etc. This allows us to not use 19 | * a commit lock. See file 21 | * formats for details. 22 | */ 23 | int FORMAT_LOCKLESS = -2; 24 | 25 | /** 26 | * This format adds a "hasSingleNormFile" flag into each segment info. 27 | * See LUCENE-756 28 | * for details. 29 | */ 30 | int FORMAT_SINGLE_NORM_FILE = -3; 31 | 32 | /** 33 | * This format allows multiple segments to share a single 34 | * vectors and stored fields file. 35 | */ 36 | int FORMAT_SHARED_DOC_STORE = -4; 37 | 38 | /** 39 | * This format adds a checksum at the end of the file to 40 | * ensure all bytes were successfully written. 41 | */ 42 | int FORMAT_CHECKSUM = -5; 43 | 44 | /** 45 | * This format adds the deletion count for each segment. 46 | * This way IndexWriter can efficiently report numDocs(). 47 | */ 48 | int FORMAT_DEL_COUNT = -6; 49 | 50 | /** 51 | * This format adds the boolean hasProx to record if any 52 | * fields in the segment store prox information (ie, have 53 | * omitTermFreqAndPositions==false) 54 | */ 55 | int FORMAT_HAS_PROX = -7; 56 | 57 | /** 58 | * This format adds optional commit userData (String) storage. 59 | */ 60 | int FORMAT_USER_DATA = -8; 61 | 62 | /** 63 | * This format adds optional per-segment String 64 | * diagnostics storage, and switches userData to Map 65 | */ 66 | int FORMAT_DIAGNOSTICS = -9; 67 | 68 | /** 69 | * Each segment records whether it has term vectors 70 | */ 71 | int FORMAT_HAS_VECTORS = -10; 72 | 73 | /** 74 | * Each segment records the Lucene version that created it. 75 | */ 76 | int FORMAT_3_1 = -11; 77 | 78 | /** 79 | * Some early 4.0 pre-alpha 80 | */ 81 | int FORMAT_PRE_4 = -12; 82 | } 83 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/skywalker/Skywalker.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.skywalker; 3 | 4 | import org.apache.lucene.codecs.CodecUtil; 5 | import org.apache.lucene.document.FieldType; 6 | import org.apache.lucene.index.*; 7 | import org.apache.lucene.store.Directory; 8 | import org.apache.lucene.store.IOContext; 9 | import org.apache.lucene.store.IndexInput; 10 | import org.apache.lucene.util.BytesRef; 11 | import org.elasticsearch.ElasticsearchException; 12 | import org.elasticsearch.cluster.metadata.IndexMetaData; 13 | import org.elasticsearch.cluster.metadata.MetaData; 14 | import org.elasticsearch.common.Nullable; 15 | import org.elasticsearch.common.io.Streams; 16 | import org.elasticsearch.common.xcontent.XContentHelper; 17 | import org.elasticsearch.common.xcontent.XContentParser; 18 | import org.elasticsearch.env.NodeEnvironment; 19 | import org.elasticsearch.index.Index; 20 | import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider; 21 | import org.elasticsearch.index.fielddata.FieldDataType; 22 | import org.elasticsearch.index.mapper.FieldMapper; 23 | import org.elasticsearch.index.mapper.MapperService; 24 | import org.elasticsearch.index.similarity.SimilarityProvider; 25 | import org.elasticsearch.index.store.Store; 26 | import org.elasticsearch.index.store.StoreFileMetaData; 27 | import org.xbib.elasticsearch.skywalker.stats.FieldTermCount; 28 | import org.xbib.elasticsearch.skywalker.stats.TermStats; 29 | import org.xbib.elasticsearch.skywalker.stats.TermStatsQueue; 30 | 31 | import java.io.File; 32 | import java.io.FileInputStream; 33 | import java.io.IOException; 34 | import java.util.*; 35 | 36 | /** 37 | * 38 | * Skywalker class for examining Lucene format 39 | * 40 | */ 41 | public class Skywalker implements LuceneFormats { 42 | 43 | private final static Map knownExtensions = new HashMap(); 44 | 45 | private IndexReader reader; 46 | private int numTerms; 47 | private FormatDetails formatDetails; 48 | private TermStats[] topTerms; 49 | 50 | private String version; 51 | private String dirImpl; 52 | 53 | /** 54 | * http://lucene.apache.org/core/4_2_0/core/org/apache/lucene/codecs/lucene42/package-summary.html 55 | */ 56 | 57 | static { 58 | knownExtensions.put("cfs", "Lucene compound file with various index data"); 59 | knownExtensions.put("cfe", "Lucene compound file entries list"); 60 | knownExtensions.put("gen", "Lucene generation number - global file"); 61 | knownExtensions.put("si", "Lucene per-commit list of segments and user data"); 62 | knownExtensions.put("doc", "Lucene frequencies"); 63 | knownExtensions.put("pos", "Lucene positions"); 64 | knownExtensions.put("pay", "Lucene payloads"); 65 | knownExtensions.put("fdt", "Lucene field data"); 66 | knownExtensions.put("fdx", "Lucene field data index"); 67 | knownExtensions.put("fnm", "Lucene fields"); 68 | knownExtensions.put("del", "Lucene deleted documents"); 69 | knownExtensions.put("dvm", "Lucene per-document values"); 70 | knownExtensions.put("dvd", "Lucene per-dicument values"); 71 | knownExtensions.put("nvm", "Lucene norms"); 72 | knownExtensions.put("nvd", "Lucene norms"); 73 | knownExtensions.put("tim", "Lucene term dictionary"); 74 | knownExtensions.put("tip", "Lucene term dictionary index positions"); 75 | knownExtensions.put("tvx", "Lucene term vector index"); 76 | knownExtensions.put("tvd", "Lucene term vector documents"); 77 | knownExtensions.put("tvf", "Lucene term vector fields"); 78 | // Elasticsearch 79 | knownExtensions.put("blm", "Elasticsearch bloom filter"); 80 | } 81 | 82 | public Skywalker(IndexReader reader) { 83 | this.reader = reader; 84 | this.dirImpl = "N/A"; 85 | this.version = "-1"; 86 | this.formatDetails = new FormatDetails("N/A", "N/A", "N/A"); 87 | Directory dir = null; 88 | if (reader instanceof DirectoryReader) { 89 | dir = ((DirectoryReader) reader).directory(); 90 | this.dirImpl = dir.getClass().getName(); 91 | this.version = Long.toString(((DirectoryReader) reader).getVersion()); 92 | this.formatDetails = getIndexFormat(dir); 93 | } 94 | } 95 | 96 | public String getVersion() { 97 | return version; 98 | } 99 | 100 | public FormatDetails getFormatDetails() { 101 | return formatDetails; 102 | } 103 | 104 | public String getDirImpl() { 105 | return dirImpl; 106 | } 107 | 108 | public String getFileFunction(String file) { 109 | if (file == null || file.trim().length() == 0) { 110 | return file; 111 | } 112 | String res = "undefined"; 113 | file = file.trim(); 114 | int idx = file.indexOf('.'); 115 | String suffix = null; 116 | if (idx != -1) { 117 | suffix = file.substring(idx + 1); 118 | } 119 | if (suffix != null) { 120 | res = knownExtensions.get(suffix); 121 | if (res != null) { 122 | return res; 123 | } 124 | // perhaps per-field norms? 125 | if (suffix.length() == 2) { 126 | return knownExtensions.get(suffix.substring(0, 1)); 127 | } 128 | } 129 | if (file.startsWith("segments_")) { 130 | return "Lucene segment"; 131 | } 132 | // elasticsearch checksums 133 | if (file.startsWith("_checksum")) { 134 | return "Elasticsearch checksum file"; 135 | } 136 | return res; 137 | } 138 | 139 | private FormatDetails detectOldFormats(int format) { 140 | switch (format) { 141 | case OLD_FORMAT: 142 | return new FormatDetails("old plain", "Lucene Pre-2.1", "2.0?"); 143 | case FORMAT_LOCKLESS: 144 | return new FormatDetails("lock-less", "Lucene 2.1", "2.1"); 145 | case FORMAT_SINGLE_NORM_FILE: 146 | return new FormatDetails("lock-less, single norms file", "Lucene 2.2", "2.2"); 147 | case FORMAT_SHARED_DOC_STORE: 148 | return new FormatDetails("lock-less, single norms file, shared doc store", "Lucene 2.3", "2.3"); 149 | case FORMAT_CHECKSUM: 150 | return new FormatDetails("lock-less, single norms, shared doc store, checksum", "Lucene 2.4", "2.4"); 151 | case FORMAT_DEL_COUNT: 152 | return new FormatDetails("lock-less, single norms, shared doc store, checksum, del count", "Lucene 2.4", "2.4"); 153 | case FORMAT_HAS_PROX: 154 | return new FormatDetails("lock-less, single norms, shared doc store, checksum, del count, omitTf", "Lucene 2.4", "2.4"); 155 | case FORMAT_USER_DATA: 156 | return new FormatDetails("lock-less, single norms, shared doc store, checksum, del count, omitTf, user data", "Lucene 2.9-dev", "2.9-dev"); 157 | case FORMAT_DIAGNOSTICS: 158 | return new FormatDetails("lock-less, single norms, shared doc store, checksum, del count, omitTf, user data, diagnostics", "Lucene 2.9", "2.9"); 159 | case FORMAT_HAS_VECTORS: 160 | return new FormatDetails("lock-less, single norms, shared doc store, checksum, del count, omitTf, user data, diagnostics, hasVectors", "Lucene 2.9", "2.9"); 161 | case FORMAT_3_1: 162 | return new FormatDetails("lock-less, single norms, shared doc store, checksum, del count, omitTf, user data, diagnostics, hasVectors", "Lucene 3.1", "3.1"); 163 | case FORMAT_PRE_4: 164 | return new FormatDetails("flexible, unreleased 4.0 pre-alpha", "Lucene 4.0-dev", "4.0-dev"); 165 | default: 166 | if (format < FORMAT_PRE_4) { 167 | return new FormatDetails("flexible, unreleased 4.0 pre-alpha", "Lucene 4.0-dev", "4.0-dev"); 168 | } else { 169 | return new FormatDetails("unknown", "Lucene 1.3 or earlier, or unreleased", "1.3?"); 170 | } 171 | } 172 | } 173 | 174 | public FormatDetails getIndexFormat(final Directory dir) { 175 | SegmentInfos.FindSegmentsFile fsf = new SegmentInfos.FindSegmentsFile(dir) { 176 | 177 | protected Object doBody(String segmentsFile) throws IOException { 178 | FormatDetails res = new FormatDetails("unknown", "unknown", "-1"); 179 | IndexInput in = dir.openInput(segmentsFile, IOContext.READ); 180 | try { 181 | int indexFormat = in.readInt(); 182 | if (indexFormat == CodecUtil.CODEC_MAGIC) { 183 | res.setCapabilities("flexible, codec-specific"); 184 | res.setGenericName("Lucene 4.x"); 185 | int actualVersion = SegmentInfos.VERSION_40; 186 | try { 187 | actualVersion = CodecUtil.checkHeaderNoMagic(in, "segments", SegmentInfos.VERSION_40, Integer.MAX_VALUE); 188 | if (actualVersion > SegmentInfos.VERSION_40) { 189 | res.setCapabilities(res.getCapabilities() + " (WARNING: newer version of Lucene than this tool)"); 190 | } 191 | } catch (Exception e) { 192 | res.setCapabilities(res.getCapabilities() + 193 | " (error reading: " + e.getMessage() + ")"); 194 | } 195 | res.setGenericName("Lucene 4." + actualVersion); 196 | res.setVersion("4." + actualVersion); 197 | } else { 198 | res = detectOldFormats(indexFormat); 199 | res.setGenericName(res.getGenericName() + " (" + indexFormat + ")"); 200 | if (res.getVersion().compareTo("3") < 0) { 201 | res.setCapabilities(res.getCapabilities() + " (UNSUPPORTED)"); 202 | } 203 | } 204 | } finally { 205 | in.close(); 206 | } 207 | return res; 208 | } 209 | }; 210 | try { 211 | return (FormatDetails) fsf.run(); 212 | } catch (IOException e) { 213 | return null; 214 | } 215 | } 216 | 217 | public List getDeletableFiles(Directory dir) throws Exception { 218 | List known = getIndexFiles(dir); 219 | Set dirFiles = new HashSet(Arrays.asList(dir.listAll())); 220 | dirFiles.removeAll(known); 221 | return new ArrayList(dirFiles); 222 | } 223 | 224 | public List getIndexFiles(Directory dir) { 225 | List commits; 226 | try { 227 | commits = DirectoryReader.listCommits(dir); 228 | } catch (IndexNotFoundException e) { 229 | return Collections.emptyList(); 230 | } catch (IOException e) { 231 | return Collections.emptyList(); 232 | } 233 | Set known = new HashSet(); 234 | try { 235 | for (IndexCommit ic : commits) { 236 | known.addAll(ic.getFileNames()); 237 | } 238 | if (dir.fileExists(IndexFileNames.SEGMENTS_GEN)) { 239 | known.add(IndexFileNames.SEGMENTS_GEN); 240 | } 241 | } catch (IOException e) { 242 | // ignore 243 | } 244 | return new ArrayList(known); 245 | } 246 | 247 | public long getTotalFileSize(Directory directory) throws Exception { 248 | long totalFileSize = 0L; 249 | String[] files; 250 | files = directory.listAll(); 251 | if (files == null) { 252 | return totalFileSize; 253 | } 254 | for (String file : files) { 255 | totalFileSize += directory.fileLength(file); 256 | } 257 | return totalFileSize; 258 | } 259 | 260 | public Map getFieldInfo(MapperService mapperService, FieldInfo fi) { 261 | Map m = new HashMap(); 262 | m.put("name", fi.name); 263 | m.put("number", fi.number); 264 | m.put("isIndexed", fi.isIndexed()); 265 | m.put("hasDocValues", fi.hasDocValues()); 266 | m.put("hasNorms", fi.hasNorms()); 267 | m.put("hasPayloads", fi.hasPayloads()); 268 | m.put("hasVectors", fi.hasVectors()); 269 | if (fi.getDocValuesType() != null) { 270 | m.put("docValuesType", fi.getDocValuesType().name()); 271 | } 272 | if (fi.getNormType() != null) { 273 | m.put("normType", fi.getNormType().name()); 274 | } 275 | if (fi.getIndexOptions() != null) { 276 | m.put("options", fi.getIndexOptions().name()); 277 | } 278 | m.put("attributes", fi.attributes()); 279 | FieldMapper fieldMapper = mapperService.smartNameFieldMapper(fi.name); 280 | if (fieldMapper != null) { 281 | Map mapper = new HashMap(); 282 | mapper.put("fullName", fieldMapper.names().fullName()); 283 | mapper.put("indexName", fieldMapper.names().indexName()); 284 | mapper.put("indexNameClean", fieldMapper.names().indexNameClean()); 285 | 286 | mapper.put("boost", fieldMapper.boost()); 287 | 288 | if (fieldMapper.indexAnalyzer() != null) { 289 | mapper.put("indexAnalyzer", fieldMapper.indexAnalyzer().toString()); 290 | } 291 | if (fieldMapper.searchAnalyzer() != null) { 292 | mapper.put("searchAnalyzer", fieldMapper.searchAnalyzer().toString()); 293 | } 294 | if (fieldMapper.searchQuoteAnalyzer() != null) { 295 | mapper.put("searchQuoteAnalyzer", fieldMapper.searchQuoteAnalyzer().toString()); 296 | } 297 | 298 | FieldDataType dataType = fieldMapper.fieldDataType(); 299 | if (dataType != null) { 300 | mapper.put("fieldDataType", dataType.getType()); 301 | } 302 | 303 | FieldType type = fieldMapper.fieldType(); 304 | if (type != null) { 305 | mapper.put("indexed", type.indexed()); 306 | mapper.put("stored", type.stored()); 307 | mapper.put("tokenized", type.tokenized()); 308 | mapper.put("omitNorms", type.omitNorms()); 309 | mapper.put("storeTermVectors", type.storeTermVectors()); 310 | mapper.put("storeTermVectorOffsets", type.storeTermVectorOffsets()); 311 | mapper.put("storeTermVectorPayloads", type.storeTermVectorPayloads()); 312 | mapper.put("storeTermVectorPositions", type.storeTermVectorPositions()); 313 | if (type.numericType() != null) { 314 | mapper.put("numericType", type.numericType().name()); 315 | mapper.put("numericPrecisionStep", type.numericPrecisionStep()); 316 | } 317 | if (type.docValueType() != null) { 318 | mapper.put("docValueType", type.docValueType().name()); 319 | } 320 | } 321 | 322 | SimilarityProvider similarityProvider = fieldMapper.similarity(); 323 | if (similarityProvider != null) { 324 | mapper.put("similarityPovider", similarityProvider.name()); 325 | mapper.put("similarity", similarityProvider.get().getClass().getName() ); 326 | } 327 | 328 | PostingsFormatProvider postingsFormatProvider = fieldMapper.postingsFormatProvider(); 329 | if (postingsFormatProvider != null) { 330 | mapper.put("postingsFormatProvider", postingsFormatProvider.name()); 331 | mapper.put("postingsFormat", postingsFormatProvider.get().getName()); 332 | } 333 | 334 | m.put("mapper", mapper); 335 | } 336 | return m; 337 | } 338 | 339 | public void getStoreMetadata(Map response, Store.MetadataSnapshot metadata) { 340 | List> result = new ArrayList(); 341 | for (String name : metadata.asMap().keySet()) { 342 | StoreFileMetaData metaData = metadata.get(name); 343 | Map info = new HashMap(); 344 | info.put("name", name); 345 | info.put("length", metaData.length()); 346 | info.put("checksum", metaData.checksum() ); 347 | info.put("function", getFileFunction(name)); 348 | result.add(info); 349 | } 350 | response.put("store", result); 351 | } 352 | 353 | /** 354 | * copied from org.elasticsearch.gateway.local.state.meta.LocalGatewayMetaState 355 | * 356 | * @return the meta data from file 357 | * @throws Exception 358 | */ 359 | 360 | public static MetaData.Builder loadState(List files, NodeEnvironment nodeEnv) throws ElasticsearchException { 361 | MetaData.Builder metaDataBuilder; 362 | try { 363 | MetaData globalMetaData = loadGlobalState(files, nodeEnv); 364 | if (globalMetaData != null) { 365 | metaDataBuilder = MetaData.builder(globalMetaData); 366 | } else { 367 | metaDataBuilder = MetaData.builder(); 368 | } 369 | Set indices = nodeEnv.findAllIndices(); 370 | for (String index : indices) { 371 | IndexMetaData indexMetaData = loadIndex(files, index, nodeEnv); 372 | if (indexMetaData == null) { 373 | continue; 374 | } else { 375 | metaDataBuilder.put(indexMetaData, false); 376 | } 377 | } 378 | } catch (Exception e) { 379 | throw new ElasticsearchException(e.getMessage()); 380 | } 381 | return metaDataBuilder; 382 | } 383 | 384 | @Nullable 385 | private static IndexMetaData loadIndex(List files, String index, NodeEnvironment nodeEnv) { 386 | long highestVersion = -1; 387 | IndexMetaData indexMetaData = null; 388 | for (File indexLocation : nodeEnv.indexLocations(new Index(index))) { 389 | File stateDir = new File(indexLocation, "_state"); 390 | if (!stateDir.exists() || !stateDir.isDirectory()) { 391 | continue; 392 | } 393 | // now, iterate over the current versions, and find latest one 394 | File[] stateFiles = stateDir.listFiles(); 395 | if (stateFiles == null) { 396 | continue; 397 | } 398 | for (File stateFile : stateFiles) { 399 | if (!stateFile.getName().startsWith("state-")) { 400 | continue; 401 | } 402 | files.add(stateFile); 403 | try { 404 | long version = Long.parseLong(stateFile.getName().substring("state-".length())); 405 | if (version > highestVersion) { 406 | byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile)); 407 | if (data.length == 0) { 408 | continue; 409 | } 410 | XContentParser parser = null; 411 | try { 412 | parser = XContentHelper.createParser(data, 0, data.length); 413 | parser.nextToken(); // move to START_OBJECT 414 | indexMetaData = IndexMetaData.Builder.fromXContent(parser); 415 | highestVersion = version; 416 | } finally { 417 | if (parser != null) { 418 | parser.close(); 419 | } 420 | } 421 | } 422 | } catch (Exception e) { 423 | continue; 424 | } 425 | } 426 | } 427 | return indexMetaData; 428 | } 429 | 430 | private static MetaData loadGlobalState(List files, NodeEnvironment nodeEnv) { 431 | long highestVersion = -1; 432 | MetaData metaData = null; 433 | for (File dataLocation : nodeEnv.nodeDataLocations()) { 434 | File stateLocation = new File(dataLocation, "_state"); 435 | if (!stateLocation.exists()) { 436 | continue; 437 | } 438 | File[] stateFiles = stateLocation.listFiles(); 439 | if (stateFiles == null) { 440 | continue; 441 | } 442 | for (File stateFile : stateFiles) { 443 | if (!stateFile.getName().startsWith("global-")) { 444 | continue; 445 | } 446 | files.add(stateFile); 447 | try { 448 | long version = Long.parseLong(stateFile.getName().substring("global-".length())); 449 | if (version > highestVersion) { 450 | byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile)); 451 | if (data.length == 0) { 452 | continue; 453 | } 454 | XContentParser parser = null; 455 | try { 456 | parser = XContentHelper.createParser(data, 0, data.length); 457 | metaData = MetaData.Builder.fromXContent(parser); 458 | highestVersion = version; 459 | } finally { 460 | if (parser != null) { 461 | parser.close(); 462 | } 463 | } 464 | } 465 | } catch (Exception e) { 466 | continue; 467 | } 468 | } 469 | } 470 | return metaData; 471 | } 472 | 473 | public Set getFieldTermCounts() throws IOException { 474 | Set termCounts = new TreeSet(); 475 | numTerms = 0; 476 | Fields fields = MultiFields.getFields(reader); 477 | if (fields != null) { 478 | Iterator fe = fields.iterator(); 479 | String fld; 480 | TermsEnum te = null; 481 | while (fe.hasNext()) { 482 | fld = fe.next(); 483 | long termCount = 0L; 484 | Terms terms = fields.terms(fld); 485 | if (terms != null) { 486 | te = terms.iterator(te); 487 | while (te.next() != null) { 488 | termCount++; 489 | numTerms++; 490 | } 491 | } 492 | termCounts.add(new FieldTermCount(fld, termCount)); 493 | } 494 | } 495 | return termCounts; 496 | } 497 | 498 | public int getNumTerms() { 499 | return numTerms; 500 | } 501 | 502 | public TermStats[] getTopTerms(int num) { 503 | if (topTerms == null) { 504 | topTerms = getHighFreqTerms(num, null); 505 | } 506 | return topTerms; 507 | } 508 | 509 | private static final TermStats[] EMPTY_STATS = new TermStats[0]; 510 | 511 | public TermStats[] getHighFreqTerms(int numTerms, String[] fieldNames) { 512 | TermStatsQueue tiq = new TermStatsQueue(numTerms); 513 | TermsEnum te = null; 514 | try { 515 | if (fieldNames != null) { 516 | Fields fields = MultiFields.getFields(reader); 517 | if (fields == null) { 518 | return EMPTY_STATS; 519 | } 520 | for (String field : fieldNames) { 521 | Terms terms = fields.terms(field); 522 | if (terms != null) { 523 | te = terms.iterator(te); 524 | fillQueue(te, tiq, field); 525 | } 526 | } 527 | } else { 528 | Fields fields = MultiFields.getFields(reader); 529 | if (fields == null) { 530 | return EMPTY_STATS; 531 | } 532 | for (String field : fields) { 533 | Terms terms = fields.terms(field); 534 | te = terms.iterator(te); 535 | fillQueue(te, tiq, field); 536 | } 537 | } 538 | } catch (IOException e) { 539 | // ignore 540 | } 541 | TermStats[] result = new TermStats[tiq.size()]; 542 | // we want highest first so we read the queue and populate the array 543 | // starting at the end and work backwards 544 | int count = tiq.size() - 1; 545 | while (tiq.size() != 0) { 546 | result[count] = tiq.pop(); 547 | count--; 548 | } 549 | return result; 550 | } 551 | 552 | public static String bytesToHex(BytesRef bytes, boolean wrap) { 553 | return bytesToHex(bytes.bytes, bytes.offset, bytes.length, wrap); 554 | } 555 | 556 | public static String bytesToHex(byte bytes[], int offset, int length, boolean wrap) { 557 | StringBuilder sb = new StringBuilder(); 558 | boolean newLine = false; 559 | for (int i = offset; i < offset + length; ++i) { 560 | if (i > offset && !newLine) { 561 | sb.append(" "); 562 | } 563 | sb.append(Integer.toHexString(0x0100 + (bytes[i] & 0x00FF)) 564 | .substring(1)); 565 | if (i > 0 && (i + 1) % 16 == 0 && wrap) { 566 | sb.append("\n"); 567 | newLine = true; 568 | } else { 569 | newLine = false; 570 | } 571 | } 572 | return sb.toString(); 573 | } 574 | 575 | private void fillQueue(TermsEnum termsEnum, TermStatsQueue tiq, String field) { 576 | while (true) { 577 | try { 578 | BytesRef term = termsEnum.next(); 579 | if (term != null) { 580 | BytesRef text = new BytesRef(); 581 | text.copyBytes(term); 582 | TermStats ts = new TermStats(); 583 | ts.field(field).text(text).docFreq(termsEnum.docFreq()); 584 | tiq.insertWithOverflow(ts); 585 | } else { 586 | break; 587 | } 588 | } catch (IOException e) { 589 | break; 590 | } 591 | } 592 | } 593 | 594 | } 595 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/skywalker/collectors/AccessibleHitCollector.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.skywalker.collectors; 3 | 4 | import org.apache.lucene.search.Collector; 5 | import org.apache.lucene.search.Scorer; 6 | 7 | /** 8 | * Accessible Hit collector 9 | */ 10 | public abstract class AccessibleHitCollector extends Collector { 11 | 12 | protected Scorer scorer; 13 | protected boolean shouldScore; 14 | protected int docBase; 15 | protected boolean outOfOrder; 16 | 17 | public abstract int getTotalHits(); 18 | 19 | public abstract int getDocId(int pos); 20 | 21 | public abstract float getScore(int pos); 22 | 23 | public abstract void reset(); 24 | } 25 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/skywalker/collectors/AccessibleTopHitCollector.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.skywalker.collectors; 3 | 4 | import org.apache.lucene.index.AtomicReaderContext; 5 | import org.apache.lucene.search.Scorer; 6 | import org.apache.lucene.search.TopDocs; 7 | import org.apache.lucene.search.TopScoreDocCollector; 8 | 9 | import java.io.IOException; 10 | 11 | /** 12 | * Accessible top hit collector 13 | */ 14 | public class AccessibleTopHitCollector extends AccessibleHitCollector { 15 | 16 | private TopScoreDocCollector tdc; 17 | private TopDocs topDocs = null; 18 | private int size; 19 | 20 | public AccessibleTopHitCollector(int size, boolean outOfOrder, boolean shouldScore) { 21 | tdc = TopScoreDocCollector.create(size, outOfOrder); 22 | this.shouldScore = shouldScore; 23 | this.outOfOrder = outOfOrder; 24 | this.size = size; 25 | } 26 | 27 | @Override 28 | public int getDocId(int pos) { 29 | if (topDocs == null) { 30 | topDocs = tdc.topDocs(); 31 | } 32 | return topDocs.scoreDocs[pos].doc; 33 | } 34 | 35 | @Override 36 | public float getScore(int pos) { 37 | if (topDocs == null) { 38 | topDocs = tdc.topDocs(); 39 | } 40 | return topDocs.scoreDocs[pos].score; 41 | } 42 | 43 | @Override 44 | public int getTotalHits() { 45 | return tdc.getTotalHits(); 46 | } 47 | 48 | @Override 49 | public boolean acceptsDocsOutOfOrder() { 50 | return tdc.acceptsDocsOutOfOrder(); 51 | } 52 | 53 | @Override 54 | public void collect(int doc) throws IOException { 55 | tdc.collect(doc); 56 | } 57 | 58 | @Override 59 | public void setNextReader(AtomicReaderContext context) throws IOException { 60 | this.docBase = context.docBase; 61 | tdc.setNextReader(context); 62 | } 63 | 64 | @Override 65 | public void setScorer(Scorer scorer) throws IOException { 66 | if (shouldScore) { 67 | tdc.setScorer(scorer); 68 | } else { 69 | tdc.setScorer(NoScoringScorer.INSTANCE); 70 | } 71 | } 72 | 73 | @Override 74 | public void reset() { 75 | tdc = TopScoreDocCollector.create(size, outOfOrder); 76 | topDocs = null; 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/skywalker/collectors/AllHit.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.skywalker.collectors; 3 | 4 | /** 5 | * All hit 6 | */ 7 | public class AllHit { 8 | 9 | private int docId; 10 | private float score; 11 | 12 | public AllHit(int docId, float score) { 13 | this.docId = docId; 14 | this.score = score; 15 | } 16 | 17 | public int docId() { 18 | return docId; 19 | } 20 | 21 | public float score() { 22 | return score; 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/skywalker/collectors/AllHitsCollector.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.skywalker.collectors; 3 | 4 | import org.apache.lucene.index.AtomicReaderContext; 5 | import org.apache.lucene.search.Scorer; 6 | 7 | import java.io.IOException; 8 | import java.util.ArrayList; 9 | import java.util.List; 10 | 11 | public class AllHitsCollector extends AccessibleHitCollector { 12 | 13 | private List hits = new ArrayList(); 14 | 15 | public AllHitsCollector(boolean outOfOrder, boolean shouldScore) { 16 | this.outOfOrder = outOfOrder; 17 | this.shouldScore = shouldScore; 18 | } 19 | 20 | @Override 21 | public void collect(int doc) { 22 | float score = 1.0f; 23 | if (shouldScore) { 24 | try { 25 | score = scorer.score(); 26 | } catch (IOException e) { 27 | } 28 | } 29 | hits.add(new AllHit(docBase + doc, score)); 30 | } 31 | 32 | @Override 33 | public int getTotalHits() { 34 | return hits.size(); 35 | } 36 | 37 | @Override 38 | public int getDocId(int i) { 39 | return hits.get(i).docId(); 40 | } 41 | 42 | @Override 43 | public float getScore(int i) { 44 | return hits.get(i).score(); 45 | } 46 | 47 | @Override 48 | public boolean acceptsDocsOutOfOrder() { 49 | return outOfOrder; 50 | } 51 | 52 | @Override 53 | public void setNextReader(AtomicReaderContext context) throws IOException { 54 | this.docBase = context.docBase; 55 | } 56 | 57 | @Override 58 | public void setScorer(Scorer scorer) throws IOException { 59 | this.scorer = scorer; 60 | } 61 | 62 | @Override 63 | public void reset() { 64 | hits.clear(); 65 | } 66 | 67 | } -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/skywalker/collectors/CountLimitedHitCollector.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.skywalker.collectors; 3 | 4 | import org.apache.lucene.index.AtomicReaderContext; 5 | import org.apache.lucene.search.Scorer; 6 | import org.apache.lucene.search.TopDocs; 7 | import org.apache.lucene.search.TopScoreDocCollector; 8 | 9 | import java.io.IOException; 10 | 11 | /** 12 | * Count limited hit collector 13 | */ 14 | public class CountLimitedHitCollector extends LimitedHitCollector { 15 | 16 | private int maxSize; 17 | private int count; 18 | private int lastDoc; 19 | private TopScoreDocCollector tdc; 20 | private TopDocs topDocs = null; 21 | 22 | public CountLimitedHitCollector(int maxSize, boolean outOfOrder, boolean shouldScore) { 23 | this.maxSize = maxSize; 24 | this.outOfOrder = outOfOrder; 25 | this.shouldScore = shouldScore; 26 | count = 0; 27 | tdc = TopScoreDocCollector.create(maxSize, outOfOrder); 28 | } 29 | 30 | @Override 31 | public long limitSize() { 32 | return maxSize; 33 | } 34 | 35 | @Override 36 | public int limitType() { 37 | return TYPE_SIZE; 38 | } 39 | 40 | @Override 41 | public void collect(int doc) throws IOException { 42 | count++; 43 | if (count > maxSize) { 44 | count--; 45 | throw new LimitedException(TYPE_SIZE, maxSize, count, lastDoc); 46 | } 47 | lastDoc = docBase + doc; 48 | 49 | tdc.collect(doc); 50 | } 51 | 52 | @Override 53 | public int getDocId(int pos) { 54 | if (topDocs == null) { 55 | topDocs = tdc.topDocs(); 56 | } 57 | return topDocs.scoreDocs[pos].doc; 58 | } 59 | 60 | @Override 61 | public float getScore(int pos) { 62 | if (topDocs == null) { 63 | topDocs = tdc.topDocs(); 64 | } 65 | return topDocs.scoreDocs[pos].score; 66 | } 67 | 68 | @Override 69 | public int getTotalHits() { 70 | return count; 71 | } 72 | 73 | @Override 74 | public boolean acceptsDocsOutOfOrder() { 75 | return tdc.acceptsDocsOutOfOrder(); 76 | } 77 | 78 | @Override 79 | public void setNextReader(AtomicReaderContext context) throws IOException { 80 | this.docBase = context.docBase; 81 | tdc.setNextReader(context); 82 | } 83 | 84 | @Override 85 | public void setScorer(Scorer scorer) throws IOException { 86 | if (shouldScore) { 87 | tdc.setScorer(scorer); 88 | } else { 89 | tdc.setScorer(NoScoringScorer.INSTANCE); 90 | } 91 | } 92 | 93 | @Override 94 | public void reset() { 95 | count = 0; 96 | lastDoc = 0; 97 | topDocs = null; 98 | tdc = TopScoreDocCollector.create(maxSize, outOfOrder); 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/skywalker/collectors/IntervalLimitedCollector.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.skywalker.collectors; 3 | 4 | import org.apache.lucene.index.AtomicReaderContext; 5 | import org.apache.lucene.search.Scorer; 6 | import org.apache.lucene.search.TimeLimitingCollector; 7 | import org.apache.lucene.search.TimeLimitingCollector.TimeExceededException; 8 | import org.apache.lucene.search.TopDocs; 9 | import org.apache.lucene.search.TopScoreDocCollector; 10 | 11 | import java.io.IOException; 12 | 13 | /** 14 | * Interval limited collector 15 | */ 16 | public class IntervalLimitedCollector extends LimitedHitCollector { 17 | 18 | private long maxTime; 19 | private long lastDoc = 0; 20 | private TopScoreDocCollector tdc; 21 | private TopDocs topDocs = null; 22 | private TimeLimitingCollector thc; 23 | 24 | public IntervalLimitedCollector(int maxTime, boolean outOfOrder, boolean shouldScore) { 25 | this.maxTime = maxTime; 26 | this.outOfOrder = outOfOrder; 27 | this.shouldScore = shouldScore; 28 | tdc = TopScoreDocCollector.create(1000, outOfOrder); 29 | thc = new TimeLimitingCollector(tdc, TimeLimitingCollector.getGlobalCounter(), maxTime); 30 | } 31 | 32 | @Override 33 | public long limitSize() { 34 | return maxTime; 35 | } 36 | 37 | @Override 38 | public int limitType() { 39 | return TYPE_TIME; 40 | } 41 | 42 | @Override 43 | public int getDocId(int pos) { 44 | if (topDocs == null) { 45 | topDocs = tdc.topDocs(); 46 | } 47 | return topDocs.scoreDocs[pos].doc; 48 | } 49 | 50 | @Override 51 | public float getScore(int pos) { 52 | if (topDocs == null) { 53 | topDocs = tdc.topDocs(); 54 | } 55 | return topDocs.scoreDocs[pos].score; 56 | } 57 | 58 | @Override 59 | public int getTotalHits() { 60 | return tdc.getTotalHits(); 61 | } 62 | 63 | @Override 64 | public void collect(int docNum) throws IOException { 65 | try { 66 | thc.collect(docNum); 67 | } catch (TimeExceededException tee) { 68 | // re-throw 69 | throw new LimitedException(TYPE_TIME, maxTime, tee.getTimeElapsed(), tee.getLastDocCollected()); 70 | } 71 | } 72 | 73 | @Override 74 | public boolean acceptsDocsOutOfOrder() { 75 | return outOfOrder; 76 | } 77 | 78 | 79 | @Override 80 | public void setNextReader(AtomicReaderContext context) throws IOException { 81 | this.docBase = context.docBase; 82 | thc.setNextReader(context); 83 | } 84 | 85 | @Override 86 | public void setScorer(Scorer scorer) throws IOException { 87 | this.scorer = scorer; 88 | if (shouldScore) { 89 | thc.setScorer(scorer); 90 | } else { 91 | thc.setScorer(NoScoringScorer.INSTANCE); 92 | } 93 | } 94 | 95 | @Override 96 | public void reset() { 97 | lastDoc = 0; 98 | tdc = TopScoreDocCollector.create(1000, outOfOrder); 99 | thc = new TimeLimitingCollector(tdc, TimeLimitingCollector.getGlobalCounter(), maxTime); 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/skywalker/collectors/LimitedException.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.skywalker.collectors; 3 | 4 | /** 5 | * Limited exception 6 | */ 7 | public class LimitedException extends RuntimeException { 8 | 9 | private int limitType; 10 | private long limitSize; 11 | private int lastDoc; 12 | private long currentSize; 13 | 14 | public LimitedException(int limitType, long limitSize, long currentSize, int lastDoc) { 15 | super(); 16 | this.limitType = limitType; 17 | this.limitSize = limitSize; 18 | this.currentSize = currentSize; 19 | this.lastDoc = lastDoc; 20 | } 21 | 22 | /** 23 | * @return the limitType 24 | */ 25 | public int getLimitType() { 26 | return limitType; 27 | } 28 | 29 | /** 30 | * @param limitType the limitType to set 31 | */ 32 | public void setLimitType(int limitType) { 33 | this.limitType = limitType; 34 | } 35 | 36 | /** 37 | * @return the limitSize 38 | */ 39 | public long getLimitSize() { 40 | return limitSize; 41 | } 42 | 43 | /** 44 | * @param limitSize the limitSize to set 45 | */ 46 | public void setLimitSize(long limitSize) { 47 | this.limitSize = limitSize; 48 | } 49 | 50 | /** 51 | * @return the currentSize 52 | */ 53 | public long getCurrentSize() { 54 | return currentSize; 55 | } 56 | 57 | /** 58 | * @param currentSize the currentSize to set 59 | */ 60 | public void setCurrentSize(long currentSize) { 61 | this.currentSize = currentSize; 62 | } 63 | 64 | /** 65 | * @return the lastDoc 66 | */ 67 | public int getLastDoc() { 68 | return lastDoc; 69 | } 70 | 71 | /** 72 | * @param lastDoc the lastDoc to set 73 | */ 74 | public void setLastDoc(int lastDoc) { 75 | this.lastDoc = lastDoc; 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/skywalker/collectors/LimitedHitCollector.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.skywalker.collectors; 3 | 4 | /** 5 | * Limited hit collector 6 | */ 7 | public abstract class LimitedHitCollector extends AccessibleHitCollector { 8 | 9 | public static final int TYPE_TIME = 0; 10 | 11 | public static final int TYPE_SIZE = 1; 12 | 13 | public abstract int limitType(); 14 | 15 | public abstract long limitSize(); 16 | } 17 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/skywalker/collectors/NoScoringScorer.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.skywalker.collectors; 3 | 4 | import org.apache.lucene.search.Scorer; 5 | 6 | import java.io.IOException; 7 | 8 | /** 9 | * No scoring scorer 10 | */ 11 | public class NoScoringScorer extends Scorer { 12 | 13 | public static final NoScoringScorer INSTANCE = new NoScoringScorer(); 14 | 15 | protected NoScoringScorer() { 16 | super(null); 17 | } 18 | 19 | @Override 20 | public int freq() throws IOException { 21 | return 0; 22 | } 23 | 24 | @Override 25 | public float score() throws IOException { 26 | return 1.0f; 27 | } 28 | 29 | @Override 30 | public int advance(int doc) throws IOException { 31 | return 0; 32 | } 33 | 34 | @Override 35 | public long cost() { 36 | return 0; 37 | } 38 | 39 | @Override 40 | public int docID() { 41 | return 0; 42 | } 43 | 44 | @Override 45 | public int nextDoc() throws IOException { 46 | return 0; 47 | } 48 | 49 | } 50 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/skywalker/reconstruct/DocumentReconstructor.java: -------------------------------------------------------------------------------- 1 | package org.xbib.elasticsearch.skywalker.reconstruct; 2 | 3 | import org.apache.lucene.document.Document; 4 | import org.apache.lucene.index.AtomicReader; 5 | import org.apache.lucene.index.CompositeReader; 6 | import org.apache.lucene.index.DocsAndPositionsEnum; 7 | import org.apache.lucene.index.FieldInfo; 8 | import org.apache.lucene.index.FieldInfos; 9 | import org.apache.lucene.index.IndexReader; 10 | import org.apache.lucene.index.IndexableField; 11 | import org.apache.lucene.index.MultiFields; 12 | import org.apache.lucene.index.SlowCompositeReaderWrapper; 13 | import org.apache.lucene.index.Terms; 14 | import org.apache.lucene.index.TermsEnum; 15 | import org.apache.lucene.util.Bits; 16 | import org.elasticsearch.ElasticsearchIllegalArgumentException; 17 | import org.elasticsearch.common.xcontent.ToXContent; 18 | import org.elasticsearch.common.xcontent.XContentBuilder; 19 | import org.xbib.elasticsearch.action.skywalker.support.IndexableFieldToXContent; 20 | 21 | import java.io.IOException; 22 | import java.util.ArrayList; 23 | import java.util.List; 24 | 25 | import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; 26 | 27 | /** 28 | * This class attempts to reconstruct all fields from a document existing in a 29 | * Lucene index. This operation may be (and usually) is lossy - e.g. unstored 30 | * fields are rebuilt from terms present in the index, and these terms may have 31 | * been changed (e.g. lowercased, stemmed), and many other input tokens may have 32 | * been skipped altogether by the Analyzer, when fields were originally added to 33 | * the index. 34 | * 35 | */ 36 | public class DocumentReconstructor { 37 | 38 | private AtomicReader reader; 39 | 40 | /** 41 | * Prepare a document reconstructor. 42 | * 43 | * @param indexReader IndexReader to read from. 44 | * @throws Exception 45 | */ 46 | public DocumentReconstructor(IndexReader indexReader) { 47 | if (indexReader == null) { 48 | throw new ElasticsearchIllegalArgumentException("reader cannot be null"); 49 | } 50 | try { 51 | if (indexReader instanceof CompositeReader) { 52 | this.reader = SlowCompositeReaderWrapper.wrap(indexReader); 53 | } else if (indexReader instanceof AtomicReader) { 54 | this.reader = (AtomicReader) indexReader; 55 | } else { 56 | throw new ElasticsearchIllegalArgumentException("unsupported IndexReader class " + indexReader.getClass().getName()); 57 | } 58 | } catch (IOException e) { 59 | throw new ElasticsearchIllegalArgumentException(e.getMessage()); 60 | } 61 | } 62 | 63 | /** 64 | * Reconstruct an index shard 65 | * 66 | * @return reconstructed document 67 | * @throws Exception 68 | */ 69 | public XContentBuilder reconstruct(int shardId) throws IOException { 70 | XContentBuilder builder = jsonBuilder(); 71 | builder.startObject() 72 | .field("shardId", shardId) 73 | .field("numDeletions", reader.numDeletedDocs()); 74 | builder.startArray("docs"); 75 | FieldInfos fieldInfos = reader.getFieldInfos(); 76 | Bits live = MultiFields.getLiveDocs(reader); 77 | for (int docNum = 0; docNum < reader.maxDoc(); docNum++) { 78 | Document doc = reader.document(docNum); 79 | if (live != null && live.get(docNum)) { 80 | continue; // not deleted 81 | } 82 | builder.startObject().startArray("fields"); 83 | if (fieldInfos != null) { 84 | for (FieldInfo fi : fieldInfos) { 85 | String name = fi.name; 86 | IndexableField[] fs = doc.getFields(name); 87 | if (fs != null && fs.length > 0) { 88 | for (IndexableField f : fs) { 89 | IndexableFieldToXContent x = new IndexableFieldToXContent().field(f); 90 | x.toXContent(builder, ToXContent.EMPTY_PARAMS); 91 | } 92 | } 93 | } 94 | } 95 | builder.endArray(); 96 | builder.startArray("terms"); 97 | if (fieldInfos != null) { 98 | TermsEnum te = null; 99 | DocsAndPositionsEnum dpe = null; 100 | for (FieldInfo fi : fieldInfos) { 101 | Terms terms = MultiFields.getTerms(reader, fi.name); 102 | if (terms == null) { // no terms in this field 103 | continue; 104 | } 105 | te = terms.iterator(te); 106 | while (te.next() != null) { 107 | DocsAndPositionsEnum newDpe = te.docsAndPositions(live, dpe, 0); 108 | if (newDpe == null) { // no position info for this field 109 | break; 110 | } 111 | dpe = newDpe; 112 | int num = dpe.advance(docNum); 113 | if (num != docNum) { // either greater than or NO_MORE_DOCS 114 | continue; // no data for this term in this doc 115 | } 116 | String text = te.term().utf8ToString(); 117 | List positions = new ArrayList(); 118 | List starts = new ArrayList(); 119 | List ends = new ArrayList(); 120 | for (int k = 0; k < dpe.freq(); k++) { 121 | int pos = dpe.nextPosition(); 122 | positions.add(pos); 123 | starts.add(dpe.startOffset()); 124 | ends.add(dpe.endOffset()); 125 | } 126 | builder.startObject() 127 | .field("text", text) 128 | .field("positions", positions) 129 | .field("starts", starts) 130 | .field("ends", ends) 131 | .field("count", dpe.freq()) 132 | .endObject(); 133 | } 134 | } 135 | } 136 | builder.endArray(); 137 | builder.endObject(); 138 | } 139 | builder.endArray(); 140 | builder.endObject(); 141 | return builder; 142 | } 143 | 144 | } 145 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/skywalker/reconstruct/DocumentTerm.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to ElasticSearch and Shay Banon under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. ElasticSearch licenses this 6 | * file to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | package org.xbib.elasticsearch.skywalker.reconstruct; 20 | 21 | import java.util.ArrayList; 22 | import java.util.List; 23 | 24 | /** 25 | * Document term 26 | * 27 | * @author Jörg Prante 28 | */ 29 | public class DocumentTerm { 30 | 31 | private String text; 32 | private List positions; 33 | private List starts; 34 | private List ends; 35 | private long count; 36 | 37 | public DocumentTerm text(String text) { 38 | this.text = text; 39 | return this; 40 | } 41 | 42 | public String text() { 43 | return text(); 44 | } 45 | 46 | public DocumentTerm count(long count) { 47 | this.count = count; 48 | return this; 49 | } 50 | 51 | public long count() { 52 | return count; 53 | } 54 | 55 | public DocumentTerm positions(int size) { 56 | this.positions = new ArrayList(size); 57 | return this; 58 | } 59 | 60 | public DocumentTerm positions(int pos, int position) { 61 | this.positions.set(pos, position); 62 | return this; 63 | } 64 | 65 | public List positions() { 66 | return positions; 67 | } 68 | 69 | public DocumentTerm starts(int size) { 70 | this.starts = new ArrayList(size); 71 | return this; 72 | } 73 | 74 | public DocumentTerm starts(int pos, int starts) { 75 | this.starts.set(pos, starts); 76 | return this; 77 | } 78 | 79 | public List starts() { 80 | return starts(); 81 | } 82 | 83 | public DocumentTerm ends(int size) { 84 | this.ends = new ArrayList(size); 85 | return this; 86 | } 87 | 88 | public DocumentTerm ends(int pos, int ends) { 89 | this.ends.set(pos, ends); 90 | return this; 91 | } 92 | 93 | public List ends() { 94 | return ends; 95 | } 96 | 97 | public String toString() { 98 | return count + ":'" + text + "'"; 99 | } 100 | 101 | } 102 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/skywalker/reconstruct/DocumentTermComparator.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.skywalker.reconstruct; 3 | 4 | import java.util.Comparator; 5 | 6 | /** 7 | * Document term comparator 8 | */ 9 | public class DocumentTermComparator implements Comparator { 10 | private boolean ascending; 11 | private boolean byText; 12 | 13 | public DocumentTermComparator(boolean byText, boolean ascending) { 14 | this.ascending = ascending; 15 | this.byText = byText; 16 | } 17 | 18 | public int compare(DocumentTerm h1, DocumentTerm h2) { 19 | if (byText) { 20 | return ascending ? h1.text().compareTo(h2.text()) : h2.text().compareTo(h1.text()); 21 | } else { 22 | if (h1.count() > h2.count()) { 23 | return ascending ? -1 : 1; 24 | } 25 | if (h1.count() < h2.count()) { 26 | return ascending ? 1 : -1; 27 | } 28 | } 29 | return 0; 30 | } 31 | } -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/skywalker/stats/DocFreqComparatorSortDescending.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.skywalker.stats; 3 | 4 | import java.util.Comparator; 5 | 6 | /** 7 | * Document frequence comparator for descending sort 8 | */ 9 | public class DocFreqComparatorSortDescending implements Comparator { 10 | 11 | public int compare(TermStats a, TermStats b) { 12 | if (a.docFreq() < b.docFreq()) { 13 | return 1; 14 | } else if (a.docFreq() > b.docFreq()) { 15 | return -1; 16 | } else { 17 | return 0; 18 | } 19 | } 20 | } -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/skywalker/stats/FieldTermCount.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.skywalker.stats; 3 | 4 | /** 5 | * Field term count 6 | */ 7 | public class FieldTermCount implements Comparable { 8 | 9 | private String fieldname; 10 | 11 | private long termCount; 12 | 13 | public FieldTermCount(String fieldname, long termCount) { 14 | this.fieldname = fieldname; 15 | this.termCount = termCount; 16 | } 17 | 18 | public String getFieldname() { 19 | return fieldname; 20 | } 21 | 22 | public long getTermCount() { 23 | return termCount; 24 | } 25 | 26 | public int compareTo(FieldTermCount f2) { 27 | if (termCount > f2.termCount) { 28 | return -1; 29 | } else if (termCount < f2.termCount) { 30 | return 1; 31 | } else { 32 | return 0; 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/skywalker/stats/TermStats.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.skywalker.stats; 3 | 4 | import org.apache.lucene.util.BytesRef; 5 | 6 | /** 7 | * Term stats 8 | */ 9 | public class TermStats { 10 | 11 | private String field; 12 | private BytesRef termtext; 13 | private int docFreq; 14 | 15 | public TermStats text(BytesRef text) { 16 | this.termtext = text; 17 | return this; 18 | } 19 | 20 | public String text() { 21 | return termtext.utf8ToString(); 22 | } 23 | 24 | public TermStats field(String field) { 25 | this.field = field; 26 | return this; 27 | } 28 | 29 | public String field() { 30 | return field; 31 | } 32 | 33 | public TermStats docFreq(int docFreq) { 34 | this.docFreq = docFreq; 35 | return this; 36 | } 37 | 38 | public int docFreq() { 39 | return docFreq; 40 | } 41 | 42 | public String toString() { 43 | return field + ":" + termtext.utf8ToString() + ":" + docFreq; 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/main/java/org/xbib/elasticsearch/skywalker/stats/TermStatsQueue.java: -------------------------------------------------------------------------------- 1 | 2 | package org.xbib.elasticsearch.skywalker.stats; 3 | 4 | import org.apache.lucene.util.PriorityQueue; 5 | 6 | /** 7 | * Term stats queue 8 | */ 9 | public class TermStatsQueue extends PriorityQueue { 10 | 11 | public TermStatsQueue(int size) { 12 | super(size); 13 | } 14 | 15 | @Override 16 | protected boolean lessThan(TermStats termInfoA, TermStats termInfoB) { 17 | return termInfoA.docFreq() < termInfoB.docFreq(); 18 | } 19 | } -------------------------------------------------------------------------------- /src/main/resources/es-plugin.properties: -------------------------------------------------------------------------------- 1 | plugin=org.xbib.elasticsearch.plugin.skywalker.SkywalkerPlugin 2 | -------------------------------------------------------------------------------- /src/site/resources/cologne-skywalker.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jprante/elasticsearch-skywalker/dda56934dee485893f59b2df9aeea6a9b1389dea/src/site/resources/cologne-skywalker.jpg -------------------------------------------------------------------------------- /src/site/resources/skywalker.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jprante/elasticsearch-skywalker/dda56934dee485893f59b2df9aeea6a9b1389dea/src/site/resources/skywalker.png -------------------------------------------------------------------------------- /src/site/site.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | org.apache.maven.skins 5 | maven-fluido-skin 6 | 1.2.1 7 | 8 | 9 | 10 | true 11 | true 12 | 13 | jprante/elasticsearch-skywalker 14 | right 15 | black 16 | 17 | 18 | xbib 19 | true 20 | true 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /src/test/java/org/xbib/elasticsearch/plugin/skywalker/AbstractNodeTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to ElasticSearch and Shay Banon under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. ElasticSearch licenses this 6 | * file to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | package org.xbib.elasticsearch.plugin.skywalker; 20 | 21 | import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; 22 | import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; 23 | import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; 24 | import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; 25 | import org.elasticsearch.client.Client; 26 | import org.elasticsearch.common.logging.ESLogger; 27 | import org.elasticsearch.common.logging.Loggers; 28 | import org.elasticsearch.common.network.NetworkUtils; 29 | import org.elasticsearch.common.settings.ImmutableSettings; 30 | import org.elasticsearch.common.settings.Settings; 31 | import org.elasticsearch.common.transport.InetSocketTransportAddress; 32 | import org.elasticsearch.indices.IndexMissingException; 33 | import org.elasticsearch.node.Node; 34 | import org.testng.Assert; 35 | import org.testng.annotations.AfterMethod; 36 | import org.testng.annotations.BeforeMethod; 37 | 38 | import java.net.URI; 39 | import java.util.Map; 40 | import java.util.Random; 41 | 42 | import static org.elasticsearch.common.collect.Maps.newHashMap; 43 | import static org.elasticsearch.common.settings.ImmutableSettings.Builder.EMPTY_SETTINGS; 44 | import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; 45 | import static org.elasticsearch.node.NodeBuilder.nodeBuilder; 46 | 47 | public abstract class AbstractNodeTest extends Assert { 48 | 49 | private final static ESLogger logger = Loggers.getLogger(AbstractNodeTest.class); 50 | 51 | public final String INDEX = "test-" + NetworkUtils.getLocalAddress().getHostName().toLowerCase(); 52 | 53 | protected final String CLUSTER = "test-cluster-" + NetworkUtils.getLocalAddress().getHostName(); 54 | 55 | protected Settings defaultSettings = ImmutableSettings 56 | .settingsBuilder() 57 | .put("cluster.name", CLUSTER) 58 | .build(); 59 | 60 | private Map nodes = newHashMap(); 61 | private Map clients = newHashMap(); 62 | private Map addresses = newHashMap(); 63 | 64 | @BeforeMethod 65 | public void createIndices() throws Exception { 66 | startNode("1"); 67 | 68 | NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().transport(true); 69 | NodesInfoResponse response = client("1").admin().cluster().nodesInfo(nodesInfoRequest).actionGet(); 70 | InetSocketTransportAddress address = (InetSocketTransportAddress)response.iterator().next() 71 | .getTransport().getAddress().publishAddress(); 72 | 73 | addresses.put("1", address); 74 | 75 | client("1").admin().indices() 76 | .create(new CreateIndexRequest(INDEX)) 77 | .actionGet(); 78 | } 79 | 80 | @AfterMethod 81 | public void deleteIndices() { 82 | try { 83 | // clear test index 84 | client("1").admin().indices() 85 | .delete(new DeleteIndexRequest().indices(INDEX)) 86 | .actionGet(); 87 | } catch (IndexMissingException e) { 88 | // ignore 89 | } 90 | closeNode("1"); 91 | closeAllNodes(); 92 | } 93 | 94 | public void putDefaultSettings(Settings.Builder settings) { 95 | putDefaultSettings(settings.build()); 96 | } 97 | 98 | public void putDefaultSettings(Settings settings) { 99 | defaultSettings = ImmutableSettings.settingsBuilder().put(defaultSettings).put(settings).build(); 100 | } 101 | 102 | public Node startNode(String id) { 103 | return buildNode(id).start(); 104 | } 105 | 106 | public Node startNode(String id, Settings.Builder settings) { 107 | return startNode(id, settings.build()); 108 | } 109 | 110 | public Node startNode(String id, Settings settings) { 111 | return buildNode(id, settings) 112 | .start(); 113 | } 114 | 115 | public Node buildNode(String id) { 116 | return buildNode(id, EMPTY_SETTINGS); 117 | } 118 | 119 | public Node buildNode(String id, Settings.Builder settings) { 120 | return buildNode(id, settings.build()); 121 | } 122 | 123 | public Node buildNode(String id, Settings settings) { 124 | String settingsSource = getClass().getName().replace('.', '/') + ".yml"; 125 | Settings finalSettings = settingsBuilder() 126 | .loadFromClasspath(settingsSource) 127 | .put(defaultSettings) 128 | .put(settings) 129 | .put("name", id) 130 | .build(); 131 | 132 | if (finalSettings.get("gateway.type") == null) { 133 | // default to non gateway 134 | finalSettings = settingsBuilder().put(finalSettings).put("gateway.type", "none").build(); 135 | } 136 | if (finalSettings.get("cluster.routing.schedule") != null) { 137 | // decrease the routing schedule so new nodes will be added quickly 138 | finalSettings = settingsBuilder().put(finalSettings).put("cluster.routing.schedule", "50ms").build(); 139 | } 140 | Node node = nodeBuilder().settings(finalSettings).build(); 141 | 142 | Client client = node.client(); 143 | 144 | nodes.put(id, node); 145 | clients.put(id, client); 146 | return node; 147 | } 148 | 149 | public void closeNode(String id) { 150 | Client client = clients.remove(id); 151 | if (client != null) { 152 | client.close(); 153 | } 154 | Node node = nodes.remove(id); 155 | if (node != null) { 156 | node.close(); 157 | } 158 | } 159 | 160 | public Node node(String id) { 161 | return nodes.get(id); 162 | } 163 | 164 | public Client client(String id) { 165 | return clients.get(id); 166 | } 167 | 168 | public InetSocketTransportAddress address(String id) { 169 | return addresses.get(id); 170 | } 171 | 172 | public void closeAllNodes() { 173 | for (Client client : clients.values()) { 174 | client.close(); 175 | } 176 | clients.clear(); 177 | for (Node node : nodes.values()) { 178 | node.close(); 179 | } 180 | nodes.clear(); 181 | } 182 | 183 | private static Random random = new Random(); 184 | private static char[] numbersAndLetters = ("0123456789abcdefghijklmnopqrstuvwxyz").toCharArray(); 185 | 186 | protected String randomString(int len) { 187 | final char[] buf = new char[len]; 188 | final int n = numbersAndLetters.length - 1; 189 | for (int i = 0; i < buf.length; i++) { 190 | buf[i] = numbersAndLetters[random.nextInt(n)]; 191 | } 192 | return new String(buf); 193 | } 194 | 195 | } 196 | -------------------------------------------------------------------------------- /src/test/java/org/xbib/elasticsearch/plugin/skywalker/SkywalkerTests.java: -------------------------------------------------------------------------------- 1 | package org.xbib.elasticsearch.plugin.skywalker; 2 | 3 | import org.elasticsearch.action.ActionModule; 4 | import org.elasticsearch.common.inject.Injector; 5 | import org.elasticsearch.common.inject.ModulesBuilder; 6 | import org.elasticsearch.common.settings.ImmutableSettings; 7 | import org.elasticsearch.common.settings.Settings; 8 | import org.elasticsearch.common.settings.SettingsModule; 9 | import org.elasticsearch.env.Environment; 10 | import org.elasticsearch.env.EnvironmentModule; 11 | import org.elasticsearch.indices.analysis.IndicesAnalysisModule; 12 | import org.elasticsearch.rest.RestModule; 13 | 14 | import org.testng.annotations.Test; 15 | 16 | public class SkywalkerTests { 17 | 18 | @Test 19 | public void test() { 20 | 21 | Settings settings = ImmutableSettings.settingsBuilder().build(); 22 | 23 | Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), 24 | new EnvironmentModule(new Environment(settings)), 25 | new IndicesAnalysisModule()) 26 | .createInjector(); 27 | 28 | ActionModule actionModule = new ActionModule(true); 29 | RestModule restModule = new RestModule(settings); 30 | 31 | SkywalkerPlugin plugin = new SkywalkerPlugin(); 32 | 33 | plugin.onModule(actionModule) ; 34 | plugin.onModule(restModule); 35 | 36 | } 37 | } 38 | --------------------------------------------------------------------------------