├── .github └── workflows │ └── gradle.yml ├── .gitignore ├── .travis.yml ├── CHANGELOG.md ├── LICENSE.txt ├── README.md ├── build.gradle ├── gradle.properties ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── lib └── lire.jar └── src ├── main ├── assemblies │ └── plugin.xml ├── java │ └── org │ │ └── elasticsearch │ │ ├── ElasticsearchImageProcessException.java │ │ ├── index │ │ ├── mapper │ │ │ └── image │ │ │ │ ├── FeatureEnum.java │ │ │ │ ├── HashEnum.java │ │ │ │ └── ImageMapper.java │ │ └── query │ │ │ └── image │ │ │ ├── AbstractImageScorer.java │ │ │ ├── ImageHashQuery.java │ │ │ ├── ImageQueryBuilder.java │ │ │ ├── ImageQueryParser.java │ │ │ └── ImageScoreCache.java │ │ └── plugin │ │ └── image │ │ └── ImagePlugin.java └── resources │ ├── hash │ ├── LshBitSampling.obj │ └── lshHashFunctions.obj │ └── plugin-descriptor.properties └── test ├── java └── org │ └── elasticsearch │ └── plugin │ └── image │ └── test │ └── ImageIntegrationTests.java └── resources ├── log4j.properties └── mapping └── test-mapping.json /.github/workflows/gradle.yml: -------------------------------------------------------------------------------- 1 | name: Java CI 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build: 7 | 8 | runs-on: ubuntu-latest 9 | 10 | steps: 11 | - uses: actions/checkout@v1 12 | - name: Set up JDK 1.8 13 | uses: actions/setup-java@v1 14 | with: 15 | java-version: 1.8 16 | - name: Build with Gradle 17 | run: ./gradlew build 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | .gradle/ 3 | *.iml 4 | work/ 5 | /data/ 6 | logs/ 7 | .DS_Store 8 | build/ 9 | target/ 10 | .local-execution-hints.log 11 | docs/html/ 12 | docs/build.log 13 | /tmp/ 14 | node_modules/ 15 | 16 | ## eclipse ignores (use 'mvn eclipse:eclipse' to build eclipse projects) 17 | ## The only configuration files which are not ignored are certain files in 18 | ## .settings (as listed below) since these files ensure common coding 19 | ## style across Eclipse and IDEA. 20 | ## Other files (.project, .classpath) should be generated through Maven which 21 | ## will correctly set the classpath based on the declared dependencies. 22 | .project 23 | .classpath 24 | eclipse-build 25 | */.project 26 | */.classpath 27 | */eclipse-build 28 | /.settings/ 29 | !/.settings/org.eclipse.core.resources.prefs 30 | !/.settings/org.eclipse.jdt.core.prefs 31 | !/.settings/org.eclipse.jdt.ui.prefs 32 | 33 | ## netbeans ignores 34 | nb-configuration.xml 35 | nbactions.xml 36 | 37 | /dependency-reduced-pom.xml 38 | 39 | ## test images 40 | /src/test/resources/image/ 41 | /bin/ 42 | /CHANGELOG.md 43 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: java 2 | 3 | jdk: 4 | - openjdk6 5 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kiwionly/elasticsearch-image/3e20cb769da9db55d2c2bf2fdd079b42c098b9dd/CHANGELOG.md -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## DEPRECATED 2 | 3 | ### Please note, this project is deprecated and no longer being maintained. Because of my limited time for make it work on upward version, and that is no very detail official plug-in guideline in E.S, not to mentioned E.S code is change so fast that usually the code is not backward compatible. 4 | 5 | However, instead of implements it as plug-in, that is another way to do so [link](https://www.linkedin.com/pulse/hacking-elasticsearch-image-retrieval-ashwin-saval), the only down side is you need to write more low level code instead. 6 | 7 | ---------- 8 | 9 | 10 | Image Plugin for Elasticsearch 11 | ============================== 12 | 13 | The Image Plugin is an Content Based Image Retrieval Plugin for Elasticsearch using [LIRE (Lucene Image Retrieval)](https://github.com/dermotte/lire). It allows users to index images and search for similar images. 14 | 15 | It adds an `image` field type and an `image` query. 16 | 17 | To create the plugin, open cmd or shell at this directory, enter `gradlew plugin`. 18 | 19 | Once the build done, you can find the distribution in build/distribution directory, unzip this file to `%elasticsearch%/plugins/elasticsearch-image-x.x.x` where the x is the version number. 20 | 21 | 22 | 23 | | Image Plugin | elasticsearch | Release date | 24 | |---------------------------|-------------------|:------------:| 25 | | 2.4.1 | 2.4.1 | 2016-10-28 | 26 | | 2.4.0 | 2.4.0 | 2016-09-28 | 27 | | 2.3.4 | 2.3.4 | 2016-07-18 | 28 | | 2.3.3 | 2.3.3 | 2016-06-13 | 29 | | 2.3.2 | 2.3.2 | 2016-05-16 | 30 | | 2.2.0 | 2.2.0 | 2016-03-01 | 31 | | 2.1.1 | 2.1.1 | 2016-01-09 | 32 | | 1.3.0-SNAPSHOT (master) | 1.1.0 | | 33 | | 1.2.0 | 1.0.1 | 2014-03-20 | 34 | | 1.1.0 | 1.0.1 | 2014-03-13 | 35 | | 1.0.0 | 1.0.1 | 2014-03-05 | 36 | 37 | 38 | ## Developers: 39 | Kevin Wang 40 | 41 | Angelo Leto 42 | 43 | zengde 44 | 45 | kiwionly 46 | 47 | youqian 48 | 49 | 50 | ## Example 51 | #### Create Settings 52 | 53 | ```sh 54 | curl -XPUT 'localhost:9200/my_index' -d '{ 55 | "settings": { 56 | "number_of_shards": 5, 57 | "number_of_replicas": 1, 58 | "index.version.created": 1070499 59 | } 60 | }' 61 | ``` 62 | 63 | Since elasticsearch 2.2, that is a version checked, index version must set before version 2.0.0 beta 1. 64 | 65 | #### Create Mapping 66 | 67 | ```sh 68 | curl -XPUT 'localhost:9200/my_index/my_image_item/_mapping' -d '{ 69 | "my_image_item": { 70 | "properties": { 71 | "my_img": { 72 | "type": "image", 73 | "feature": { 74 | "CEDD": { 75 | "hash": ["BIT_SAMPLING"] 76 | }, 77 | "JCD": { 78 | "hash": ["BIT_SAMPLING", "LSH"] 79 | } 80 | }, 81 | "metadata": { 82 | "jpeg.image_width": { 83 | "type": "string", 84 | "store": "yes" 85 | }, 86 | "jpeg.image_height": { 87 | "type": "string", 88 | "store": "yes" 89 | } 90 | } 91 | } 92 | } 93 | } 94 | }' 95 | ``` 96 | `type` should be `image`. This is the type register by this plugin. **Mandatory** 97 | 98 | `feature` is a map of features for index. You can only search what you specific, e.g. base on example above, specific `JCD` with `LSH` in mapping allow search for it, but you cannot search `CEDD` with `LSH` 99 | because the index mapping for `LSH` is not specific and created. If you not specific hash for a `feature`, it won't work. **Mandatory, at least one is required** 100 | 101 | `hash` can be set if you want to search on hash. **Mandatory** 102 | 103 | `metadata` is a map of metadata for index, only those metadata will be indexed. See [Metadata](#metadata). **Optional** 104 | 105 | 106 | #### Index Image 107 | ```sh 108 | curl -XPOST 'localhost:9200/test/test' -d '{ 109 | "my_img": "... base64 encoded image ..." 110 | }' 111 | ``` 112 | 113 | #### Search Image 114 | ```sh 115 | curl -XPOST 'localhost:9200/test/test/_search' -d '{ 116 | "from": 0, 117 | "size": 3, 118 | "query": { 119 | "image": { 120 | "my_img": { 121 | "feature": "CEDD", 122 | "image": "... base64 encoded image to search ...", 123 | "hash": "BIT_SAMPLING", 124 | "boost": 2.1, 125 | "limit": 100 126 | } 127 | } 128 | } 129 | }' 130 | ``` 131 | `feature` should be one of the features in the mapping. See above. **Mandatory** 132 | 133 | `image` base64 of image to search. **Optional if search using existing image** 134 | 135 | `hash` should be same to the hash set in mapping. See Above. **Optional** 136 | 137 | `boost` score boost **Optional** 138 | 139 | 140 | #### Search Image using existing image in index 141 | ```sh 142 | curl -XPOST 'localhost:9200/test/test/_search' -d '{ 143 | "query": { 144 | "image": { 145 | "my_img": { 146 | "feature": "CEDD", 147 | "index": "test", 148 | "type": "test", 149 | "id": "image1", 150 | "hash": "BIT_SAMPLING" 151 | } 152 | } 153 | } 154 | }' 155 | ``` 156 | `index` the index to fetch image from. Default to current index. **Optional** 157 | 158 | `type` the type to fetch image from. **Mandatory** 159 | 160 | `id` the id of the document to fetch image from. **Mandatory** 161 | 162 | `field` the field specified as path to fetch image from. Example above is "my_img **Optional** 163 | 164 | `routing` a custom routing value to be used when retrieving the external image doc. **Optional** 165 | 166 | ### image query Builder 167 | ```sh 168 | SearchRequestBuilder queryBuilder = searchClient.prepareSearch(INDEX) 169 | .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) 170 | .setTypes("Image") 171 | .setFrom(from) 172 | .setSize(size); 173 | 174 | ImageQueryBuilder query = new ImageQueryBuilder("img"); //image field 175 | query.feature(feature); 176 | query.hash(hash); 177 | query.lookupIndex(INDEX); 178 | query.lookupType("Image"); 179 | query.lookupId(itemId); 180 | ``` 181 | 182 | 183 | ### Metadata 184 | Metadata are extracted using [metadata-extractor](https://code.google.com/p/metadata-extractor/). See [SampleOutput](https://code.google.com/p/metadata-extractor/wiki/SampleOutput) for some examples of metadata. 185 | 186 | The field name in index will be `directory.tag_name`, all lower case and space becomes underscore(`_`). e.g. if the *Directory* is `JPEG` and *Tag Name* is `Image Height`, the field name will be `jpeg.image_height` 187 | 188 | 189 | 190 | ### Supported Image Formats 191 | Images are processed by Java ImageIO, supported formats can be found [here](http://docs.oracle.com/javase/7/docs/api/javax/imageio/package-summary.html) 192 | 193 | Additional formats can be supported by ImageIO plugins, for example [TwelveMonkeys](https://github.com/haraldk/TwelveMonkeys) 194 | 195 | 196 | ### Supported Features 197 | [`AUTO_COLOR_CORRELOGRAM`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/AutoColorCorrelogram.java), [`BINARY_PATTERNS_PYRAMID`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/BinaryPatternsPyramid.java), [`CEDD`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/CEDD.java), [`SIMPLE_COLOR_HISTOGRAM`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/SimpleColorHistogram.java), [`COLOR_LAYOUT`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/ColorLayout.java), [`EDGE_HISTOGRAM`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/EdgeHistogram.java), [`FCTH`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/FCTH.java), [`GABOR`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/Gabor.java), [`JCD`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/JCD.java), [`JOINT_HISTOGRAM`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/joint/JointHistogram.java), [`JPEG_COEFFICIENT_HISTOGRAM`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/JpegCoefficientHistogram.java), [`LOCAL_BINARY_PATTERNS`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/LocalBinaryPatterns.java), [`LUMINANCE_LAYOUT`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/LuminanceLayout.java), [`OPPONENT_HISTOGRAM`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/OpponentHistogram.java), [`PHOG`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/PHOG.java), [`ROTATION_INVARIANT_LOCAL_BINARY_PATTERNS`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/RotationInvariantLocalBinaryPatterns.java), [`SCALABLE_COLOR`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/ScalableColor.java), [`TAMURA`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/Tamura.java) 198 | 199 | 200 | ### Supported Hash Mode 201 | [`BIT_SAMPLING`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/indexing/hashing/BitSampling.java), [`LSH`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/indexing/hashing/LocalitySensitiveHashing.java) 202 | 203 | Hash will increase search speed with large data sets 204 | 205 | See [Large image data sets with LIRE ?some new numbers](http://www.semanticmetadata.net/2013/03/20/large-image-data-sets-with-lire-some-new-numbers/) 206 | 207 | 208 | ### Settings 209 | | Setting | Description | Default | 210 | |----------------------|-----------------|:-------:| 211 | | index.image.use_thread_pool | use multiple thread when multiple features are required | True | 212 | | index.image.ignore_metadata_error| ignore errors happened during extract metadata from image | True | 213 | 214 | ## ChangeLog 215 | 216 | #### 2.4.0 (2016-10-28) 217 | - upgrade to 2.4.1 218 | - remove support from maven, because is hard to remember update the dependency, if you using Maven for eclipse, please install buildship gradle plugin, it should work as expected. 219 | 220 | 221 | #### 2.4.0 (2016-09-28) 222 | - upgrade to 2.4.0 223 | 224 | #### 2.3.4 (2016-07-18) 225 | - upgrade to 2.3.4 226 | 227 | #### 2.3.3 (2016-06-13) 228 | - upgrade to 2.3.3 229 | 230 | 231 | #### 2.3.2 (2016-05-16) 232 | - fix a JCD feature bug, see [here](https://github.com/visuual/elasticsearch-image/commit/be80790ed23253faf677a8f336da6228e8e3fd82) 233 | - thanks youqian for 2.3.2 patch. 234 | 235 | #### 2.2.0 (2016-03-01) 236 | - upgrade to lire 1.0b2. 237 | - all LIRE features supported. 238 | - index.image.use_thread_pool is optional. 239 | - index.version.created is mandatory in settings. 240 | - add gradle support. (maven no longer use) 241 | - simplify index and search by remove some parameters. 242 | - limit no longer use, use pagination `from` and `size` from elastic search instead. 243 | - remove ImageHashLimitQuery and ImageQuery, this 2 classes possible no longer work 244 | (I cound not make it work, also that is possibility not valid anymore for new elastic search version). 245 | 246 | *reindex is needed if using difference version of LIRE. 247 | 248 | #### 2.1.1 (2016-01-06) 249 | 250 | #### 1.2.0 (2014-03-20) 251 | 252 | - Use multi-thread when multiple features are required to improve index speed 253 | - Allow index metadata 254 | - Allow query by existing image in index 255 | 256 | #### 1.1.0 (2014-03-13) 257 | 258 | - Added `limit` in `image` query 259 | - Added plugin version in es-plugin.properties 260 | 261 | #### 1.0.0 (2014-03-05) 262 | 263 | - initial release -------------------------------------------------------------------------------- /build.gradle: -------------------------------------------------------------------------------- 1 | 2 | apply plugin: 'java' 3 | 4 | description = 'elasticsearch image' 5 | 6 | archivesBaseName = project.ext['project.name'] 7 | version = project.ext['project.version'] 8 | 9 | sourceCompatibility = 1.8 10 | 11 | jar.baseName = archivesBaseName 12 | 13 | task plugin(type: Zip) { 14 | 15 | from jar 16 | from 'lib' 17 | 18 | from('src/main/resources') { 19 | include '*.properties' 20 | } 21 | 22 | from (configurations.compile) { 23 | include 'metadata-extractor*.jar' 24 | include 'xmpcore*.jar' 25 | } 26 | 27 | into '/' 28 | } 29 | 30 | task wrapper(type: Wrapper) { 31 | 32 | description 'create gradle wrapper' 33 | gradleVersion = '2.14.1' 34 | } 35 | 36 | repositories { 37 | mavenCentral() 38 | } 39 | 40 | dependencies { 41 | 42 | compile fileTree(dir: 'lib', include: '*.jar') 43 | compile 'org.elasticsearch:elasticsearch:2.4.1' 44 | compile ('org.apache.lucene:lucene-core:5.5.2') { transitive = false } 45 | compile 'com.drewnoakes:metadata-extractor:2.8.1' 46 | compile 'com.adobe.xmp:xmpcore:5.1.2' 47 | 48 | testCompile 'junit:junit:4.12' 49 | testCompile 'org.hamcrest:hamcrest-core:1.3' 50 | testCompile 'org.apache.sanselan:sanselan:0.97-incubator' 51 | testCompile 'log4j:log4j:1.2.17' 52 | testCompile 'org.elasticsearch:elasticsearch:2.3.3:tests' 53 | 54 | 55 | } -------------------------------------------------------------------------------- /gradle.properties: -------------------------------------------------------------------------------- 1 | 2 | org.gradle.daemon = false 3 | 4 | project.name = elasticsearch-image 5 | project.version = 2.4.1 6 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kiwionly/elasticsearch-image/3e20cb769da9db55d2c2bf2fdd079b42c098b9dd/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | #Thu Jul 21 17:48:32 SGT 2016 2 | distributionBase=GRADLE_USER_HOME 3 | distributionPath=wrapper/dists 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | distributionUrl=https\://services.gradle.org/distributions/gradle-2.14.1-bin.zip 7 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ############################################################################## 4 | ## 5 | ## Gradle start up script for UN*X 6 | ## 7 | ############################################################################## 8 | 9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 10 | DEFAULT_JVM_OPTS="" 11 | 12 | APP_NAME="Gradle" 13 | APP_BASE_NAME=`basename "$0"` 14 | 15 | # Use the maximum available, or set MAX_FD != -1 to use that value. 16 | MAX_FD="maximum" 17 | 18 | warn ( ) { 19 | echo "$*" 20 | } 21 | 22 | die ( ) { 23 | echo 24 | echo "$*" 25 | echo 26 | exit 1 27 | } 28 | 29 | # OS specific support (must be 'true' or 'false'). 30 | cygwin=false 31 | msys=false 32 | darwin=false 33 | case "`uname`" in 34 | CYGWIN* ) 35 | cygwin=true 36 | ;; 37 | Darwin* ) 38 | darwin=true 39 | ;; 40 | MINGW* ) 41 | msys=true 42 | ;; 43 | esac 44 | 45 | # Attempt to set APP_HOME 46 | # Resolve links: $0 may be a link 47 | PRG="$0" 48 | # Need this for relative symlinks. 49 | while [ -h "$PRG" ] ; do 50 | ls=`ls -ld "$PRG"` 51 | link=`expr "$ls" : '.*-> \(.*\)$'` 52 | if expr "$link" : '/.*' > /dev/null; then 53 | PRG="$link" 54 | else 55 | PRG=`dirname "$PRG"`"/$link" 56 | fi 57 | done 58 | SAVED="`pwd`" 59 | cd "`dirname \"$PRG\"`/" >/dev/null 60 | APP_HOME="`pwd -P`" 61 | cd "$SAVED" >/dev/null 62 | 63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 64 | 65 | # Determine the Java command to use to start the JVM. 66 | if [ -n "$JAVA_HOME" ] ; then 67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 68 | # IBM's JDK on AIX uses strange locations for the executables 69 | JAVACMD="$JAVA_HOME/jre/sh/java" 70 | else 71 | JAVACMD="$JAVA_HOME/bin/java" 72 | fi 73 | if [ ! -x "$JAVACMD" ] ; then 74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 75 | 76 | Please set the JAVA_HOME variable in your environment to match the 77 | location of your Java installation." 78 | fi 79 | else 80 | JAVACMD="java" 81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 82 | 83 | Please set the JAVA_HOME variable in your environment to match the 84 | location of your Java installation." 85 | fi 86 | 87 | # Increase the maximum file descriptors if we can. 88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then 89 | MAX_FD_LIMIT=`ulimit -H -n` 90 | if [ $? -eq 0 ] ; then 91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 92 | MAX_FD="$MAX_FD_LIMIT" 93 | fi 94 | ulimit -n $MAX_FD 95 | if [ $? -ne 0 ] ; then 96 | warn "Could not set maximum file descriptor limit: $MAX_FD" 97 | fi 98 | else 99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 100 | fi 101 | fi 102 | 103 | # For Darwin, add options to specify how the application appears in the dock 104 | if $darwin; then 105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 106 | fi 107 | 108 | # For Cygwin, switch paths to Windows format before running java 109 | if $cygwin ; then 110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 112 | JAVACMD=`cygpath --unix "$JAVACMD"` 113 | 114 | # We build the pattern for arguments to be converted via cygpath 115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 116 | SEP="" 117 | for dir in $ROOTDIRSRAW ; do 118 | ROOTDIRS="$ROOTDIRS$SEP$dir" 119 | SEP="|" 120 | done 121 | OURCYGPATTERN="(^($ROOTDIRS))" 122 | # Add a user-defined pattern to the cygpath arguments 123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 125 | fi 126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 127 | i=0 128 | for arg in "$@" ; do 129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 131 | 132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 134 | else 135 | eval `echo args$i`="\"$arg\"" 136 | fi 137 | i=$((i+1)) 138 | done 139 | case $i in 140 | (0) set -- ;; 141 | (1) set -- "$args0" ;; 142 | (2) set -- "$args0" "$args1" ;; 143 | (3) set -- "$args0" "$args1" "$args2" ;; 144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;; 145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 150 | esac 151 | fi 152 | 153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules 154 | function splitJvmOpts() { 155 | JVM_OPTS=("$@") 156 | } 157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS 158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" 159 | 160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" 161 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @if "%DEBUG%" == "" @echo off 2 | @rem ########################################################################## 3 | @rem 4 | @rem Gradle startup script for Windows 5 | @rem 6 | @rem ########################################################################## 7 | 8 | @rem Set local scope for the variables with windows NT shell 9 | if "%OS%"=="Windows_NT" setlocal 10 | 11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 12 | set DEFAULT_JVM_OPTS= 13 | 14 | set DIRNAME=%~dp0 15 | if "%DIRNAME%" == "" set DIRNAME=. 16 | set APP_BASE_NAME=%~n0 17 | set APP_HOME=%DIRNAME% 18 | 19 | @rem Find java.exe 20 | if defined JAVA_HOME goto findJavaFromJavaHome 21 | 22 | set JAVA_EXE=java.exe 23 | %JAVA_EXE% -version >NUL 2>&1 24 | if "%ERRORLEVEL%" == "0" goto init 25 | 26 | echo. 27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 28 | echo. 29 | echo Please set the JAVA_HOME variable in your environment to match the 30 | echo location of your Java installation. 31 | 32 | goto fail 33 | 34 | :findJavaFromJavaHome 35 | set JAVA_HOME=%JAVA_HOME:"=% 36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 37 | 38 | if exist "%JAVA_EXE%" goto init 39 | 40 | echo. 41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 42 | echo. 43 | echo Please set the JAVA_HOME variable in your environment to match the 44 | echo location of your Java installation. 45 | 46 | goto fail 47 | 48 | :init 49 | @rem Get command-line arguments, handling Windows variants 50 | 51 | if not "%OS%" == "Windows_NT" goto win9xME_args 52 | if "%@eval[2+2]" == "4" goto 4NT_args 53 | 54 | :win9xME_args 55 | @rem Slurp the command line arguments. 56 | set CMD_LINE_ARGS= 57 | set _SKIP=2 58 | 59 | :win9xME_args_slurp 60 | if "x%~1" == "x" goto execute 61 | 62 | set CMD_LINE_ARGS=%* 63 | goto execute 64 | 65 | :4NT_args 66 | @rem Get arguments from the 4NT Shell from JP Software 67 | set CMD_LINE_ARGS=%$ 68 | 69 | :execute 70 | @rem Setup the command line 71 | 72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 73 | 74 | @rem Execute Gradle 75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% 76 | 77 | :end 78 | @rem End local scope for the variables with windows NT shell 79 | if "%ERRORLEVEL%"=="0" goto mainEnd 80 | 81 | :fail 82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 83 | rem the _cmd.exe /c_ return code! 84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 85 | exit /b 1 86 | 87 | :mainEnd 88 | if "%OS%"=="Windows_NT" endlocal 89 | 90 | :omega 91 | -------------------------------------------------------------------------------- /lib/lire.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kiwionly/elasticsearch-image/3e20cb769da9db55d2c2bf2fdd079b42c098b9dd/lib/lire.jar -------------------------------------------------------------------------------- /src/main/assemblies/plugin.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | plugin 4 | 5 | zip 6 | 7 | false 8 | 9 | 10 | ${project.basedir}/src/main/resources/plugin-descriptor.properties 11 | / 12 | 13 | 14 | 15 | 16 | / 17 | true 18 | true 19 | 20 | org.elasticsearch:elasticsearch 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/ElasticsearchImageProcessException.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch; 2 | 3 | 4 | @SuppressWarnings("serial") 5 | public class ElasticsearchImageProcessException extends ElasticsearchException { 6 | 7 | public ElasticsearchImageProcessException(String msg) { 8 | super(msg); 9 | } 10 | 11 | public ElasticsearchImageProcessException(String msg, Throwable cause) { 12 | super(msg, cause); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/index/mapper/image/FeatureEnum.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.index.mapper.image; 2 | 3 | 4 | import net.semanticmetadata.lire.imageanalysis.features.LireFeature; 5 | import net.semanticmetadata.lire.imageanalysis.features.global.*; 6 | import net.semanticmetadata.lire.imageanalysis.features.global.joint.*; 7 | import net.semanticmetadata.lire.imageanalysis.features.global.centrist.*; 8 | import net.semanticmetadata.lire.imageanalysis.features.global.spatialpyramid.*; 9 | 10 | /** 11 | * Features supported by LIRE 12 | * Subclass of {@link LireFeature} 13 | */ 14 | public enum FeatureEnum { 15 | 16 | AUTO_COLOR_CORRELOGRAM(AutoColorCorrelogram.class), 17 | BINARY_PATTERNS_PYRAMID(BinaryPatternsPyramid.class), 18 | CEDD(net.semanticmetadata.lire.imageanalysis.features.global.CEDD.class), 19 | EDGE_HISTOGRAM(EdgeHistogram.class), 20 | COLOR_LAYOUT(ColorLayout.class), 21 | FCTH(FCTH.class), 22 | FUZZY_COLOR_HISTOGRAM(FuzzyColorHistogram.class), 23 | FUZZY_OPPONENT_HISTOGRAM(FuzzyOpponentHistogram.class), 24 | GABOR(Gabor.class), 25 | JCD(JCD.class), 26 | JOINT_HISTOGRAM(JointHistogram.class), 27 | JPEG_COEFFICIENT_HISTOGRAM(JpegCoefficientHistogram.class), 28 | LOCAL_BINARY_PATTERNS(LocalBinaryPatterns.class), 29 | LOCAL_BINARY_PATTERNS_AND_OPPONENT(LocalBinaryPatternsAndOpponent.class), 30 | LUMINANCE_LAYOUT(LuminanceLayout.class), 31 | OPPONENT_HISTOGRAM(OpponentHistogram.class), 32 | PHOG(PHOG.class), 33 | RANK_AND_OPPONENT(RankAndOpponent.class), 34 | ROTATION_INVARIANT_LOCAL_BINARY_PATTERNS(RotationInvariantLocalBinaryPatterns.class), 35 | SCALABLE_COLOR(ScalableColor.class), 36 | SIMPLE_CENTRIST(SimpleCentrist.class), 37 | SIMPLE_COLOR_HISTOGRAM(SimpleColorHistogram.class), 38 | SPACC(SPACC.class), 39 | SPATIAL_PYRAMID_CENTRIST(SpatialPyramidCentrist.class), 40 | SPCEDD(SPCEDD.class), 41 | SPFCTH(SPFCTH.class), 42 | SPJCD(SPJCD.class), 43 | SPLBP(SPLBP.class), 44 | TAMURA(Tamura.class); 45 | 46 | private Class featureClass; 47 | 48 | FeatureEnum(Class featureClass) { 49 | this.featureClass = featureClass; 50 | } 51 | 52 | public Class getFeatureClass() { 53 | return featureClass; 54 | } 55 | 56 | public static FeatureEnum getByName(String name) { 57 | return valueOf(name.toUpperCase()); 58 | } 59 | 60 | } 61 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/index/mapper/image/HashEnum.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.index.mapper.image; 2 | 3 | 4 | public enum HashEnum { 5 | BIT_SAMPLING, LSH; 6 | 7 | public static HashEnum getByName(String name) { 8 | return valueOf(name.toUpperCase()); 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/index/mapper/image/ImageMapper.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.index.mapper.image; 2 | 3 | import com.drew.imaging.ImageMetadataReader; 4 | import com.drew.imaging.ImageProcessingException; 5 | import com.drew.metadata.Directory; 6 | import com.drew.metadata.Metadata; 7 | import com.drew.metadata.Tag; 8 | 9 | import net.semanticmetadata.lire.imageanalysis.features.Extractor; 10 | import net.semanticmetadata.lire.imageanalysis.features.LireFeature; 11 | import net.semanticmetadata.lire.indexers.hashing.BitSampling; 12 | import net.semanticmetadata.lire.indexers.hashing.LocalitySensitiveHashing; 13 | import net.semanticmetadata.lire.utils.ImageUtils; 14 | import net.semanticmetadata.lire.utils.SerializationUtils; 15 | import org.apache.lucene.document.BinaryDocValuesField; 16 | import org.apache.lucene.document.Field; 17 | import org.apache.lucene.index.IndexOptions; 18 | import org.apache.lucene.util.BytesRef; 19 | import org.elasticsearch.ElasticsearchGenerationException; 20 | import org.elasticsearch.ElasticsearchImageProcessException; 21 | import org.elasticsearch.common.collect.ImmutableOpenMap; 22 | import com.google.common.collect.Lists; 23 | import com.google.common.collect.MapMaker; 24 | import com.google.common.collect.Maps; 25 | import com.carrotsearch.hppc.cursors.ObjectObjectCursor; 26 | import org.elasticsearch.common.io.stream.ByteBufferStreamInput; 27 | import org.elasticsearch.common.logging.ESLogger; 28 | import org.elasticsearch.common.logging.ESLoggerFactory; 29 | import org.elasticsearch.common.settings.Settings; 30 | import org.elasticsearch.common.xcontent.XContentBuilder; 31 | import org.elasticsearch.common.xcontent.XContentParser; 32 | import org.elasticsearch.index.mapper.*; 33 | import org.elasticsearch.index.mapper.MappedFieldType.Names; 34 | import org.elasticsearch.threadpool.ThreadPool; 35 | 36 | import javax.imageio.ImageIO; 37 | import java.awt.image.BufferedImage; 38 | import java.io.IOException; 39 | import java.nio.ByteBuffer; 40 | import java.util.List; 41 | import java.util.Map; 42 | import java.util.concurrent.CountDownLatch; 43 | import java.util.concurrent.Executor; 44 | 45 | import static org.elasticsearch.index.mapper.MapperBuilders.binaryField; 46 | import static org.elasticsearch.index.mapper.MapperBuilders.stringField; 47 | 48 | 49 | 50 | public class ImageMapper extends FieldMapper { 51 | 52 | private static ESLogger logger = ESLoggerFactory.getLogger(ImageMapper.class.getName()); 53 | 54 | public static final int MAX_IMAGE_DIMENSION = 1024; 55 | 56 | public static final String CONTENT_TYPE = "image"; 57 | 58 | public static final String HASH = "hash"; 59 | 60 | public static final String FEATURE = "feature"; 61 | public static final String METADATA = "metadata"; 62 | 63 | public static final String BIT_SAMPLING_FILE = "/hash/LshBitSampling.obj"; 64 | public static final String LSH_HASH_FILE = "/hash/lshHashFunctions.obj"; 65 | 66 | static { 67 | try { 68 | BitSampling.readHashFunctions(ImageMapper.class.getResourceAsStream(BIT_SAMPLING_FILE)); 69 | LocalitySensitiveHashing.readHashFunctions(ImageMapper.class.getResourceAsStream(LSH_HASH_FILE)); 70 | } catch (IOException e) { 71 | logger.error("Failed to initialize hash function", e); 72 | } 73 | } 74 | 75 | public static class Defaults { 76 | 77 | public static final ImageFieldType FIELD_TYPE = new ImageFieldType(); 78 | static { 79 | FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); 80 | FIELD_TYPE.setTokenized(false); 81 | FIELD_TYPE.freeze(); 82 | } 83 | } 84 | 85 | static final class ImageFieldType extends MappedFieldType { 86 | 87 | public ImageFieldType() {} 88 | 89 | protected ImageFieldType(ImageMapper.ImageFieldType ref) { 90 | super(ref); 91 | } 92 | 93 | @Override 94 | public ImageMapper.ImageFieldType clone() { 95 | return new ImageMapper.ImageFieldType(this); 96 | } 97 | 98 | @Override 99 | public String typeName() { 100 | return CONTENT_TYPE; 101 | } 102 | 103 | public String value(Object value) { 104 | return value == null?null:value.toString(); 105 | } 106 | } 107 | 108 | public static class Builder extends FieldMapper.Builder { 109 | 110 | private ThreadPool threadPool; 111 | 112 | private Map> features = Maps.newHashMap(); 113 | 114 | private Map> metadataBuilders = Maps.newHashMap(); 115 | 116 | public Builder(String name, ThreadPool threadPool) { 117 | super(name, new ImageFieldType(), new ImageFieldType()); 118 | this.threadPool = threadPool; 119 | this.builder = this; 120 | } 121 | 122 | public Builder addFeature(FeatureEnum featureEnum, Map featureMap) { 123 | this.features.put(featureEnum, featureMap); 124 | return this; 125 | } 126 | 127 | public Builder addMetadata(String metadata, FieldMapper.Builder metadataBuilder) { 128 | this.metadataBuilders.put(metadata, metadataBuilder); 129 | return this; 130 | } 131 | 132 | @Override 133 | @SuppressWarnings("unchecked") 134 | public ImageMapper build(BuilderContext context) { 135 | 136 | Map featureMappers = Maps.newHashMap(); 137 | Map hashMappers = Maps.newHashMap(); 138 | Map metadataMappers = Maps.newHashMap(); 139 | 140 | context.path().add(name); 141 | // add feature and hash mappers 142 | for (FeatureEnum featureEnum : features.keySet()) { 143 | Map featureMap = features.get(featureEnum); 144 | String featureName = featureEnum.name(); 145 | 146 | // add feature mapper 147 | featureMappers.put(featureName, binaryField(featureName).store(true).includeInAll(false).index(false).build(context)); 148 | 149 | 150 | // add hash mapper if hash is required 151 | if (featureMap.containsKey(HASH)){ 152 | List hashes = (List) featureMap.get(HASH); 153 | for (String h : hashes) { 154 | String hashFieldName = featureName + "." + HASH + "." + h; 155 | hashMappers.put(hashFieldName, stringField(hashFieldName).store(true).includeInAll(false).index(true).build(context)); 156 | } 157 | } 158 | } 159 | 160 | // add metadata mappers 161 | context.path().add(METADATA); 162 | for (Map.Entry> entry : metadataBuilders.entrySet()){ 163 | String metadataName = entry.getKey(); 164 | FieldMapper.Builder metadataBuilder = entry.getValue(); 165 | metadataMappers.put(metadataName, (FieldMapper) metadataBuilder.build(context)); 166 | } 167 | 168 | context.path().remove(); // remove METADATA 169 | context.path().remove(); // remove name 170 | 171 | MappedFieldType defaultFieldType = Defaults.FIELD_TYPE.clone(); 172 | defaultFieldType.setNames(new Names(name)); 173 | 174 | fieldType.setNames(new Names(name)); 175 | 176 | return new ImageMapper(name, threadPool, context.indexSettings(), features, featureMappers, hashMappers, metadataMappers, 177 | fieldType, defaultFieldType, multiFieldsBuilder.build(this, context), copyTo); 178 | } 179 | 180 | @Override 181 | public String toString() { 182 | return "Builder [threadPool=" + threadPool + ", features=" + features + ", metadataBuilders=" 183 | + metadataBuilders + "]"; 184 | } 185 | 186 | } 187 | 188 | public static class TypeParser implements Mapper.TypeParser { 189 | 190 | private ThreadPool threadPool; 191 | 192 | public TypeParser() { } 193 | 194 | public TypeParser(ThreadPool threadPool) { 195 | this.threadPool = threadPool; 196 | } 197 | 198 | @Override 199 | @SuppressWarnings("unchecked") 200 | public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { 201 | 202 | ImageMapper.Builder builder = new ImageMapper.Builder(name, threadPool); 203 | Map features = Maps.newHashMap(); 204 | Map metadatas = Maps.newHashMap(); 205 | 206 | for (Map.Entry entry : node.entrySet()) { 207 | String fieldName = entry.getKey(); 208 | Object fieldNode = entry.getValue(); 209 | 210 | if (FEATURE.equals(fieldName)) { 211 | features = (Map) fieldNode; 212 | } else if (METADATA.equals(fieldName)) { 213 | metadatas = (Map) fieldNode; 214 | } 215 | } 216 | 217 | if (features == null || features.isEmpty()) { 218 | throw new ElasticsearchGenerationException("Feature not found"); 219 | } 220 | 221 | // process features 222 | for (Map.Entry entry : features.entrySet()) { 223 | String feature = entry.getKey(); 224 | Map featureMap = (Map) entry.getValue(); 225 | 226 | if(featureMap.isEmpty()) { 227 | throw new ElasticsearchImageProcessException("hash is not specific."); 228 | } 229 | 230 | // process hash for each feature 231 | if (featureMap.containsKey(HASH)) { 232 | Object hashVal = featureMap.get(HASH); 233 | List hashes = Lists.newArrayList(); 234 | if (hashVal instanceof List) { 235 | for (String h : (List)hashVal) { 236 | hashes.add(HashEnum.valueOf(h).name()); 237 | } 238 | } else if (hashVal instanceof String) { 239 | hashes.add(HashEnum.valueOf((String) hashVal).name()); 240 | } else { 241 | throw new ElasticsearchGenerationException("Malformed hash value"); 242 | } 243 | featureMap.put(HASH, hashes); 244 | } 245 | 246 | FeatureEnum featureEnum = FeatureEnum.getByName(feature); 247 | builder.addFeature(featureEnum, featureMap); 248 | } 249 | 250 | 251 | // process metadata 252 | for (Map.Entry entry : metadatas.entrySet()) { 253 | String metadataName = entry.getKey(); 254 | Map metadataMap = (Map) entry.getValue(); 255 | String fieldType = (String) metadataMap.get("type"); 256 | builder.addMetadata(metadataName, (FieldMapper.Builder) parserContext.typeParser(fieldType).parse(metadataName, metadataMap, parserContext)); 257 | } 258 | 259 | return builder; 260 | } 261 | } 262 | 263 | private final String name; 264 | 265 | private final ThreadPool threadPool; 266 | 267 | private final Settings settings; 268 | 269 | private volatile ImmutableOpenMap> features = ImmutableOpenMap.of(); 270 | 271 | private volatile ImmutableOpenMap featureMappers = ImmutableOpenMap.of(); 272 | 273 | private volatile ImmutableOpenMap hashMappers = ImmutableOpenMap.of(); 274 | 275 | private volatile ImmutableOpenMap metadataMappers = ImmutableOpenMap.of(); 276 | 277 | 278 | public ImageMapper(String name, ThreadPool threadPool, Settings indexSettings, Map> features, Map featureMappers, 279 | Map hashMappers, Map metadataMappers, 280 | MappedFieldType type, MappedFieldType defaultFieldType,MultiFields multiFields, CopyTo copyTo) { 281 | super(name, type, defaultFieldType, indexSettings, multiFields, copyTo); 282 | this.name = name; 283 | this.threadPool = threadPool; 284 | this.settings = indexSettings; 285 | if (features != null) { 286 | this.features = ImmutableOpenMap.builder(this.features).putAll(features).build(); 287 | } 288 | if (featureMappers != null) { 289 | this.featureMappers = ImmutableOpenMap.builder(this.featureMappers).putAll(featureMappers).build(); 290 | } 291 | if (hashMappers != null) { 292 | this.hashMappers = ImmutableOpenMap.builder(this.hashMappers).putAll(hashMappers).build(); 293 | } 294 | if (metadataMappers != null) { 295 | this.metadataMappers = ImmutableOpenMap.builder(this.metadataMappers).putAll(metadataMappers).build(); 296 | } 297 | } 298 | 299 | @Override 300 | public String name() { 301 | return name; 302 | } 303 | 304 | @Override 305 | @SuppressWarnings("unchecked") 306 | public Mapper parse(ParseContext context) throws IOException { 307 | byte[] content = null; 308 | 309 | XContentParser parser = context.parser(); 310 | XContentParser.Token token = parser.currentToken(); 311 | if (token == XContentParser.Token.VALUE_STRING) { 312 | content = parser.binaryValue(); 313 | } 314 | 315 | if (content == null) { 316 | throw new MapperParsingException("No content is provided."); 317 | } 318 | 319 | final Boolean useThreadPool = settings.getAsBoolean("index.image.use_thread_pool", true); 320 | final Boolean ignoreMetadataError = settings.getAsBoolean("index.image.ignore_metadata_error", true); 321 | 322 | BufferedImage img = ImageIO.read(new ByteBufferStreamInput(ByteBuffer.wrap(content))); 323 | if (Math.max(img.getHeight(), img.getWidth()) > MAX_IMAGE_DIMENSION) { 324 | img = ImageUtils.scaleImage(img, MAX_IMAGE_DIMENSION); 325 | } 326 | 327 | final BufferedImage finalImg = img; 328 | 329 | final Map featureExtractMap = new MapMaker().makeMap(); 330 | 331 | // have multiple features, use ThreadPool to process each feature 332 | if (useThreadPool && features.size() > 1) { 333 | final CountDownLatch latch = new CountDownLatch(features.size()); 334 | Executor executor = threadPool.generic(); 335 | 336 | for (ObjectObjectCursor> cursor : features) { 337 | final FeatureEnum featureEnum = cursor.key; 338 | executor.execute(new Runnable() { 339 | @Override 340 | public void run() { 341 | try { 342 | LireFeature lireFeature = featureEnum.getFeatureClass().newInstance(); 343 | ((Extractor)lireFeature).extract(finalImg); 344 | featureExtractMap.put(featureEnum, lireFeature); 345 | } catch (Throwable e){ 346 | logger.error("Failed to extract feature from image", e); 347 | } finally { 348 | latch.countDown(); 349 | } 350 | } 351 | }); 352 | } 353 | try { 354 | latch.await(); 355 | } catch (InterruptedException e) { 356 | logger.debug("Interrupted extract feature from image", e); 357 | Thread.currentThread().interrupt(); 358 | } 359 | } 360 | 361 | 362 | for (ObjectObjectCursor> cursor : features) { 363 | FeatureEnum featureEnum = cursor.key; 364 | Map featureMap = cursor.value; 365 | 366 | try { 367 | LireFeature lireFeature; 368 | if (featureExtractMap.containsKey(featureEnum)) { // already processed 369 | lireFeature = featureExtractMap.get(featureEnum); 370 | } else { 371 | lireFeature = featureEnum.getFeatureClass().newInstance(); 372 | ((Extractor)lireFeature).extract(img); 373 | } 374 | byte[] parsedContent = lireFeature.getByteArrayRepresentation(); 375 | 376 | FieldMapper featureMapper = featureMappers.get(featureEnum.name()); 377 | featureMapper.parse(context.createExternalValueContext(parsedContent)); 378 | context.doc().add(new BinaryDocValuesField(name() + "." + featureEnum.name(), new BytesRef(parsedContent))); 379 | 380 | // add hash if required 381 | if (featureMap.containsKey(HASH)) { 382 | List hashes = (List) featureMap.get(HASH); 383 | for (String h : hashes) { 384 | HashEnum hashEnum = HashEnum.valueOf(h); 385 | int[] hashVals = null; 386 | if (hashEnum.equals(HashEnum.BIT_SAMPLING)) { 387 | hashVals = BitSampling.generateHashes(lireFeature.getFeatureVector()); 388 | } else if (hashEnum.equals(HashEnum.LSH)) { 389 | hashVals = LocalitySensitiveHashing.generateHashes(lireFeature.getFeatureVector()); 390 | } 391 | 392 | String mapperName = featureEnum.name() + "." + HASH + "." + h; 393 | FieldMapper hashMapper = hashMappers.get(mapperName); 394 | 395 | hashMapper.parse(context.createExternalValueContext(SerializationUtils.arrayToString(hashVals))); 396 | } 397 | } 398 | 399 | } catch (Exception e) { 400 | throw new ElasticsearchImageProcessException("Failed to index feature " + featureEnum.name(), e); 401 | } 402 | } 403 | 404 | // process metadata if required 405 | if (!metadataMappers.isEmpty()) { 406 | try { 407 | Metadata metadata = ImageMetadataReader.readMetadata(new ByteBufferStreamInput(ByteBuffer.wrap(content))); 408 | for (Directory directory : metadata.getDirectories()) { 409 | for (Tag tag : directory.getTags()) { 410 | String metadataName = tag.getDirectoryName().toLowerCase().replaceAll("\\s+", "_") + "." + 411 | tag.getTagName().toLowerCase().replaceAll("\\s+", "_"); 412 | if (metadataMappers.containsKey(metadataName)) { 413 | FieldMapper mapper = metadataMappers.get(metadataName); 414 | mapper.parse(context.createExternalValueContext(tag.getDescription())); 415 | } 416 | } 417 | } 418 | } catch (ImageProcessingException e) { 419 | logger.error("Failed to extract metadata from image", e); 420 | if (!ignoreMetadataError) { 421 | throw new ElasticsearchImageProcessException("Failed to extract metadata from image", e); 422 | } 423 | } 424 | } 425 | 426 | return null; 427 | } 428 | 429 | @Override 430 | protected void parseCreateField(ParseContext parseContext, List fields) throws IOException { 431 | 432 | } 433 | 434 | @Override 435 | protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { 436 | // ignore this for now 437 | } 438 | 439 | @Override 440 | public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { 441 | builder.startObject(name); 442 | 443 | builder.field("type", CONTENT_TYPE); 444 | 445 | builder.startObject(FEATURE); 446 | for (ObjectObjectCursor> cursor : features) { 447 | builder.field(cursor.key.name(), cursor.value); 448 | } 449 | builder.endObject(); 450 | 451 | builder.startObject(METADATA); 452 | for (ObjectObjectCursor cursor : metadataMappers) { 453 | cursor.value.toXContent(builder, params); 454 | } 455 | builder.endObject(); 456 | 457 | return builder.endObject(); 458 | } 459 | 460 | @Override 461 | protected String contentType() { 462 | return CONTENT_TYPE; 463 | } 464 | 465 | } 466 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/index/query/image/AbstractImageScorer.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.index.query.image; 2 | 3 | import org.apache.lucene.index.BinaryDocValues; 4 | import org.apache.lucene.index.IndexReader; 5 | import org.apache.lucene.index.LeafReader; 6 | import org.apache.lucene.search.Scorer; 7 | import org.apache.lucene.search.Weight; 8 | import org.apache.lucene.util.BytesRef; 9 | import org.elasticsearch.ElasticsearchImageProcessException; 10 | 11 | import net.semanticmetadata.lire.imageanalysis.features.LireFeature; 12 | 13 | import java.io.IOException; 14 | 15 | import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; 16 | 17 | /** 18 | * Calculate score for each image 19 | * score = (1 / distance) * boost 20 | */ 21 | public abstract class AbstractImageScorer extends Scorer { 22 | private final String luceneFieldName; 23 | private final LireFeature lireFeature; 24 | private final IndexReader reader; 25 | private final float boost; 26 | private BinaryDocValues binaryDocValues; 27 | 28 | protected AbstractImageScorer(Weight weight, String luceneFieldName, LireFeature lireFeature, IndexReader reader, 29 | float boost) { 30 | super(weight); 31 | this.luceneFieldName = luceneFieldName; 32 | this.lireFeature = lireFeature; 33 | this.reader = reader; 34 | this.boost = boost; 35 | } 36 | 37 | @Override 38 | public float score() throws IOException { 39 | assert docID() != NO_MORE_DOCS; 40 | 41 | if (binaryDocValues == null) { 42 | LeafReader atomicReader = (LeafReader) reader; 43 | binaryDocValues = atomicReader.getBinaryDocValues(luceneFieldName); 44 | } 45 | 46 | try { 47 | BytesRef bytesRef = binaryDocValues.get(docID()); 48 | LireFeature docFeature = lireFeature.getClass().newInstance(); 49 | docFeature.setByteArrayRepresentation(bytesRef.bytes, bytesRef.offset, bytesRef.length); 50 | 51 | double distance = lireFeature.getDistance(docFeature); 52 | double score; 53 | if (Double.compare(distance, 1.0f) <= 0) { // distance less than 1, consider as same image 54 | score = 2f - distance; 55 | } else { 56 | score = 1 / distance; 57 | } 58 | return (float)score * boost; 59 | } catch (Exception e) { 60 | throw new ElasticsearchImageProcessException("Failed to calculate score", e); 61 | } 62 | } 63 | 64 | @Override 65 | public int freq() { 66 | return 1; 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/index/query/image/ImageHashQuery.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.index.query.image; 2 | 3 | import java.io.IOException; 4 | import java.util.ArrayList; 5 | import java.util.List; 6 | import java.util.Set; 7 | 8 | import org.apache.lucene.index.*; 9 | import org.apache.lucene.search.*; 10 | import org.apache.lucene.util.ToStringUtils; 11 | import net.semanticmetadata.lire.imageanalysis.features.LireFeature; 12 | 13 | import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; 14 | 15 | /** 16 | * Copied from {@link TermQuery}, query by hash first and only calculate score for matching docs 17 | */ 18 | public class ImageHashQuery extends Query { 19 | private final Term term; 20 | 21 | private String luceneFieldName; 22 | private LireFeature lireFeature; 23 | private ImageScoreCache imageScoreCache; 24 | 25 | public ImageHashQuery(Term t, String luceneFieldName, LireFeature lireFeature, ImageScoreCache imageScoreCache, float boost) { 26 | this.term = t; 27 | this.luceneFieldName = luceneFieldName; 28 | this.lireFeature = lireFeature; 29 | this.imageScoreCache = imageScoreCache; 30 | setBoost(boost); 31 | } 32 | 33 | @Override 34 | public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { 35 | 36 | final IndexReaderContext context = searcher.getTopReaderContext(); 37 | final TermContext termState = TermContext.build(context, term); 38 | 39 | return new ImageHashWeight(searcher, needsScores, termState); 40 | } 41 | 42 | @Override 43 | public boolean equals(Object o) { 44 | if (!(o instanceof ImageHashQuery)) 45 | return false; 46 | ImageHashQuery other = (ImageHashQuery)o; 47 | return (this.getBoost() == other.getBoost()) 48 | && this.term.equals(other.term) 49 | & luceneFieldName.equals(luceneFieldName) 50 | && lireFeature.equals(lireFeature); 51 | } 52 | 53 | @Override 54 | public int hashCode() { 55 | int result = super.hashCode(); 56 | result = 31 * result + term.hashCode(); 57 | result = 31 * result + luceneFieldName.hashCode(); 58 | result = 31 * result + lireFeature.hashCode(); 59 | result = Float.floatToIntBits(getBoost()) ^ result; 60 | return result; 61 | } 62 | 63 | @Override 64 | public String toString(String field) { 65 | StringBuilder buffer = new StringBuilder(); 66 | if (!term.field().equals(field)) { 67 | buffer.append(term.field()); 68 | buffer.append(":"); 69 | } 70 | buffer.append(term.text()); 71 | buffer.append(";"); 72 | buffer.append(luceneFieldName); 73 | buffer.append(","); 74 | buffer.append(lireFeature.getClass().getSimpleName()); 75 | buffer.append(ToStringUtils.boost(getBoost())); 76 | return buffer.toString(); 77 | } 78 | 79 | 80 | final class ImageHashWeight extends Weight 81 | { 82 | private final TermContext termStates; 83 | 84 | public ImageHashWeight(IndexSearcher searcher, boolean needsScores, TermContext termStates) throws IOException { 85 | super(ImageHashQuery.this); 86 | assert termStates != null : "TermContext must not be null"; 87 | this.termStates = termStates; 88 | } 89 | 90 | @Override 91 | public String toString() { return "weight(" + ImageHashQuery.this + ")"; } 92 | 93 | @Override 94 | public float getValueForNormalization() { 95 | return 1f; 96 | } 97 | 98 | @Override 99 | public void normalize(float queryNorm, float topLevelBoost) { 100 | } 101 | 102 | @Override 103 | public Scorer scorer(LeafReaderContext context) throws IOException { 104 | assert termStates.topReaderContext == ReaderUtil.getTopLevelContext(context) : "The top-reader used to create Weight (" + termStates.topReaderContext + ") is not the same as the current reader's top-reader (" + ReaderUtil.getTopLevelContext(context); 105 | final TermsEnum termsEnum = getTermsEnum(context); 106 | if (termsEnum == null) { 107 | return null; 108 | } 109 | PostingsEnum docs = termsEnum.postings( null); 110 | assert docs != null; 111 | return new ImageHashScorer(this, docs, context.reader()); 112 | } 113 | 114 | private TermsEnum getTermsEnum(LeafReaderContext context) throws IOException { 115 | final TermState state = termStates.get(context.ord); 116 | if (state == null) { // term is not present in that reader 117 | assert termNotInReader(context.reader(), term) : "no termstate found but term exists in reader term=" + term; 118 | return null; 119 | } 120 | final TermsEnum termsEnum = context.reader().terms(term.field()).iterator(); 121 | termsEnum.seekExact(term.bytes(), state); 122 | return termsEnum; 123 | } 124 | 125 | private boolean termNotInReader(LeafReader reader, Term term) throws IOException { 126 | return reader.docFreq(term) == 0; 127 | } 128 | 129 | @Override 130 | public Explanation explain(LeafReaderContext context, int doc) throws IOException { 131 | Scorer scorer = scorer(context); 132 | boolean exists = (scorer != null && scorer.iterator().advance(doc) == doc); 133 | //scorer. 134 | 135 | if(exists){ 136 | float score = scorer.score(); 137 | List details=new ArrayList<>(); 138 | if (getBoost() != 1.0f) { 139 | details.add(Explanation.match(getBoost(), "boost")); 140 | score = score / getBoost(); 141 | } 142 | details.add(Explanation.match(score ,"image score (1/distance)")); 143 | return Explanation.match( 144 | score, ImageHashQuery.this.toString() + ", product of:",details); 145 | 146 | }else{ 147 | return Explanation.noMatch(ImageHashQuery.this.toString() + " doesn't match id " + doc); 148 | } 149 | } 150 | 151 | @Override 152 | public void extractTerms(Set terms) { 153 | 154 | } 155 | } 156 | 157 | 158 | final class ImageHashScorer extends AbstractImageScorer 159 | { 160 | private final PostingsEnum docsEnum; 161 | private final IndexReader reader; 162 | 163 | ImageHashScorer(Weight weight, PostingsEnum td, IndexReader reader) { 164 | super(weight, luceneFieldName, lireFeature, reader, ImageHashQuery.this.getBoost()); 165 | this.docsEnum = td; 166 | this.reader = reader; 167 | } 168 | 169 | @Override 170 | public int docID() { 171 | return docsEnum.docID(); 172 | } 173 | 174 | @Override 175 | public float score() throws IOException { 176 | assert docID() != NO_MORE_DOCS; 177 | int docId = docID(); 178 | String cacheKey = reader.toString() + ":" + docId; 179 | if (imageScoreCache.getScore(cacheKey) != null) { 180 | return 0f; // BooleanScorer will add all score together, return 0 for docs already processed 181 | } 182 | float score = super.score(); 183 | imageScoreCache.setScore(cacheKey, score); 184 | return score; 185 | } 186 | 187 | @Override 188 | public DocIdSetIterator iterator() { 189 | // added for lucene 5.5.0 190 | return new DocIdSetIterator() { 191 | 192 | @Override 193 | public int docID() { 194 | return docsEnum.docID(); 195 | } 196 | 197 | @Override 198 | public int nextDoc() throws IOException { 199 | return docsEnum.nextDoc(); 200 | } 201 | 202 | @Override 203 | public int advance(int target) throws IOException { 204 | return docsEnum.advance(target); 205 | } 206 | 207 | @Override 208 | public long cost() { 209 | return docsEnum.cost(); 210 | } 211 | }; 212 | } 213 | } 214 | 215 | 216 | 217 | } 218 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/index/query/image/ImageQueryBuilder.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.index.query.image; 2 | 3 | 4 | import org.elasticsearch.common.xcontent.ToXContent; 5 | import org.elasticsearch.common.xcontent.XContentBuilder; 6 | import org.elasticsearch.index.query.BoostableQueryBuilder; 7 | import org.elasticsearch.index.query.QueryBuilder; 8 | 9 | import java.io.IOException; 10 | 11 | public class ImageQueryBuilder extends QueryBuilder implements BoostableQueryBuilder { 12 | 13 | private final String fieldName; 14 | 15 | private String feature; 16 | 17 | private byte[] image; 18 | 19 | private String hash; 20 | 21 | private float boost = -1; 22 | 23 | private String lookupIndex; 24 | 25 | private String lookupType; 26 | 27 | private String lookupField; 28 | 29 | private String lookupId; 30 | 31 | private String lookupRouting; 32 | 33 | public ImageQueryBuilder(String fieldName) { 34 | this.fieldName = fieldName; 35 | } 36 | 37 | public ImageQueryBuilder feature(String feature) { 38 | this.feature = feature; 39 | return this; 40 | } 41 | 42 | public ImageQueryBuilder image(byte[] image) { 43 | this.image = image; 44 | return this; 45 | } 46 | 47 | public ImageQueryBuilder hash(String hash) { 48 | this.hash = hash; 49 | return this; 50 | } 51 | 52 | public ImageQueryBuilder lookupIndex(String lookupIndex) { 53 | this.lookupIndex = lookupIndex; 54 | return this; 55 | } 56 | 57 | public ImageQueryBuilder lookupField(String lookupField) { 58 | this.lookupField = lookupField; 59 | return this; 60 | } 61 | 62 | public ImageQueryBuilder lookupType(String lookupType) { 63 | this.lookupType = lookupType; 64 | return this; 65 | } 66 | 67 | public ImageQueryBuilder lookupId(String lookupId) { 68 | this.lookupId = lookupId; 69 | return this; 70 | } 71 | 72 | public ImageQueryBuilder lookupRouting(String lookupRouting) { 73 | this.lookupRouting = lookupRouting; 74 | return this; 75 | } 76 | 77 | @Override 78 | public ImageQueryBuilder boost(float boost) { 79 | this.boost = boost; 80 | return this; 81 | } 82 | 83 | @Override 84 | protected void doXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { 85 | builder.startObject(ImageQueryParser.NAME); 86 | 87 | builder.startObject(fieldName); 88 | builder.field("feature", feature); 89 | 90 | if (image != null) { 91 | builder.field("image", image); 92 | } 93 | 94 | if (lookupIndex != null) { 95 | builder.field("index", lookupIndex); 96 | } 97 | 98 | builder.field("type", lookupType); 99 | builder.field("field", lookupField); 100 | builder.field("id", lookupId); 101 | 102 | if (lookupRouting != null) { 103 | builder.field("routing", lookupRouting); 104 | } 105 | 106 | if (hash != null) { 107 | builder.field("hash", hash); 108 | } 109 | 110 | if (boost != -1) { 111 | builder.field("boost", boost); 112 | } 113 | 114 | builder.endObject(); 115 | 116 | builder.endObject(); 117 | } 118 | 119 | 120 | } 121 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/index/query/image/ImageQueryParser.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.index.query.image; 2 | 3 | 4 | import net.semanticmetadata.lire.imageanalysis.features.Extractor; 5 | import net.semanticmetadata.lire.imageanalysis.features.LireFeature; 6 | import net.semanticmetadata.lire.indexers.hashing.BitSampling; 7 | import net.semanticmetadata.lire.indexers.hashing.LocalitySensitiveHashing; 8 | import net.semanticmetadata.lire.utils.ImageUtils; 9 | import org.apache.lucene.index.Term; 10 | import org.apache.lucene.search.BooleanClause; 11 | import org.apache.lucene.search.BooleanQuery; 12 | import org.apache.lucene.search.Query; 13 | import org.apache.lucene.util.BytesRef; 14 | import org.elasticsearch.ElasticsearchImageProcessException; 15 | import org.elasticsearch.action.get.GetRequest; 16 | import org.elasticsearch.action.get.GetResponse; 17 | import org.elasticsearch.client.Client; 18 | import org.elasticsearch.common.inject.Inject; 19 | import org.elasticsearch.common.io.stream.ByteBufferStreamInput; 20 | import org.elasticsearch.common.xcontent.XContentParser; 21 | import org.elasticsearch.index.get.GetField; 22 | import org.elasticsearch.index.mapper.image.FeatureEnum; 23 | import org.elasticsearch.index.mapper.image.HashEnum; 24 | import org.elasticsearch.index.mapper.image.ImageMapper; 25 | import org.elasticsearch.index.query.QueryParseContext; 26 | import org.elasticsearch.index.query.QueryParser; 27 | import org.elasticsearch.index.query.QueryParsingException; 28 | 29 | import javax.imageio.ImageIO; 30 | import java.awt.image.BufferedImage; 31 | import java.io.IOException; 32 | import java.nio.ByteBuffer; 33 | 34 | public class ImageQueryParser implements QueryParser { 35 | 36 | public static final String NAME = "image"; 37 | 38 | private Client client; 39 | 40 | @Inject 41 | public ImageQueryParser(Client client) { 42 | this.client = client; 43 | } 44 | 45 | @Override 46 | public String[] names() { 47 | return new String[] {NAME}; 48 | } 49 | 50 | @Override 51 | public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException { 52 | XContentParser parser = parseContext.parser(); 53 | 54 | XContentParser.Token token = parser.nextToken(); 55 | 56 | if (token != XContentParser.Token.FIELD_NAME) { 57 | throw new QueryParsingException(parseContext, "[image] query malformed, no field"); 58 | } 59 | 60 | String fieldName = parser.currentName(); 61 | FeatureEnum featureEnum = null; 62 | byte[] image = null; 63 | HashEnum hashEnum = null; 64 | float boost = 1.0f; 65 | 66 | String lookupIndex = parseContext.index().name(); 67 | String lookupType = null; 68 | String lookupId = null; 69 | String field = null; 70 | String lookupRouting = null; 71 | 72 | 73 | token = parser.nextToken(); 74 | if (token == XContentParser.Token.START_OBJECT) { 75 | String currentFieldName = null; 76 | while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { 77 | if (token == XContentParser.Token.FIELD_NAME) { 78 | currentFieldName = parser.currentName(); 79 | } else { 80 | if ("feature".equals(currentFieldName)) { 81 | featureEnum = FeatureEnum.getByName(parser.text()); 82 | } else if ("image".equals(currentFieldName)) { 83 | image = parser.binaryValue(); 84 | } else if ("hash".equals(currentFieldName)) { 85 | hashEnum = HashEnum.getByName(parser.text()); 86 | } else if ("boost".equals(currentFieldName)) { 87 | boost = parser.floatValue(); 88 | }else if ("index".equals(currentFieldName)) { 89 | lookupIndex = parser.text(); 90 | } else if ("type".equals(currentFieldName)) { 91 | lookupType = parser.text(); 92 | } else if ("field".equals(currentFieldName)) { 93 | field = parser.text(); 94 | } else if ("id".equals(currentFieldName)) { 95 | lookupId = parser.text(); 96 | } else if ("routing".equals(currentFieldName)) { 97 | lookupRouting = parser.textOrNull(); 98 | } else { 99 | throw new QueryParsingException(parseContext, "[image] query does not support [" + currentFieldName + "]"); 100 | } 101 | } 102 | } 103 | parser.nextToken(); 104 | } 105 | 106 | if (featureEnum == null) { 107 | throw new QueryParsingException(parseContext, "No feature specified for image query"); 108 | } 109 | 110 | if(field == null) 111 | field = fieldName; 112 | 113 | String luceneFieldName = field + "." + featureEnum.name(); 114 | LireFeature lireFeature = null; 115 | 116 | if (image != null) { 117 | 118 | try { 119 | lireFeature = featureEnum.getFeatureClass().newInstance(); 120 | BufferedImage img = ImageIO.read(new ByteBufferStreamInput(ByteBuffer.wrap(image))); 121 | if (Math.max(img.getHeight(), img.getWidth()) > ImageMapper.MAX_IMAGE_DIMENSION) { 122 | img = ImageUtils.scaleImage(img, ImageMapper.MAX_IMAGE_DIMENSION); 123 | } 124 | ((Extractor)lireFeature).extract(img); 125 | } catch (Exception e) { 126 | throw new ElasticsearchImageProcessException("Failed to parse image", e); 127 | } 128 | 129 | } else if (lookupIndex != null && lookupType != null && lookupId != null) { 130 | 131 | String lookupFieldName = field + "." + featureEnum.name(); 132 | 133 | GetResponse response = client.get(new GetRequest(lookupIndex, lookupType, lookupId).preference("_local").routing(lookupRouting).fields(lookupFieldName).realtime(false)).actionGet(); 134 | 135 | if (response.isExists()) { 136 | GetField getField = response.getField(lookupFieldName); 137 | 138 | if (getField == null) { 139 | throw new ElasticsearchImageProcessException("field:" + field + " not found in index:" + lookupIndex); 140 | } 141 | 142 | BytesRef bytesReference = (BytesRef) getField.getValue(); 143 | 144 | try { 145 | lireFeature = featureEnum.getFeatureClass().newInstance(); 146 | lireFeature.setByteArrayRepresentation(bytesReference.bytes); 147 | } catch (Exception e) { 148 | throw new ElasticsearchImageProcessException("Failed to parse image", e); 149 | } 150 | } 151 | else 152 | { 153 | throw new ElasticsearchImageProcessException("Image not found from field:" + field); 154 | } 155 | } 156 | 157 | if (lireFeature == null) 158 | throw new QueryParsingException(parseContext, "No feature found for image query or missing parameters"); 159 | 160 | 161 | if (hashEnum == null) 162 | throw new QueryParsingException(parseContext, "No hash found for image query"); 163 | 164 | int[] hash = null; 165 | 166 | if (hashEnum.equals(HashEnum.BIT_SAMPLING)) { 167 | hash = BitSampling.generateHashes(lireFeature.getFeatureVector()); 168 | } else if (hashEnum.equals(HashEnum.LSH)) { 169 | hash = LocalitySensitiveHashing.generateHashes(lireFeature.getFeatureVector()); 170 | } 171 | 172 | String hashFieldName = luceneFieldName + "." + ImageMapper.HASH + "." + hashEnum.name(); 173 | 174 | BooleanQuery.Builder builder=new BooleanQuery.Builder().setDisableCoord(true); 175 | 176 | ImageScoreCache imageScoreCache = new ImageScoreCache(); 177 | 178 | for (int h : hash) { 179 | builder.add(new BooleanClause(new ImageHashQuery(new Term(hashFieldName, Integer.toString(h)), luceneFieldName, lireFeature, imageScoreCache, boost), BooleanClause.Occur.SHOULD)); 180 | } 181 | 182 | return builder.build(); 183 | } 184 | 185 | } 186 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/index/query/image/ImageScoreCache.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.index.query.image; 2 | 3 | import com.google.common.collect.MapMaker; 4 | 5 | import java.util.Map; 6 | 7 | /** 8 | * Cache document score for {@link org.elasticsearch.index.query.image.ImageHashQuery} 9 | */ 10 | public class ImageScoreCache { 11 | private Map scoreCache = new MapMaker().makeMap(); 12 | 13 | public Float getScore(String key) { 14 | if (!scoreCache.containsKey(key)) { 15 | return null; 16 | } 17 | return scoreCache.get(key); 18 | } 19 | 20 | public void setScore(String key, Float score) { 21 | scoreCache.put(key, score); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/plugin/image/ImagePlugin.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.plugin.image; 2 | 3 | import org.elasticsearch.index.mapper.image.ImageMapper; 4 | import org.elasticsearch.index.query.image.ImageQueryParser; 5 | import org.elasticsearch.indices.IndicesModule; 6 | import org.elasticsearch.plugins.Plugin; 7 | import org.elasticsearch.threadpool.ThreadPool; 8 | 9 | 10 | public class ImagePlugin extends Plugin { 11 | 12 | @Override 13 | public String name() { 14 | return "image"; 15 | } 16 | 17 | @Override 18 | public String description() { 19 | return "Elasticsearch Image Plugin"; 20 | } 21 | 22 | public void onModule(IndicesModule indicesModule) { 23 | indicesModule.registerMapper("image", new ImageMapper.TypeParser(new ThreadPool("elasticsearch-image"))); 24 | indicesModule.registerQueryParser(ImageQueryParser.class); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/main/resources/hash/LshBitSampling.obj: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kiwionly/elasticsearch-image/3e20cb769da9db55d2c2bf2fdd079b42c098b9dd/src/main/resources/hash/LshBitSampling.obj -------------------------------------------------------------------------------- /src/main/resources/hash/lshHashFunctions.obj: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kiwionly/elasticsearch-image/3e20cb769da9db55d2c2bf2fdd079b42c098b9dd/src/main/resources/hash/lshHashFunctions.obj -------------------------------------------------------------------------------- /src/main/resources/plugin-descriptor.properties: -------------------------------------------------------------------------------- 1 | # Elasticsearch plugin descriptor file 2 | # This file must exist as 'plugin-descriptor.properties' at 3 | # the root directory of all plugins. 4 | # 5 | # A plugin can be 'site', 'jvm', or both. 6 | # 7 | ### example site plugin for "foo": 8 | # 9 | # foo.zip <-- zip file for the plugin, with this structure: 10 | # _site/ <-- the contents that will be served 11 | # plugin-descriptor.properties <-- example contents below: 12 | # 13 | # site=true 14 | # description=My cool plugin 15 | # version=1.0 16 | # 17 | ### example jvm plugin for "foo" 18 | # 19 | # foo.zip <-- zip file for the plugin, with this structure: 20 | # .jar <-- classes, resources, dependencies 21 | # .jar <-- any number of jars 22 | # plugin-descriptor.properties <-- example contents below: 23 | # 24 | # jvm=true 25 | # classname=foo.bar.BazPlugin 26 | # description=My cool plugin 27 | # version=2.0.0-rc1 28 | # elasticsearch.version=2.0 29 | # java.version=1.7 30 | # 31 | ### mandatory elements for all plugins: 32 | # 33 | # 'description': simple summary of the plugin 34 | description=Image Plugin for ElasticSearch. 35 | # 36 | # 'version': plugin's version 37 | version=2.4.1 38 | # 39 | # 'name': the plugin name 40 | name=elasticsearch-image 41 | 42 | ### mandatory elements for site plugins: 43 | # 44 | # 'site': set to true to indicate contents of the _site/ 45 | # directory in the root of the plugin should be served. 46 | site=false 47 | # 48 | ### mandatory elements for jvm plugins : 49 | # 50 | # 'jvm': true if the 'classname' class should be loaded 51 | # from jar files in the root directory of the plugin. 52 | # Note that only jar files in the root directory are 53 | # added to the classpath for the plugin! If you need 54 | # other resources, package them into a resources jar. 55 | jvm=true 56 | # 57 | # 'classname': the name of the class to load, fully-qualified. 58 | classname=org.elasticsearch.plugin.image.ImagePlugin 59 | # 60 | # 'java.version' version of java the code is built against 61 | # use the system property java.specification.version 62 | # version string must be a sequence of nonnegative decimal integers 63 | # separated by "."'s and may have leading zeros 64 | java.version=1.8 65 | # 66 | # 'elasticsearch.version' version of elasticsearch compiled against 67 | # You will have to release a new version of the plugin for each new 68 | # elasticsearch release. This version is checked when the plugin 69 | # is loaded so Elasticsearch will refuse to start in the presence of 70 | # plugins with the incorrect elasticsearch.version. 71 | elasticsearch.version=2.4.1 72 | # 73 | ### deprecated elements for jvm plugins : 74 | # 75 | # 'isolated': true if the plugin should have its own classloader. 76 | # passing false is deprecated, and only intended to support plugins 77 | # that have hard dependencies against each other. If this is 78 | # not specified, then the plugin is isolated by default. 79 | isolated=true 80 | # -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/plugin/image/test/ImageIntegrationTests.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.plugin.image.test; 2 | 3 | import org.apache.sanselan.ImageFormat; 4 | import org.apache.sanselan.ImageWriteException; 5 | import org.apache.sanselan.Sanselan; 6 | import org.elasticsearch.action.index.IndexResponse; 7 | import org.elasticsearch.action.search.SearchResponse; 8 | import com.google.common.collect.Maps; 9 | import org.elasticsearch.common.settings.Settings; 10 | import org.elasticsearch.index.mapper.image.FeatureEnum; 11 | import org.elasticsearch.index.mapper.image.HashEnum; 12 | import org.elasticsearch.index.query.BoolQueryBuilder; 13 | import org.elasticsearch.index.query.QueryBuilders; 14 | import org.elasticsearch.index.query.TermQueryBuilder; 15 | import org.elasticsearch.index.query.image.ImageQueryBuilder; 16 | import org.elasticsearch.plugin.image.ImagePlugin; 17 | import org.elasticsearch.plugins.Plugin; 18 | import org.elasticsearch.search.SearchHit; 19 | import org.elasticsearch.search.SearchHits; 20 | import org.elasticsearch.test.ESIntegTestCase; 21 | import org.junit.Before; 22 | import org.junit.Test; 23 | 24 | import java.awt.image.BufferedImage; 25 | import java.io.IOException; 26 | import java.io.InputStreamReader; 27 | import java.util.Collection; 28 | 29 | import static org.elasticsearch.client.Requests.putMappingRequest; 30 | import static org.elasticsearch.common.io.Streams.copyToString; 31 | import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; 32 | import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; 33 | import static org.hamcrest.Matcher.*; 34 | 35 | @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE,numDataNodes=1) 36 | public class ImageIntegrationTests // extends ESIntegTestCase 37 | { 38 | /* comment out since not work 39 | 40 | private final static String INDEX_NAME = "test"; 41 | private final static String DOC_TYPE_NAME = "test"; 42 | 43 | 44 | @Override 45 | protected Settings nodeSettings(int nodeOrdinal) { 46 | return Settings.builder() 47 | .put(super.nodeSettings(nodeOrdinal)) 48 | .build(); 49 | } 50 | 51 | @Override 52 | protected Collection> nodePlugins() { 53 | return pluginList(ImagePlugin.class); 54 | } 55 | 56 | @Before 57 | public void createEmptyIndex() throws Exception { 58 | logger.info("creating index [{}]", INDEX_NAME); 59 | createIndex(INDEX_NAME); 60 | ensureGreen(); 61 | } 62 | 63 | @Override 64 | public Settings indexSettings() { 65 | return Settings.builder() 66 | .put("index.number_of_replicas", 0) 67 | .put("index.number_of_shards", 5) 68 | .put("index.image.use_thread_pool", randomBoolean()) 69 | .build(); 70 | } 71 | 72 | @Test 73 | public void test_index_search_image() throws Exception { 74 | String mapping = copyToStringFromClasspath("/mapping/test-mapping.json"); 75 | client().admin().indices().putMapping(putMappingRequest(INDEX_NAME).type(DOC_TYPE_NAME).source(mapping)).actionGet(); 76 | 77 | int totalImages = randomIntBetween(10, 50); 78 | 79 | // generate random images and index 80 | String nameToSearch = null; 81 | byte[] imgToSearch = null; 82 | String idToSearch = null; 83 | for (int i = 0; i < totalImages; i ++) { 84 | byte[] imageByte = getRandomImage(); 85 | String name = randomAsciiOfLength(5); 86 | IndexResponse response = index(INDEX_NAME, DOC_TYPE_NAME, jsonBuilder().startObject().field("img", imageByte).field("name", name).endObject()); 87 | if (nameToSearch == null || imgToSearch == null || idToSearch == null) { 88 | nameToSearch = name; 89 | imgToSearch = imageByte; 90 | idToSearch = response.getId(); 91 | } 92 | } 93 | 94 | refresh(); 95 | 96 | // test search with hash 97 | ImageQueryBuilder imageQueryBuilder = new ImageQueryBuilder("img").feature(FeatureEnum.CEDD.name()).image(imgToSearch).hash(HashEnum.BIT_SAMPLING.name()); 98 | SearchResponse searchResponse = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE_NAME).setQuery(imageQueryBuilder).addFields("img.metadata.exif_ifd0.x_resolution", "name").setSize(totalImages).get(); 99 | assertNoFailures(searchResponse); 100 | SearchHits hits = searchResponse.getHits(); 101 | assertThat("Should match at least one image", hits.getTotalHits(), greaterThanOrEqualTo(1l)); // if using hash, total result maybe different than number of images 102 | SearchHit hit = hits.getHits()[0]; 103 | assertThat("First should be exact match and has score 1", hit.getScore(), equalTo(2.0f)); 104 | assertImageScore(hits, nameToSearch, 2.0f); 105 | assertThat("Should have metadata", hit.getFields().get("img.metadata.exif_ifd0.x_resolution").getValues(), hasSize(1)); 106 | 107 | // test search without hash and with boost 108 | ImageQueryBuilder imageQueryBuilder2 = new ImageQueryBuilder("img").feature(FeatureEnum.CEDD.name()).image(imgToSearch).boost(2.0f); 109 | SearchResponse searchResponse2 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE_NAME).setQuery(imageQueryBuilder2).setSize(totalImages).get(); 110 | assertNoFailures(searchResponse2); 111 | SearchHits hits2 = searchResponse2.getHits(); 112 | assertThat("Should get all images", hits2.getTotalHits(), equalTo((long)totalImages)); // no hash used, total result should be same as number of images 113 | assertThat("First should be exact match and has score 2", searchResponse2.getHits().getMaxScore(), equalTo(4.0f)); 114 | assertImageScore(hits2, nameToSearch, 4.0f); 115 | 116 | // test search for name as well 117 | BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); 118 | boolQueryBuilder.must(QueryBuilders.termQuery("name", nameToSearch)); 119 | boolQueryBuilder.must(new ImageQueryBuilder("img").feature(FeatureEnum.CEDD.name()).image(imgToSearch)); 120 | SearchResponse searchResponse3 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE_NAME).setQuery(boolQueryBuilder).setSize(totalImages).get(); 121 | assertNoFailures(searchResponse3); 122 | SearchHits hits3 = searchResponse3.getHits(); 123 | assertThat("Should match one document only", hits3.getTotalHits(), equalTo(1l)); // added filename to query, should have only one result 124 | SearchHit hit3 = hits3.getHits()[0]; 125 | assertThat((String)hit3.getSource().get("name"), equalTo(nameToSearch)); 126 | 127 | // test search with hash and limit 128 | ImageQueryBuilder imageQueryBuilder4 = new ImageQueryBuilder("img").feature(FeatureEnum.CEDD.name()).image(imgToSearch).hash(HashEnum.BIT_SAMPLING.name()); 129 | SearchResponse searchResponse4 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE_NAME).setQuery(imageQueryBuilder4).setSize(totalImages).get(); 130 | assertNoFailures(searchResponse4); 131 | SearchHits hits4 = searchResponse4.getHits(); 132 | assertThat("Should match at least one image", hits4.getTotalHits(), greaterThanOrEqualTo(1l)); // if using hash, total result maybe different than number of images 133 | SearchHit hit4 = hits4.getHits()[0]; 134 | assertThat("First should be exact match and has score 1", hit4.getScore(), equalTo(2.0f)); 135 | assertImageScore(hits4, nameToSearch, 2.0f); 136 | 137 | // test search metadata 138 | TermQueryBuilder termQueryBuilder = QueryBuilders.termQuery("img.metadata.exif_ifd0.x_resolution", "72 dots per inch"); 139 | SearchResponse searchResponse5 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE_NAME).setQuery(termQueryBuilder).setSize(totalImages).get(); 140 | assertNoFailures(searchResponse5); 141 | SearchHits hits5 = searchResponse5.getHits(); 142 | assertThat("Should match at least one record", hits5.getTotalHits(), greaterThanOrEqualTo(1l)); // if using hash, total result maybe different than number of images 143 | 144 | // test search with exist image 145 | ImageQueryBuilder imageQueryBuilder6 = new ImageQueryBuilder("img").feature(FeatureEnum.CEDD.name()).lookupIndex(INDEX_NAME).lookupType(DOC_TYPE_NAME).lookupId(idToSearch); 146 | SearchResponse searchResponse6 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE_NAME).setQuery(imageQueryBuilder6).setSize(totalImages).get(); 147 | assertNoFailures(searchResponse6); 148 | SearchHits hits6 = searchResponse6.getHits(); 149 | assertThat("Should match at least one image", hits6.getTotalHits(), equalTo((long) totalImages)); 150 | SearchHit hit6 = hits6.getHits()[0]; 151 | assertThat("First should be exact match and has score 1", hit6.getScore(), equalTo(2.0f)); 152 | assertImageScore(hits6, nameToSearch, 2.0f); 153 | 154 | // test search with exist image using hash 155 | ImageQueryBuilder imageQueryBuilder7 = new ImageQueryBuilder("img").feature(FeatureEnum.CEDD.name()).lookupIndex(INDEX_NAME).lookupType(DOC_TYPE_NAME).lookupId(idToSearch).hash(HashEnum.BIT_SAMPLING.name()); 156 | SearchResponse searchResponse7 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE_NAME).setQuery(imageQueryBuilder7).setSize(totalImages).get(); 157 | assertNoFailures(searchResponse7); 158 | SearchHits hits7 = searchResponse7.getHits(); 159 | assertThat("Should match at least one image", hits7.getTotalHits(), equalTo((long) totalImages)); 160 | SearchHit hit7 = hits7.getHits()[0]; 161 | assertThat("First should be exact match and has score 1", hit7.getScore(), equalTo(2.0f)); 162 | assertImageScore(hits7, nameToSearch, 2.0f); 163 | } 164 | 165 | private void assertImageScore(SearchHits hits, String name, float score) { 166 | for (SearchHit hit : hits) { 167 | if ((hit.getSource() != null && hit.getSource().get("name").equals(name)) 168 | || (hit.getFields() != null && !hit.getFields().isEmpty() && hit.getFields().get("name").getValue().equals(name))){ 169 | assertThat(hit.getScore(), equalTo(score)); 170 | return; 171 | } 172 | } 173 | throw new AssertionError("Image " + name + " not found"); 174 | } 175 | 176 | private byte[] getRandomImage() throws IOException, ImageWriteException { 177 | int width = randomIntBetween(100, 1000); 178 | int height = randomIntBetween(100, 1000); 179 | BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB); 180 | for (int j = 0; j < width; j ++) { 181 | for (int k = 0; k < height; k ++) { 182 | image.setRGB(j, k, randomInt(512)); 183 | } 184 | } 185 | ImageFormat format = ImageFormat.IMAGE_FORMAT_TIFF; 186 | byte[] bytes = Sanselan.writeImageToBytes(image, format, Maps.newHashMap()); 187 | return bytes; 188 | } 189 | 190 | public String copyToStringFromClasspath(String path) throws IOException { 191 | return copyToString(new InputStreamReader(getClass().getResource(path).openStream(), "UTF-8")); 192 | } 193 | 194 | */ 195 | } 196 | -------------------------------------------------------------------------------- /src/test/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.rootLogger=INFO, out 2 | 3 | log4j.appender.out=org.apache.log4j.ConsoleAppender 4 | log4j.appender.out.layout=org.apache.log4j.PatternLayout 5 | log4j.appender.out.layout.conversionPattern=[%d{ISO8601}][%-5p][%-25c] %m%n 6 | -------------------------------------------------------------------------------- /src/test/resources/mapping/test-mapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "test": { 3 | "_source": { 4 | "excludes": ["img"] 5 | }, 6 | "properties": { 7 | "img": { 8 | "type": "image", 9 | "feature": { 10 | "CEDD": { 11 | "hash": "BIT_SAMPLING" 12 | }, 13 | "JCD": { 14 | "hash": ["BIT_SAMPLING", "LSH"] 15 | }, 16 | "FCTH": {} 17 | }, 18 | "metadata": { 19 | "exif_ifd0.x_resolution": { 20 | "type": "string", 21 | "index": "not_analyzed", 22 | "store": "yes" 23 | } 24 | } 25 | }, 26 | "name": { 27 | "type": "string", 28 | "index": "not_analyzed" 29 | } 30 | } 31 | } 32 | } --------------------------------------------------------------------------------