├── .gitignore
├── .travis.yml
├── LICENSE.txt
├── README.md
├── pom.xml
└── src
├── main
├── assemblies
│ └── plugin.xml
├── java
│ └── org
│ │ └── elasticsearch
│ │ ├── ElasticsearchImageProcessException.java
│ │ ├── index
│ │ ├── mapper
│ │ │ └── image
│ │ │ │ ├── FeatureEnum.java
│ │ │ │ ├── HashEnum.java
│ │ │ │ ├── ImageMapper.java
│ │ │ │ └── RegisterImageType.java
│ │ └── query
│ │ │ └── image
│ │ │ ├── AbstractImageScorer.java
│ │ │ ├── ImageHashLimitQuery.java
│ │ │ ├── ImageHashQuery.java
│ │ │ ├── ImageQuery.java
│ │ │ ├── ImageQueryBuilder.java
│ │ │ ├── ImageQueryParser.java
│ │ │ ├── ImageScoreCache.java
│ │ │ └── RegisterImageQueryParser.java
│ │ └── plugin
│ │ └── image
│ │ ├── ImageIndexModule.java
│ │ └── ImagePlugin.java
└── resources
│ ├── es-plugin.properties
│ └── hash
│ ├── LshBitSampling.obj
│ └── lshHashFunctions.obj
└── test
├── java
└── org
│ └── elasticsearch
│ └── plugin
│ └── image
│ └── test
│ └── ImageIntegrationTests.java
└── resources
├── log4j.properties
└── mapping
└── test-mapping.json
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea/
2 | .gradle/
3 | *.iml
4 | work/
5 | /data/
6 | logs/
7 | .DS_Store
8 | build/
9 | target/
10 | .local-execution-hints.log
11 | docs/html/
12 | docs/build.log
13 | /tmp/
14 |
15 | ## eclipse ignores (use 'mvn eclipse:eclipse' to build eclipse projects)
16 | ## The only configuration files which are not ignored are certain files in
17 | ## .settings (as listed below) since these files ensure common coding
18 | ## style across Eclipse and IDEA.
19 | ## Other files (.project, .classpath) should be generated through Maven which
20 | ## will correctly set the classpath based on the declared dependencies.
21 | .project
22 | .classpath
23 | eclipse-build
24 | */.project
25 | */.classpath
26 | */eclipse-build
27 | /.settings/
28 | !/.settings/org.eclipse.core.resources.prefs
29 | !/.settings/org.eclipse.jdt.core.prefs
30 | !/.settings/org.eclipse.jdt.ui.prefs
31 |
32 | ## netbeans ignores
33 | nb-configuration.xml
34 | nbactions.xml
35 |
36 | /dependency-reduced-pom.xml
37 |
38 | ## test images
39 | /src/test/resources/image/
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: java
2 |
3 | jdk:
4 | - openjdk6
5 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright [yyyy] [name of copyright owner]
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Image Plugin for Elasticsearch
2 | ==================================
3 |
4 | [](https://travis-ci.org/kzwang/elasticsearch-image)
5 |
6 | The Image Plugin is an Content Based Image Retrieval Plugin for Elasticsearch using [LIRE (Lucene Image Retrieval)](https://code.google.com/p/lire/). It allows users to index images and search for similar images.
7 |
8 | It adds an `image` field type and an `image` query
9 |
10 | See [http://demo.elasticsearch-image.com](http://demo.elasticsearch-image.com) for a demo of the plugin
11 |
12 | In order to install the plugin, simply run: `bin/plugin -install com.github.kzwang/elasticsearch-image/1.2.0`.
13 |
14 | | Image Plugin | elasticsearch | Release date |
15 | |---------------------------|-------------------|:------------:|
16 | | 1.3.0-SNAPSHOT (master) | 1.1.0 | |
17 | | 1.2.0 | 1.0.1 | 2014-03-20 |
18 | | 1.1.0 | 1.0.1 | 2014-03-13 |
19 | | 1.0.0 | 1.0.1 | 2014-03-05 |
20 |
21 |
22 | ## Example
23 | #### Create Mapping
24 | ```sh
25 | curl -XPUT 'localhost:9200/test/test/_mapping' -d '{
26 | "test": {
27 | "properties": {
28 | "my_img": {
29 | "type": "image",
30 | "feature": {
31 | "CEDD": {
32 | "hash": "BIT_SAMPLING"
33 | },
34 | "JCD": {
35 | "hash": ["BIT_SAMPLING", "LSH"]
36 | },
37 | "FCTH": {}
38 | },
39 | "metadata": {
40 | "jpeg.image_width": {
41 | "type": "string",
42 | "store": "yes"
43 | },
44 | "jpeg.image_height": {
45 | "type": "string",
46 | "store": "yes"
47 | }
48 | }
49 | }
50 | }
51 | }
52 | }'
53 | ```
54 | `type` should be `image`. **Mandatory**
55 |
56 | `feature` is a map of features for index. **Mandatory, at least one is required**
57 |
58 | `hash` can be set if you want to search on hash. **Optional**
59 |
60 | `metadata` is a map of metadata for index, only those metadata will be indexed. See [Metadata](#metadata). **Optional**
61 |
62 |
63 | #### Index Image
64 | ```sh
65 | curl -XPOST 'localhost:9200/test/test' -d '{
66 | "my_img": "... base64 encoded image ..."
67 | }'
68 | ```
69 |
70 | #### Search Image
71 | ```sh
72 | curl -XPOST 'localhost:9200/test/test/_search' -d '{
73 | "query": {
74 | "image": {
75 | "my_img": {
76 | "feature": "CEDD",
77 | "image": "... base64 encoded image to search ...",
78 | "hash": "BIT_SAMPLING",
79 | "boost": 2.1,
80 | "limit": 100
81 | }
82 | }
83 | }
84 | }'
85 | ```
86 | `feature` should be one of the features in the mapping. **Mandatory**
87 |
88 | `image` base64 of image to search. **Optional if search using existing image**
89 |
90 | `hash` should be same to the hash set in mapping. **Optional**
91 |
92 | `limit` limit the number of results returned (per shard) for scoring. **Optional, only works when `hash` is specified**
93 |
94 | `boost` score boost **Optional**
95 |
96 |
97 | #### Search Image using existing image in index
98 | ```sh
99 | curl -XPOST 'localhost:9200/test/test/_search' -d '{
100 | "query": {
101 | "image": {
102 | "my_img": {
103 | "feature": "CEDD",
104 | "index": "test",
105 | "type": "test",
106 | "id": "image1",
107 | "path": "my_image",
108 | "hash": "BIT_SAMPLING"
109 | }
110 | }
111 | }
112 | }'
113 | ```
114 | `index` the index to fetch image from. Default to current index. **Optional**
115 |
116 | `type` the type to fetch image from. **Mandatory**
117 |
118 | `id` the id of the document to fetch image from. **Mandatory**
119 |
120 | `path` the field specified as path to fetch image from. **Mandatory**
121 |
122 | `routing` a custom routing value to be used when retrieving the external image doc. **Optional**
123 |
124 |
125 | ### Metadata
126 | Metadata are extracted using [metadata-extractor](https://code.google.com/p/metadata-extractor/). See [SampleOutput](https://code.google.com/p/metadata-extractor/wiki/SampleOutput) for some examples of metadata.
127 |
128 | The field name in index will be `directory.tag_name`, all lower case and space becomes underscore(`_`). e.g. if the *Directory* is `JPEG` and *Tag Name* is `Image Height`, the field name will be `jpeg.image_height`
129 |
130 |
131 |
132 | ### Supported Image Formats
133 | Images are processed by Java ImageIO, supported formats can be found [here](http://docs.oracle.com/javase/7/docs/api/javax/imageio/package-summary.html)
134 |
135 | Additional formats can be supported by ImageIO plugins, for example [TwelveMonkeys](https://github.com/haraldk/TwelveMonkeys)
136 |
137 |
138 | ### Supported Features
139 | [`AUTO_COLOR_CORRELOGRAM`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/AutoColorCorrelogram.java), [`BINARY_PATTERNS_PYRAMID`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/BinaryPatternsPyramid.java), [`CEDD`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/CEDD.java), [`SIMPLE_COLOR_HISTOGRAM`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/SimpleColorHistogram.java), [`COLOR_LAYOUT`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/ColorLayout.java), [`EDGE_HISTOGRAM`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/EdgeHistogram.java), [`FCTH`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/FCTH.java), [`GABOR`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/Gabor.java), [`JCD`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/JCD.java), [`JOINT_HISTOGRAM`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/joint/JointHistogram.java), [`JPEG_COEFFICIENT_HISTOGRAM`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/JpegCoefficientHistogram.java), [`LOCAL_BINARY_PATTERNS`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/LocalBinaryPatterns.java), [`LUMINANCE_LAYOUT`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/LuminanceLayout.java), [`OPPONENT_HISTOGRAM`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/OpponentHistogram.java), [`PHOG`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/PHOG.java), [`ROTATION_INVARIANT_LOCAL_BINARY_PATTERNS`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/RotationInvariantLocalBinaryPatterns.java), [`SCALABLE_COLOR`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/ScalableColor.java), [`TAMURA`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/imageanalysis/Tamura.java)
140 |
141 |
142 | ### Supported Hash Mode
143 | [`BIT_SAMPLING`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/indexing/hashing/BitSampling.java), [`LSH`](https://code.google.com/p/lire/source/browse/trunk/src/main/java/net/semanticmetadata/lire/indexing/hashing/LocalitySensitiveHashing.java)
144 |
145 | Hash will increase search speed with large data sets
146 |
147 | See [Large image data sets with LIRE ?some new numbers](http://www.semanticmetadata.net/2013/03/20/large-image-data-sets-with-lire-some-new-numbers/)
148 |
149 |
150 | ### Settings
151 | | Setting | Description | Default |
152 | |----------------------|-----------------|:-------:|
153 | | index.image.use_thread_pool | use multiple thread when multiple features are required | True |
154 | | index.image.ignore_metadata_error| ignore errors happened during extract metadata from image | True |
155 |
156 | ## ChangeLog
157 |
158 | #### 1.2.0 (2014-03-20)
159 |
160 | - Use multi-thread when multiple features are required to improve index speed
161 | - Allow index metadata
162 | - Allow query by existing image in index
163 |
164 | #### 1.1.0 (2014-03-13)
165 |
166 | - Added `limit` in `image` query
167 | - Added plugin version in es-plugin.properties
168 |
169 | #### 1.0.0 (2014-03-05)
170 |
171 | - initial release
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | elasticsearch-image
4 | 4.0.0
5 | com.github.kzwang
6 | elasticsearch-image
7 | 1.3.0-SNAPSHOT
8 | jar
9 | Image Plugin for ElasticSearch
10 | http://github.com/kzwang/elasticsearch-image
11 |
12 |
13 |
14 |
15 | The Apache Software License, Version 2.0
16 | http://www.apache.org/licenses/LICENSE-2.0.txt
17 | repo
18 |
19 |
20 |
21 | scm:git:git@github.com:kzwang/elasticsearch-image.git
22 | scm:git:git@github.com:kzwang/elasticsearch-image.git
23 | http://github.com/kzwang/elasticsearch-image
24 |
25 |
26 |
27 |
28 | kzwang
29 | Kevin Wang
30 | kevin807359@gmail.com
31 |
32 |
33 |
34 |
35 | org.sonatype.oss
36 | oss-parent
37 | 7
38 |
39 |
40 |
41 | 1.3.2
42 | 4.9.0
43 | 1
44 | true
45 | onerror
46 | INFO
47 |
48 |
49 |
50 |
51 | org.apache.lucene
52 | lucene-test-framework
53 | ${lucene.version}
54 | test
55 |
56 |
57 |
58 | org.elasticsearch
59 | elasticsearch
60 | ${elasticsearch.version}
61 | compile
62 |
63 |
64 |
65 | com.github.kzwang
66 | lire
67 | 0.9.4-kzwang-beta1
68 |
69 |
70 |
71 | com.twelvemonkeys.imageio
72 | imageio-jpeg
73 | 3.0-rc5
74 |
75 |
76 |
77 | com.drewnoakes
78 | metadata-extractor
79 | 2.6.2
80 |
81 |
82 |
83 | log4j
84 | log4j
85 | 1.2.17
86 | test
87 |
88 |
89 |
90 | org.elasticsearch
91 | elasticsearch
92 | ${elasticsearch.version}
93 | test-jar
94 | test
95 |
96 |
97 |
98 | org.hamcrest
99 | hamcrest-all
100 | 1.3
101 | test
102 |
103 |
104 |
105 |
106 | org.apache.sanselan
107 | sanselan
108 | 0.97-incubator
109 | test
110 |
111 |
112 |
113 | com.twelvemonkeys.imageio
114 | imageio-tiff
115 | 3.0-rc5
116 | test
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 | src/main/resources
125 | true
126 |
127 | *.properties
128 |
129 |
130 |
131 | src/main/resources
132 | false
133 |
134 | *.properties
135 |
136 |
137 |
138 |
139 |
140 | org.apache.maven.plugins
141 | maven-compiler-plugin
142 | 2.3.2
143 |
144 | 1.7
145 | 1.7
146 |
147 |
148 |
149 | com.carrotsearch.randomizedtesting
150 | junit4-maven-plugin
151 | 2.0.15
152 |
153 |
154 | tests
155 | test
156 |
157 | junit4
158 |
159 |
160 | 20
161 | pipe,warn
162 | true
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 | ${tests.jvms}
172 |
173 |
174 |
175 |
176 |
177 |
178 | **/*Tests.class
179 | **/*Test.class
180 |
181 |
182 | **/Abstract*.class
183 | **/*StressTest.class
184 |
185 |
186 | -Xmx512m
187 | -XX:MaxDirectMemorySize=512m
188 | -Des.logger.prefix=
189 |
190 | ${tests.shuffle}
191 |
192 |
193 |
194 |
195 |
196 |
197 | org.apache.maven.plugins
198 | maven-surefire-plugin
199 | 2.16
200 |
201 | true
202 |
203 |
204 |
205 |
206 | org.apache.maven.plugins
207 | maven-source-plugin
208 | 2.2.1
209 |
210 |
211 | attach-sources
212 |
213 | jar
214 |
215 |
216 |
217 |
218 |
219 |
220 | maven-assembly-plugin
221 | 2.4
222 |
223 | false
224 | ${project.build.directory}/releases/
225 |
226 | ${basedir}/src/main/assemblies/plugin.xml
227 |
228 |
229 |
230 |
231 | package
232 |
233 | single
234 |
235 |
236 |
237 |
238 |
239 |
240 | org.apache.maven.plugins
241 | maven-javadoc-plugin
242 | 2.7
243 |
244 |
245 | attach-javadocs
246 |
247 | jar
248 |
249 |
250 |
251 |
252 |
253 |
254 |
255 |
--------------------------------------------------------------------------------
/src/main/assemblies/plugin.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | plugin
4 |
5 | zip
6 |
7 | false
8 |
9 |
10 | /
11 | true
12 | true
13 |
14 | org.elasticsearch:elasticsearch
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/src/main/java/org/elasticsearch/ElasticsearchImageProcessException.java:
--------------------------------------------------------------------------------
1 | package org.elasticsearch;
2 |
3 |
4 | public class ElasticsearchImageProcessException extends ElasticsearchException {
5 |
6 | public ElasticsearchImageProcessException(String msg) {
7 | super(msg);
8 | }
9 |
10 | public ElasticsearchImageProcessException(String msg, Throwable cause) {
11 | super(msg, cause);
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/src/main/java/org/elasticsearch/index/mapper/image/FeatureEnum.java:
--------------------------------------------------------------------------------
1 | package org.elasticsearch.index.mapper.image;
2 |
3 |
4 | import net.semanticmetadata.lire.imageanalysis.*;
5 | import net.semanticmetadata.lire.imageanalysis.joint.JointHistogram;
6 |
7 | /**
8 | * Features supported by LIRE
9 | * Subclass of {@link LireFeature}
10 | */
11 | public enum FeatureEnum {
12 |
13 | AUTO_COLOR_CORRELOGRAM(AutoColorCorrelogram.class),
14 | BINARY_PATTERNS_PYRAMID(BinaryPatternsPyramid.class),
15 | CEDD(CEDD.class),
16 | SIMPLE_COLOR_HISTOGRAM(SimpleColorHistogram.class),
17 | COLOR_LAYOUT(ColorLayout.class),
18 | EDGE_HISTOGRAM(EdgeHistogram.class),
19 | FCTH(FCTH.class),
20 | GABOR(Gabor.class),
21 | JCD(JCD.class),
22 | JOINT_HISTOGRAM(JointHistogram.class),
23 | JPEG_COEFFICIENT_HISTOGRAM(JpegCoefficientHistogram.class),
24 | LOCAL_BINARY_PATTERNS(LocalBinaryPatterns.class),
25 | LUMINANCE_LAYOUT(LuminanceLayout.class),
26 | OPPONENT_HISTOGRAM(OpponentHistogram.class),
27 | PHOG(PHOG.class),
28 | ROTATION_INVARIANT_LOCAL_BINARY_PATTERNS(RotationInvariantLocalBinaryPatterns.class),
29 | SCALABLE_COLOR(ScalableColor.class),
30 | TAMURA(Tamura.class),
31 | ;
32 |
33 | private Class extends LireFeature> featureClass;
34 |
35 | FeatureEnum(Class extends LireFeature> featureClass) {
36 | this.featureClass = featureClass;
37 | }
38 |
39 | public Class extends LireFeature> getFeatureClass() {
40 | return featureClass;
41 | }
42 |
43 | public static FeatureEnum getByName(String name) {
44 | return valueOf(name.toUpperCase());
45 | }
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/src/main/java/org/elasticsearch/index/mapper/image/HashEnum.java:
--------------------------------------------------------------------------------
1 | package org.elasticsearch.index.mapper.image;
2 |
3 |
4 | public enum HashEnum {
5 | BIT_SAMPLING, LSH;
6 |
7 | public static HashEnum getByName(String name) {
8 | return valueOf(name.toUpperCase());
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/src/main/java/org/elasticsearch/index/mapper/image/ImageMapper.java:
--------------------------------------------------------------------------------
1 | package org.elasticsearch.index.mapper.image;
2 |
3 | import com.drew.imaging.ImageMetadataReader;
4 | import com.drew.imaging.ImageProcessingException;
5 | import com.drew.metadata.Directory;
6 | import com.drew.metadata.Metadata;
7 | import com.drew.metadata.Tag;
8 | import net.semanticmetadata.lire.imageanalysis.LireFeature;
9 | import net.semanticmetadata.lire.indexing.hashing.BitSampling;
10 | import net.semanticmetadata.lire.indexing.hashing.LocalitySensitiveHashing;
11 | import net.semanticmetadata.lire.utils.ImageUtils;
12 | import net.semanticmetadata.lire.utils.SerializationUtils;
13 | import org.apache.lucene.document.BinaryDocValuesField;
14 | import org.apache.lucene.util.BytesRef;
15 | import org.elasticsearch.ElasticsearchIllegalArgumentException;
16 | import org.elasticsearch.ElasticsearchImageProcessException;
17 | import org.elasticsearch.common.collect.ImmutableOpenMap;
18 | import org.elasticsearch.common.collect.Lists;
19 | import org.elasticsearch.common.collect.MapMaker;
20 | import org.elasticsearch.common.collect.Maps;
21 | import org.elasticsearch.common.hppc.cursors.ObjectObjectCursor;
22 | import org.elasticsearch.common.io.stream.BytesStreamInput;
23 | import org.elasticsearch.common.logging.ESLogger;
24 | import org.elasticsearch.common.logging.ESLoggerFactory;
25 | import org.elasticsearch.common.settings.Settings;
26 | import org.elasticsearch.common.xcontent.XContentBuilder;
27 | import org.elasticsearch.common.xcontent.XContentParser;
28 | import org.elasticsearch.index.mapper.*;
29 | import org.elasticsearch.threadpool.ThreadPool;
30 |
31 | import javax.imageio.ImageIO;
32 | import java.awt.image.BufferedImage;
33 | import java.io.BufferedInputStream;
34 | import java.io.IOException;
35 | import java.util.List;
36 | import java.util.Map;
37 | import java.util.concurrent.CountDownLatch;
38 | import java.util.concurrent.Executor;
39 |
40 | import static org.elasticsearch.index.mapper.MapperBuilders.binaryField;
41 | import static org.elasticsearch.index.mapper.MapperBuilders.stringField;
42 |
43 |
44 |
45 | public class ImageMapper implements Mapper {
46 |
47 | private static ESLogger logger = ESLoggerFactory.getLogger(ImageMapper.class.getName());
48 |
49 | public static final int MAX_IMAGE_DIMENSION = 1024;
50 |
51 | public static final String CONTENT_TYPE = "image";
52 |
53 | public static final String HASH = "hash";
54 |
55 | public static final String FEATURE = "feature";
56 | public static final String METADATA = "metadata";
57 |
58 | public static final String BIT_SAMPLING_FILE = "/hash/LshBitSampling.obj";
59 | public static final String LSH_HASH_FILE = "/hash/lshHashFunctions.obj";
60 |
61 | static {
62 | try {
63 | BitSampling.readHashFunctions(ImageMapper.class.getResourceAsStream(BIT_SAMPLING_FILE));
64 | LocalitySensitiveHashing.readHashFunctions(ImageMapper.class.getResourceAsStream(LSH_HASH_FILE));
65 | } catch (IOException e) {
66 | logger.error("Failed to initialize hash function", e);
67 | }
68 | }
69 |
70 |
71 | public static class Builder extends Mapper.Builder {
72 |
73 | private ThreadPool threadPool;
74 |
75 | private Map> features = Maps.newHashMap();
76 |
77 | private Map metadataBuilders = Maps.newHashMap();
78 |
79 | public Builder(String name, ThreadPool threadPool) {
80 | super(name);
81 | this.threadPool = threadPool;
82 | this.builder = this;
83 | }
84 |
85 | public Builder addFeature(FeatureEnum featureEnum, Map featureMap) {
86 | this.features.put(featureEnum, featureMap);
87 | return this;
88 | }
89 |
90 | public Builder addMetadata(String metadata, Mapper.Builder metadataBuilder) {
91 | this.metadataBuilders.put(metadata, metadataBuilder);
92 | return this;
93 | }
94 |
95 | @Override
96 | public ImageMapper build(BuilderContext context) {
97 | Map featureMappers = Maps.newHashMap();
98 | Map hashMappers = Maps.newHashMap();
99 | Map metadataMappers = Maps.newHashMap();
100 |
101 | context.path().add(name);
102 | // add feature and hash mappers
103 | for (FeatureEnum featureEnum : features.keySet()) {
104 | Map featureMap = features.get(featureEnum);
105 | String featureName = featureEnum.name();
106 |
107 | // add feature mapper
108 | featureMappers.put(featureName, binaryField(featureName).store(true).includeInAll(false).index(false).build(context));
109 |
110 |
111 | // add hash mapper if hash is required
112 | if (featureMap.containsKey(HASH)){
113 | List hashes = (List) featureMap.get(HASH);
114 | for (String h : hashes) {
115 | String hashFieldName = featureName + "." + HASH + "." + h;
116 | hashMappers.put(hashFieldName, stringField(hashFieldName).store(true).includeInAll(false).index(true).build(context));
117 | }
118 | }
119 | }
120 |
121 | // add metadata mappers
122 | context.path().add(METADATA);
123 | for (Map.Entry entry : metadataBuilders.entrySet()){
124 | String metadataName = entry.getKey();
125 | Mapper.Builder metadataBuilder = entry.getValue();
126 | metadataMappers.put(metadataName, metadataBuilder.build(context));
127 | }
128 | context.path().remove(); // remove METADATA
129 | context.path().remove(); // remove name
130 |
131 | return new ImageMapper(name, threadPool, context.indexSettings(), features, featureMappers, hashMappers, metadataMappers);
132 | }
133 | }
134 |
135 | public static class TypeParser implements Mapper.TypeParser {
136 | private ThreadPool threadPool;
137 |
138 | public TypeParser(ThreadPool threadPool) {
139 | this.threadPool = threadPool;
140 | }
141 |
142 | @SuppressWarnings({"unchecked"})
143 | @Override
144 | public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException {
145 | ImageMapper.Builder builder = new ImageMapper.Builder(name, threadPool);
146 | Map features = Maps.newHashMap();
147 | Map metadatas = Maps.newHashMap();
148 |
149 | for (Map.Entry entry : node.entrySet()) {
150 | String fieldName = entry.getKey();
151 | Object fieldNode = entry.getValue();
152 |
153 | if (FEATURE.equals(fieldName)) {
154 | features = (Map) fieldNode;
155 | } else if (METADATA.equals(fieldName)) {
156 | metadatas = (Map) fieldNode;
157 | }
158 | }
159 |
160 | if (features == null || features.isEmpty()) {
161 | throw new ElasticsearchIllegalArgumentException("Feature not found");
162 | }
163 |
164 | // process features
165 | for (Map.Entry entry : features.entrySet()) {
166 | String feature = entry.getKey();
167 | Map featureMap = (Map) entry.getValue();
168 |
169 | // process hash for each feature
170 | if (featureMap.containsKey(HASH)) {
171 | Object hashVal = featureMap.get(HASH);
172 | List hashes = Lists.newArrayList();
173 | if (hashVal instanceof List) {
174 | for (String h : (List)hashVal) {
175 | hashes.add(HashEnum.valueOf(h).name());
176 | }
177 | } else if (hashVal instanceof String) {
178 | hashes.add(HashEnum.valueOf((String) hashVal).name());
179 | } else {
180 | throw new ElasticsearchIllegalArgumentException("Malformed hash value");
181 | }
182 | featureMap.put(HASH, hashes);
183 | }
184 |
185 | FeatureEnum featureEnum = FeatureEnum.getByName(feature);
186 | builder.addFeature(featureEnum, featureMap);
187 | }
188 |
189 |
190 | // process metadata
191 | for (Map.Entry entry : metadatas.entrySet()) {
192 | String metadataName = entry.getKey();
193 | Map metadataMap = (Map) entry.getValue();
194 | String fieldType = (String) metadataMap.get("type");
195 | builder.addMetadata(metadataName, parserContext.typeParser(fieldType).parse(metadataName, metadataMap, parserContext));
196 | }
197 |
198 | return builder;
199 | }
200 | }
201 |
202 | private final String name;
203 |
204 | private final ThreadPool threadPool;
205 |
206 | private final Settings settings;
207 |
208 | private volatile ImmutableOpenMap> features = ImmutableOpenMap.of();
209 |
210 | private volatile ImmutableOpenMap featureMappers = ImmutableOpenMap.of();
211 |
212 | private volatile ImmutableOpenMap hashMappers = ImmutableOpenMap.of();
213 |
214 | private volatile ImmutableOpenMap metadataMappers = ImmutableOpenMap.of();
215 |
216 |
217 | public ImageMapper(String name, ThreadPool threadPool, Settings settings, Map> features, Map featureMappers,
218 | Map hashMappers, Map metadataMappers) {
219 | this.name = name;
220 | this.threadPool = threadPool;
221 | this.settings = settings;
222 | if (features != null) {
223 | this.features = ImmutableOpenMap.builder(this.features).putAll(features).build();
224 | }
225 | if (featureMappers != null) {
226 | this.featureMappers = ImmutableOpenMap.builder(this.featureMappers).putAll(featureMappers).build();
227 | }
228 | if (hashMappers != null) {
229 | this.hashMappers = ImmutableOpenMap.builder(this.hashMappers).putAll(hashMappers).build();
230 | }
231 | if (metadataMappers != null) {
232 | this.metadataMappers = ImmutableOpenMap.builder(this.metadataMappers).putAll(metadataMappers).build();
233 | }
234 | }
235 |
236 | @Override
237 | public String name() {
238 | return name;
239 | }
240 |
241 | @Override
242 | public void parse(ParseContext context) throws IOException {
243 | byte[] content = null;
244 |
245 | XContentParser parser = context.parser();
246 | XContentParser.Token token = parser.currentToken();
247 | if (token == XContentParser.Token.VALUE_STRING) {
248 | content = parser.binaryValue();
249 | }
250 |
251 | if (content == null) {
252 | throw new MapperParsingException("No content is provided.");
253 | }
254 |
255 | final Boolean useThreadPool = settings.getAsBoolean("index.image.use_thread_pool", true);
256 | final Boolean ignoreMetadataError = settings.getAsBoolean("index.image.ignore_metadata_error", true);
257 |
258 | BufferedImage img = ImageIO.read(new BytesStreamInput(content, false));
259 | if (Math.max(img.getHeight(), img.getWidth()) > MAX_IMAGE_DIMENSION) {
260 | img = ImageUtils.scaleImage(img, MAX_IMAGE_DIMENSION);
261 | }
262 | final BufferedImage finalImg = img;
263 |
264 |
265 |
266 | final Map featureExtractMap = new MapMaker().makeMap();
267 |
268 | // have multiple features, use ThreadPool to process each feature
269 | if (useThreadPool && features.size() > 1) {
270 | final CountDownLatch latch = new CountDownLatch(features.size());
271 | Executor executor = threadPool.generic();
272 |
273 | for (ObjectObjectCursor> cursor : features) {
274 | final FeatureEnum featureEnum = cursor.key;
275 | executor.execute(new Runnable() {
276 | @Override
277 | public void run() {
278 | try {
279 | LireFeature lireFeature = featureEnum.getFeatureClass().newInstance();
280 | lireFeature.extract(finalImg);
281 | featureExtractMap.put(featureEnum, lireFeature);
282 | } catch (Throwable e){
283 | logger.error("Failed to extract feature from image", e);
284 | } finally {
285 | latch.countDown();
286 | }
287 | }
288 | });
289 | }
290 | try {
291 | latch.await();
292 | } catch (InterruptedException e) {
293 | logger.debug("Interrupted extract feature from image", e);
294 | Thread.currentThread().interrupt();
295 | }
296 | }
297 |
298 |
299 | for (ObjectObjectCursor> cursor : features) {
300 | FeatureEnum featureEnum = cursor.key;
301 | Map featureMap = cursor.value;
302 |
303 | try {
304 | LireFeature lireFeature;
305 | if (featureExtractMap.containsKey(featureEnum)) { // already processed
306 | lireFeature = featureExtractMap.get(featureEnum);
307 | } else {
308 | lireFeature = featureEnum.getFeatureClass().newInstance();
309 | lireFeature.extract(img);
310 | }
311 | byte[] parsedContent = lireFeature.getByteArrayRepresentation();
312 |
313 | Mapper featureMapper = featureMappers.get(featureEnum.name());
314 | context.externalValue(parsedContent);
315 | featureMapper.parse(context);
316 | context.doc().add(new BinaryDocValuesField(name() + "." + featureEnum.name(), new BytesRef(parsedContent)));
317 |
318 | // add hash if required
319 | if (featureMap.containsKey(HASH)) {
320 | List hashes = (List) featureMap.get(HASH);
321 | for (String h : hashes) {
322 | HashEnum hashEnum = HashEnum.valueOf(h);
323 | int[] hashVals = null;
324 | if (hashEnum.equals(HashEnum.BIT_SAMPLING)) {
325 | hashVals = BitSampling.generateHashes(lireFeature.getDoubleHistogram());
326 | } else if (hashEnum.equals(HashEnum.LSH)) {
327 | hashVals = LocalitySensitiveHashing.generateHashes(lireFeature.getDoubleHistogram());
328 | }
329 |
330 | String mapperName = featureEnum.name() + "." + HASH + "." + h;
331 | Mapper hashMapper = hashMappers.get(mapperName);
332 | context.externalValue(SerializationUtils.arrayToString(hashVals));
333 | hashMapper.parse(context);
334 | }
335 | }
336 | } catch (Exception e) {
337 | throw new ElasticsearchImageProcessException("Failed to index feature " + featureEnum.name(), e);
338 | }
339 | }
340 |
341 | // process metadata if required
342 | if (!metadataMappers.isEmpty()) {
343 | try {
344 | Metadata metadata = ImageMetadataReader.readMetadata(new BufferedInputStream(new BytesStreamInput(content, false)), false);
345 | for (Directory directory : metadata.getDirectories()) {
346 | for (Tag tag : directory.getTags()) {
347 | String metadataName = tag.getDirectoryName().toLowerCase().replaceAll("\\s+", "_") + "." +
348 | tag.getTagName().toLowerCase().replaceAll("\\s+", "_");
349 | if (metadataMappers.containsKey(metadataName)) {
350 | Mapper mapper = metadataMappers.get(metadataName);
351 | context.externalValue(tag.getDescription());
352 | mapper.parse(context);
353 | }
354 | }
355 | }
356 | } catch (ImageProcessingException e) {
357 | logger.error("Failed to extract metadata from image", e);
358 | if (!ignoreMetadataError) {
359 | throw new ElasticsearchImageProcessException("Failed to extract metadata from image", e);
360 | }
361 | }
362 | }
363 |
364 |
365 | }
366 |
367 | @Override
368 | public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
369 | }
370 |
371 | @Override
372 | public void traverse(FieldMapperListener fieldMapperListener) {
373 | for (ObjectObjectCursor cursor : featureMappers) {
374 | cursor.value.traverse(fieldMapperListener);
375 | }
376 | for (ObjectObjectCursor cursor : hashMappers) {
377 | cursor.value.traverse(fieldMapperListener);
378 | }
379 | for (ObjectObjectCursor cursor : metadataMappers) {
380 | cursor.value.traverse(fieldMapperListener);
381 | }
382 | }
383 |
384 | @Override
385 | public void traverse(ObjectMapperListener objectMapperListener) {
386 | }
387 |
388 |
389 | @Override
390 | public void close() {
391 | }
392 |
393 | @Override
394 | public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
395 | builder.startObject(name);
396 |
397 | builder.field("type", CONTENT_TYPE);
398 |
399 | builder.startObject(FEATURE);
400 | for (ObjectObjectCursor> cursor : features) {
401 | builder.field(cursor.key.name(), cursor.value);
402 | }
403 | builder.endObject();
404 |
405 | builder.startObject(METADATA);
406 | for (ObjectObjectCursor cursor : metadataMappers) {
407 | cursor.value.toXContent(builder, params);
408 | }
409 | builder.endObject();
410 |
411 | builder.endObject();
412 | return builder;
413 | }
414 | }
415 |
--------------------------------------------------------------------------------
/src/main/java/org/elasticsearch/index/mapper/image/RegisterImageType.java:
--------------------------------------------------------------------------------
1 | package org.elasticsearch.index.mapper.image;
2 |
3 | import org.elasticsearch.common.inject.Inject;
4 | import org.elasticsearch.common.settings.Settings;
5 | import org.elasticsearch.index.AbstractIndexComponent;
6 | import org.elasticsearch.index.Index;
7 | import org.elasticsearch.index.mapper.MapperService;
8 | import org.elasticsearch.index.settings.IndexSettings;
9 | import org.elasticsearch.threadpool.ThreadPool;
10 |
11 | public class RegisterImageType extends AbstractIndexComponent {
12 |
13 | @Inject
14 | public RegisterImageType(Index index, @IndexSettings Settings indexSettings, MapperService mapperService, ThreadPool threadPool) {
15 | super(index, indexSettings);
16 | mapperService.documentMapperParser().putTypeParser("image", new ImageMapper.TypeParser(threadPool));
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/src/main/java/org/elasticsearch/index/query/image/AbstractImageScorer.java:
--------------------------------------------------------------------------------
1 | package org.elasticsearch.index.query.image;
2 |
3 | import net.semanticmetadata.lire.imageanalysis.LireFeature;
4 | import org.apache.lucene.index.AtomicReader;
5 | import org.apache.lucene.index.BinaryDocValues;
6 | import org.apache.lucene.index.IndexReader;
7 | import org.apache.lucene.search.Scorer;
8 | import org.apache.lucene.search.Weight;
9 | import org.apache.lucene.util.BytesRef;
10 | import org.elasticsearch.ElasticsearchImageProcessException;
11 |
12 | import java.io.IOException;
13 |
14 | /**
15 | * Calculate score for each image
16 | * score = (1 / distance) * boost
17 | */
18 | public abstract class AbstractImageScorer extends Scorer {
19 | private final String luceneFieldName;
20 | private final LireFeature lireFeature;
21 | private final IndexReader reader;
22 | private final float boost;
23 | private BinaryDocValues binaryDocValues;
24 |
25 | protected AbstractImageScorer(Weight weight, String luceneFieldName, LireFeature lireFeature, IndexReader reader,
26 | float boost) {
27 | super(weight);
28 | this.luceneFieldName = luceneFieldName;
29 | this.lireFeature = lireFeature;
30 | this.reader = reader;
31 | this.boost = boost;
32 | }
33 |
34 | @Override
35 | public float score() throws IOException {
36 | assert docID() != NO_MORE_DOCS;
37 |
38 | if (binaryDocValues == null) {
39 | AtomicReader atomicReader = (AtomicReader) reader;
40 | binaryDocValues = atomicReader.getBinaryDocValues(luceneFieldName);
41 | }
42 |
43 | try {
44 | BytesRef bytesRef = binaryDocValues.get(docID());
45 | LireFeature docFeature = lireFeature.getClass().newInstance();
46 | docFeature.setByteArrayRepresentation(bytesRef.bytes);
47 |
48 | float distance = lireFeature.getDistance(docFeature);
49 | float score;
50 | if (Float.compare(distance, 1.0f) <= 0) { // distance less than 1, consider as same image
51 | score = 2f - distance;
52 | } else {
53 | score = 1 / distance;
54 | }
55 | return score * boost;
56 | } catch (Exception e) {
57 | throw new ElasticsearchImageProcessException("Failed to calculate score", e);
58 | }
59 | }
60 |
61 | @Override
62 | public int freq() {
63 | return 1;
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/src/main/java/org/elasticsearch/index/query/image/ImageHashLimitQuery.java:
--------------------------------------------------------------------------------
1 | package org.elasticsearch.index.query.image;
2 |
3 | import net.semanticmetadata.lire.imageanalysis.LireFeature;
4 | import org.apache.lucene.index.*;
5 | import org.apache.lucene.search.*;
6 | import org.apache.lucene.search.similarities.DefaultSimilarity;
7 | import org.apache.lucene.util.Bits;
8 | import org.apache.lucene.util.ToStringUtils;
9 | import org.elasticsearch.common.lucene.search.Queries;
10 |
11 | import java.io.IOException;
12 | import java.util.Arrays;
13 | import java.util.BitSet;
14 | import java.util.Set;
15 |
16 | /**
17 | * Query by hash first and only calculate score for top n matches
18 | */
19 | public class ImageHashLimitQuery extends Query {
20 |
21 | private String hashFieldName;
22 | private int[] hashes;
23 | private int maxResult;
24 | private String luceneFieldName;
25 | private LireFeature lireFeature;
26 |
27 |
28 | public ImageHashLimitQuery(String hashFieldName, int[] hashes, int maxResult, String luceneFieldName, LireFeature lireFeature, float boost) {
29 | this.hashFieldName = hashFieldName;
30 | this.hashes = hashes;
31 | this.maxResult = maxResult;
32 | this.luceneFieldName = luceneFieldName;
33 | this.lireFeature = lireFeature;
34 | setBoost(boost);
35 | }
36 |
37 |
38 | final class ImageHashScorer extends AbstractImageScorer {
39 | private int doc = -1;
40 | private final int maxDoc;
41 | private final int docBase;
42 | private final BitSet bitSet;
43 | private final Bits liveDocs;
44 |
45 | ImageHashScorer(Weight weight, BitSet bitSet, AtomicReaderContext context, Bits liveDocs) {
46 | super(weight, luceneFieldName, lireFeature, context.reader(), ImageHashLimitQuery.this.getBoost());
47 | this.bitSet = bitSet;
48 | this.liveDocs = liveDocs;
49 | maxDoc = context.reader().maxDoc();
50 | docBase = context.docBase;
51 | }
52 |
53 | @Override
54 | public int docID() {
55 | return doc;
56 | }
57 |
58 | @Override
59 | public int nextDoc() throws IOException {
60 | int d;
61 | do {
62 | d = bitSet.nextSetBit(docBase + doc + 1);
63 | if (d == -1 || d >= maxDoc + docBase) {
64 | doc = NO_MORE_DOCS;
65 | } else {
66 | doc = d - docBase;
67 | }
68 | } while (doc != NO_MORE_DOCS && d < maxDoc + docBase && liveDocs != null && !liveDocs.get(doc));
69 | return doc;
70 | }
71 |
72 | @Override
73 | public int advance(int target) throws IOException {
74 | doc = target-1;
75 | return nextDoc();
76 | }
77 |
78 | @Override
79 | public long cost() {
80 | return maxDoc;
81 | }
82 | }
83 |
84 | final class ImageHashLimitWeight extends Weight {
85 | private final BitSet bitSet;
86 | private final IndexSearcher searcher;
87 |
88 | public ImageHashLimitWeight(IndexSearcher searcher, BitSet bitSet)
89 | throws IOException {
90 | this.bitSet = bitSet;
91 | this.searcher = searcher;
92 | }
93 |
94 | @Override
95 | public String toString() { return "weight(" + ImageHashLimitQuery.this + ")"; }
96 |
97 | @Override
98 | public Query getQuery() { return ImageHashLimitQuery.this; }
99 |
100 | @Override
101 | public float getValueForNormalization() {
102 | return 1f;
103 | }
104 |
105 | @Override
106 | public void normalize(float queryNorm, float topLevelBoost) {
107 | }
108 |
109 | @Override
110 | public Scorer scorer(AtomicReaderContext context, Bits acceptDocs) throws IOException {
111 | return new ImageHashScorer(this, bitSet, context, acceptDocs);
112 | }
113 |
114 | @Override
115 | public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
116 | Scorer scorer = scorer(context, context.reader().getLiveDocs());
117 | if (scorer != null) {
118 | int newDoc = scorer.advance(doc);
119 | if (newDoc == doc) {
120 | float score = scorer.score();
121 | ComplexExplanation result = new ComplexExplanation();
122 | result.setDescription("ImageHashLimitQuery, product of:");
123 | result.setValue(score);
124 | if (getBoost() != 1.0f) {
125 | result.addDetail(new Explanation(getBoost(),"boost"));
126 | score = score / getBoost();
127 | }
128 | result.addDetail(new Explanation(score ,"image score (1/distance)"));
129 | result.setMatch(true);
130 | return result;
131 | }
132 | }
133 |
134 | return new ComplexExplanation(false, 0.0f, "no matching term");
135 | }
136 | }
137 |
138 |
139 | @Override
140 | public Weight createWeight(IndexSearcher searcher) throws IOException {
141 | IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
142 | indexSearcher.setSimilarity(new SimpleSimilarity());
143 |
144 | BooleanQuery booleanQuery = new BooleanQuery();
145 | for (int h : hashes) {
146 | booleanQuery.add(new BooleanClause(new TermQuery(new Term(hashFieldName, Integer.toString(h))), BooleanClause.Occur.SHOULD));
147 | }
148 | TopDocs topDocs = indexSearcher.search(booleanQuery, maxResult);
149 |
150 | if (topDocs.scoreDocs.length == 0) { // no result find
151 | return Queries.newMatchNoDocsQuery().createWeight(searcher);
152 | }
153 |
154 | BitSet bitSet = new BitSet(topDocs.scoreDocs.length);
155 | for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
156 | bitSet.set(scoreDoc.doc);
157 | }
158 |
159 | return new ImageHashLimitWeight(searcher, bitSet);
160 | }
161 |
162 | @Override
163 | public void extractTerms(Set terms) {
164 | }
165 |
166 | @Override
167 | public String toString(String field) {
168 | StringBuilder buffer = new StringBuilder();
169 | buffer.append(hashFieldName);
170 | buffer.append(",");
171 | buffer.append(Arrays.toString(hashes));
172 | buffer.append(",");
173 | buffer.append(maxResult);
174 | buffer.append(",");
175 | buffer.append(luceneFieldName);
176 | buffer.append(",");
177 | buffer.append(lireFeature.getClass().getSimpleName());
178 | buffer.append(ToStringUtils.boost(getBoost()));
179 | return buffer.toString();
180 | }
181 |
182 |
183 | @Override
184 | public boolean equals(Object o) {
185 | if (!(o instanceof ImageHashLimitQuery))
186 | return false;
187 |
188 | ImageHashLimitQuery that = (ImageHashLimitQuery) o;
189 |
190 | if (maxResult != that.maxResult) return false;
191 | if (!hashFieldName.equals(that.hashFieldName)) return false;
192 | if (!Arrays.equals(hashes, that.hashes)) return false;
193 | if (!lireFeature.equals(that.lireFeature)) return false;
194 | if (!luceneFieldName.equals(that.luceneFieldName)) return false;
195 |
196 | return true;
197 | }
198 |
199 | @Override
200 | public int hashCode() {
201 | int result = super.hashCode();
202 | result = 31 * result + hashFieldName.hashCode();
203 | result = 31 * result + Arrays.hashCode(hashes);
204 | result = 31 * result + maxResult;
205 | result = 31 * result + luceneFieldName.hashCode();
206 | result = 31 * result + lireFeature.hashCode();
207 | return result;
208 | }
209 |
210 |
211 | final class SimpleSimilarity extends DefaultSimilarity{
212 | @Override
213 | public float tf(float freq) {
214 | return 1;
215 | }
216 |
217 | @Override
218 | public float idf(long docFreq, long numDocs) {
219 | return 1;
220 | }
221 |
222 | @Override
223 | public float coord(int overlap, int maxOverlap) {
224 | return 1;
225 | }
226 |
227 | @Override
228 | public float queryNorm(float sumOfSquaredWeights) {
229 | return 1;
230 | }
231 |
232 | @Override
233 | public float lengthNorm(FieldInvertState state) {
234 | return 1;
235 | }
236 |
237 | @Override
238 | public float sloppyFreq(int distance) {
239 | return 1;
240 | }
241 | }
242 | }
243 |
--------------------------------------------------------------------------------
/src/main/java/org/elasticsearch/index/query/image/ImageHashQuery.java:
--------------------------------------------------------------------------------
1 | package org.elasticsearch.index.query.image;
2 |
3 | import java.io.IOException;
4 | import java.util.Set;
5 |
6 | import net.semanticmetadata.lire.imageanalysis.LireFeature;
7 | import org.apache.lucene.index.*;
8 | import org.apache.lucene.search.*;
9 | import org.apache.lucene.util.Bits;
10 | import org.apache.lucene.util.ToStringUtils;
11 |
12 | /**
13 | * Copied from {@link TermQuery}, query by hash first and only calculate score for matching docs
14 | */
15 | public class ImageHashQuery extends Query {
16 | private final Term term;
17 |
18 | private String luceneFieldName;
19 | private LireFeature lireFeature;
20 | private ImageScoreCache imageScoreCache;
21 |
22 | final class ImageHashScorer extends AbstractImageScorer {
23 | private final DocsEnum docsEnum;
24 | private final IndexReader reader;
25 |
26 | ImageHashScorer(Weight weight, DocsEnum td, IndexReader reader) {
27 | super(weight, luceneFieldName, lireFeature, reader, ImageHashQuery.this.getBoost());
28 | this.docsEnum = td;
29 | this.reader = reader;
30 | }
31 |
32 | @Override
33 | public int docID() {
34 | return docsEnum.docID();
35 | }
36 |
37 |
38 | @Override
39 | public int nextDoc() throws IOException {
40 | return docsEnum.nextDoc();
41 | }
42 |
43 | @Override
44 | public float score() throws IOException {
45 | assert docID() != NO_MORE_DOCS;
46 | int docId = docID();
47 | String cacheKey = reader.toString() + ":" + docId;
48 | if (imageScoreCache.getScore(cacheKey) != null) {
49 | return 0f; // BooleanScorer will add all score together, return 0 for docs already processed
50 | }
51 | float score = super.score();
52 | imageScoreCache.setScore(cacheKey, score);
53 | return score;
54 | }
55 |
56 | @Override
57 | public int advance(int target) throws IOException {
58 | return docsEnum.advance(target);
59 | }
60 |
61 | @Override
62 | public long cost() {
63 | return docsEnum.cost();
64 | }
65 | }
66 |
67 | final class ImageHashWeight extends Weight {
68 | private final TermContext termStates;
69 |
70 | public ImageHashWeight(IndexSearcher searcher, TermContext termStates)
71 | throws IOException {
72 | assert termStates != null : "TermContext must not be null";
73 | this.termStates = termStates;
74 | }
75 |
76 | @Override
77 | public String toString() { return "weight(" + ImageHashQuery.this + ")"; }
78 |
79 | @Override
80 | public Query getQuery() { return ImageHashQuery.this; }
81 |
82 | @Override
83 | public float getValueForNormalization() {
84 | return 1f;
85 | }
86 |
87 | @Override
88 | public void normalize(float queryNorm, float topLevelBoost) {
89 | }
90 |
91 | @Override
92 | public Scorer scorer(AtomicReaderContext context, Bits acceptDocs) throws IOException {
93 | assert termStates.topReaderContext == ReaderUtil.getTopLevelContext(context) : "The top-reader used to create Weight (" + termStates.topReaderContext + ") is not the same as the current reader's top-reader (" + ReaderUtil.getTopLevelContext(context);
94 | final TermsEnum termsEnum = getTermsEnum(context);
95 | if (termsEnum == null) {
96 | return null;
97 | }
98 | DocsEnum docs = termsEnum.docs(acceptDocs, null);
99 | assert docs != null;
100 | return new ImageHashScorer(this, docs, context.reader());
101 | }
102 |
103 | private TermsEnum getTermsEnum(AtomicReaderContext context) throws IOException {
104 | final TermState state = termStates.get(context.ord);
105 | if (state == null) { // term is not present in that reader
106 | assert termNotInReader(context.reader(), term) : "no termstate found but term exists in reader term=" + term;
107 | return null;
108 | }
109 | final TermsEnum termsEnum = context.reader().terms(term.field()).iterator(null);
110 | termsEnum.seekExact(term.bytes(), state);
111 | return termsEnum;
112 | }
113 |
114 | private boolean termNotInReader(AtomicReader reader, Term term) throws IOException {
115 | return reader.docFreq(term) == 0;
116 | }
117 |
118 | @Override
119 | public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
120 | Scorer scorer = scorer(context, context.reader().getLiveDocs());
121 | if (scorer != null) {
122 | int newDoc = scorer.advance(doc);
123 | if (newDoc == doc) {
124 | float score = scorer.score();
125 | ComplexExplanation result = new ComplexExplanation();
126 | result.setDescription("ImageHashQuery, product of:");
127 | result.setValue(score);
128 | if (getBoost() != 1.0f) {
129 | result.addDetail(new Explanation(getBoost(),"boost"));
130 | score = score / getBoost();
131 | }
132 | result.addDetail(new Explanation(score ,"image score (1/distance)"));
133 | result.setMatch(true);
134 | return result;
135 | }
136 | }
137 |
138 | return new ComplexExplanation(false, 0.0f, "no matching term");
139 | }
140 | }
141 |
142 | public ImageHashQuery(Term t, String luceneFieldName, LireFeature lireFeature, ImageScoreCache imageScoreCache, float boost) {
143 | this.term = t;
144 | this.luceneFieldName = luceneFieldName;
145 | this.lireFeature = lireFeature;
146 | this.imageScoreCache = imageScoreCache;
147 | setBoost(boost);
148 | }
149 |
150 | public Term getTerm() {
151 | return term;
152 | }
153 |
154 | @Override
155 | public Weight createWeight(IndexSearcher searcher) throws IOException {
156 | final IndexReaderContext context = searcher.getTopReaderContext();
157 | final TermContext termState = TermContext.build(context, term);
158 | return new ImageHashWeight(searcher, termState);
159 | }
160 |
161 | @Override
162 | public void extractTerms(Set terms) {
163 | terms.add(getTerm());
164 | }
165 |
166 | @Override
167 | public String toString(String field) {
168 | StringBuilder buffer = new StringBuilder();
169 | if (!term.field().equals(field)) {
170 | buffer.append(term.field());
171 | buffer.append(":");
172 | }
173 | buffer.append(term.text());
174 | buffer.append(";");
175 | buffer.append(luceneFieldName);
176 | buffer.append(",");
177 | buffer.append(lireFeature.getClass().getSimpleName());
178 | buffer.append(ToStringUtils.boost(getBoost()));
179 | return buffer.toString();
180 | }
181 |
182 | @Override
183 | public boolean equals(Object o) {
184 | if (!(o instanceof ImageHashQuery))
185 | return false;
186 | ImageHashQuery other = (ImageHashQuery)o;
187 | return (this.getBoost() == other.getBoost())
188 | && this.term.equals(other.term)
189 | & luceneFieldName.equals(luceneFieldName)
190 | && lireFeature.equals(lireFeature);
191 | }
192 |
193 | @Override
194 | public int hashCode() {
195 | int result = super.hashCode();
196 | result = 31 * result + term.hashCode();
197 | result = 31 * result + luceneFieldName.hashCode();
198 | result = 31 * result + lireFeature.hashCode();
199 | result = Float.floatToIntBits(getBoost()) ^ result;
200 | return result;
201 | }
202 | }
203 |
--------------------------------------------------------------------------------
/src/main/java/org/elasticsearch/index/query/image/ImageQuery.java:
--------------------------------------------------------------------------------
1 | package org.elasticsearch.index.query.image;
2 |
3 | import net.semanticmetadata.lire.imageanalysis.LireFeature;
4 | import org.apache.lucene.index.AtomicReaderContext;
5 | import org.apache.lucene.index.IndexReader;
6 | import org.apache.lucene.index.Term;
7 | import org.apache.lucene.search.*;
8 | import org.apache.lucene.util.Bits;
9 | import org.apache.lucene.util.ToStringUtils;
10 |
11 | import java.io.IOException;
12 | import java.util.Set;
13 |
14 |
15 | /**
16 | * Copied from {@link MatchAllDocsQuery}, calculate score for all docs
17 | */
18 | public class ImageQuery extends Query {
19 |
20 | private String luceneFieldName;
21 | private LireFeature lireFeature;
22 |
23 | public ImageQuery(String luceneFieldName, LireFeature lireFeature, float boost) {
24 | this.luceneFieldName = luceneFieldName;
25 | this.lireFeature = lireFeature;
26 | setBoost(boost);
27 | }
28 |
29 | private class ImageScorer extends AbstractImageScorer {
30 | private int doc = -1;
31 | private final int maxDoc;
32 | private final Bits liveDocs;
33 |
34 | ImageScorer(IndexReader reader, Bits liveDocs, Weight w) {
35 | super(w, luceneFieldName, lireFeature, reader, ImageQuery.this.getBoost());
36 | this.liveDocs = liveDocs;
37 | maxDoc = reader.maxDoc();
38 | }
39 |
40 | @Override
41 | public int docID() {
42 | return doc;
43 | }
44 |
45 | @Override
46 | public int nextDoc() throws IOException {
47 | doc++;
48 | while(liveDocs != null && doc < maxDoc && !liveDocs.get(doc)) {
49 | doc++;
50 | }
51 | if (doc == maxDoc) {
52 | doc = NO_MORE_DOCS;
53 | }
54 | return doc;
55 | }
56 |
57 |
58 | @Override
59 | public int advance(int target) throws IOException {
60 | doc = target-1;
61 | return nextDoc();
62 | }
63 |
64 | @Override
65 | public long cost() {
66 | return maxDoc;
67 | }
68 | }
69 |
70 | private class ImageWeight extends Weight {
71 | public ImageWeight(IndexSearcher searcher) {
72 | }
73 |
74 | @Override
75 | public String toString() {
76 | return "weight(" + ImageQuery.this + ")";
77 | }
78 |
79 | @Override
80 | public Query getQuery() {
81 | return ImageQuery.this;
82 | }
83 |
84 | @Override
85 | public float getValueForNormalization() {
86 | return 1f;
87 | }
88 |
89 | @Override
90 | public void normalize(float queryNorm, float topLevelBoost) {
91 | }
92 |
93 | @Override
94 | public Scorer scorer(AtomicReaderContext context, Bits acceptDocs) throws IOException {
95 | return new ImageScorer(context.reader(), acceptDocs, this);
96 | }
97 |
98 | @Override
99 | public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
100 | Scorer scorer = scorer(context, context.reader().getLiveDocs());
101 | if (scorer != null) {
102 | int newDoc = scorer.advance(doc);
103 | if (newDoc == doc) {
104 | float score = scorer.score();
105 | ComplexExplanation result = new ComplexExplanation();
106 | result.setDescription("ImageQuery, product of:");
107 | result.setValue(score);
108 | if (getBoost() != 1.0f) {
109 | result.addDetail(new Explanation(getBoost(),"boost"));
110 | score = score / getBoost();
111 | }
112 | result.addDetail(new Explanation(score ,"image score (1/distance)"));
113 | result.setMatch(true);
114 | return result;
115 | }
116 | }
117 |
118 | return new ComplexExplanation(false, 0.0f, "no matching term");
119 | }
120 | }
121 |
122 | @Override
123 | public Weight createWeight(IndexSearcher searcher) {
124 | return new ImageWeight(searcher);
125 | }
126 |
127 | @Override
128 | public void extractTerms(Set terms) {
129 | }
130 |
131 | @Override
132 | public String toString(String field) {
133 | StringBuilder buffer = new StringBuilder();
134 | buffer.append(luceneFieldName);
135 | buffer.append(",");
136 | buffer.append(lireFeature.getClass().getSimpleName());
137 | buffer.append(ToStringUtils.boost(getBoost()));
138 | return buffer.toString();
139 | }
140 |
141 |
142 | @Override
143 | public boolean equals(Object o) {
144 | if (!(o instanceof ImageQuery))
145 | return false;
146 | ImageQuery other = (ImageQuery) o;
147 | return (this.getBoost() == other.getBoost())
148 | && luceneFieldName.equals(luceneFieldName)
149 | && lireFeature.equals(lireFeature);
150 | }
151 |
152 | @Override
153 | public int hashCode() {
154 | int result = super.hashCode();
155 | result = 31 * result + luceneFieldName.hashCode();
156 | result = 31 * result + lireFeature.hashCode();
157 | result = Float.floatToIntBits(getBoost()) ^ result;
158 | return result;
159 | }
160 |
161 |
162 | }
163 |
--------------------------------------------------------------------------------
/src/main/java/org/elasticsearch/index/query/image/ImageQueryBuilder.java:
--------------------------------------------------------------------------------
1 | package org.elasticsearch.index.query.image;
2 |
3 |
4 | import org.elasticsearch.common.xcontent.XContentBuilder;
5 | import org.elasticsearch.index.query.BaseQueryBuilder;
6 | import org.elasticsearch.index.query.BoostableQueryBuilder;
7 |
8 | import java.io.IOException;
9 |
10 | public class ImageQueryBuilder extends BaseQueryBuilder implements BoostableQueryBuilder {
11 |
12 | private final String fieldName;
13 |
14 | private String feature;
15 |
16 | private byte[] image;
17 |
18 | private String hash;
19 |
20 | private float boost = -1;
21 |
22 | private int limit = -1;
23 |
24 | private String lookupIndex;
25 |
26 | private String lookupType;
27 |
28 | private String lookupId;
29 |
30 | private String lookupRouting;
31 |
32 | private String lookupPath;
33 |
34 | public ImageQueryBuilder(String fieldName) {
35 | this.fieldName = fieldName;
36 | }
37 |
38 | public ImageQueryBuilder feature(String feature) {
39 | this.feature = feature;
40 | return this;
41 | }
42 |
43 | public ImageQueryBuilder image(byte[] image) {
44 | this.image = image;
45 | return this;
46 | }
47 |
48 | public ImageQueryBuilder hash(String hash) {
49 | this.hash = hash;
50 | return this;
51 | }
52 |
53 | public ImageQueryBuilder limit(int limit) {
54 | this.limit = limit;
55 | return this;
56 | }
57 |
58 | public ImageQueryBuilder lookupIndex(String lookupIndex) {
59 | this.lookupIndex = lookupIndex;
60 | return this;
61 | }
62 |
63 | public ImageQueryBuilder lookupType(String lookupType) {
64 | this.lookupType = lookupType;
65 | return this;
66 | }
67 |
68 | public ImageQueryBuilder lookupId(String lookupId) {
69 | this.lookupId = lookupId;
70 | return this;
71 | }
72 |
73 | public ImageQueryBuilder lookupPath(String lookupPath) {
74 | this.lookupPath = lookupPath;
75 | return this;
76 | }
77 |
78 | public ImageQueryBuilder lookupRouting(String lookupRouting) {
79 | this.lookupRouting = lookupRouting;
80 | return this;
81 | }
82 |
83 | @Override
84 | public ImageQueryBuilder boost(float boost) {
85 | this.boost = boost;
86 | return this;
87 | }
88 |
89 | @Override
90 | protected void doXContent(XContentBuilder builder, Params params) throws IOException {
91 | builder.startObject(ImageQueryParser.NAME);
92 |
93 | builder.startObject(fieldName);
94 | builder.field("feature", feature);
95 |
96 | if (image != null) {
97 | builder.field("image", image);
98 | }
99 |
100 |
101 | if (lookupIndex != null) {
102 | builder.field("index", lookupIndex);
103 | }
104 | builder.field("type", lookupType);
105 | builder.field("id", lookupId);
106 | if (lookupRouting != null) {
107 | builder.field("routing", lookupRouting);
108 | }
109 | builder.field("path", lookupPath);
110 |
111 | if (hash != null) {
112 | builder.field("hash", hash);
113 | }
114 |
115 | if (boost != -1) {
116 | builder.field("boost", boost);
117 | }
118 |
119 | if (limit != -1) {
120 | builder.field("limit", limit);
121 | }
122 |
123 | builder.endObject();
124 |
125 | builder.endObject();
126 | }
127 |
128 |
129 | }
130 |
--------------------------------------------------------------------------------
/src/main/java/org/elasticsearch/index/query/image/ImageQueryParser.java:
--------------------------------------------------------------------------------
1 | package org.elasticsearch.index.query.image;
2 |
3 |
4 | import net.semanticmetadata.lire.imageanalysis.LireFeature;
5 | import net.semanticmetadata.lire.indexing.hashing.BitSampling;
6 | import net.semanticmetadata.lire.indexing.hashing.LocalitySensitiveHashing;
7 | import net.semanticmetadata.lire.utils.ImageUtils;
8 | import org.apache.lucene.index.Term;
9 | import org.apache.lucene.search.BooleanClause;
10 | import org.apache.lucene.search.BooleanQuery;
11 | import org.apache.lucene.search.Query;
12 | import org.elasticsearch.ElasticsearchImageProcessException;
13 | import org.elasticsearch.action.get.GetRequest;
14 | import org.elasticsearch.action.get.GetResponse;
15 | import org.elasticsearch.client.Client;
16 | import org.elasticsearch.common.bytes.BytesReference;
17 | import org.elasticsearch.common.inject.Inject;
18 | import org.elasticsearch.common.io.stream.BytesStreamInput;
19 | import org.elasticsearch.common.xcontent.XContentParser;
20 | import org.elasticsearch.index.get.GetField;
21 | import org.elasticsearch.index.mapper.image.FeatureEnum;
22 | import org.elasticsearch.index.mapper.image.HashEnum;
23 | import org.elasticsearch.index.mapper.image.ImageMapper;
24 | import org.elasticsearch.index.query.QueryParseContext;
25 | import org.elasticsearch.index.query.QueryParser;
26 | import org.elasticsearch.index.query.QueryParsingException;
27 |
28 | import javax.imageio.ImageIO;
29 | import java.awt.image.BufferedImage;
30 | import java.io.IOException;
31 |
32 | public class ImageQueryParser implements QueryParser {
33 |
34 | public static final String NAME = "image";
35 |
36 | private Client client;
37 |
38 | @Inject
39 | public ImageQueryParser(Client client) {
40 | this.client = client;
41 | }
42 |
43 | @Override
44 | public String[] names() {
45 | return new String[] {NAME};
46 | }
47 |
48 | @Override
49 | public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
50 | XContentParser parser = parseContext.parser();
51 |
52 | XContentParser.Token token = parser.nextToken();
53 | if (token != XContentParser.Token.FIELD_NAME) {
54 | throw new QueryParsingException(parseContext.index(), "[image] query malformed, no field");
55 | }
56 |
57 |
58 | String fieldName = parser.currentName();
59 | FeatureEnum featureEnum = null;
60 | byte[] image = null;
61 | HashEnum hashEnum = null;
62 | float boost = 1.0f;
63 | int limit = -1;
64 |
65 | String lookupIndex = parseContext.index().name();
66 | String lookupType = null;
67 | String lookupId = null;
68 | String lookupPath = null;
69 | String lookupRouting = null;
70 |
71 |
72 | token = parser.nextToken();
73 | if (token == XContentParser.Token.START_OBJECT) {
74 | String currentFieldName = null;
75 | while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
76 | if (token == XContentParser.Token.FIELD_NAME) {
77 | currentFieldName = parser.currentName();
78 | } else {
79 | if ("feature".equals(currentFieldName)) {
80 | featureEnum = FeatureEnum.getByName(parser.text());
81 | } else if ("image".equals(currentFieldName)) {
82 | image = parser.binaryValue();
83 | } else if ("hash".equals(currentFieldName)) {
84 | hashEnum = HashEnum.getByName(parser.text());
85 | } else if ("boost".equals(currentFieldName)) {
86 | boost = parser.floatValue();
87 | } else if ("limit".equals(currentFieldName)) {
88 | limit = parser.intValue();
89 | }else if ("index".equals(currentFieldName)) {
90 | lookupIndex = parser.text();
91 | } else if ("type".equals(currentFieldName)) {
92 | lookupType = parser.text();
93 | } else if ("id".equals(currentFieldName)) {
94 | lookupId = parser.text();
95 | } else if ("path".equals(currentFieldName)) {
96 | lookupPath = parser.text();
97 | } else if ("routing".equals(currentFieldName)) {
98 | lookupRouting = parser.textOrNull();
99 | } else {
100 | throw new QueryParsingException(parseContext.index(), "[image] query does not support [" + currentFieldName + "]");
101 | }
102 | }
103 | }
104 | parser.nextToken();
105 | }
106 |
107 | if (featureEnum == null) {
108 | throw new QueryParsingException(parseContext.index(), "No feature specified for image query");
109 | }
110 |
111 | String luceneFieldName = fieldName + "." + featureEnum.name();
112 | LireFeature feature = null;
113 |
114 | if (image != null) {
115 | try {
116 | feature = featureEnum.getFeatureClass().newInstance();
117 | BufferedImage img = ImageIO.read(new BytesStreamInput(image, false));
118 | if (Math.max(img.getHeight(), img.getWidth()) > ImageMapper.MAX_IMAGE_DIMENSION) {
119 | img = ImageUtils.scaleImage(img, ImageMapper.MAX_IMAGE_DIMENSION);
120 | }
121 | feature.extract(img);
122 | } catch (Exception e) {
123 | throw new ElasticsearchImageProcessException("Failed to parse image", e);
124 | }
125 | } else if (lookupIndex != null && lookupType != null && lookupId != null && lookupPath != null) {
126 | String lookupFieldName = lookupPath + "." + featureEnum.name();
127 | GetResponse getResponse = client.get(new GetRequest(lookupIndex, lookupType, lookupId).preference("_local").routing(lookupRouting).fields(lookupFieldName).realtime(false)).actionGet();
128 | if (getResponse.isExists()) {
129 | GetField getField = getResponse.getField(lookupFieldName);
130 | if (getField != null) {
131 | BytesReference bytesReference = (BytesReference) getField.getValue();
132 | try {
133 | feature = featureEnum.getFeatureClass().newInstance();
134 | feature.setByteArrayRepresentation(bytesReference.array(), bytesReference.arrayOffset(), bytesReference.length());
135 | } catch (Exception e) {
136 | throw new ElasticsearchImageProcessException("Failed to parse image", e);
137 | }
138 | }
139 | }
140 | }
141 | if (feature == null) {
142 | throw new QueryParsingException(parseContext.index(), "No image specified for image query");
143 | }
144 |
145 |
146 | if (hashEnum == null) { // no hash, need to scan all documents
147 | return new ImageQuery(luceneFieldName, feature, boost);
148 | } else { // query by hash first
149 | int[] hash = null;
150 | if (hashEnum.equals(HashEnum.BIT_SAMPLING)) {
151 | hash = BitSampling.generateHashes(feature.getDoubleHistogram());
152 | } else if (hashEnum.equals(HashEnum.LSH)) {
153 | hash = LocalitySensitiveHashing.generateHashes(feature.getDoubleHistogram());
154 | }
155 | String hashFieldName = luceneFieldName + "." + ImageMapper.HASH + "." + hashEnum.name();
156 |
157 | if (limit > 0) { // has max result limit, use ImageHashLimitQuery
158 | return new ImageHashLimitQuery(hashFieldName, hash, limit, luceneFieldName, feature, boost);
159 | } else { // no max result limit, use ImageHashQuery
160 | BooleanQuery query = new BooleanQuery(true);
161 | ImageScoreCache imageScoreCache = new ImageScoreCache();
162 |
163 | for (int h : hash) {
164 | query.add(new BooleanClause(new ImageHashQuery(new Term(hashFieldName, Integer.toString(h)), luceneFieldName, feature, imageScoreCache, boost), BooleanClause.Occur.SHOULD));
165 | }
166 | return query;
167 | }
168 |
169 | }
170 | }
171 | }
172 |
--------------------------------------------------------------------------------
/src/main/java/org/elasticsearch/index/query/image/ImageScoreCache.java:
--------------------------------------------------------------------------------
1 | package org.elasticsearch.index.query.image;
2 |
3 | import org.elasticsearch.common.collect.MapMaker;
4 |
5 | import java.util.Map;
6 |
7 | /**
8 | * Cache document score for {@link org.elasticsearch.index.query.image.ImageHashQuery}
9 | */
10 | public class ImageScoreCache {
11 | private Map scoreCache = new MapMaker().makeMap();
12 |
13 | public Float getScore(String key) {
14 | if (!scoreCache.containsKey(key)) {
15 | return null;
16 | }
17 | return scoreCache.get(key);
18 | }
19 |
20 | public void setScore(String key, Float score) {
21 | scoreCache.put(key, score);
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/src/main/java/org/elasticsearch/index/query/image/RegisterImageQueryParser.java:
--------------------------------------------------------------------------------
1 | package org.elasticsearch.index.query.image;
2 |
3 | import org.elasticsearch.common.inject.Inject;
4 | import org.elasticsearch.common.settings.Settings;
5 | import org.elasticsearch.index.AbstractIndexComponent;
6 | import org.elasticsearch.index.Index;
7 | import org.elasticsearch.index.settings.IndexSettings;
8 | import org.elasticsearch.indices.query.IndicesQueriesRegistry;
9 |
10 |
11 | public class RegisterImageQueryParser extends AbstractIndexComponent {
12 |
13 | @Inject
14 | protected RegisterImageQueryParser(Index index, @IndexSettings Settings indexSettings,
15 | IndicesQueriesRegistry indicesQueriesRegistry,
16 | ImageQueryParser parser) {
17 | super(index, indexSettings);
18 | indicesQueriesRegistry.addQueryParser(parser);
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/src/main/java/org/elasticsearch/plugin/image/ImageIndexModule.java:
--------------------------------------------------------------------------------
1 | package org.elasticsearch.plugin.image;
2 |
3 | import org.elasticsearch.common.inject.AbstractModule;
4 | import org.elasticsearch.index.mapper.image.RegisterImageType;
5 | import org.elasticsearch.index.query.image.RegisterImageQueryParser;
6 |
7 |
8 | public class ImageIndexModule extends AbstractModule {
9 |
10 | @Override
11 | protected void configure() {
12 | bind(RegisterImageType.class).asEagerSingleton();
13 | bind(RegisterImageQueryParser.class).asEagerSingleton();
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/src/main/java/org/elasticsearch/plugin/image/ImagePlugin.java:
--------------------------------------------------------------------------------
1 | package org.elasticsearch.plugin.image;
2 |
3 | import org.elasticsearch.common.inject.Module;
4 | import org.elasticsearch.plugins.AbstractPlugin;
5 |
6 | import java.util.Collection;
7 |
8 | import static org.elasticsearch.common.collect.Lists.newArrayList;
9 |
10 |
11 | public class ImagePlugin extends AbstractPlugin {
12 |
13 | @Override
14 | public String name() {
15 | return "image";
16 | }
17 |
18 | @Override
19 | public String description() {
20 | return "Elasticsearch Image Plugin";
21 | }
22 |
23 | @Override
24 | public Collection> indexModules() {
25 | Collection> modules = newArrayList();
26 | modules.add(ImageIndexModule.class);
27 | return modules;
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/src/main/resources/es-plugin.properties:
--------------------------------------------------------------------------------
1 | plugin=org.elasticsearch.plugin.image.ImagePlugin
2 | version=${project.version}
--------------------------------------------------------------------------------
/src/main/resources/hash/LshBitSampling.obj:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kzwang/elasticsearch-image/89bce3146c2b0e10838afeb78ab7bb30f3cc8490/src/main/resources/hash/LshBitSampling.obj
--------------------------------------------------------------------------------
/src/main/resources/hash/lshHashFunctions.obj:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kzwang/elasticsearch-image/89bce3146c2b0e10838afeb78ab7bb30f3cc8490/src/main/resources/hash/lshHashFunctions.obj
--------------------------------------------------------------------------------
/src/test/java/org/elasticsearch/plugin/image/test/ImageIntegrationTests.java:
--------------------------------------------------------------------------------
1 | package org.elasticsearch.plugin.image.test;
2 |
3 | import org.apache.sanselan.ImageFormat;
4 | import org.apache.sanselan.ImageWriteException;
5 | import org.apache.sanselan.Sanselan;
6 | import org.elasticsearch.action.index.IndexResponse;
7 | import org.elasticsearch.action.search.SearchResponse;
8 | import org.elasticsearch.common.collect.Maps;
9 | import org.elasticsearch.common.settings.ImmutableSettings;
10 | import org.elasticsearch.common.settings.Settings;
11 | import org.elasticsearch.index.mapper.image.FeatureEnum;
12 | import org.elasticsearch.index.mapper.image.HashEnum;
13 | import org.elasticsearch.index.query.BoolQueryBuilder;
14 | import org.elasticsearch.index.query.QueryBuilders;
15 | import org.elasticsearch.index.query.TermQueryBuilder;
16 | import org.elasticsearch.index.query.image.ImageQueryBuilder;
17 | import org.elasticsearch.plugins.PluginsService;
18 | import org.elasticsearch.search.SearchHit;
19 | import org.elasticsearch.search.SearchHits;
20 | import org.elasticsearch.test.ElasticsearchIntegrationTest;
21 | import org.junit.Before;
22 | import org.junit.Test;
23 |
24 | import java.awt.image.BufferedImage;
25 | import java.io.IOException;
26 |
27 | import static org.elasticsearch.client.Requests.putMappingRequest;
28 | import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
29 | import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
30 | import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
31 | import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
32 | import static org.hamcrest.Matchers.*;
33 |
34 | @ElasticsearchIntegrationTest.ClusterScope(scope = ElasticsearchIntegrationTest.Scope.SUITE)
35 | public class ImageIntegrationTests extends ElasticsearchIntegrationTest {
36 |
37 | private final static String INDEX_NAME = "test";
38 | private final static String DOC_TYPE_NAME = "test";
39 |
40 |
41 | @Override
42 | protected Settings nodeSettings(int nodeOrdinal) {
43 | return ImmutableSettings.builder()
44 | .put(super.nodeSettings(nodeOrdinal))
45 | .put("plugins." + PluginsService.LOAD_PLUGIN_FROM_CLASSPATH, true)
46 | .build();
47 | }
48 |
49 | @Before
50 | public void createEmptyIndex() throws Exception {
51 | logger.info("creating index [{}]", INDEX_NAME);
52 | createIndex(INDEX_NAME);
53 | ensureGreen();
54 | }
55 |
56 | @Override
57 | public Settings indexSettings() {
58 | return settingsBuilder()
59 | .put("index.number_of_replicas", 0)
60 | .put("index.number_of_shards", 5)
61 | .put("index.image.use_thread_pool", randomBoolean())
62 | .build();
63 | }
64 |
65 | @Test
66 | public void test_index_search_image() throws Exception {
67 | String mapping = copyToStringFromClasspath("/mapping/test-mapping.json");
68 | client().admin().indices().putMapping(putMappingRequest(INDEX_NAME).type(DOC_TYPE_NAME).source(mapping)).actionGet();
69 |
70 | int totalImages = randomIntBetween(10, 50);
71 |
72 | // generate random images and index
73 | String nameToSearch = null;
74 | byte[] imgToSearch = null;
75 | String idToSearch = null;
76 | for (int i = 0; i < totalImages; i ++) {
77 | byte[] imageByte = getRandomImage();
78 | String name = randomAsciiOfLength(5);
79 | IndexResponse response = index(INDEX_NAME, DOC_TYPE_NAME, jsonBuilder().startObject().field("img", imageByte).field("name", name).endObject());
80 | if (nameToSearch == null || imgToSearch == null || idToSearch == null) {
81 | nameToSearch = name;
82 | imgToSearch = imageByte;
83 | idToSearch = response.getId();
84 | }
85 | }
86 |
87 | refresh();
88 |
89 | // test search with hash
90 | ImageQueryBuilder imageQueryBuilder = new ImageQueryBuilder("img").feature(FeatureEnum.CEDD.name()).image(imgToSearch).hash(HashEnum.BIT_SAMPLING.name());
91 | SearchResponse searchResponse = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE_NAME).setQuery(imageQueryBuilder).addFields("img.metadata.exif_ifd0.x_resolution", "name").setSize(totalImages).get();
92 | assertNoFailures(searchResponse);
93 | SearchHits hits = searchResponse.getHits();
94 | assertThat("Should match at least one image", hits.getTotalHits(), greaterThanOrEqualTo(1l)); // if using hash, total result maybe different than number of images
95 | SearchHit hit = hits.getHits()[0];
96 | assertThat("First should be exact match and has score 1", hit.getScore(), equalTo(2.0f));
97 | assertImageScore(hits, nameToSearch, 2.0f);
98 | assertThat("Should have metadata", hit.getFields().get("img.metadata.exif_ifd0.x_resolution").getValues(), hasSize(1));
99 |
100 | // test search without hash and with boost
101 | ImageQueryBuilder imageQueryBuilder2 = new ImageQueryBuilder("img").feature(FeatureEnum.CEDD.name()).image(imgToSearch).boost(2.0f);
102 | SearchResponse searchResponse2 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE_NAME).setQuery(imageQueryBuilder2).setSize(totalImages).get();
103 | assertNoFailures(searchResponse2);
104 | SearchHits hits2 = searchResponse2.getHits();
105 | assertThat("Should get all images", hits2.getTotalHits(), equalTo((long)totalImages)); // no hash used, total result should be same as number of images
106 | assertThat("First should be exact match and has score 2", searchResponse2.getHits().getMaxScore(), equalTo(4.0f));
107 | assertImageScore(hits2, nameToSearch, 4.0f);
108 |
109 | // test search for name as well
110 | BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
111 | boolQueryBuilder.must(QueryBuilders.termQuery("name", nameToSearch));
112 | boolQueryBuilder.must(new ImageQueryBuilder("img").feature(FeatureEnum.CEDD.name()).image(imgToSearch));
113 | SearchResponse searchResponse3 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE_NAME).setQuery(boolQueryBuilder).setSize(totalImages).get();
114 | assertNoFailures(searchResponse3);
115 | SearchHits hits3 = searchResponse3.getHits();
116 | assertThat("Should match one document only", hits3.getTotalHits(), equalTo(1l)); // added filename to query, should have only one result
117 | SearchHit hit3 = hits3.getHits()[0];
118 | assertThat((String)hit3.getSource().get("name"), equalTo(nameToSearch));
119 |
120 | // test search with hash and limit
121 | ImageQueryBuilder imageQueryBuilder4 = new ImageQueryBuilder("img").feature(FeatureEnum.CEDD.name()).image(imgToSearch).hash(HashEnum.BIT_SAMPLING.name()).limit(10);
122 | SearchResponse searchResponse4 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE_NAME).setQuery(imageQueryBuilder4).setSize(totalImages).get();
123 | assertNoFailures(searchResponse4);
124 | SearchHits hits4 = searchResponse4.getHits();
125 | assertThat("Should match at least one image", hits4.getTotalHits(), greaterThanOrEqualTo(1l)); // if using hash, total result maybe different than number of images
126 | SearchHit hit4 = hits4.getHits()[0];
127 | assertThat("First should be exact match and has score 1", hit4.getScore(), equalTo(2.0f));
128 | assertImageScore(hits4, nameToSearch, 2.0f);
129 |
130 | // test search metadata
131 | TermQueryBuilder termQueryBuilder = QueryBuilders.termQuery("img.metadata.exif_ifd0.x_resolution", "72 dots per inch");
132 | SearchResponse searchResponse5 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE_NAME).setQuery(termQueryBuilder).setSize(totalImages).get();
133 | assertNoFailures(searchResponse5);
134 | SearchHits hits5 = searchResponse5.getHits();
135 | assertThat("Should match at least one record", hits5.getTotalHits(), greaterThanOrEqualTo(1l)); // if using hash, total result maybe different than number of images
136 |
137 | // test search with exist image
138 | ImageQueryBuilder imageQueryBuilder6 = new ImageQueryBuilder("img").feature(FeatureEnum.CEDD.name()).lookupIndex(INDEX_NAME).lookupType(DOC_TYPE_NAME).lookupId(idToSearch).lookupPath("img");
139 | SearchResponse searchResponse6 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE_NAME).setQuery(imageQueryBuilder6).setSize(totalImages).get();
140 | assertNoFailures(searchResponse6);
141 | SearchHits hits6 = searchResponse6.getHits();
142 | assertThat("Should match at least one image", hits6.getTotalHits(), equalTo((long) totalImages));
143 | SearchHit hit6 = hits6.getHits()[0];
144 | assertThat("First should be exact match and has score 1", hit6.getScore(), equalTo(2.0f));
145 | assertImageScore(hits6, nameToSearch, 2.0f);
146 |
147 | // test search with exist image using hash
148 | ImageQueryBuilder imageQueryBuilder7 = new ImageQueryBuilder("img").feature(FeatureEnum.CEDD.name()).lookupIndex(INDEX_NAME).lookupType(DOC_TYPE_NAME).lookupId(idToSearch).lookupPath("img").hash(HashEnum.BIT_SAMPLING.name());
149 | SearchResponse searchResponse7 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE_NAME).setQuery(imageQueryBuilder7).setSize(totalImages).get();
150 | assertNoFailures(searchResponse7);
151 | SearchHits hits7 = searchResponse7.getHits();
152 | assertThat("Should match at least one image", hits7.getTotalHits(), equalTo((long) totalImages));
153 | SearchHit hit7 = hits7.getHits()[0];
154 | assertThat("First should be exact match and has score 1", hit7.getScore(), equalTo(2.0f));
155 | assertImageScore(hits7, nameToSearch, 2.0f);
156 | }
157 |
158 | private void assertImageScore(SearchHits hits, String name, float score) {
159 | for (SearchHit hit : hits) {
160 | if ((hit.getSource() != null && hit.getSource().get("name").equals(name))
161 | || (hit.getFields() != null && !hit.getFields().isEmpty() && hit.getFields().get("name").getValue().equals(name))){
162 | assertThat(hit.getScore(), equalTo(score));
163 | return;
164 | }
165 | }
166 | throw new AssertionError("Image " + name + " not found");
167 | }
168 |
169 | private byte[] getRandomImage() throws IOException, ImageWriteException {
170 | int width = randomIntBetween(100, 1000);
171 | int height = randomIntBetween(100, 1000);
172 | BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
173 | for (int j = 0; j < width; j ++) {
174 | for (int k = 0; k < height; k ++) {
175 | image.setRGB(j, k, randomInt(512));
176 | }
177 | }
178 | ImageFormat format = ImageFormat.IMAGE_FORMAT_TIFF;
179 | byte[] bytes = Sanselan.writeImageToBytes(image, format, Maps.newHashMap());
180 | return bytes;
181 | }
182 | }
183 |
--------------------------------------------------------------------------------
/src/test/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | log4j.rootLogger=INFO, out
2 |
3 | log4j.appender.out=org.apache.log4j.ConsoleAppender
4 | log4j.appender.out.layout=org.apache.log4j.PatternLayout
5 | log4j.appender.out.layout.conversionPattern=[%d{ISO8601}][%-5p][%-25c] %m%n
6 |
--------------------------------------------------------------------------------
/src/test/resources/mapping/test-mapping.json:
--------------------------------------------------------------------------------
1 | {
2 | "test": {
3 | "_source": {
4 | "excludes": ["img"]
5 | },
6 | "properties": {
7 | "img": {
8 | "type": "image",
9 | "feature": {
10 | "CEDD": {
11 | "hash": "BIT_SAMPLING"
12 | },
13 | "JCD": {
14 | "hash": ["BIT_SAMPLING", "LSH"]
15 | },
16 | "FCTH": {}
17 | },
18 | "metadata": {
19 | "exif_ifd0.x_resolution": {
20 | "type": "string",
21 | "index": "not_analyzed",
22 | "store": "yes"
23 | }
24 | }
25 | },
26 | "name": {
27 | "type": "string",
28 | "index": "not_analyzed"
29 | }
30 | }
31 | }
32 | }
--------------------------------------------------------------------------------