├── .gitignore ├── src ├── main │ ├── resources │ │ └── es-plugin.properties │ ├── java │ │ └── nl │ │ │ └── trifork │ │ │ └── elasticsearch │ │ │ └── facet │ │ │ └── geohash │ │ │ ├── GeohashFacet.java │ │ │ ├── ClusterReducer.java │ │ │ ├── GeohashFacetPlugin.java │ │ │ ├── GeoPoints.java │ │ │ ├── TypeAndId.java │ │ │ ├── ClusterBuilder.java │ │ │ ├── GeoFacetBuilder.java │ │ │ ├── GeohashFacetExecutor.java │ │ │ ├── BoundingBox.java │ │ │ ├── BinaryGeoHashUtils.java │ │ │ ├── GeohashFacetParser.java │ │ │ ├── Cluster.java │ │ │ └── InternalGeohashFacet.java │ └── assemblies │ │ └── plugin.xml └── test │ └── java │ └── nl │ └── trifork │ └── elasticsearch │ ├── clustering │ └── grid │ │ ├── test │ │ ├── Places.java │ │ └── GeoPointMatchers.java │ │ ├── GeoPointsTests.java │ │ ├── BoundingBoxTests.java │ │ ├── ClusterTests.java │ │ └── ClusterReducerTests.java │ └── facet │ └── geohash │ ├── InternalGeohashFacetTests.java │ └── BinaryGeoHashUtilsTest.java ├── examples └── clustered-icons │ └── www │ ├── index.html │ └── js │ └── index.js ├── pom.xml └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | /.settings 2 | /.project 3 | /.classpath 4 | /target 5 | /test-output 6 | -------------------------------------------------------------------------------- /src/main/resources/es-plugin.properties: -------------------------------------------------------------------------------- 1 | plugin=nl.trifork.elasticsearch.facet.geohash.GeohashFacetPlugin 2 | -------------------------------------------------------------------------------- /src/main/java/nl/trifork/elasticsearch/facet/geohash/GeohashFacet.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.facet.geohash; 2 | 3 | import java.util.List; 4 | 5 | import org.elasticsearch.search.facet.Facet; 6 | 7 | public interface GeohashFacet extends Facet, Iterable { 8 | 9 | /** 10 | * The type of the filter facet. 11 | */ 12 | public String TYPE = "geohash"; 13 | 14 | /** 15 | * A list of geo clusters. 16 | */ 17 | List getEntries(); 18 | } 19 | -------------------------------------------------------------------------------- /src/test/java/nl/trifork/elasticsearch/clustering/grid/test/Places.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.clustering.grid.test; 2 | 3 | import nl.trifork.elasticsearch.facet.geohash.BoundingBox; 4 | import org.elasticsearch.common.geo.GeoPoint; 5 | 6 | public interface Places { 7 | 8 | BoundingBox COLORADO = new BoundingBox(new GeoPoint(41.00, -109.05), new GeoPoint(37.00, -102.04)); 9 | 10 | GeoPoint DENVER = new GeoPoint(39.75, -104.87); 11 | GeoPoint LAS_VEGAS = new GeoPoint(36.08, -115.17); 12 | GeoPoint SAN_DIEGO = new GeoPoint(32.82, -117.13); 13 | } 14 | -------------------------------------------------------------------------------- /src/main/assemblies/plugin.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | plugin 4 | 5 | zip 6 | 7 | false 8 | 9 | 10 | / 11 | true 12 | true 13 | 14 | org.elasticsearch:elasticsearch 15 | 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /src/main/java/nl/trifork/elasticsearch/facet/geohash/ClusterReducer.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.facet.geohash; 2 | 3 | import org.elasticsearch.common.collect.Lists; 4 | import org.elasticsearch.common.collect.Maps; 5 | 6 | import java.util.List; 7 | import java.util.Map; 8 | 9 | public class ClusterReducer { 10 | 11 | public List reduce(Iterable clusters) { 12 | Map map = Maps.newHashMap(); 13 | for (Cluster cluster : clusters) { 14 | long clusterGeohash = cluster.clusterGeohash(); 15 | if (map.containsKey(clusterGeohash)) { 16 | map.put(clusterGeohash, map.get(clusterGeohash).merge(cluster)); 17 | } 18 | else { 19 | map.put(clusterGeohash, cluster); 20 | } 21 | } 22 | return Lists.newArrayList(map.values()); 23 | } 24 | 25 | } 26 | -------------------------------------------------------------------------------- /src/main/java/nl/trifork/elasticsearch/facet/geohash/GeohashFacetPlugin.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.facet.geohash; 2 | 3 | import org.elasticsearch.common.inject.Module; 4 | import org.elasticsearch.plugins.AbstractPlugin; 5 | import org.elasticsearch.search.facet.FacetModule; 6 | import org.elasticsearch.search.facet.TransportFacetModule; 7 | 8 | public class GeohashFacetPlugin extends AbstractPlugin { 9 | 10 | @Override 11 | public String name() { 12 | return "geohash-facet"; 13 | } 14 | 15 | @Override 16 | public String description() { 17 | return "Facet for clustering geo points based on their geohash"; 18 | } 19 | 20 | @Override 21 | public void processModule(Module module) { 22 | if (module instanceof FacetModule) { 23 | ((FacetModule) module).addFacetProcessor(GeohashFacetParser.class); 24 | InternalGeohashFacet.registerStreams(); 25 | } 26 | if (module instanceof TransportFacetModule) { 27 | InternalGeohashFacet.registerStreams(); 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/test/java/nl/trifork/elasticsearch/clustering/grid/test/GeoPointMatchers.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.clustering.grid.test; 2 | 3 | import org.elasticsearch.common.geo.GeoPoint; 4 | import org.hamcrest.CustomTypeSafeMatcher; 5 | import org.hamcrest.Description; 6 | import org.hamcrest.Matchers; 7 | import org.hamcrest.TypeSafeMatcher; 8 | 9 | import nl.trifork.elasticsearch.facet.geohash.GeoPoints; 10 | 11 | public class GeoPointMatchers { 12 | 13 | private GeoPointMatchers() { 14 | 15 | } 16 | 17 | public static TypeSafeMatcher closeTo(final GeoPoint expected) { 18 | return closeTo(expected, 0.001); 19 | } 20 | 21 | public static TypeSafeMatcher closeTo(final GeoPoint expected, final double error) { 22 | return new CustomTypeSafeMatcher("close to \"" + GeoPoints.toString(expected) + "\"") { 23 | @Override 24 | protected void describeMismatchSafely(GeoPoint item, Description description) { 25 | description.appendText("was ").appendValue(GeoPoints.toString(item)); 26 | } 27 | @Override 28 | protected boolean matchesSafely(GeoPoint point) { 29 | return Matchers.closeTo(point.getLat(), error).matches(expected.getLat()) && 30 | Matchers.closeTo(point.getLon(), error).matches(expected.getLon()); 31 | } 32 | }; 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/test/java/nl/trifork/elasticsearch/clustering/grid/GeoPointsTests.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.clustering.grid; 2 | 3 | import static nl.trifork.elasticsearch.clustering.grid.test.Places.*; 4 | import static org.hamcrest.MatcherAssert.assertThat; 5 | import static org.hamcrest.Matchers.*; 6 | 7 | import java.io.IOException; 8 | 9 | import nl.trifork.elasticsearch.facet.geohash.GeoPoints; 10 | import org.elasticsearch.common.geo.GeoPoint; 11 | import org.elasticsearch.common.io.stream.BytesStreamInput; 12 | import org.elasticsearch.common.io.stream.BytesStreamOutput; 13 | import org.elasticsearch.common.unit.DistanceUnit; 14 | import org.testng.annotations.Test; 15 | 16 | public class GeoPointsTests { 17 | 18 | @Test 19 | public void testReadFromWriteTo() throws IOException { 20 | BytesStreamOutput out = new BytesStreamOutput(); 21 | GeoPoints.writeTo(LAS_VEGAS, out); 22 | BytesStreamInput in = new BytesStreamInput(out.bytes()); 23 | GeoPoint point = GeoPoints.readFrom(in); 24 | assertThat("Latitude", point.lat(), equalTo(LAS_VEGAS.lat())); 25 | assertThat("Longitude", point.lon(), equalTo(LAS_VEGAS.lon())); 26 | } 27 | 28 | @Test 29 | public void testDistance() throws IOException { 30 | assertThat("Distance (mi)", GeoPoints.distance(LAS_VEGAS, SAN_DIEGO, DistanceUnit.MILES), closeTo(250.0, 5.0)); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /examples/clustered-icons/www/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 16 | 17 | 18 | 19 | 20 |
21 |
22 | 23 | 24 | 25 | 26 | 27 | 28 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /src/test/java/nl/trifork/elasticsearch/clustering/grid/BoundingBoxTests.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.clustering.grid; 2 | 3 | import static nl.trifork.elasticsearch.clustering.grid.test.Places.*; 4 | import static org.hamcrest.MatcherAssert.assertThat; 5 | import static org.hamcrest.Matchers.*; 6 | 7 | import nl.trifork.elasticsearch.facet.geohash.BoundingBox; 8 | import org.elasticsearch.common.unit.DistanceUnit; 9 | import org.testng.annotations.Test; 10 | 11 | public class BoundingBoxTests { 12 | 13 | @Test 14 | public void testSize() { 15 | assertThat("Size of Denver", new BoundingBox(DENVER, DENVER).size(DistanceUnit.KILOMETERS), equalTo(0.0)); 16 | assertThat("Size of Colorado", COLORADO.size(DistanceUnit.KILOMETERS), greaterThan(750.0)); 17 | } 18 | 19 | @Test 20 | public void testContains() { 21 | assertThat("Top left corner is in bounds", COLORADO.contains(COLORADO.topLeft()), is(true)); 22 | assertThat("Bottom right corner is in bounds", COLORADO.contains(COLORADO.bottomRight()), is(true)); 23 | assertThat("Denver is in Colorado", COLORADO.contains(DENVER), is(true)); 24 | assertThat("Las Vegas is in Colorado", COLORADO.contains(LAS_VEGAS), is(false)); 25 | } 26 | 27 | @Test 28 | public void testExtend() { 29 | BoundingBox southwest = COLORADO.extend(SAN_DIEGO); 30 | assertThat("Denver is in the SW", southwest.contains(DENVER), is(true)); 31 | assertThat("Las Vegas is in the SW", southwest.contains(LAS_VEGAS), is(true)); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/main/java/nl/trifork/elasticsearch/facet/geohash/GeoPoints.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.facet.geohash; 2 | 3 | import java.io.IOException; 4 | 5 | import org.elasticsearch.common.geo.GeoDistance; 6 | import org.elasticsearch.common.geo.GeoPoint; 7 | import org.elasticsearch.common.io.stream.StreamInput; 8 | import org.elasticsearch.common.io.stream.StreamOutput; 9 | import org.elasticsearch.common.unit.DistanceUnit; 10 | 11 | /** 12 | * Modified from https://github.com/zenobase/geocluster-facet/blob/master/src/main/java/com/zenobase/search/facet/geocluster/GeoPoints.java 13 | */ 14 | public class GeoPoints { 15 | 16 | private GeoPoints() { 17 | 18 | } 19 | 20 | public static double distance(GeoPoint from, GeoPoint to, DistanceUnit unit) { 21 | return GeoDistance.ARC.calculate(from.getLat(), from.getLon(), 22 | to.getLat(), to.getLon(), unit); 23 | } 24 | 25 | public static GeoPoint readFrom(StreamInput in) throws IOException { 26 | return new GeoPoint(in.readDouble(), in.readDouble()); 27 | } 28 | 29 | public static void writeTo(GeoPoint point, StreamOutput out) throws IOException { 30 | out.writeDouble(point.getLat()); 31 | out.writeDouble(point.getLon()); 32 | } 33 | 34 | public static GeoPoint copy(GeoPoint point) { 35 | return new GeoPoint(point.lat(), point.lon()); 36 | } 37 | 38 | public static boolean equals(GeoPoint left, GeoPoint right) { 39 | return toString(left).equals(toString(right)); 40 | } 41 | 42 | public static String toString(GeoPoint point) { 43 | return String.format("%.4f,%.4f", point.getLat(), point.getLon()); 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/main/java/nl/trifork/elasticsearch/facet/geohash/TypeAndId.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.facet.geohash; 2 | 3 | import org.elasticsearch.common.io.stream.StreamInput; 4 | import org.elasticsearch.common.io.stream.StreamOutput; 5 | 6 | import java.io.IOException; 7 | 8 | /** 9 | * Unique identifier of a document within an index 10 | */ 11 | public class TypeAndId { 12 | private final String type; 13 | private final String id; 14 | 15 | public TypeAndId(String type, String id) { 16 | this.type = type; 17 | this.id = id; 18 | } 19 | 20 | public String type() { 21 | return type; 22 | } 23 | 24 | public String id() { 25 | return id; 26 | } 27 | 28 | public void writeTo(StreamOutput out) throws IOException { 29 | 30 | out.writeString(type); 31 | out.writeString(id); 32 | } 33 | 34 | public static TypeAndId readFrom(StreamInput in) throws IOException { 35 | 36 | return new TypeAndId(in.readString(), in.readString()); 37 | } 38 | 39 | @Override 40 | public boolean equals(Object o) { 41 | if (this == o) return true; 42 | if (o == null || getClass() != o.getClass()) return false; 43 | 44 | TypeAndId typeAndId = (TypeAndId) o; 45 | 46 | if (!id.equals(typeAndId.id)) return false; 47 | if (!type.equals(typeAndId.type)) return false; 48 | 49 | return true; 50 | } 51 | 52 | @Override 53 | public int hashCode() { 54 | int result = type.hashCode(); 55 | result = 31 * result + id.hashCode(); 56 | return result; 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /src/main/java/nl/trifork/elasticsearch/facet/geohash/ClusterBuilder.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.facet.geohash; 2 | 3 | import java.util.Map; 4 | 5 | import org.apache.lucene.util.BytesRef; 6 | import org.elasticsearch.common.collect.ImmutableList; 7 | import org.elasticsearch.common.collect.Maps; 8 | import org.elasticsearch.common.geo.GeoPoint; 9 | 10 | /** 11 | * Modified from the original on https://github.com/zenobase/geocluster-facet/blob/master/src/main/java/com/zenobase/search/facet/geocluster/GeoClusterBuilder.java 12 | */ 13 | public class ClusterBuilder { 14 | 15 | private final int geohashBits; 16 | private final Map clusters = Maps.newHashMap(); 17 | 18 | public ClusterBuilder(double factor) { 19 | this.geohashBits = BinaryGeoHashUtils.MAX_PREFIX_LENGTH - (int) Math.round(factor * BinaryGeoHashUtils.MAX_PREFIX_LENGTH); 20 | } 21 | 22 | public ClusterBuilder add(TypeAndId typeAndId, GeoPoint point) { 23 | long geohash = BinaryGeoHashUtils.encodeAsLong(point, geohashBits); 24 | if (clusters.containsKey(geohash)) { 25 | clusters.get(geohash).add(point); 26 | } 27 | else { 28 | if (typeAndId == null) { 29 | 30 | clusters.put(geohash, new Cluster(point, geohash, geohashBits)); 31 | } else { 32 | 33 | clusters.put(geohash, new Cluster(point, geohash, geohashBits, typeAndId)); 34 | } 35 | } 36 | return this; 37 | } 38 | 39 | public ClusterBuilder add(GeoPoint point) { 40 | return add(null, point); 41 | } 42 | 43 | public ImmutableList build() { 44 | return ImmutableList.copyOf(clusters.values()); 45 | } 46 | 47 | } 48 | -------------------------------------------------------------------------------- /src/test/java/nl/trifork/elasticsearch/facet/geohash/InternalGeohashFacetTests.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.facet.geohash; 2 | 3 | import nl.trifork.elasticsearch.clustering.grid.test.Places; 4 | import org.elasticsearch.common.io.stream.BytesStreamInput; 5 | import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; 6 | import org.testng.annotations.Test; 7 | 8 | import java.io.ByteArrayOutputStream; 9 | import java.util.Arrays; 10 | import static org.hamcrest.MatcherAssert.assertThat; 11 | import static org.hamcrest.Matchers.*; 12 | 13 | 14 | public class InternalGeohashFacetTests { 15 | 16 | @Test 17 | public void testSerializationRoundtrip() throws Exception { 18 | 19 | InternalGeohashFacet facet = new InternalGeohashFacet( 20 | "name", 21 | 0.5, 22 | true, 23 | false, 24 | Arrays.asList(new Cluster(Places.DENVER, 25 | BinaryGeoHashUtils.encodeAsLong(Places.DENVER, BinaryGeoHashUtils.MAX_PREFIX_LENGTH), 26 | BinaryGeoHashUtils.MAX_PREFIX_LENGTH)) 27 | ); 28 | 29 | ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); 30 | OutputStreamStreamOutput output = new OutputStreamStreamOutput(byteArrayOutputStream); 31 | 32 | facet.writeTo(output); 33 | 34 | InternalGeohashFacet deserialized = new InternalGeohashFacet(); 35 | deserialized.readFrom(new BytesStreamInput(byteArrayOutputStream.toByteArray(), false)); 36 | 37 | assertThat(deserialized.factor(), is(facet.factor())); 38 | assertThat(deserialized.showGeohashCell(), is(facet.showGeohashCell())); 39 | assertThat(deserialized.getEntries().size(), is(1)); 40 | assertThat(deserialized.getEntries().get(0).center(), is(Places.DENVER)); 41 | assertThat(deserialized.getEntries().get(0).clusterGeohash(), is(BinaryGeoHashUtils.encodeAsLong(Places.DENVER, BinaryGeoHashUtils.MAX_PREFIX_LENGTH))); 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/test/java/nl/trifork/elasticsearch/clustering/grid/ClusterTests.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.clustering.grid; 2 | 3 | import static nl.trifork.elasticsearch.clustering.grid.test.GeoPointMatchers.closeTo; 4 | import static nl.trifork.elasticsearch.clustering.grid.test.Places.*; 5 | import static org.hamcrest.MatcherAssert.assertThat; 6 | import static org.hamcrest.Matchers.equalTo; 7 | 8 | import java.io.IOException; 9 | import java.util.Random; 10 | 11 | import nl.trifork.elasticsearch.facet.geohash.BinaryGeoHashUtils; 12 | import nl.trifork.elasticsearch.facet.geohash.BoundingBox; 13 | import nl.trifork.elasticsearch.facet.geohash.Cluster; 14 | import nl.trifork.elasticsearch.facet.geohash.GeoPoints; 15 | import org.elasticsearch.common.geo.GeoPoint; 16 | import org.elasticsearch.common.io.stream.BytesStreamInput; 17 | import org.elasticsearch.common.io.stream.BytesStreamOutput; 18 | import org.testng.annotations.Test; 19 | 20 | public class ClusterTests { 21 | 22 | private Random random = new Random(System.currentTimeMillis()); 23 | 24 | @Test 25 | public void serializationRoundtrip() throws IOException { 26 | 27 | for (int i = 0; i < 100; i++) { 28 | 29 | GeoPoint point = new GeoPoint(random.nextDouble() * 180 - 90, random.nextDouble() * 360 - 180); 30 | 31 | for (int geohashBits = 0; geohashBits <= BinaryGeoHashUtils.MAX_PREFIX_LENGTH; geohashBits++) { 32 | 33 | System.out.printf("Testing with point %s and %d geohash bit(s)...\n", point.toString(), geohashBits); 34 | 35 | Cluster cluster = new Cluster(point, BinaryGeoHashUtils.encodeAsLong(point, geohashBits), geohashBits); 36 | assertThat(cluster, equalTo(roundtrip(cluster))); 37 | } 38 | } 39 | 40 | } 41 | 42 | private Cluster roundtrip(Cluster cluster) throws IOException { 43 | 44 | BytesStreamOutput out = new BytesStreamOutput(); 45 | cluster.writeTo(out); 46 | 47 | return Cluster.readFrom(new BytesStreamInput(out.bytes())); 48 | } 49 | 50 | 51 | } 52 | -------------------------------------------------------------------------------- /src/main/java/nl/trifork/elasticsearch/facet/geohash/GeoFacetBuilder.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.facet.geohash; 2 | 3 | import org.elasticsearch.action.search.SearchResponse; 4 | import org.elasticsearch.action.search.SearchType; 5 | import org.elasticsearch.client.Client; 6 | import org.elasticsearch.client.transport.TransportClient; 7 | import org.elasticsearch.common.settings.ImmutableSettings; 8 | import org.elasticsearch.common.settings.Settings; 9 | import org.elasticsearch.common.transport.InetSocketTransportAddress; 10 | import org.elasticsearch.common.xcontent.XContentBuilder; 11 | import org.elasticsearch.search.builder.SearchSourceBuilderException; 12 | import org.elasticsearch.search.facet.FacetBuilder; 13 | import org.elasticsearch.search.facet.terms.TermsFacet; 14 | 15 | import java.io.IOException; 16 | import java.util.Map; 17 | 18 | public class GeoFacetBuilder extends FacetBuilder { 19 | private String fieldName; 20 | private double factor; 21 | private boolean showGeohashCell; 22 | private boolean showDocId; 23 | 24 | /** 25 | * Construct a new term facet with the provided facet name. 26 | * 27 | * @param name The facet name. 28 | */ 29 | public GeoFacetBuilder(String name) { 30 | super(name); 31 | } 32 | 33 | /** 34 | * The field the terms will be collected from. 35 | */ 36 | public GeoFacetBuilder field(String field) { 37 | this.fieldName = field; 38 | return this; 39 | } 40 | 41 | public GeoFacetBuilder showGeohashCell(boolean showGeohashCell) { 42 | this.showGeohashCell = showGeohashCell; 43 | return this; 44 | } 45 | 46 | public GeoFacetBuilder showDocId(boolean showDocId) { 47 | this.showDocId = showDocId; 48 | return this; 49 | } 50 | 51 | public GeoFacetBuilder factor(double factor) { 52 | this.factor = factor; 53 | return this; 54 | } 55 | 56 | @Override 57 | public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { 58 | if (fieldName == null) { 59 | throw new SearchSourceBuilderException("field must be set facet for facet [" + name + "]"); 60 | } 61 | builder.startObject(name); 62 | 63 | builder.startObject("geohash"); 64 | builder.field("field", fieldName); 65 | builder.field("factor", factor); 66 | builder.field("show_geohash_cell", showGeohashCell); 67 | builder.field("show_doc_id", showDocId); 68 | 69 | builder.endObject(); 70 | addFilterFacetAndGlobal(builder, params); 71 | builder.endObject(); 72 | return builder; 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /src/test/java/nl/trifork/elasticsearch/clustering/grid/ClusterReducerTests.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.clustering.grid; 2 | 3 | import nl.trifork.elasticsearch.facet.geohash.BinaryGeoHashUtils; 4 | import nl.trifork.elasticsearch.facet.geohash.Cluster; 5 | import nl.trifork.elasticsearch.facet.geohash.ClusterReducer; 6 | import org.testng.annotations.BeforeMethod; 7 | import org.testng.annotations.Test; 8 | 9 | import java.util.Arrays; 10 | import java.util.List; 11 | 12 | import static nl.trifork.elasticsearch.clustering.grid.test.Places.DENVER; 13 | import static nl.trifork.elasticsearch.clustering.grid.test.Places.LAS_VEGAS; 14 | import static org.hamcrest.MatcherAssert.assertThat; 15 | import static org.hamcrest.Matchers.is; 16 | 17 | public class ClusterReducerTests { 18 | 19 | private ClusterReducer clusterReducer; 20 | 21 | @BeforeMethod 22 | public void setUp() throws Exception { 23 | clusterReducer = new ClusterReducer(); 24 | } 25 | 26 | @Test 27 | public void testOneCluster() throws Exception { 28 | 29 | Iterable clusters = Arrays.asList(new Cluster(DENVER, 30 | BinaryGeoHashUtils.encodeAsLong(DENVER, BinaryGeoHashUtils.MAX_PREFIX_LENGTH), 31 | BinaryGeoHashUtils.MAX_PREFIX_LENGTH)); 32 | 33 | List reduced = clusterReducer.reduce(clusters); 34 | 35 | assertThat(reduced.size(), is(1)); 36 | } 37 | 38 | @Test 39 | public void testTwoClustersWithDifferentGeohashes() throws Exception { 40 | 41 | Iterable clusters = Arrays.asList( 42 | new Cluster(DENVER, 43 | BinaryGeoHashUtils.encodeAsLong(DENVER, BinaryGeoHashUtils.MAX_PREFIX_LENGTH), 44 | BinaryGeoHashUtils.MAX_PREFIX_LENGTH), 45 | new Cluster(LAS_VEGAS, 46 | BinaryGeoHashUtils.encodeAsLong(LAS_VEGAS, BinaryGeoHashUtils.MAX_PREFIX_LENGTH), 47 | BinaryGeoHashUtils.MAX_PREFIX_LENGTH) 48 | ); 49 | 50 | List reduced = clusterReducer.reduce(clusters); 51 | 52 | assertThat(reduced.size(), is(2)); 53 | 54 | } 55 | 56 | @Test 57 | public void testTwoClustersWithSameGeohash() throws Exception { 58 | 59 | Iterable clusters = Arrays.asList( 60 | new Cluster(DENVER, 61 | BinaryGeoHashUtils.encodeAsLong(DENVER, 4), 62 | 4), 63 | new Cluster(LAS_VEGAS, 64 | BinaryGeoHashUtils.encodeAsLong(LAS_VEGAS, 4), 65 | 4) 66 | ); 67 | 68 | List reduced = clusterReducer.reduce(clusters); 69 | 70 | assertThat(reduced.size(), is(1)); 71 | assertThat(reduced.get(0).size(), is(2)); 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4.0.0 4 | nl.trifork.geohash-facet 5 | geohash-facet 6 | 0.0.20-SNAPSHOT 7 | jar 8 | 9 | 10 | 11 | org.elasticsearch 12 | elasticsearch 13 | 1.4.1 14 | 15 | 16 | org.hamcrest 17 | hamcrest-all 18 | 1.3 19 | test 20 | 21 | 22 | org.testng 23 | testng 24 | 6.8 25 | test 26 | 27 | 28 | 29 | 30 | 31 | 32 | org.apache.maven.plugins 33 | maven-compiler-plugin 34 | 2.3.2 35 | 36 | 1.6 37 | 1.6 38 | 39 | 40 | 41 | org.apache.maven.plugins 42 | maven-surefire-plugin 43 | 2.12.3 44 | 45 | 46 | **/*Tests.java 47 | 48 | 49 | 50 | 51 | org.apache.maven.plugins 52 | maven-source-plugin 53 | 2.1.2 54 | 55 | 56 | attach-sources 57 | 58 | jar 59 | 60 | 61 | 62 | 63 | 64 | maven-assembly-plugin 65 | 2.3 66 | 67 | ${project.build.directory}/releases/ 68 | false 69 | 70 | ${basedir}/src/main/assemblies/plugin.xml 71 | 72 | 73 | 74 | 75 | package 76 | 77 | single 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | scm:git:https://github.com/triforkams/geohash-facet.git 87 | scm:git:https://github.com/triforkams/geohash-facet.git 88 | HEAD 89 | 90 | 91 | 92 | 93 | 94 | -------------------------------------------------------------------------------- /src/main/java/nl/trifork/elasticsearch/facet/geohash/GeohashFacetExecutor.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.facet.geohash; 2 | 3 | import java.io.IOException; 4 | 5 | import org.apache.lucene.index.AtomicReaderContext; 6 | import org.apache.lucene.util.BytesRef; 7 | import org.elasticsearch.index.fielddata.*; 8 | import org.elasticsearch.search.facet.FacetExecutor; 9 | import org.elasticsearch.search.facet.InternalFacet; 10 | 11 | import org.elasticsearch.common.geo.GeoPoint; 12 | import org.elasticsearch.index.mapper.Uid; 13 | 14 | public class GeohashFacetExecutor extends FacetExecutor { 15 | 16 | private final IndexGeoPointFieldData indexFieldData; 17 | private final IndexFieldData idIndexFieldData; 18 | private final double factor; 19 | private final boolean showGeohashCell; 20 | private final boolean showDocumentId; 21 | private final ClusterBuilder builder; 22 | 23 | public GeohashFacetExecutor(IndexGeoPointFieldData indexFieldData, IndexFieldData idIndexFieldData, 24 | double factor, boolean showGeohashCell, boolean showDocumentId) { 25 | this.indexFieldData = indexFieldData; 26 | this.idIndexFieldData = idIndexFieldData; 27 | this.factor = factor; 28 | this.showGeohashCell = showGeohashCell; 29 | this.showDocumentId = showDocumentId; 30 | this.builder = new ClusterBuilder(factor); 31 | } 32 | 33 | @Override 34 | public FacetExecutor.Collector collector() { 35 | return new Collector(); 36 | } 37 | 38 | @Override 39 | public InternalFacet buildFacet(String facetName) { 40 | return new InternalGeohashFacet(facetName, factor, showGeohashCell, showDocumentId, builder.build()); 41 | } 42 | 43 | private class Collector extends FacetExecutor.Collector { 44 | 45 | private SortedBinaryDocValues ids; 46 | private MultiGeoPointValues values; 47 | 48 | @Override 49 | public void setNextReader(AtomicReaderContext context) throws IOException { 50 | ids = idIndexFieldData.load(context).getBytesValues(); 51 | //ordinals = ids.ordinals(); 52 | values = indexFieldData.load(context).getGeoPointValues(); 53 | } 54 | 55 | @Override 56 | public void collect(int docId) throws IOException { 57 | ids.setDocument(docId); 58 | values.setDocument(docId); 59 | 60 | TypeAndId typeAndId = null; 61 | if (ids.count() > 0) { 62 | BytesRef[] bytesRefs = Uid.splitUidIntoTypeAndId(ids.valueAt(0)); 63 | typeAndId = new TypeAndId(bytesRefs[0].utf8ToString(), bytesRefs[1].utf8ToString()); 64 | } 65 | 66 | for (int i = 0; i < values.count(); i++) { 67 | GeoPoint gp = GeoPoints.copy(values.valueAt(i)); 68 | 69 | if(showDocumentId) { 70 | builder.add(typeAndId, gp); 71 | } else { 72 | builder.add(gp); 73 | } 74 | } 75 | } 76 | 77 | @Override 78 | public void postCollection() { 79 | 80 | } 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /src/test/java/nl/trifork/elasticsearch/facet/geohash/BinaryGeoHashUtilsTest.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.facet.geohash; 2 | 3 | import nl.trifork.elasticsearch.clustering.grid.test.Places; 4 | import org.elasticsearch.common.geo.GeoPoint; 5 | import org.testng.annotations.Test; 6 | import static nl.trifork.elasticsearch.facet.geohash.BinaryGeoHashUtils.*; 7 | import static org.hamcrest.MatcherAssert.assertThat; 8 | import static org.hamcrest.Matchers.*; 9 | 10 | public class BinaryGeoHashUtilsTest { 11 | 12 | private static final double _ = 123.456; 13 | 14 | @Test 15 | public void encodeAsLong_different_locations() throws Exception { 16 | 17 | 18 | for (int bits = 0; bits <= 6; bits++) { 19 | 20 | long denverHash = encodeAsLong(Places.DENVER, bits); 21 | long sanDiegoHash = encodeAsLong(Places.SAN_DIEGO, bits); 22 | 23 | assertThat(String.format("%016X and %016X don't share the first %d bits", denverHash, sanDiegoHash, bits), 24 | denverHash == sanDiegoHash); 25 | } 26 | 27 | for (int bits = 7; bits <= 60; bits++) { 28 | 29 | long denverHash = encodeAsLong(Places.DENVER, bits); 30 | long sanDiegoHash = encodeAsLong(Places.SAN_DIEGO, bits); 31 | 32 | assertThat(String.format("%016X and %016X shouldn't share the first %d bits", denverHash, sanDiegoHash, bits), 33 | denverHash != sanDiegoHash); 34 | } 35 | } 36 | 37 | @Test 38 | public void decodeCell_point_in_right_half() throws Exception { 39 | 40 | double[] bbox = decodeCell(encodeAsLong(new GeoPoint(0, 90), 1), 1); 41 | 42 | assertThat(bbox[2], closeTo(0, 0.1)); 43 | assertThat(bbox[3], closeTo(180, 0.1)); 44 | } 45 | 46 | @Test 47 | public void decodeCell_point_in_left_half() throws Exception { 48 | 49 | double[] bbox = decodeCell(encodeAsLong(new GeoPoint(0, -90), 1), 1); 50 | 51 | assertThat(bbox[2], closeTo(-180, 0.1)); 52 | assertThat(bbox[3], closeTo(0, 0.1)); 53 | } 54 | 55 | @Test 56 | public void decodeCell_point_in_upper_half() throws Exception { 57 | 58 | double[] bbox = decodeCell(encodeAsLong(new GeoPoint(45, 0), 2), 2); 59 | 60 | assertThat(bbox[0], closeTo(0, 0.1)); 61 | assertThat(bbox[1], closeTo(90, 0.1)); 62 | } 63 | 64 | @Test 65 | public void decodeCell_point_in_lower_half() throws Exception { 66 | 67 | double[] bbox = decodeCell(encodeAsLong(new GeoPoint(-45, 0), 2), 2); 68 | 69 | assertThat(bbox[0], closeTo(-90, 0.1)); 70 | assertThat(bbox[1], closeTo(0, 0.1)); 71 | } 72 | 73 | @Test 74 | public void decodeCell_denver_is_inside_in_its_own_cell() throws Exception { 75 | 76 | double[] bbox = decodeCell(encodeAsLong(Places.DENVER, MAX_PREFIX_LENGTH), MAX_PREFIX_LENGTH); 77 | 78 | assertThat(bbox[0], lessThan(Places.DENVER.lat())); 79 | assertThat(bbox[1], greaterThan(Places.DENVER.lat())); 80 | assertThat(bbox[2], lessThan(Places.DENVER.lon())); 81 | assertThat(bbox[3], greaterThan(Places.DENVER.lon())); 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /src/main/java/nl/trifork/elasticsearch/facet/geohash/BoundingBox.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.facet.geohash; 2 | 3 | import java.io.IOException; 4 | import java.util.Arrays; 5 | 6 | import org.elasticsearch.common.Preconditions; 7 | import org.elasticsearch.common.geo.GeoPoint; 8 | import org.elasticsearch.common.io.stream.StreamInput; 9 | import org.elasticsearch.common.io.stream.StreamOutput; 10 | import org.elasticsearch.common.unit.DistanceUnit; 11 | 12 | /** 13 | * Modified from the original on https://github.com/zenobase/geocluster-facet/blob/master/src/main/java/com/zenobase/search/facet/geocluster/GeoBoundingBox.java 14 | * 15 | */ 16 | public class BoundingBox { 17 | 18 | private final GeoPoint topLeft, bottomRight; 19 | 20 | public BoundingBox(GeoPoint point) { 21 | this(point, point); 22 | } 23 | 24 | public BoundingBox(GeoPoint topLeft, GeoPoint bottomRight) { 25 | Preconditions.checkArgument(topLeft.getLat() >= bottomRight.getLat()); 26 | Preconditions.checkArgument(topLeft.getLon() <= bottomRight.getLon()); 27 | this.topLeft = topLeft; 28 | this.bottomRight = bottomRight; 29 | } 30 | 31 | public GeoPoint topLeft() { 32 | return topLeft; 33 | } 34 | 35 | public GeoPoint bottomRight() { 36 | return bottomRight; 37 | } 38 | 39 | public boolean contains(GeoPoint point) { 40 | return point.getLat() <= topLeft.getLat() && point.getLat() >= bottomRight.getLat() && 41 | point.getLon() >= topLeft.getLon() && point.getLon() <= bottomRight.getLon(); 42 | } 43 | 44 | public BoundingBox extend(GeoPoint point) { 45 | return extend(point, point); 46 | } 47 | 48 | public BoundingBox extend(BoundingBox bounds) { 49 | return extend(bounds.topLeft(), bounds.bottomRight()); 50 | } 51 | 52 | private BoundingBox extend(GeoPoint topLeft, GeoPoint bottomRight) { 53 | return contains(topLeft) && contains(bottomRight) ? this : new BoundingBox( 54 | new GeoPoint(Math.max(topLeft().getLat(), topLeft.getLat()), Math.min(topLeft().getLon(), topLeft.getLon())), 55 | new GeoPoint(Math.min(bottomRight().getLat(), bottomRight.getLat()), Math.max(bottomRight().getLon(), bottomRight.getLon()))); 56 | } 57 | 58 | public double size(DistanceUnit unit) { 59 | return GeoPoints.distance(topLeft, bottomRight, unit); 60 | } 61 | 62 | public static BoundingBox readFrom(StreamInput in) throws IOException { 63 | return new BoundingBox(GeoPoints.readFrom(in), GeoPoints.readFrom(in)); 64 | } 65 | 66 | public void writeTo(StreamOutput out) throws IOException { 67 | GeoPoints.writeTo(topLeft, out); 68 | GeoPoints.writeTo(bottomRight, out); 69 | } 70 | 71 | @Override 72 | public boolean equals(Object that) { 73 | return that instanceof BoundingBox && 74 | equals((BoundingBox) that); 75 | } 76 | 77 | private boolean equals(BoundingBox that) { 78 | return GeoPoints.equals(topLeft, that.topLeft()) && 79 | GeoPoints.equals(bottomRight, that.bottomRight()); 80 | } 81 | 82 | @Override 83 | public int hashCode() { 84 | return hashCode(topLeft.toString(), bottomRight.toString()); 85 | } 86 | 87 | private static int hashCode(Object... objects) { 88 | return Arrays.hashCode(objects); 89 | } 90 | 91 | @Override 92 | public String toString() { 93 | return GeoPoints.toString(topLeft) + ".." + GeoPoints.toString(bottomRight); 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /src/main/java/nl/trifork/elasticsearch/facet/geohash/BinaryGeoHashUtils.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.facet.geohash; 2 | 3 | import org.elasticsearch.common.geo.GeoHashUtils; 4 | import org.elasticsearch.common.geo.GeoPoint; 5 | 6 | public class BinaryGeoHashUtils { 7 | public static final int MAX_PREFIX_LENGTH = 60; 8 | 9 | private static final int[] BITS = {16, 8, 4, 2, 1}; 10 | 11 | /** 12 | * Encodes a geohash as a bit string. 13 | * 14 | * @param bits - values: from 0 to {@link nl.trifork.elasticsearch.facet.geohash.BinaryGeoHashUtils#MAX_PREFIX_LENGTH} 15 | */ 16 | public static long encodeAsLong(GeoPoint geoPoint, int bits) { 17 | 18 | if (bits == 0) { 19 | return 0x0000000000000000; 20 | } 21 | return GeoHashUtils.encodeAsLong(geoPoint.lat(), geoPoint.lon(), GeoHashUtils.PRECISION) >> 4 + (60 - bits); 22 | } 23 | 24 | public static void decodeCell(long geohash, int geohashBits, GeoPoint northWest, GeoPoint southEast) { 25 | double[] interval = decodeCell(geohash, geohashBits); 26 | northWest.reset(interval[1], interval[2]); 27 | southEast.reset(interval[0], interval[3]); 28 | } 29 | 30 | /** 31 | * copied and adapted from {@link org.elasticsearch.common.geo.GeoHashUtils} to handle an additional precision parameter. 32 | * 33 | * It decodes a value previously returned by {@link nl.trifork.elasticsearch.facet.geohash.BinaryGeoHashUtils#encodeAsLong(org.elasticsearch.common.geo.GeoPoint, int)} 34 | * and returns the corresponding geohash cell. 35 | * 36 | * @param geohashBits - values: from 0 to {@link nl.trifork.elasticsearch.facet.geohash.BinaryGeoHashUtils#MAX_PREFIX_LENGTH} 37 | */ 38 | public static double[] decodeCell(long geohash, int geohashBits) { 39 | double[] interval = {-90.0, 90.0, -180.0, 180.0}; 40 | 41 | if (geohashBits == 0) { 42 | return interval; 43 | } 44 | boolean isEven = true; 45 | 46 | geohash <<= 60 - geohashBits; 47 | int[] cds = new int[12]; 48 | for (int i = 11; i >= 0 ; i--) { 49 | cds[i] = (int) (geohash & 31); 50 | geohash >>= 5; 51 | } 52 | 53 | int bitCount = 0; 54 | for (int i = 0; i < cds.length ; i++) { 55 | final int cd = cds[i]; 56 | for (int mask : BITS) { 57 | if (isEven) { 58 | if ((cd & mask) != 0) { 59 | interval[2] = (interval[2] + interval[3]) / 2D; 60 | } else { 61 | interval[3] = (interval[2] + interval[3]) / 2D; 62 | } 63 | } else { 64 | if ((cd & mask) != 0) { 65 | interval[0] = (interval[0] + interval[1]) / 2D; 66 | } else { 67 | interval[1] = (interval[0] + interval[1]) / 2D; 68 | } 69 | } 70 | isEven = !isEven; 71 | bitCount++; 72 | if (bitCount == geohashBits) { 73 | return interval; 74 | } 75 | } 76 | } 77 | return interval; 78 | } 79 | 80 | public static void printBbox(double[] bbox) { 81 | System.out.printf("%f, %f, %f %f\n", bbox[0], bbox[1], bbox[2], bbox[3]); 82 | } 83 | 84 | } 85 | -------------------------------------------------------------------------------- /src/main/java/nl/trifork/elasticsearch/facet/geohash/GeohashFacetParser.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.facet.geohash; 2 | 3 | import java.io.IOException; 4 | 5 | import org.elasticsearch.common.component.AbstractComponent; 6 | import org.elasticsearch.common.inject.Inject; 7 | import org.elasticsearch.common.settings.Settings; 8 | import org.elasticsearch.common.xcontent.XContentParser; 9 | import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; 10 | import org.elasticsearch.index.fielddata.IndexFieldData; 11 | import org.elasticsearch.index.mapper.FieldMapper; 12 | import org.elasticsearch.search.facet.FacetExecutor; 13 | import org.elasticsearch.search.facet.FacetExecutor.Mode; 14 | import org.elasticsearch.search.facet.FacetParser; 15 | import org.elasticsearch.search.facet.FacetPhaseExecutionException; 16 | import org.elasticsearch.search.internal.SearchContext; 17 | 18 | /** 19 | * Modified from the original on https://github.com/zenobase/geocluster-facet/blob/master/src/main/java/com/zenobase/search/facet/geocluster/GeoClusterFacetParser.java 20 | */ 21 | public class GeohashFacetParser extends AbstractComponent implements FacetParser { 22 | 23 | @Inject 24 | public GeohashFacetParser(Settings settings) { 25 | super(settings); 26 | InternalGeohashFacet.registerStreams(); 27 | } 28 | 29 | @Override 30 | public String[] types() { 31 | return new String[] { 32 | GeohashFacet.TYPE 33 | }; 34 | } 35 | 36 | @Override 37 | public Mode defaultMainMode() { 38 | return FacetExecutor.Mode.COLLECTOR; 39 | } 40 | 41 | @Override 42 | public Mode defaultGlobalMode() { 43 | return FacetExecutor.Mode.COLLECTOR; 44 | } 45 | 46 | @Override 47 | public FacetExecutor parse(String facetName, XContentParser parser, SearchContext context) throws IOException { 48 | 49 | String fieldName = null; 50 | double factor = 0.1; 51 | boolean showGeohashCell = false; 52 | boolean showDocumentId = false; 53 | 54 | String currentName = parser.currentName(); 55 | XContentParser.Token token; 56 | while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { 57 | if (token == XContentParser.Token.FIELD_NAME) { 58 | currentName = parser.currentName(); 59 | } else if (token.isValue()) { 60 | if ("field".equals(currentName)) { 61 | fieldName = parser.text(); 62 | } else if ("factor".equals(currentName)) { 63 | factor = parser.doubleValue(); 64 | } else if ("show_geohash_cell".equals(currentName)) { 65 | showGeohashCell = parser.booleanValue(); 66 | } else if ("show_doc_id".equals(currentName)) { 67 | showDocumentId = parser.booleanValue(); 68 | } 69 | } 70 | } 71 | 72 | if (factor < 0.0 || factor > 1.0) { 73 | throw new FacetPhaseExecutionException(facetName, "value [" + factor + "] is not in range [0.0, 1.0]"); 74 | } 75 | FieldMapper fieldMapper = context.smartNameFieldMapper(fieldName); 76 | if (fieldMapper == null) { 77 | throw new FacetPhaseExecutionException(facetName, "failed to find mapping for [" + fieldName + "]"); 78 | } 79 | IndexGeoPointFieldData indexFieldData = context.fieldData().getForField(fieldMapper); 80 | 81 | FieldMapper idFieldMapper = context.smartNameFieldMapper("_uid"); 82 | if (idFieldMapper == null) { 83 | throw new FacetPhaseExecutionException(facetName, "failed to find mapping for [_id]"); 84 | } 85 | IndexFieldData idIndexFieldData = context.fieldData().getForField(idFieldMapper); 86 | return new GeohashFacetExecutor(indexFieldData, idIndexFieldData, factor, showGeohashCell, showDocumentId); 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /examples/clustered-icons/www/js/index.js: -------------------------------------------------------------------------------- 1 | 2 | var map; 3 | var markers = []; 4 | var geohashCells = []; 5 | var factor = 1.0; 6 | 7 | function clearMarkers() { 8 | while(markers.length){ 9 | markers.pop().setMap(null); 10 | } 11 | } 12 | 13 | function addMarker(lat, lon, title, icon) { 14 | markers.push(new google.maps.Marker({ 15 | position: new google.maps.LatLng(lat, lon), 16 | map: map, 17 | title: title, 18 | icon: icon, 19 | shadow: null 20 | })); 21 | } 22 | 23 | function clearGeohashCells() { 24 | while(geohashCells.length){ 25 | geohashCells.pop().setMap(null); 26 | } 27 | } 28 | 29 | function addGeohashCell(geohashCell) { 30 | geohashCells.push(new google.maps.Rectangle({ 31 | strokeColor: '#FF0000', 32 | strokeOpacity: 0.8, 33 | strokeWeight: 2, 34 | fillColor: '#FF0000', 35 | fillOpacity: 0.35, 36 | map: map, 37 | bounds: new google.maps.LatLngBounds( 38 | new google.maps.LatLng(geohashCell.top_left.lat, geohashCell.top_left.lon), 39 | new google.maps.LatLng(geohashCell.bottom_right.lat, geohashCell.bottom_right.lon)) 40 | })); 41 | } 42 | 43 | function fetchFacets() { 44 | var ne = map.getBounds().getNorthEast(); 45 | var sw = map.getBounds().getSouthWest(); 46 | console.log("querying with factor " + factor); 47 | $.ajax({ 48 | 49 | url: "http://" + window.location.hostname + ":9200/idx/objects/_search?search_type=count", 50 | contentType: "text/json", 51 | type: "POST", 52 | data: JSON.stringify({ 53 | query: { 54 | filtered: { 55 | query: { 56 | match_all: {} 57 | }, 58 | filter: { 59 | geo_bounding_box: { 60 | location: { 61 | top_left: { 62 | "lat": ne.lat(), 63 | "lon": sw.lng() 64 | }, 65 | bottom_right: { 66 | "lat": sw.lat(), 67 | "lon": ne.lng() 68 | } 69 | } 70 | } 71 | } 72 | } 73 | }, 74 | facets: { 75 | places: { 76 | geohash: { 77 | field: "location", 78 | factor: factor, 79 | show_geohash_cell: true 80 | } 81 | } 82 | } 83 | }), 84 | dataType: "json"} 85 | ) 86 | .done(function(data){ 87 | clearMarkers(); 88 | clearGeohashCells(); 89 | 90 | var clusters = data.facets.places.clusters; 91 | console.log('received ' + clusters.length + ' clusters'); 92 | for (var i = 0; i < clusters.length; i++) { 93 | 94 | addMarker( 95 | clusters[i].center.lat, 96 | clusters[i].center.lon, 97 | clusters[i].total == 1? 98 | "single item @" + clusters[i].center.lat + ", " + clusters[i].center.lon: 99 | "cluster (" + clusters[i].total + ") @" + clusters[i].center.lat + ", " + clusters[i].center.lon, 100 | groupIcon(clusters[i].total) 101 | ); 102 | addGeohashCell(clusters[i].geohash_cell); 103 | 104 | } 105 | }); 106 | } 107 | 108 | function groupIcon(groupSize) { 109 | return groupSize > 1? 110 | 'https://chart.googleapis.com/chart?chst=d_map_spin&chld=1.0|0|FF8429|16|b|' + groupSize: 111 | 'https://chart.googleapis.com/chart?chst=d_map_spin&chld=0.5|0|FF8429|16|b|'; 112 | } 113 | 114 | 115 | function initialize(divId){ 116 | 117 | initMap(divId); 118 | 119 | } 120 | 121 | function initMap(divId){ 122 | var mapOptions = { 123 | zoom: 8, 124 | center: new google.maps.LatLng(52.37267, 4.89295), 125 | mapTypeId: google.maps.MapTypeId.ROADMAP 126 | }; 127 | 128 | map = new google.maps.Map(document.getElementById(divId), mapOptions); 129 | 130 | google.maps.event.addDomListener(window, 'resize', function(){ fetchFacets(); } ); 131 | google.maps.event.addListener(map, 'dragend', function(){ fetchFacets(); } ); 132 | google.maps.event.addListener(map, 'zoom_changed', function(){ fetchFacets(); } ); 133 | 134 | } 135 | 136 | -------------------------------------------------------------------------------- /src/main/java/nl/trifork/elasticsearch/facet/geohash/Cluster.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.facet.geohash; 2 | 3 | import java.io.IOException; 4 | import java.util.Arrays; 5 | 6 | import org.elasticsearch.common.Preconditions; 7 | import org.elasticsearch.common.geo.GeoHashUtils; 8 | import org.elasticsearch.common.geo.GeoPoint; 9 | import org.elasticsearch.common.io.stream.StreamInput; 10 | import org.elasticsearch.common.io.stream.StreamOutput; 11 | 12 | /** 13 | * Modified from the original on https://github.com/zenobase/geocluster-facet/blob/master/src/main/java/com/zenobase/search/facet/geocluster/GeoCluster.java 14 | */ 15 | public class Cluster { 16 | 17 | private int geohashBits; 18 | 19 | private int size; 20 | private GeoPoint center; 21 | private long clusterGeohash; 22 | private TypeAndId typeAndId; 23 | private BoundingBox bounds; 24 | 25 | /** 26 | * @param clusterGeohash - geohash of the cluster, obtained from {@link nl.trifork.elasticsearch.facet.geohash.BinaryGeoHashUtils#encodeAsLong(org.elasticsearch.common.geo.GeoPoint, int)} 27 | * @param geohashBits - number of meaningful bits of the geohash. Values: 0 to {@link nl.trifork.elasticsearch.facet.geohash.BinaryGeoHashUtils#MAX_PREFIX_LENGTH} 28 | */ 29 | public Cluster(GeoPoint point, long clusterGeohash, int geohashBits) { 30 | this(1, point, clusterGeohash, geohashBits, new BoundingBox(point)); 31 | 32 | } 33 | 34 | public Cluster(GeoPoint point, long clusterGeohash, int geohashBits, TypeAndId typeAndId) { 35 | this(1, point, clusterGeohash, geohashBits, typeAndId, new BoundingBox(point)); 36 | 37 | } 38 | 39 | /** 40 | * @param clusterGeohash - geohash of the cluster, obtained from {@link nl.trifork.elasticsearch.facet.geohash.BinaryGeoHashUtils#encodeAsLong(org.elasticsearch.common.geo.GeoPoint, int)} 41 | * @param geohashBits - number of meaningful bits of the geohash. Values: 0 to {@link nl.trifork.elasticsearch.facet.geohash.BinaryGeoHashUtils#MAX_PREFIX_LENGTH} 42 | */ 43 | public Cluster(int size, GeoPoint center, long clusterGeohash, int geohashBits, TypeAndId typeAndId, BoundingBox bounds) { 44 | Preconditions.checkArgument(clusterGeohash == BinaryGeoHashUtils.encodeAsLong(center, geohashBits)); 45 | 46 | this.size = size; 47 | this.center = center; 48 | this.clusterGeohash = clusterGeohash; 49 | this.geohashBits = geohashBits; 50 | this.typeAndId = typeAndId; 51 | this.bounds = bounds; 52 | } 53 | 54 | /** 55 | * @param clusterGeohash - geohash of the cluster, obtained from {@link nl.trifork.elasticsearch.facet.geohash.BinaryGeoHashUtils#encodeAsLong(org.elasticsearch.common.geo.GeoPoint, int)} 56 | * @param geohashBits - number of meaningful bits of the geohash. Values: 0 to {@link nl.trifork.elasticsearch.facet.geohash.BinaryGeoHashUtils#MAX_PREFIX_LENGTH} 57 | */ 58 | public Cluster(int size, GeoPoint center, long clusterGeohash, int geohashBits, BoundingBox bounds) { 59 | 60 | this(size, center, clusterGeohash, geohashBits, null, bounds); 61 | } 62 | 63 | public void add(GeoPoint point) { 64 | Preconditions.checkArgument(clusterGeohash == BinaryGeoHashUtils.encodeAsLong(point, geohashBits)); 65 | 66 | ++size; 67 | center = mean(center, size - 1, point, 1); 68 | bounds = bounds.extend(point); 69 | } 70 | 71 | public Cluster merge(Cluster that) { 72 | Preconditions.checkArgument(clusterGeohash == that.clusterGeohash && 73 | geohashBits == that.geohashBits); 74 | 75 | GeoPoint center = mean(this.center, this.size(), that.center(), that.size()); 76 | return new Cluster(this.size + that.size(), 77 | center, this.clusterGeohash, this.geohashBits, this.bounds.extend(that.bounds())); 78 | } 79 | 80 | private static GeoPoint mean(GeoPoint left, int leftWeight, GeoPoint right, int rightWeight) { 81 | double lat = (left.getLat() * leftWeight + right.getLat() * rightWeight) / (leftWeight + rightWeight); 82 | double lon = (left.getLon() * leftWeight + right.getLon() * rightWeight) / (leftWeight + rightWeight); 83 | return new GeoPoint(lat, lon); 84 | } 85 | 86 | public int size() { 87 | return size; 88 | } 89 | 90 | public GeoPoint center() { 91 | return center; 92 | } 93 | 94 | public BoundingBox bounds() { 95 | return bounds; 96 | } 97 | 98 | public long clusterGeohash() { 99 | return clusterGeohash; 100 | } 101 | 102 | public int clusterGeohashBits() { 103 | return geohashBits; 104 | } 105 | 106 | public TypeAndId typeAndId() { 107 | return typeAndId; 108 | } 109 | 110 | public static Cluster readFrom(StreamInput in) throws IOException { 111 | int size = in.readVInt(); 112 | GeoPoint center = GeoPoints.readFrom(in); 113 | long clusterGeohash = in.readLong(); 114 | int geohashBits = in.readVInt(); 115 | if (size > 1) { 116 | 117 | BoundingBox bounds = BoundingBox.readFrom(in); 118 | return new Cluster(size, center, clusterGeohash, geohashBits, bounds); 119 | } else { 120 | 121 | BoundingBox bounds = new BoundingBox(center, center); 122 | boolean hasDocId = in.readBoolean(); 123 | if (hasDocId) { 124 | 125 | TypeAndId typeAndId1 = TypeAndId.readFrom(in); 126 | return new Cluster(size, center, clusterGeohash, geohashBits, typeAndId1, bounds); 127 | } else { 128 | 129 | return new Cluster(size, center, clusterGeohash, geohashBits, bounds); 130 | } 131 | } 132 | } 133 | 134 | public void writeTo(StreamOutput out) throws IOException { 135 | out.writeVInt(size); 136 | GeoPoints.writeTo(center, out); 137 | out.writeLong(clusterGeohash); 138 | out.writeVInt(geohashBits); 139 | if (size > 1) { 140 | bounds.writeTo(out); 141 | } else { 142 | if (typeAndId == null) { 143 | out.writeBoolean(false); 144 | } else { 145 | out.writeBoolean(true); 146 | typeAndId.writeTo(out); 147 | } 148 | } 149 | } 150 | 151 | @Override 152 | public boolean equals(Object o) { 153 | if (this == o) return true; 154 | if (o == null || getClass() != o.getClass()) return false; 155 | 156 | Cluster cluster = (Cluster) o; 157 | 158 | if (clusterGeohash != cluster.clusterGeohash) return false; 159 | if (geohashBits != cluster.geohashBits) return false; 160 | if (size != cluster.size) return false; 161 | if (!bounds.equals(cluster.bounds)) return false; 162 | if (!center.equals(cluster.center)) return false; 163 | if (typeAndId != null ? !typeAndId.equals(cluster.typeAndId) : cluster.typeAndId != null) return false; 164 | 165 | return true; 166 | } 167 | 168 | @Override 169 | public int hashCode() { 170 | int result = geohashBits; 171 | result = 31 * result + size; 172 | result = 31 * result + center.hashCode(); 173 | result = 31 * result + (int) (clusterGeohash ^ (clusterGeohash >>> 32)); 174 | result = 31 * result + (typeAndId != null ? typeAndId.hashCode() : 0); 175 | result = 31 * result + bounds.hashCode(); 176 | return result; 177 | } 178 | 179 | @Override 180 | public String toString() { 181 | return String.format("%s %s (%d)", GeoPoints.toString(center), clusterGeohash, size); 182 | } 183 | } 184 | -------------------------------------------------------------------------------- /src/main/java/nl/trifork/elasticsearch/facet/geohash/InternalGeohashFacet.java: -------------------------------------------------------------------------------- 1 | package nl.trifork.elasticsearch.facet.geohash; 2 | 3 | import java.io.IOException; 4 | import java.util.Iterator; 5 | import java.util.List; 6 | 7 | import org.elasticsearch.common.bytes.BytesReference; 8 | import org.elasticsearch.common.bytes.HashedBytesArray; 9 | import org.elasticsearch.common.collect.ImmutableList; 10 | import org.elasticsearch.common.collect.Lists; 11 | import org.elasticsearch.common.geo.GeoHashUtils; 12 | import org.elasticsearch.common.geo.GeoPoint; 13 | import org.elasticsearch.common.io.stream.StreamInput; 14 | import org.elasticsearch.common.io.stream.StreamOutput; 15 | import org.elasticsearch.common.xcontent.XContentBuilder; 16 | import org.elasticsearch.common.xcontent.XContentBuilderString; 17 | import org.elasticsearch.search.facet.Facet; 18 | import org.elasticsearch.search.facet.InternalFacet; 19 | 20 | public class InternalGeohashFacet extends InternalFacet implements GeohashFacet { 21 | 22 | private static final BytesReference STREAM_TYPE = new HashedBytesArray("geohashGroup".getBytes()); 23 | 24 | private static InternalFacet.Stream STREAM = new Stream() { 25 | 26 | @Override 27 | public Facet readFacet(StreamInput in) throws IOException { 28 | return readGeoClusterFacet(in); 29 | } 30 | }; 31 | 32 | public static void registerStreams() { 33 | Streams.registerStream(STREAM, STREAM_TYPE); 34 | } 35 | 36 | private double factor; 37 | private boolean showGeohashCell; 38 | private boolean showDocuments; 39 | private List entries; 40 | 41 | InternalGeohashFacet() { 42 | 43 | } 44 | 45 | public InternalGeohashFacet(String name, double factor, boolean showGeohashCell, boolean showDocuments, List entries) { 46 | super(name); 47 | this.factor = factor; 48 | this.showGeohashCell = showGeohashCell; 49 | this.showDocuments = showDocuments; 50 | this.entries = entries; 51 | } 52 | 53 | @Override 54 | public String getType() { 55 | return TYPE; 56 | } 57 | 58 | @Override 59 | public BytesReference streamType() { 60 | return STREAM_TYPE; 61 | } 62 | 63 | @Override 64 | public List getEntries() { 65 | return ImmutableList.copyOf(entries); 66 | } 67 | 68 | @Override 69 | public Iterator iterator() { 70 | return getEntries().iterator(); 71 | } 72 | 73 | @Override 74 | public Facet reduce(ReduceContext context) { 75 | ClusterReducer reducer = new ClusterReducer(); 76 | List reduced = reducer.reduce(flatMap(context.facets())); 77 | return new InternalGeohashFacet(getName(), factor, showGeohashCell, showDocuments, reduced); 78 | } 79 | 80 | private List flatMap(Iterable facets) { 81 | List entries = Lists.newArrayList(); 82 | for (Facet facet : facets) { 83 | entries.addAll(((GeohashFacet) facet).getEntries()); 84 | } 85 | return entries; 86 | } 87 | 88 | public static InternalGeohashFacet readGeoClusterFacet(StreamInput in) throws IOException { 89 | InternalGeohashFacet facet = new InternalGeohashFacet(); 90 | facet.readFrom(in); 91 | return facet; 92 | } 93 | 94 | @Override 95 | public void readFrom(StreamInput in) throws IOException { 96 | super.readFrom(in); 97 | factor = in.readDouble(); 98 | showGeohashCell = in .readBoolean(); 99 | showDocuments = in.readBoolean(); 100 | int entriesCount = in.readVInt(); 101 | entries = Lists.newArrayList(); 102 | for (int i = 0, max = entriesCount; i < max; ++i) { 103 | entries.add(Cluster.readFrom(in)); 104 | } 105 | } 106 | 107 | @Override 108 | public void writeTo(StreamOutput out) throws IOException { 109 | super.writeTo(out); 110 | out.writeDouble(factor); 111 | out.writeBoolean(showGeohashCell); 112 | out.writeBoolean(showDocuments); 113 | out.writeVInt(entries.size()); 114 | for (Cluster entry : entries) { 115 | entry.writeTo(out); 116 | } 117 | } 118 | 119 | 120 | @Override 121 | public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { 122 | builder.startObject(getName()); 123 | builder.field(Fields._TYPE, TYPE); 124 | builder.field(Fields.FACTOR, factor); 125 | builder.startArray(Fields.CLUSTERS); 126 | for (Cluster entry : entries) { 127 | toXContent(entry, builder); 128 | } 129 | builder.endArray(); 130 | builder.endObject(); 131 | return builder; 132 | } 133 | 134 | public double factor() { 135 | return factor; 136 | } 137 | 138 | public boolean showGeohashCell() { 139 | return showGeohashCell; 140 | } 141 | 142 | public boolean showDocuments() { 143 | return showDocuments; 144 | } 145 | 146 | private interface Fields { 147 | 148 | final XContentBuilderString _TYPE = new XContentBuilderString("_type"); 149 | final XContentBuilderString FACTOR = new XContentBuilderString("factor"); 150 | final XContentBuilderString CLUSTERS = new XContentBuilderString("clusters"); 151 | final XContentBuilderString TOTAL = new XContentBuilderString("total"); 152 | final XContentBuilderString CENTER = new XContentBuilderString("center"); 153 | final XContentBuilderString TOP_LEFT = new XContentBuilderString("top_left"); 154 | final XContentBuilderString BOTTOM_RIGHT = new XContentBuilderString("bottom_right"); 155 | final XContentBuilderString LAT = new XContentBuilderString("lat"); 156 | final XContentBuilderString LON = new XContentBuilderString("lon"); 157 | final XContentBuilderString GEOHASH_CELL = new XContentBuilderString("geohash_cell"); 158 | final XContentBuilderString DOC_ID = new XContentBuilderString("doc_id"); 159 | final XContentBuilderString DOC_TYPE = new XContentBuilderString("doc_type"); 160 | } 161 | 162 | private void toXContent(Cluster cluster, XContentBuilder builder) throws IOException { 163 | builder.startObject(); 164 | builder.field(Fields.TOTAL, cluster.size()); 165 | toXContent(cluster.center(), Fields.CENTER, builder); 166 | if (cluster.size() > 1) { 167 | toXContent(cluster.bounds().topLeft(), Fields.TOP_LEFT, builder); 168 | toXContent(cluster.bounds().bottomRight(), Fields.BOTTOM_RIGHT, builder); 169 | } else if (showDocuments) { 170 | builder.field(Fields.DOC_TYPE, cluster.typeAndId().type()); 171 | builder.field(Fields.DOC_ID, cluster.typeAndId().id()); 172 | } 173 | if (showGeohashCell) { 174 | addGeohashCell(cluster, builder); 175 | } 176 | builder.endObject(); 177 | } 178 | 179 | private void addGeohashCell(Cluster cluster, XContentBuilder builder) throws IOException { 180 | builder.startObject(Fields.GEOHASH_CELL); 181 | GeoPoint geohashCellTopLeft = new GeoPoint(); 182 | GeoPoint geohashCellBottomRight = new GeoPoint(); 183 | BinaryGeoHashUtils.decodeCell(cluster.clusterGeohash(), cluster.clusterGeohashBits(), geohashCellTopLeft, geohashCellBottomRight); 184 | toXContent(geohashCellTopLeft, Fields.TOP_LEFT, builder); 185 | toXContent(geohashCellBottomRight, Fields.BOTTOM_RIGHT, builder); 186 | builder.endObject(); 187 | } 188 | 189 | private static void toXContent(GeoPoint point, XContentBuilderString field, XContentBuilder builder) throws IOException { 190 | builder.startObject(field); 191 | builder.field(Fields.LAT, point.getLat()); 192 | builder.field(Fields.LON, point.getLon()); 193 | builder.endObject(); 194 | } 195 | } 196 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Geohash Facet Plugin for elasticsearch 2 | ========================================== 3 | 4 | Original project: https://github.com/zenobase/geocluster-facet 5 | 6 | Installation (latest version): run 7 | 8 | ``` 9 | bin/plugin --url https://github.com/triforkams/geohash-facet/releases/download/geohash-facet-0.0.17/geohash-facet-0.0.19.jar --install geohash-facet 10 | ``` 11 | 12 | 13 | For usage see [this blog post](http://blog.trifork.com/2013/08/01/server-side-clustering-of-geo-points-on-a-map-using-elasticsearch/). 14 | 15 | Versions 16 | -------- 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 |
geohash-facetelasticsearch compatibilitynotes
0.0.191.4.1upgraded to ES 1.4.1
0.0.181.3.6upgraded to ES 1.3.6
0.0.171.2.1upgraded to ES 1.2.1
0.0.161.0.0fix for https://github.com/triforkams/geohash-facet/issues/9
0.0.151.0.0merged https://github.com/triforkams/geohash-facet/pull/6
0.0.141.0.0implemented https://github.com/triforkams/geohash-facet/issues/7
0.0.131.0.0bug fixing, added a facet builder for use on the client side
0.0.120.90.6+, 1.0.0+implemented https://github.com/triforkams/geohash-facet/issues/4
0.0.110.90.6+, 1.0.0+fixed https://github.com/triforkams/geohash-facet/issues/3
0.0.100.90.6+, 1.0.0+updated to stay compatible with latest ES
0.0.90.90.5updated to stay compatible with latest ES
0.0.80.90.3updated to stay compatible with latest ES
0.0.70.90.2
94 | 95 | 96 | Parameters 97 | ---------- 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 112 | 113 | 114 | 115 | 118 | 119 | 120 | 121 | 123 | 124 | 125 | 126 |
fieldThe name of a field of type `geo_point`.
factorControls the amount of clustering, from 0.0 (don't cluster any points) to 1.0 (create a single cluster containing all points). 108 | Defaults to 0.1. The value determines the size of the cells used to cluster together points. 109 | Starting from version 0.0.14, the clustering is computed using a bit-string geohash 110 | instead of the traditional alphanumeric geohash. This gives you more fine grained selection 111 | of the level of clustering.
show_geohash_cellBoolean. If true, for each cluster included in the reply the coordinates 116 | of the corresponding geohash cell are provided (top left and bottom right corner. 117 | Defaults to false.
show_doc_idBoolean. If true, for each cluster composed of a single document the document ID is returned. 122 | Defaults to false.
127 | 128 | 129 | Index configuration 130 | ------------------- 131 | 132 | In the mapping, you need to declare the field containing the location as a type `geo_point`. 133 | 134 | ```javascript 135 | { 136 | "venues" : { 137 | "properties" : { 138 | "location" : { 139 | "type" : "geo_point" 140 | } 141 | } 142 | } 143 | } 144 | ``` 145 | 146 | Querying (HTTP) 147 | --------------- 148 | 149 | Example document: 150 | 151 | ```javascript 152 | { 153 | "took" : 42, 154 | "timed_out" : false, 155 | "_shards" : { 156 | "total" : 5, 157 | "successful" : 5, 158 | "failed" : 0 159 | }, 160 | "hits" : { 161 | "total" : 1, 162 | "max_score" : 1.0, 163 | "hits" : [ { 164 | "_index" : "myindex", 165 | "_type" : "venues", 166 | "_id" : "abc", 167 | "_score" : 1.0, 168 | "_source" : { 169 | "location":{ "lat":"52.01010835419531","lon":"4.722006599999986" } 170 | } 171 | }] 172 | } 173 | } 174 | ``` 175 | 176 | Query: 177 | 178 | ```javascript 179 | { 180 | "query" : { ... }, 181 | "facets" : { 182 | "places" : { 183 | "geohash" : { 184 | "field" : "location", 185 | "factor" : 0.9 186 | } 187 | } 188 | } 189 | } 190 | ``` 191 | 192 | Result: 193 | 194 | ```javascript 195 | { 196 | "took" : 67, 197 | "timed_out" : false, 198 | "_shards" : { 199 | "total" : 5, 200 | "successful" : 5, 201 | "failed" : 0 202 | }, 203 | "hits" : { 204 | "total" : 1372947, 205 | "max_score" : 0.0, 206 | "hits" : [ ] 207 | }, 208 | "facets" : { 209 | "places" : { 210 | "_type" : "geohash", 211 | "factor" : 0.9, 212 | "clusters" : [ { 213 | "total" : 8, 214 | "center" : { 215 | "lat" : 16.95292075, 216 | "lon" : 122.036081375 217 | }, 218 | "top_left" : { 219 | "lat" : 33.356026, 220 | "lon" : 121.00589 221 | }, 222 | "bottom_right" : { 223 | "lat" : 14.60962, 224 | "lon" : 129.247421 225 | } 226 | }, { 227 | "total" : 191793, 228 | "center" : { 229 | "lat" : 52.02785559813162, 230 | "lon" : 4.921446953767902 231 | }, 232 | "top_left" : { 233 | "lat" : 64.928595, 234 | "lon" : 3.36244 235 | }, 236 | "bottom_right" : { 237 | "lat" : 45.468945, 238 | "lon" : 26.067386 239 | } 240 | } ] 241 | } 242 | } 243 | } 244 | ``` 245 | 246 | Query with show_geohash_cell enabled: 247 | 248 | ```javascript 249 | { 250 | "query" : { ... }, 251 | "facets" : { 252 | "places" : { 253 | "geohash" : { 254 | "field" : "location", 255 | "factor" : 0.9, 256 | "show_geohash_cell" : true 257 | } 258 | } 259 | } 260 | } 261 | ``` 262 | 263 | Result: 264 | 265 | ```javascript 266 | { 267 | "took" : 61, 268 | "timed_out" : false, 269 | "_shards" : { 270 | "total" : 5, 271 | "successful" : 5, 272 | "failed" : 0 273 | }, 274 | "hits" : { 275 | "total" : 1372947, 276 | "max_score" : 0.0, 277 | "hits" : [ ] 278 | }, 279 | "facets" : { 280 | "places" : { 281 | "_type" : "geohash", 282 | "factor" : 0.9, 283 | "clusters" : [ { 284 | "total" : 8, 285 | "center" : { 286 | "lat" : 16.95292075, 287 | "lon" : 122.036081375 288 | }, 289 | "top_left" : { 290 | "lat" : 33.356026, 291 | "lon" : 121.00589 292 | }, 293 | "bottom_right" : { 294 | "lat" : 14.60962, 295 | "lon" : 129.247421 296 | }, 297 | "geohash_cell" : { 298 | "top_left" : { 299 | "lat" : 45.0, 300 | "lon" : 90.0 301 | }, 302 | "bottom_right" : { 303 | "lat" : 0.0, 304 | "lon" : 135.0 305 | } 306 | } 307 | }, { 308 | "total" : 191793, 309 | "center" : { 310 | "lat" : 52.02785559813162, 311 | "lon" : 4.921446953767902 312 | }, 313 | "top_left" : { 314 | "lat" : 64.928595, 315 | "lon" : 3.36244 316 | }, 317 | "bottom_right" : { 318 | "lat" : 45.468945, 319 | "lon" : 26.067386 320 | }, 321 | "geohash_cell" : { 322 | "top_left" : { 323 | "lat" : 90.0, 324 | "lon" : 0.0 325 | }, 326 | "bottom_right" : { 327 | "lat" : 45.0, 328 | "lon" : 45.0 329 | } 330 | } 331 | } ] 332 | } 333 | } 334 | } 335 | ``` 336 | 337 | Querying (Java) 338 | --------------- 339 | 340 | You can also do facet requests using the `GeoFacetBuilder` class included in the library: 341 | ```java 342 | public class Example { 343 | 344 | public static void main(String[] args) { 345 | 346 | GeoFacetBuilder facetBuilder = new GeoFacetBuilder("monuments"). 347 | field("location"). 348 | factor(0.9) 349 | .showGeohashCell(false) 350 | .showDocId(true); 351 | 352 | Client client = ... // instantiate 353 | 354 | SearchResponse response = client.prepareSearch("poi") 355 | .setSearchType(SearchType.COUNT) 356 | .addFacet(facetBuilder) 357 | .execute() 358 | .actionGet(); 359 | 360 | GeohashFacet geohashFacet = (GeohashFacet) response.getFacets().facetsAsMap().get("monuments"); 361 | 362 | for (Cluster cluster: geohashFacet.getEntries()) { 363 | 364 | // do something 365 | } 366 | } 367 | 368 | } 369 | 370 | ``` 371 | 372 | Size of the cells 373 | ----------------- 374 | 375 | The table below shows the size of the cells defined by various values of the `factor` parameter. These data can be useful if you want to find the factor value which returns at most _n_ clusters given a bounding box to search on. 376 | 377 | 378 | 379 | 380 | 381 | 382 | 383 | 384 | 385 | 386 | 387 | 388 | 389 | 390 | 391 | 392 | 393 | 394 | 395 | 396 | 397 | 398 | 399 | 400 | 401 | 402 | 403 | 404 | 405 | 406 | 407 | 408 | 409 | 410 | 411 | 412 | 413 | 414 | 415 | 416 | 417 | 418 | 419 | 420 | 421 | 422 | 423 | 424 | 425 | 426 | 427 | 428 | 429 | 430 | 431 | 432 | 433 | 434 | 435 | 436 | 437 | 438 | 439 | 440 | 441 | 442 | 443 | 444 | 445 | 446 | 447 | 448 |
FactorLatitude delta (degrees)Longitude delta (degrees)
1180360
0.98180180
0.9790180
0.959090
0.934590
0.924545
0.922.545
0.8822.522.5
0.8711.2522.5
0.8511.2511.25
0.835.62511.25
0.825.6255.625
0.82.81255.625
0.782.81252.8125
0.771.406252.8125
0.751.406251.40625
0.730.7031251.40625
0.720.7031250.703125
0.70.35156250.703125
0.680.35156250.3515625
0.670.175781250.3515625
0.650.175781250.17578125
0.630.0878906250.17578125
0.620.0878906250.087890625
0.60.04394531250.087890625
0.580.04394531250.0439453125
0.570.021972656250.0439453125
0.550.021972656250.02197265625
0.530.010986328130.02197265625
0.520.010986328130.01098632813
0.50.0054931640630.01098632813
0.480.0054931640630.005493164063
0.470.0027465820310.005493164063
0.450.0027465820310.002746582031
0.430.0013732910160.002746582031
0.420.0013732910160.001373291016
0.40.00068664550780.001373291016
0.380.00068664550780.0006866455078
0.370.00034332275390.0006866455078
0.350.00034332275390.0003433227539
0.330.0001716613770.0003433227539
0.320.0001716613770.000171661377
0.30.000085830688480.000171661377
0.280.000085830688480.00008583068848
0.270.000042915344240.00008583068848
0.250.000042915344240.00004291534424
0.230.000021457672120.00004291534424
0.220.000021457672120.00002145767212
0.20.000010728836060.00002145767212
0.180.000010728836060.00001072883606
0.170.000005364418030.00001072883606
0.150.000005364418030.00000536441803
0.130.0000026822090150.00000536441803
0.120.0000026822090150.000002682209015
0.10.0000013411045070.000002682209015
0.080.0000013411045070.000001341104507
0.070.00000067055225370.000001341104507
0.050.00000067055225370.0000006705522537
0.030.00000033527612690.0000006705522537
0.020.00000033527612690.0000003352761269
00.00000016763806340.0000003352761269
449 | 450 | License 451 | ------- 452 | 453 | ``` 454 | 455 | This software is licensed under the Apache 2 license, quoted below. 456 | 457 | Copyright 2012-2013 Trifork Amsterdam BV 458 | 459 | Licensed under the Apache License, Version 2.0 (the "License"); you may not 460 | use this file except in compliance with the License. You may obtain a copy of 461 | the License at 462 | 463 | http://www.apache.org/licenses/LICENSE-2.0 464 | 465 | Unless required by applicable law or agreed to in writing, software 466 | distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 467 | WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 468 | License for the specific language governing permissions and limitations under 469 | the License. 470 | ``` 471 | --------------------------------------------------------------------------------