├── .gitignore ├── .travis.yml ├── CONTRIBUTING.md ├── README.md ├── build.xml ├── hive ├── build.xml ├── function-ddl.sql ├── pom.xml ├── src │ ├── main │ │ └── java │ │ │ └── com │ │ │ └── esri │ │ │ └── hadoop │ │ │ ├── hive │ │ │ ├── BinUtils.java │ │ │ ├── GeometryUtils.java │ │ │ ├── HiveGeometry.java │ │ │ ├── HiveGeometryOIHelper.java │ │ │ ├── LogUtils.java │ │ │ ├── ST_Aggr_ConvexHull.java │ │ │ ├── ST_Aggr_Intersection.java │ │ │ ├── ST_Aggr_Union.java │ │ │ ├── ST_Area.java │ │ │ ├── ST_AsBinary.java │ │ │ ├── ST_AsGeoJson.java │ │ │ ├── ST_AsJson.java │ │ │ ├── ST_AsShape.java │ │ │ ├── ST_AsText.java │ │ │ ├── ST_Bin.java │ │ │ ├── ST_BinEnvelope.java │ │ │ ├── ST_Boundary.java │ │ │ ├── ST_Buffer.java │ │ │ ├── ST_Centroid.java │ │ │ ├── ST_Contains.java │ │ │ ├── ST_ConvexHull.java │ │ │ ├── ST_CoordDim.java │ │ │ ├── ST_Crosses.java │ │ │ ├── ST_Difference.java │ │ │ ├── ST_Dimension.java │ │ │ ├── ST_Disjoint.java │ │ │ ├── ST_Distance.java │ │ │ ├── ST_EndPoint.java │ │ │ ├── ST_EnvIntersects.java │ │ │ ├── ST_Envelope.java │ │ │ ├── ST_Equals.java │ │ │ ├── ST_ExteriorRing.java │ │ │ ├── ST_GeodesicLengthWGS84.java │ │ │ ├── ST_GeomCollection.java │ │ │ ├── ST_GeomFromGeoJson.java │ │ │ ├── ST_GeomFromJson.java │ │ │ ├── ST_GeomFromShape.java │ │ │ ├── ST_GeomFromText.java │ │ │ ├── ST_GeomFromWKB.java │ │ │ ├── ST_Geometry.java │ │ │ ├── ST_GeometryAccessor.java │ │ │ ├── ST_GeometryN.java │ │ │ ├── ST_GeometryProcessing.java │ │ │ ├── ST_GeometryRelational.java │ │ │ ├── ST_GeometryType.java │ │ │ ├── ST_InteriorRingN.java │ │ │ ├── ST_Intersection.java │ │ │ ├── ST_Intersects.java │ │ │ ├── ST_Is3D.java │ │ │ ├── ST_IsClosed.java │ │ │ ├── ST_IsEmpty.java │ │ │ ├── ST_IsMeasured.java │ │ │ ├── ST_IsRing.java │ │ │ ├── ST_IsSimple.java │ │ │ ├── ST_Length.java │ │ │ ├── ST_LineFromWKB.java │ │ │ ├── ST_LineString.java │ │ │ ├── ST_M.java │ │ │ ├── ST_MLineFromWKB.java │ │ │ ├── ST_MPointFromWKB.java │ │ │ ├── ST_MPolyFromWKB.java │ │ │ ├── ST_MaxM.java │ │ │ ├── ST_MaxX.java │ │ │ ├── ST_MaxY.java │ │ │ ├── ST_MaxZ.java │ │ │ ├── ST_MinM.java │ │ │ ├── ST_MinX.java │ │ │ ├── ST_MinY.java │ │ │ ├── ST_MinZ.java │ │ │ ├── ST_MultiLineString.java │ │ │ ├── ST_MultiPoint.java │ │ │ ├── ST_MultiPolygon.java │ │ │ ├── ST_NumGeometries.java │ │ │ ├── ST_NumInteriorRing.java │ │ │ ├── ST_NumPoints.java │ │ │ ├── ST_Overlaps.java │ │ │ ├── ST_Point.java │ │ │ ├── ST_PointFromWKB.java │ │ │ ├── ST_PointN.java │ │ │ ├── ST_PointZ.java │ │ │ ├── ST_PolyFromWKB.java │ │ │ ├── ST_Polygon.java │ │ │ ├── ST_Relate.java │ │ │ ├── ST_SRID.java │ │ │ ├── ST_SetSRID.java │ │ │ ├── ST_StartPoint.java │ │ │ ├── ST_SymmetricDiff.java │ │ │ ├── ST_Touches.java │ │ │ ├── ST_Union.java │ │ │ ├── ST_Within.java │ │ │ ├── ST_X.java │ │ │ ├── ST_Y.java │ │ │ ├── ST_Z.java │ │ │ └── serde │ │ │ │ ├── BaseJsonSerDe.java │ │ │ │ ├── EsriJsonSerDe.java │ │ │ │ ├── GeoJsonSerDe.java │ │ │ │ └── JsonSerde.java │ │ │ └── shims │ │ │ └── HiveShims.java │ └── test │ │ └── java │ │ └── com │ │ └── esri │ │ └── hadoop │ │ └── hive │ │ ├── TestStAsShape.java │ │ ├── TestStCentroid.java │ │ ├── TestStGeomFromShape.java │ │ ├── TestStGeometryType.java │ │ ├── TestStLineString.java │ │ ├── TestStMinX.java │ │ ├── TestStMinY.java │ │ ├── TestStMultiPoint.java │ │ ├── TestStMultiPolygon.java │ │ ├── TestStPoint.java │ │ ├── TestStX.java │ │ ├── TestStY.java │ │ └── serde │ │ ├── JsonSerDeTestingBase.java │ │ ├── TestEsriJsonSerDe.java │ │ └── TestGeoJsonSerDe.java └── test │ ├── README.md │ ├── st-geom-aggr.ref │ ├── st-geom-aggr.sql │ ├── st-geom-aggr.tsv │ ├── st-geom-aggr.txt │ ├── st-geom-bins.ref │ ├── st-geom-bins.sql │ ├── st-geom-desc.ref │ ├── st-geom-desc.sql │ ├── st-geom-exact.ref │ ├── st-geom-exact.sql │ ├── st-geom-multi-call.ref │ ├── st-geom-multi-call.sql │ ├── st-geom-onerow.txt │ ├── st-geom-text.ref │ └── st-geom-text.sql ├── json ├── build.xml ├── pom.xml └── src │ ├── main │ └── java │ │ └── com │ │ └── esri │ │ └── json │ │ ├── EsriFeature.java │ │ ├── EsriFeatureClass.java │ │ ├── EsriField.java │ │ ├── EsriFieldType.java │ │ ├── EsriJsonFactory.java │ │ ├── deserializer │ │ ├── GeometryJsonDeserializer.java │ │ ├── GeometryTypeJsonDeserializer.java │ │ └── SpatialReferenceJsonDeserializer.java │ │ ├── hadoop │ │ ├── EnclosedBaseJsonRecordReader.java │ │ ├── EnclosedEsriJsonInputFormat.java │ │ ├── EnclosedEsriJsonRecordReader.java │ │ ├── EnclosedGeoJsonInputFormat.java │ │ ├── EnclosedGeoJsonRecordReader.java │ │ ├── EnclosedJsonInputFormat.java │ │ ├── EnclosedJsonRecordReader.java │ │ ├── UnenclosedBaseJsonRecordReader.java │ │ ├── UnenclosedEsriJsonInputFormat.java │ │ ├── UnenclosedEsriJsonRecordReader.java │ │ ├── UnenclosedGeoJsonInputFormat.java │ │ ├── UnenclosedGeoJsonRecordReader.java │ │ ├── UnenclosedJsonInputFormat.java │ │ └── UnenclosedJsonRecordReader.java │ │ └── serializer │ │ ├── GeometryJsonSerializer.java │ │ ├── GeometryTypeJsonSerializer.java │ │ └── SpatialReferenceJsonSerializer.java │ └── test │ ├── java │ └── com │ │ └── esri │ │ └── json │ │ └── hadoop │ │ ├── TestEnclosedEsriJsonRecordReader.java │ │ ├── TestEnclosedGeoJsonRecordReader.java │ │ ├── TestUnenclosedEsriJsonRecordMrv1.java │ │ ├── TestUnenclosedEsriJsonRecordReader.java │ │ └── TestUnenclosedGeoJsonRecordReader.java │ └── resources │ └── com │ └── esri │ └── json │ └── hadoop │ ├── sample-geojs-area.json │ ├── sample-study-area.json │ ├── unenclosed-geojs-escape.json │ ├── unenclosed-geojs-simple.json │ ├── unenclosed-geojs-type.json │ ├── unenclosed-json-attrs.json │ ├── unenclosed-json-chars.json │ ├── unenclosed-json-comma.json │ ├── unenclosed-json-esc-points.json │ ├── unenclosed-json-esc1.json │ ├── unenclosed-json-esc2.json │ ├── unenclosed-json-esc3.json │ ├── unenclosed-json-esc4.json │ ├── unenclosed-json-esc5.json │ ├── unenclosed-json-escape.json │ ├── unenclosed-json-geom-first.json │ ├── unenclosed-json-return.json │ └── unenclosed-json-simple.json ├── license.txt └── pom.xml /.gitignore: -------------------------------------------------------------------------------- 1 | build/ 2 | hive/build/ 3 | json/javadoc/ 4 | spatial-sdk-hadoop.jar 5 | hive/spatial-sdk-hive.jar 6 | json/spatial-sdk-json.jar 7 | hive/target 8 | json/target 9 | .idea 10 | *.iml 11 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: java 2 | jdk: 3 | - openjdk8 4 | - openjdk11 5 | - oraclejdk11 6 | notifications: 7 | email: false 8 | sudo: false 9 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | Esri welcomes contributions from anyone and everyone. Please see our [guidelines for contributing](https://github.com/esri/contributing). 2 | -------------------------------------------------------------------------------- /build.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /hive/build.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /hive/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4.0.0 3 | 4 | 5 | com.esri.hadoop 6 | spatial-sdk-hadoop 7 | 2.2.1-SNAPSHOT 8 | ../ 9 | 10 | 11 | spatial-sdk-hive 12 | Spatial Framework for Hive and SparkSQL 13 | 14 | 15 | 16 | org.apache.hadoop 17 | hadoop-client 18 | 19 | 20 | 21 | com.fasterxml.jackson.core 22 | jackson-core 23 | 24 | 25 | 26 | com.fasterxml.jackson.core 27 | jackson-databind 28 | 29 | 30 | 31 | com.esri.geometry 32 | esri-geometry-api 33 | 34 | 35 | 36 | org.apache.hive 37 | hive-exec 38 | 39 | 40 | 41 | org.apache.hive 42 | hive-serde 43 | 44 | 45 | 46 | junit 47 | junit 48 | test 49 | 50 | 51 | 52 | com.esri.hadoop 53 | spatial-sdk-json 54 | ${project.version} 55 | test 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | org.apache.maven.plugins 64 | maven-compiler-plugin 65 | 66 | 67 | org.apache.maven.plugins 68 | maven-surefire-plugin 69 | 70 | 71 | true 72 | org.apache.maven.plugins 73 | maven-javadoc-plugin 74 | 75 | 76 | 77 | 78 | 79 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/BinUtils.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import com.esri.core.geometry.Envelope; 4 | 5 | public class BinUtils { 6 | final long numCols; 7 | final double extentMin; 8 | final double extentMax; 9 | final double binSize; 10 | 11 | public BinUtils(double binSize) { 12 | this.binSize = binSize; 13 | 14 | // absolute max number of rows/columns we can have 15 | long maxBinsPerAxis = (long) Math.sqrt(Long.MAX_VALUE); 16 | 17 | // a smaller binSize gives us a smaller extent width and height that 18 | // can be addressed by a single 64 bit long 19 | double size = (binSize < 1) ? maxBinsPerAxis * binSize : maxBinsPerAxis; 20 | 21 | extentMax = size/2; 22 | extentMin = extentMax - size; 23 | numCols = (long)(Math.ceil(size / binSize)); 24 | } 25 | 26 | /** 27 | * Gets bin ID from a point. 28 | * 29 | * @param x 30 | * @param y 31 | * @return 32 | */ 33 | public long getId(double x, double y) { 34 | double down = (extentMax - y) / binSize; 35 | double over = (x - extentMin) / binSize; 36 | 37 | return ((long)down * numCols) + (long)over; 38 | } 39 | 40 | /** 41 | * Gets the envelope for the bin ID. 42 | * 43 | * @param binId 44 | * @param envelope 45 | */ 46 | public void queryEnvelope(long binId, Envelope envelope) { 47 | long down = binId / numCols; 48 | long over = binId % numCols; 49 | 50 | double xmin = extentMin + (over * binSize); 51 | double xmax = xmin + binSize; 52 | double ymax = extentMax - (down * binSize); 53 | double ymin = ymax - binSize; 54 | 55 | envelope.setCoords(xmin, ymin, xmax, ymax); 56 | } 57 | 58 | /** 59 | * Gets the envelope for the bin that contains the x,y coords. 60 | * 61 | * @param x 62 | * @param y 63 | * @param envelope 64 | */ 65 | public void queryEnvelope(double x, double y, Envelope envelope) { 66 | double down = (extentMax - y) / binSize; 67 | double over = (x - extentMin) / binSize; 68 | 69 | double xmin = extentMin + (over * binSize); 70 | double xmax = xmin + binSize; 71 | double ymax = extentMax - (down * binSize); 72 | double ymin = ymax - binSize; 73 | 74 | envelope.setCoords(xmin, ymin, xmax, ymax); 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/HiveGeometry.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | public class HiveGeometry { 4 | } 5 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Area.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | // DoubleWritable - must use hive-serde2; the other one produces struct {value:d.d} 8 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 9 | 10 | 11 | import com.esri.core.geometry.ogc.OGCGeometry; 12 | 13 | @Description(name = "ST_Area", 14 | value = "_FUNC_(ST_Polygon) - returns the area of polygon or multipolygon", 15 | extended = "Example:\n" 16 | + " SELECT _FUNC_(ST_Polygon(1,1, 1,4, 4,4, 4,1)) FROM src LIMIT 1; -- 9.0" 17 | ) 18 | //@HivePdkUnitTests( 19 | // cases = { 20 | // @HivePdkUnitTest( 21 | // query = "select ST_Area(ST_Polygon(1,1, 1,4, 4,4, 4,1)) from onerow", 22 | // result = "9.0" 23 | // ), 24 | // @HivePdkUnitTest( 25 | // query = "select ST_Area(ST_Polygon('polygon ((0 0, 8 0, 0 8, 0 0), (1 1, 1 5, 5 1, 1 1))')) from onerow", 26 | // result = "24.0" 27 | // ), 28 | // @HivePdkUnitTest( 29 | // query = "select ST_Area(ST_MultiPolygon(array(1,1, 1,2, 2,2, 2,1), array(3,3, 3,4, 4,4, 4,3))) from onerow", 30 | // result = "2.0" 31 | // ), 32 | // @HivePdkUnitTest( 33 | // query = "select ST_Area(null) from onerow", 34 | // result = "null" 35 | // ) 36 | // } 37 | //) 38 | 39 | public class ST_Area extends ST_GeometryAccessor { 40 | final DoubleWritable resultDouble = new DoubleWritable(); 41 | static final Log LOG = LogFactory.getLog(ST_Area.class.getName()); 42 | 43 | public DoubleWritable evaluate(BytesWritable geomref) { 44 | if (geomref == null || geomref.getLength() == 0) { 45 | LogUtils.Log_ArgumentsNull(LOG); 46 | return null; 47 | } 48 | 49 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 50 | if (ogcGeometry == null){ 51 | LogUtils.Log_ArgumentsNull(LOG); 52 | return null; 53 | } 54 | 55 | resultDouble.set(ogcGeometry.getEsriGeometry().calculateArea2D()); 56 | return resultDouble; 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_AsBinary.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import java.nio.ByteBuffer; 4 | import org.apache.commons.logging.Log; 5 | import org.apache.commons.logging.LogFactory; 6 | import org.apache.hadoop.hive.ql.exec.Description; 7 | import org.apache.hadoop.io.BytesWritable; 8 | 9 | 10 | import com.esri.core.geometry.ogc.OGCGeometry; 11 | 12 | @Description( 13 | name = "ST_AsBinary", 14 | value = "_FUNC_(ST_Geometry) - return Well-Known Binary (WKB) representation of geometry\n", 15 | extended = "Example:\n" + 16 | " SELECT _FUNC_(ST_Point(1, 2)) FROM onerow; -- WKB representation of POINT (1 2)\n" 17 | ) 18 | //@HivePdkUnitTests( 19 | // cases = { 20 | // @HivePdkUnitTest( 21 | // query = "select ST_GeometryType(ST_GeomFromWKB(ST_AsBinary(ST_GeomFromText('linestring (10 40, 40 30)')))) from onerow", 22 | // result = "ST_LINESTRING" 23 | // ) 24 | // } 25 | // ) 26 | 27 | public class ST_AsBinary extends ST_Geometry { 28 | 29 | static final Log LOG = LogFactory.getLog(ST_AsBinary.class.getName()); 30 | 31 | public BytesWritable evaluate(BytesWritable geomref) { 32 | if (geomref == null || geomref.getLength() == 0){ 33 | LogUtils.Log_ArgumentsNull(LOG); 34 | return null; 35 | } 36 | 37 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 38 | if (ogcGeometry == null){ 39 | LogUtils.Log_ArgumentsNull(LOG); 40 | return null; 41 | } 42 | 43 | try { 44 | ByteBuffer byteBuf = ogcGeometry.asBinary(); 45 | byte [] byteArr = byteBuf.array(); 46 | return new BytesWritable(byteArr); 47 | } catch (Exception e){ 48 | LOG.error(e.getMessage()); 49 | return null; 50 | } 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_AsGeoJson.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.io.Text; 8 | 9 | 10 | import com.esri.core.geometry.ogc.OGCGeometry; 11 | 12 | @Description( 13 | name = "ST_AsGeoJSON", 14 | value = "_FUNC_(geometry) - return GeoJSON representation of geometry\n", 15 | extended = "Example:\n" + 16 | " SELECT _FUNC_(ST_Point(1.0, 2.0)) from onerow; -- {\"type\":\"Point\", \"coordinates\":[1.0, 2.0]}\n" + 17 | "Note : \n" + 18 | " ST_AsGeoJSON outputs the _geometry_ contents but not _crs_.\n" + 19 | " ST_AsGeoJSON requires geometry-api-java version 1.1 or later.\n" 20 | ) 21 | //@HivePdkUnitTests( 22 | // cases = { 23 | // @HivePdkUnitTest( 24 | // query = "select ST_AsGeoJSON(ST_point(1, 2))) from onerow", 25 | // result = "{\"type\":\"Point\", \"coordinates\":[1.0, 2.0]}" 26 | // ), 27 | // @HivePdkUnitTest( 28 | // query = "SELECT ST_AsGeoJSON(ST_MultiLineString(array(1, 1, 2, 2, 3, 3), array(7,7, 8,8, 9,9))) from onerow", 29 | // result = "{\"type\":\"MultiLineString\",\"coordinates\":[[[1.0,1.0],[2.0,2.0],[3.0,3.0]],[[7.0,7.0],[8.0,8.0],[9.0,9.0]]]}" 30 | // ), 31 | // @HivePdkUnitTest( 32 | // query = "SELECT ST_AsGeoJSON(ST_Polygon(1, 1, 1, 4, 4, 4, 4, 1)) from onerow", 33 | // result = "{\"type\":\"Polygon\",\"coordinates\":[[[1.0,1.0],[1.0,4.0],[4.0,4.0],[4.0,1.0],[1.0,1.0]]]}" 34 | // ) 35 | // } 36 | // ) 37 | 38 | public class ST_AsGeoJson extends ST_Geometry { 39 | final Text resultText = new Text(); 40 | static final Log LOG = LogFactory.getLog(ST_AsGeoJson.class.getName()); 41 | 42 | public Text evaluate(BytesWritable geomref) { 43 | if (geomref == null || geomref.getLength() == 0) { 44 | LogUtils.Log_ArgumentsNull(LOG); 45 | return null; 46 | } 47 | 48 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 49 | if (ogcGeometry == null) { 50 | LogUtils.Log_ArgumentsNull(LOG); 51 | return null; 52 | } 53 | 54 | try { 55 | String outJson = ogcGeometry.asGeoJson(); 56 | resultText.set(outJson); 57 | return resultText; 58 | } catch (Exception e) { 59 | LogUtils.Log_InternalError(LOG, "ST_AsGeoJSON: " + e); 60 | return null; 61 | } 62 | } 63 | 64 | } 65 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_AsJson.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.io.Text; 8 | 9 | 10 | import com.esri.core.geometry.Geometry; 11 | import com.esri.core.geometry.GeometryEngine; 12 | 13 | import com.esri.core.geometry.ogc.OGCGeometry; 14 | 15 | 16 | @Description( 17 | name = "ST_AsJSON", 18 | value = "_FUNC_(ST_Geometry) - return JSON representation of ST_Geometry\n", 19 | extended = "Example:\n" + 20 | " SELECT _FUNC_(ST_Point(1.0, 2.0)) from onerow; -- {\"x\":1.0,\"y\":2.0}\n" + 21 | " SELECT _FUNC_(ST_SetSRID(ST_Point(1, 1), 4326)) from onerow; -- {\"x\":1.0,\"y\":1.0,\"spatialReference\":{\"wkid\":4326}}") 22 | //@HivePdkUnitTests( 23 | // cases = { 24 | // @HivePdkUnitTest( 25 | // query = "select ST_AsJSON(ST_Point(1, 2)), ST_AsJSON(ST_SetSRID(ST_Point(1, 1), 4326)) from onerow", 26 | // result = "{\"x\":1.0,\"y\":2.0} {\"x\":1.0,\"y\":1.0,\"spatialReference\":{\"wkid\":4326}}" 27 | // ), 28 | // @HivePdkUnitTest( 29 | // query = "SELECT ST_AsJSON(ST_MultiLineString(array(1, 1, 2, 2, 3, 3), array(10, 10, 11, 11, 12, 12))) from onerow", 30 | // result = "{\"paths\":[[[1.0,1.0],[2.0,2.0],[3.0,3.0]],[[10.0,10.0],[11.0,11.0],[12.0,12.0]]]}" 31 | // ), 32 | // @HivePdkUnitTest( 33 | // query = "SELECT ST_AsJSON(ST_Polygon(1, 1, 1, 4, 4, 4, 4, 1)), ST_AsJSON(ST_Polygon(1, 1)) from onerow", 34 | // result = "{\"rings\":[[[1.0,1.0],[1.0,4.0],[4.0,4.0],[4.0,1.0],[1.0,1.0]]]} NULL" 35 | // ) 36 | // } 37 | // ) 38 | public class ST_AsJson extends ST_Geometry { 39 | static final Log LOG = LogFactory.getLog(ST_AsJson.class.getName()); 40 | 41 | public Text evaluate(BytesWritable geomref){ 42 | if (geomref == null || geomref.getLength() == 0){ 43 | LogUtils.Log_ArgumentsNull(LOG); 44 | return null; 45 | } 46 | 47 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 48 | if (ogcGeometry == null){ 49 | LogUtils.Log_ArgumentsNull(LOG); 50 | return null; 51 | } 52 | 53 | Geometry esriGeom = ogcGeometry.getEsriGeometry(); 54 | int wkid = GeometryUtils.getWKID(geomref); 55 | return new Text(GeometryEngine.geometryToJson(wkid, esriGeom)); 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_AsShape.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | 8 | import com.esri.core.geometry.Geometry; 9 | import com.esri.core.geometry.GeometryEngine; 10 | import com.esri.core.geometry.ogc.OGCGeometry; 11 | 12 | @Description( 13 | name = "ST_AsShape", 14 | value = "_FUNC_(ST_Geometry) - return Esri shape representation of geometry\n", 15 | extended = "Example:\n" + 16 | " SELECT _FUNC_(ST_Point(1, 2)) FROM onerow; -- Esri shape representation of POINT (1 2)\n" 17 | ) 18 | public class ST_AsShape extends ST_Geometry { 19 | 20 | static final Log LOG = LogFactory.getLog(ST_AsShape.class.getName()); 21 | 22 | public BytesWritable evaluate(BytesWritable geomref) { 23 | if (geomref == null || geomref.getLength() == 0){ 24 | LogUtils.Log_ArgumentsNull(LOG); 25 | return null; 26 | } 27 | 28 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 29 | if (ogcGeometry == null){ 30 | LogUtils.Log_ArgumentsNull(LOG); 31 | return null; 32 | } 33 | 34 | try { 35 | // Get Esri shape representation 36 | Geometry esriGeometry = ogcGeometry.getEsriGeometry(); 37 | byte[] esriShape = GeometryEngine.geometryToEsriShape(esriGeometry); 38 | return new BytesWritable(esriShape); 39 | } catch (Exception e){ 40 | LOG.error(e.getMessage()); 41 | return null; 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Bin.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import java.util.EnumSet; 4 | 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.hive.ql.exec.UDFArgumentException; 7 | import org.apache.hadoop.hive.ql.metadata.HiveException; 8 | import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; 9 | import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; 10 | import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; 11 | import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; 12 | import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; 13 | import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; 14 | import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; 15 | import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; 16 | 17 | import com.esri.core.geometry.ogc.OGCPoint; 18 | 19 | @Description( 20 | name = "ST_Bin", 21 | value = "_FUNC_(binsize, point) - return bin ID for given point\n") 22 | public class ST_Bin extends GenericUDF { 23 | 24 | private transient HiveGeometryOIHelper geomHelper; 25 | private transient boolean binSizeIsConstant; 26 | private transient PrimitiveObjectInspector oiBinSize; 27 | private transient BinUtils bins; 28 | 29 | @Override 30 | public ObjectInspector initialize(ObjectInspector[] OIs) 31 | throws UDFArgumentException { 32 | 33 | if (OIs.length != 2) { 34 | throw new UDFArgumentException("Function takes exactly 2 arguments"); 35 | } 36 | 37 | if (OIs[0].getCategory() != Category.PRIMITIVE) { 38 | throw new UDFArgumentException("Argument 0 must be a number - got: " + OIs[0].getCategory()); 39 | } 40 | 41 | oiBinSize = (PrimitiveObjectInspector)OIs[0]; 42 | if (!EnumSet.of(PrimitiveCategory.DECIMAL,PrimitiveCategory.DOUBLE,PrimitiveCategory.INT,PrimitiveCategory.LONG,PrimitiveCategory.SHORT, PrimitiveCategory.FLOAT).contains(oiBinSize.getPrimitiveCategory())) { 43 | throw new UDFArgumentException("Argument 0 must be a number - got: " + oiBinSize.getPrimitiveCategory()); 44 | } 45 | 46 | geomHelper = HiveGeometryOIHelper.create(OIs[1], 1); 47 | binSizeIsConstant = ObjectInspectorUtils.isConstantObjectInspector(OIs[0]); 48 | 49 | return PrimitiveObjectInspectorFactory.javaLongObjectInspector; 50 | } 51 | 52 | @Override 53 | public Object evaluate(DeferredObject[] args) throws HiveException { 54 | double binSize = PrimitiveObjectInspectorUtils.getDouble(args[0].get(), oiBinSize); 55 | 56 | if (!binSizeIsConstant || bins == null) { 57 | bins = new BinUtils(binSize); 58 | } 59 | 60 | OGCPoint point = geomHelper.getPoint(args); 61 | 62 | if (point == null) { 63 | return null; 64 | } 65 | 66 | return bins.getId(point.X(), point.Y()); 67 | } 68 | 69 | @Override 70 | public String getDisplayString(String[] args) { 71 | assert(args.length == 2); 72 | return String.format("st_bin(%s,%s)", args[0], args[1]); 73 | } 74 | 75 | } 76 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Boundary.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | 8 | 9 | import com.esri.core.geometry.ogc.OGCGeometry; 10 | import com.esri.core.geometry.ogc.OGCMultiLineString; 11 | 12 | @Description( 13 | name = "ST_Boundary", 14 | value = "_FUNC_(ST_Geometry) - boundary of the input ST_Geometry", 15 | extended = "Example:\n" 16 | + " SELECT _FUNC_(ST_LineString(0,1, 1,0))) FROM src LIMIT 1; -- MULTIPOINT((1 0),(0 1))\n" 17 | + " SELECT _FUNC_(ST_Polygon(1,1, 4,1, 1,4)) FROM src LIMIT 1; -- LINESTRING(1 1, 4 1, 1 4, 1 1)\n" 18 | ) 19 | //@HivePdkUnitTests( 20 | // cases = { 21 | // @HivePdkUnitTest( 22 | // query = "select ST_GeometryType(ST_Boundary(ST_Linestring('linestring (10 10, 20 20)'))) from onerow", 23 | // result = "ST_MULTIPOINT" 24 | // ), 25 | // @HivePdkUnitTest( 26 | // query = "select ST_Equals(ST_Boundary(ST_Linestring('linestring (10 10, 20 20)')), ST_GeomFromText('multipoint ((10 10), (20 20))')) from onerow", 27 | // result = "true" 28 | // ) 29 | // } 30 | // ) 31 | 32 | // The boundary of a point (or multipoint) is the empty set OGC 4.18, 6.1.5 33 | // The boundary of a closed curve is empty; non-closed curve, its 2 end points OGC 6.1.6.1 34 | // The boundary of a surface is the set of closed curves that form its limits OGC 4.21 35 | 36 | public class ST_Boundary extends ST_GeometryProcessing { 37 | static final Log LOG = LogFactory.getLog(ST_Boundary.class.getName()); 38 | 39 | public BytesWritable evaluate(BytesWritable geomref) { 40 | if (geomref == null || geomref.getLength() == 0) { 41 | LogUtils.Log_ArgumentsNull(LOG); 42 | return null; 43 | } 44 | 45 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 46 | if (ogcGeometry == null){ 47 | LogUtils.Log_ArgumentsNull(LOG); 48 | return null; 49 | } 50 | try { 51 | OGCGeometry boundGeom = ogcGeometry.boundary(); 52 | if (boundGeom.geometryType().equals("MultiLineString") && ((OGCMultiLineString)boundGeom).numGeometries() == 1) 53 | boundGeom = ((OGCMultiLineString)boundGeom).geometryN(0); // match ST_Boundary/SQL-RDBMS 54 | return GeometryUtils.geometryToEsriShapeBytesWritable(boundGeom); 55 | } catch (Exception e) { 56 | LogUtils.Log_InternalError(LOG, "ST_Boundary: " + e); 57 | return null; 58 | } 59 | } 60 | 61 | } 62 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Buffer.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 7 | import org.apache.hadoop.io.BytesWritable; 8 | 9 | import com.esri.core.geometry.ogc.OGCGeometry; 10 | 11 | @Description( 12 | name = "ST_Buffer", 13 | value = "_FUNC_(ST_Geometry, distance) - ST_Geometry buffered by distance", 14 | extended = "Example:\n" 15 | + " SELECT _FUNC_(ST_Point(0, 0), 1) FROM src LIMIT 1; -- polygon approximating a unit circle\n" 16 | ) 17 | public class ST_Buffer extends ST_GeometryProcessing { 18 | 19 | static final Log LOG = LogFactory.getLog(ST_Buffer.class.getName()); 20 | 21 | public BytesWritable evaluate(BytesWritable geometryref1, DoubleWritable distance) 22 | { 23 | if (geometryref1 == null || geometryref1.getLength() == 0 || distance == null) { 24 | return null; 25 | } 26 | 27 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geometryref1); 28 | if (ogcGeometry == null){ 29 | LogUtils.Log_ArgumentsNull(LOG); 30 | return null; 31 | } 32 | 33 | OGCGeometry bufferedGeometry = ogcGeometry.buffer(distance.get()); 34 | // TODO persist type information (polygon vs multipolygon) 35 | return GeometryUtils.geometryToEsriShapeBytesWritable(bufferedGeometry); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Centroid.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | 8 | import com.esri.core.geometry.Envelope; 9 | import com.esri.core.geometry.Point; 10 | import com.esri.core.geometry.SpatialReference; 11 | import com.esri.core.geometry.ogc.OGCGeometry; 12 | 13 | @Description( 14 | name = "ST_Centroid", 15 | value = "_FUNC_(geometry) - returns the centroid of the geometry", 16 | extended = "Example:\n" 17 | + " > SELECT _FUNC_(ST_GeomFromText('point (2 3)')); -- POINT(2 3)\n" 18 | + " > SELECT _FUNC_(ST_GeomFromText('multipoint ((0 0), (1 1), (1 -1), (6 0))')); -- POINT(2 0)\n" 19 | + " > SELECT _FUNC_(ST_GeomFromText('linestring ((0 0, 6 0))')); -- POINT(3 0)\n" 20 | + " > SELECT _FUNC_(ST_GeomFromText('linestring ((0 0, 2 4, 6 8))')); -- POINT(3 4)\n" 21 | + " > SELECT _FUNC_(ST_GeomFromText('polygon ((0 0, 0 8, 8 8, 8 0, 0 0))')); -- POINT(4 4)\n" 22 | + " > SELECT _FUNC_(ST_GeomFromText('polygon ((1 1, 5 1, 3 4))')); -- POINT(3 2)\n" 23 | ) 24 | 25 | public class ST_Centroid extends ST_GeometryAccessor { 26 | static final Log LOG = LogFactory.getLog(ST_PointN.class.getName()); 27 | 28 | public BytesWritable evaluate(BytesWritable geomref) { 29 | if (geomref == null || geomref.getLength() == 0) { 30 | LogUtils.Log_ArgumentsNull(LOG); 31 | return null; 32 | } 33 | 34 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 35 | if (ogcGeometry == null){ 36 | LogUtils.Log_ArgumentsNull(LOG); 37 | return null; 38 | } 39 | 40 | return GeometryUtils.geometryToEsriShapeBytesWritable(ogcGeometry.centroid()); 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Contains.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.hadoop.hive.ql.exec.Description; 4 | import org.apache.hadoop.hive.ql.udf.UDFType; 5 | 6 | import com.esri.core.geometry.OperatorContains; 7 | import com.esri.core.geometry.OperatorSimpleRelation; 8 | 9 | @UDFType(deterministic = true) 10 | @Description( 11 | name = "ST_Contains", 12 | value = "_FUNC_(geometry1, geometry2) - return true if geometry1 contains geometry2", 13 | extended = "Example:\n" + 14 | "SELECT _FUNC_(st_polygon(1,1, 1,4, 4,4, 4,1), st_point(2, 3) from src LIMIT 1; -- return true\n" + 15 | "SELECT _FUNC_(st_polygon(1,1, 1,4, 4,4, 4,1), st_point(8, 8) from src LIMIT 1; -- return false" 16 | ) 17 | public class ST_Contains extends ST_GeometryRelational { 18 | 19 | @Override 20 | protected OperatorSimpleRelation getRelationOperator() { 21 | return OperatorContains.local(); 22 | } 23 | 24 | @Override 25 | public String getDisplayString(String[] args) { 26 | return String.format("returns true if %s contains %s", args[0], args[1]); 27 | } 28 | } 29 | 30 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_CoordDim.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.io.IntWritable; 8 | 9 | 10 | import com.esri.core.geometry.ogc.OGCGeometry; 11 | 12 | @Description( 13 | name = "ST_CoordDim", 14 | value = "_FUNC_(geometry) - return count of coordinate components", 15 | extended = "Example:\n" 16 | + " > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1; -- 2\n" 17 | + " > SELECT _FUNC_(ST_PointZ(1.5,2.5, 3) FROM src LIMIT 1; -- 3\n" 18 | + " > SELECT _FUNC_(ST_Point(1.5, 2.5, 3., 4.)) FROM src LIMIT 1; -- 4\n" 19 | ) 20 | //@HivePdkUnitTests( 21 | // cases = { 22 | // @HivePdkUnitTest( 23 | // query = "select ST_CoordDim(ST_Point(0., 3.)) from onerow", 24 | // result = "2" 25 | // ), 26 | // @HivePdkUnitTest( 27 | // query = "select ST_CoordDim(ST_PointZ(0., 3., 1)) from onerow", 28 | // result = "3" 29 | // ), 30 | // @HivePdkUnitTest( 31 | // query = "select ST_CoordDim(ST_Point(0., 3., 1., 2.)) from onerow", 32 | // result = "4" 33 | // ), 34 | // @HivePdkUnitTest( 35 | // query = "select ST_CoordDim(null) from onerow", 36 | // result = "null" 37 | // ) 38 | // } 39 | //) 40 | 41 | public class ST_CoordDim extends ST_GeometryAccessor { 42 | final IntWritable resultInt = new IntWritable(); 43 | static final Log LOG = LogFactory.getLog(ST_Is3D.class.getName()); 44 | 45 | public IntWritable evaluate(BytesWritable geomref) { 46 | if (geomref == null || geomref.getLength() == 0) { 47 | LogUtils.Log_ArgumentsNull(LOG); 48 | return null; 49 | } 50 | 51 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 52 | if (ogcGeometry == null){ 53 | return null; 54 | } 55 | 56 | resultInt.set(ogcGeometry.coordinateDimension()); 57 | return resultInt; 58 | } 59 | 60 | } 61 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Crosses.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.hadoop.hive.ql.exec.Description; 4 | 5 | import com.esri.core.geometry.OperatorCrosses; 6 | import com.esri.core.geometry.OperatorSimpleRelation; 7 | 8 | @Description( 9 | name = "ST_Crosses", 10 | value = "_FUNC_(geometry1, geometry2) - return true if geometry1 crosses geometry2", 11 | extended = "Example:\n" + 12 | "SELECT _FUNC_(st_linestring(0,0, 1,1), st_linestring(1,0, 0,1)) from src LIMIT 1; -- return true\n" + 13 | "SELECT _FUNC_(st_linestring(2,0, 2,3), st_polygon(1,1, 1,4, 4,4, 4,1)) from src LIMIT 1; -- return true\n" + 14 | "SELECT _FUNC_(st_linestring(0,2, 0,1), ST_linestring(2,0, 1,0)) from src LIMIT 1; -- return false" 15 | ) 16 | public class ST_Crosses extends ST_GeometryRelational { 17 | 18 | @Override 19 | protected OperatorSimpleRelation getRelationOperator() { 20 | return OperatorCrosses.local(); 21 | } 22 | 23 | @Override 24 | public String getDisplayString(String[] args) { 25 | return String.format("returns true if %s crosses %s", args[0], args[1]); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Difference.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | 8 | 9 | import com.esri.core.geometry.ogc.OGCGeometry; 10 | 11 | @Description( 12 | name = "ST_Difference", 13 | value = "_FUNC_(ST_Geometry1, ST_Geometry2) - return the difference of ST_Geometry1 - ST_Geometry2", 14 | extended = "Examples:\n" 15 | + " > SELECT ST_AsText(ST_Difference(ST_MultiPoint(1, 1, 1.5, 1.5, 2, 2), ST_Point(1.5, 1.5))) FROM onerow; \n" 16 | + " MULTIPOINT (1 1, 2 2)\n" 17 | + " > SELECT ST_AsText(ST_Difference(ST_Polygon(0, 0, 0, 10, 10, 10, 10, 0), ST_Polygon(0, 0, 0, 5, 5, 5, 5, 0))) from onerow;\n" 18 | + " MULTIPOLYGON (((10 0, 10 10, 0 10, 0 5, 5 5, 5 0, 10 0)))\n\n" 19 | ) 20 | //@HivePdkUnitTests( 21 | // cases = { 22 | // @HivePdkUnitTest( 23 | // query = "SELECT ST_AsText(ST_Difference(ST_MultiPoint(1, 1, 1.5, 1.5, 2, 2), ST_Point(1.5, 1.5))) FROM onerow", 24 | // result = "MULTIPOINT (1 1, 2 2)" 25 | // ), 26 | // @HivePdkUnitTest( 27 | // query = "SELECT ST_AsText(ST_Difference(ST_Polygon(0, 0, 0, 10, 10, 10, 10, 0), ST_Polygon(0, 0, 0, 5, 5, 5, 5, 0))) from onerow", 28 | // result = "MULTIPOLYGON (((10 0, 10 10, 0 10, 0 5, 5 5, 5 0, 10 0)))" 29 | // ) 30 | // } 31 | // ) 32 | public class ST_Difference extends ST_GeometryProcessing { 33 | 34 | static final Log LOG = LogFactory.getLog(ST_Difference.class.getName()); 35 | 36 | public BytesWritable evaluate(BytesWritable geometryref1, BytesWritable geometryref2) 37 | { 38 | if (geometryref1 == null || geometryref2 == null || 39 | geometryref1.getLength() == 0 || geometryref2.getLength() == 0) { 40 | LogUtils.Log_ArgumentsNull(LOG); 41 | return null; 42 | } 43 | 44 | if (!GeometryUtils.compareSpatialReferences(geometryref1, geometryref2)) { 45 | LogUtils.Log_SRIDMismatch(LOG, geometryref1, geometryref2); 46 | return null; 47 | } 48 | 49 | OGCGeometry ogcGeom1 = GeometryUtils.geometryFromEsriShape(geometryref1); 50 | OGCGeometry ogcGeom2 = GeometryUtils.geometryFromEsriShape(geometryref2); 51 | if (ogcGeom1 == null || ogcGeom2 == null){ 52 | LogUtils.Log_ArgumentsNull(LOG); 53 | return null; 54 | } 55 | 56 | OGCGeometry diffGeometry = ogcGeom1.difference(ogcGeom2); 57 | 58 | // we have to infer the type of the differenced geometry because we don't know 59 | // if it's going to end up as a single or multi-part geometry 60 | // OGCType inferredType = GeometryUtils.getInferredOGCType(diffGeometry.getEsriGeometry()); 61 | 62 | return GeometryUtils.geometryToEsriShapeBytesWritable(diffGeometry); 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Dimension.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.io.IntWritable; 8 | 9 | 10 | import com.esri.core.geometry.ogc.OGCGeometry; 11 | 12 | @Description( 13 | name = "ST_Dimension", 14 | value = "_FUNC_(geometry) - return spatial dimension of geometry", 15 | extended = "Example:\n" 16 | + " > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1; -- 0\n" 17 | + " > SELECT _FUNC_(ST_LineString(1.5,2.5, 3.0,2.2)) FROM src LIMIT 1; -- 1\n" 18 | + " > SELECT _FUNC_(ST_Polygon(2,0, 2,3, 3,0)) FROM src LIMIT 1; -- 2\n" 19 | ) 20 | //@HivePdkUnitTests( 21 | // cases = { 22 | // @HivePdkUnitTest( 23 | // query = "select ST_Dimension(ST_Point(0,0)) from onerow", 24 | // result = "0" 25 | // ), 26 | // @HivePdkUnitTest( 27 | // query = "select ST_Dimension(ST_LineString(1.5,2.5, 3.0,2.2)) from onerow", 28 | // result = "1" 29 | // ), 30 | // @HivePdkUnitTest( 31 | // query = "select ST_Dimension(ST_Polygon(1.5,2.5, 3.0,2.2, 2.2,1.1)) from onerow", 32 | // result = "2" 33 | // ), 34 | // @HivePdkUnitTest( 35 | // query = "select ST_Dimension(ST_MultiPoint(0,0, 2,2)) from onerow", 36 | // result = "0" 37 | // ), 38 | // @HivePdkUnitTest( 39 | // query = "select ST_Dimension(ST_MultiLineString(array(1, 1, 2, 2), array(10, 10, 20, 20))) from onerow", 40 | // result = "1" 41 | // ), 42 | // @HivePdkUnitTest( 43 | // query = "select ST_Dimension(ST_MultiPolygon(array(1,1, 1,2, 2,2, 2,1), array(3,3, 3,4, 4,4, 4,3))) from onerow", 44 | // result = "2" 45 | // ), 46 | // @HivePdkUnitTest( 47 | // query = "select ST_Dimension(null) from onerow", 48 | // result = "null" 49 | // ) 50 | // } 51 | //) 52 | 53 | public class ST_Dimension extends ST_GeometryAccessor { 54 | final IntWritable resultInt = new IntWritable(); 55 | static final Log LOG = LogFactory.getLog(ST_Dimension.class.getName()); 56 | 57 | public IntWritable evaluate(BytesWritable geomref) { 58 | if (geomref == null || geomref.getLength() == 0) { 59 | LogUtils.Log_ArgumentsNull(LOG); 60 | return null; 61 | } 62 | 63 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 64 | if (ogcGeometry == null) { 65 | LogUtils.Log_ArgumentsNull(LOG); 66 | return null; 67 | } 68 | 69 | resultInt.set(ogcGeometry.dimension()); 70 | return resultInt; 71 | } 72 | 73 | } 74 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Disjoint.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.hadoop.hive.ql.exec.Description; 4 | 5 | import com.esri.core.geometry.OperatorDisjoint; 6 | import com.esri.core.geometry.OperatorSimpleRelation; 7 | 8 | @Description( 9 | name = "ST_Disjoint", 10 | value = "_FUNC_(ST_Geometry1, ST_Geometry2) - return true if ST_Geometry1 intersects ST_Geometry2", 11 | extended = "Example:\n" + 12 | "SELECT _FUNC_(ST_LineString(0,0, 0,1), ST_LineString(1,1, 1,0)) from src LIMIT 1; -- return true\n" + 13 | "SELECT _FUNC_(ST_LineString(0,0, 1,1), ST_LineString(1,0, 0,1)) from src LIMIT 1; -- return false\n" 14 | ) 15 | 16 | public class ST_Disjoint extends ST_GeometryRelational { 17 | 18 | @Override 19 | protected OperatorSimpleRelation getRelationOperator() { 20 | return OperatorDisjoint.local(); 21 | } 22 | 23 | @Override 24 | public String getDisplayString(String[] args) { 25 | return String.format("returns true if %s and %s are disjoint", args[0], args[1]); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Distance.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | // DoubleWritable - must use hive-serde2; the other one produces struct {value:d.d} 8 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 9 | 10 | 11 | import com.esri.core.geometry.ogc.OGCGeometry; 12 | 13 | @Description(name = "ST_Distance", 14 | value = "_FUNC_(ST_Geometry1, ST_Geometry2) - returns the distance between 2 ST_Geometry objects", 15 | extended = "Example:\n" 16 | + " SELECT _FUNC_(ST_Point(0.0,0.0), ST_Point(3.0,4.0)) FROM src LIMIT 1; -- 5.0" 17 | ) 18 | //@HivePdkUnitTests( 19 | // cases = { 20 | // @HivePdkUnitTest( 21 | // query = "select ST_Distance(ST_Point(0.0,0.0), ST_Point(3.0,4.0)) from onerow", 22 | // result = "5.0" 23 | // ), 24 | // @HivePdkUnitTest( 25 | // query = "select ST_Distance(ST_LineString(0,0, 1,1), ST_LineString(2,1, 3,0)) from onerow", 26 | // result = "11" 27 | // ), 28 | // @HivePdkUnitTest( 29 | // query = "select ST_Distance(null) from onerow", 30 | // result = "null" 31 | // ) 32 | // } 33 | //) 34 | 35 | public class ST_Distance extends ST_GeometryAccessor { 36 | final DoubleWritable resultDouble = new DoubleWritable(); 37 | static final Log LOG = LogFactory.getLog(ST_Distance.class.getName()); 38 | 39 | public DoubleWritable evaluate(BytesWritable geometryref1, BytesWritable geometryref2) { 40 | if (geometryref1 == null || geometryref2 == null || 41 | geometryref1.getLength() == 0 || geometryref2.getLength() == 0) { 42 | LogUtils.Log_ArgumentsNull(LOG); 43 | return null; 44 | } 45 | if (!GeometryUtils.compareSpatialReferences(geometryref1, geometryref2)) { 46 | LogUtils.Log_SRIDMismatch(LOG, geometryref1, geometryref2); 47 | return null; 48 | } 49 | 50 | OGCGeometry ogcGeom1 = GeometryUtils.geometryFromEsriShape(geometryref1); 51 | OGCGeometry ogcGeom2 = GeometryUtils.geometryFromEsriShape(geometryref2); 52 | if (ogcGeom1 == null || ogcGeom2 == null){ 53 | LogUtils.Log_ArgumentsNull(LOG); 54 | return null; 55 | } 56 | 57 | try { 58 | resultDouble.set(ogcGeom1.distance(ogcGeom2)); 59 | return resultDouble; 60 | } catch (Exception e) { 61 | LogUtils.Log_InternalError(LOG, "ST_Distance: " + e); 62 | return null; 63 | } 64 | 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_EndPoint.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | 8 | import com.esri.core.geometry.MultiPath; 9 | import com.esri.core.geometry.SpatialReference; 10 | 11 | import com.esri.core.geometry.ogc.OGCGeometry; 12 | 13 | @Description( 14 | name = "ST_EndPoint", 15 | value = "_FUNC_(geometry) - returns the last point of an ST_Linestring", 16 | extended = "Example:\n" 17 | + " > SELECT _FUNC_(ST_LineString(1.5,2.5, 3.0,2.2)) FROM src LIMIT 1; -- POINT(3.0 2.0)\n" 18 | ) 19 | 20 | public class ST_EndPoint extends ST_GeometryAccessor { 21 | static final Log LOG = LogFactory.getLog(ST_EndPoint.class.getName()); 22 | 23 | /** 24 | * Return the last point of the ST_Linestring. 25 | * @param geomref hive geometry bytes 26 | * @return byte-reference of the last ST_Point 27 | */ 28 | public BytesWritable evaluate(BytesWritable geomref) { 29 | if (geomref == null || geomref.getLength() == 0){ 30 | LogUtils.Log_ArgumentsNull(LOG); 31 | return null; 32 | } 33 | 34 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 35 | if (ogcGeometry == null){ 36 | LogUtils.Log_ArgumentsNull(LOG); 37 | return null; 38 | } 39 | 40 | if (GeometryUtils.getType(geomref) == GeometryUtils.OGCType.ST_LINESTRING) { 41 | MultiPath lines = (MultiPath)(ogcGeometry.getEsriGeometry()); 42 | int wkid = GeometryUtils.getWKID(geomref); 43 | SpatialReference spatialReference = null; 44 | if (wkid != GeometryUtils.WKID_UNKNOWN) { 45 | spatialReference = SpatialReference.create(wkid); 46 | } 47 | return GeometryUtils.geometryToEsriShapeBytesWritable(OGCGeometry.createFromEsriGeometry(lines.getPoint(lines.getPointCount()-1), 48 | spatialReference)); 49 | } else { 50 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_LINESTRING, GeometryUtils.getType(geomref)); 51 | return null; 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_EnvIntersects.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BooleanWritable; 7 | import org.apache.hadoop.io.BytesWritable; 8 | 9 | 10 | import com.esri.core.geometry.Envelope; 11 | import com.esri.core.geometry.Geometry; 12 | 13 | import com.esri.core.geometry.ogc.OGCGeometry; 14 | 15 | @Description( 16 | name = "ST_EnvIntersects", 17 | value = "_FUNC_(ST_Geometry1, ST_Geometry2) - return true if the envelopes of ST_Geometry1 and ST_Geometry2 intersect", 18 | extended = "Example:\n" + 19 | "SELECT _FUNC_(ST_LineString(0,0, 1,1), ST_LineString(1,3, 2,2)) from src LIMIT 1; -- return false\n" + 20 | "SELECT _FUNC_(ST_LineString(0,0, 2,2), ST_LineString(1,0, 3,2)) from src LIMIT 1; -- return true\n" 21 | ) 22 | //@HivePdkUnitTests( 23 | // cases = { 24 | // @HivePdkUnitTest( 25 | // query = "select ST_EnvIntersects(ST_LineString(0,0, 1,1), ST_LineString(1,3, 2,2)) from onerow", 26 | // result = "false" 27 | // ), 28 | // @HivePdkUnitTest( 29 | // query = "select ST_EnvIntersects(ST_LineString(0,0, 2,2), ST_LineString(1,0, 3,2)) from onerow", 30 | // result = "true" 31 | // ), 32 | // @HivePdkUnitTest( 33 | // query = "select ST_EnvIntersects(null, ST_LineString(0,0, 1,1)) from onerow", 34 | // result = "null" 35 | // ) 36 | // } 37 | //) 38 | 39 | public class ST_EnvIntersects extends ST_Geometry { 40 | 41 | final BooleanWritable resultBoolean = new BooleanWritable(); 42 | static final Log LOG = LogFactory.getLog(ST_EnvIntersects.class.getName()); 43 | 44 | public BooleanWritable evaluate(BytesWritable geometryref1, BytesWritable geometryref2) 45 | { 46 | if (geometryref1 == null || geometryref2 == null || 47 | geometryref1.getLength() == 0 || geometryref2.getLength() == 0) { 48 | LogUtils.Log_ArgumentsNull(LOG); 49 | return null; 50 | } 51 | if (!GeometryUtils.compareSpatialReferences(geometryref1, geometryref2)) { 52 | LogUtils.Log_SRIDMismatch(LOG, geometryref1, geometryref2); 53 | return null; 54 | } 55 | 56 | OGCGeometry ogcGeom1 = GeometryUtils.geometryFromEsriShape(geometryref1); 57 | OGCGeometry ogcGeom2 = GeometryUtils.geometryFromEsriShape(geometryref2); 58 | if (ogcGeom1 == null || ogcGeom2 == null){ 59 | LogUtils.Log_ArgumentsNull(LOG); 60 | return null; 61 | } 62 | 63 | Geometry geometry1 = ogcGeom1.getEsriGeometry(); 64 | Geometry geometry2 = ogcGeom2.getEsriGeometry(); 65 | Envelope env1 = new Envelope(), env2 = new Envelope(); 66 | geometry1.queryEnvelope(env1); 67 | geometry2.queryEnvelope(env2); 68 | 69 | resultBoolean.set(env1.isIntersecting(env2)); 70 | return resultBoolean; 71 | } 72 | 73 | } 74 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Envelope.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | 8 | 9 | import com.esri.core.geometry.Envelope; 10 | import com.esri.core.geometry.SpatialReference; 11 | import com.esri.core.geometry.ogc.OGCGeometry; 12 | 13 | @Description( 14 | name = "ST_Envelope", 15 | value = "_FUNC_(ST_Geometry) - the envelope of the ST_Geometry", 16 | extended = "Example:\n" + 17 | "SELECT _FUNC_(ST_LineString(0,0, 2,2)) from src LIMIT 1; -- POLYGON ((0 0, 2 0, 2 2, 0 2, 0 0))\n" + 18 | "SELECT _FUNC_(ST_Polygon(2,0, 2,3, 3,0)) from src LIMIT 1; -- POLYGON ((2 0, 3 0, 3 3, 2 3, 2 0))\n" + 19 | "OGC Compliance Notes : \n" + 20 | " In the case of a point or a vertical or horizontal line," + 21 | " ST_Envelope may either apply a tolerance or return an empty envelope." 22 | ) 23 | //@HivePdkUnitTests( 24 | // cases = { 25 | // @HivePdkUnitTest( 26 | // query = "select ST_EnvIntersects(ST_LineString(0,0, 1,1), ST_LineString(1,3, 2,2)) from onerow", 27 | // result = "false" 28 | // ), 29 | // @HivePdkUnitTest( 30 | // query = "select ST_EnvIntersects(ST_LineString(0,0, 2,2), ST_LineString(1,0, 3,2)) from onerow", 31 | // result = "true" 32 | // ), 33 | // @HivePdkUnitTest( 34 | // query = "select ST_EnvIntersects(null, ST_LineString(0,0, 1,1)) from onerow", 35 | // result = "null" 36 | // ) 37 | // } 38 | //) 39 | 40 | public class ST_Envelope extends ST_GeometryProcessing { 41 | static final Log LOG = LogFactory.getLog(ST_Envelope.class.getName()); 42 | 43 | public BytesWritable evaluate(BytesWritable geometryref) 44 | { 45 | if (geometryref == null || geometryref.getLength() == 0) { 46 | LogUtils.Log_ArgumentsNull(LOG); 47 | return null; 48 | } 49 | 50 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geometryref); 51 | if (ogcGeometry == null){ 52 | LogUtils.Log_ArgumentsNull(LOG); 53 | return null; 54 | } 55 | 56 | int wkid = GeometryUtils.getWKID(geometryref); 57 | SpatialReference spatialReference = null; 58 | if (wkid != GeometryUtils.WKID_UNKNOWN) { 59 | spatialReference = SpatialReference.create(wkid); 60 | } 61 | Envelope envBound = new Envelope(); 62 | ogcGeometry.getEsriGeometry().queryEnvelope(envBound); 63 | return GeometryUtils.geometryToEsriShapeBytesWritable(OGCGeometry.createFromEsriGeometry(envBound, 64 | spatialReference)); 65 | } 66 | 67 | } 68 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Equals.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.hadoop.hive.ql.exec.Description; 4 | 5 | import com.esri.core.geometry.OperatorEquals; 6 | import com.esri.core.geometry.OperatorSimpleRelation; 7 | 8 | @Description( 9 | name = "ST_Equals", 10 | value = "_FUNC_(geometry1, geometry2) - return true if geometry1 equals geometry2", 11 | extended = "Example:\n" + 12 | "SELECT _FUNC_(st_linestring(0,0, 1,1), st_linestring(1,1, 0,0)) from src LIMIT 1; -- return true\n" + 13 | "SELECT _FUNC_(st_linestring(0,0, 1,1), st_linestring(1,0, 0,1)) from src LIMIT 1; -- return false\n" 14 | ) 15 | public class ST_Equals extends ST_GeometryRelational { 16 | 17 | @Override 18 | protected OperatorSimpleRelation getRelationOperator() { 19 | return OperatorEquals.local(); 20 | } 21 | 22 | @Override 23 | public String getDisplayString(String[] args) { 24 | return String.format("returns true if %s equals %s", args[0], args[1]); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_GeomCollection.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | import org.apache.commons.logging.Log; 3 | import org.apache.commons.logging.LogFactory; 4 | import org.apache.hadoop.hive.ql.exec.Description; 5 | import org.apache.hadoop.hive.ql.exec.UDFArgumentException; 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.io.Text; 8 | 9 | 10 | import com.esri.core.geometry.Geometry; 11 | import com.esri.core.geometry.GeometryEngine; 12 | import com.esri.core.geometry.SpatialReference; 13 | import com.esri.core.geometry.ogc.OGCGeometry; 14 | 15 | @Description( 16 | name = "ST_GeomCollection", 17 | value = "_FUNC_(wkt) - construct a multi-part ST_Geometry from OGC well-known text", 18 | extended = "Example:\n" + 19 | " > SELECT _FUNC_('multipoint ((1 0), (2 3))') FROM src LIMIT 1; -- constructs ST_MultiPoint\n" + 20 | "OGC Compliance Notes : \n" + 21 | " ST_GeomCollection on Hive does not support collections - only multi-part geometries.\n" + 22 | "ST_GeomCollection('POINT(1 1), LINESTRING(2 0,3 0)') -- not supported\n" 23 | ) 24 | 25 | //@HivePdkUnitTests( 26 | // cases = { 27 | // @HivePdkUnitTest( 28 | // query = "select ST_Equals(ST_GeomCollection('MULTIPOINT ((10 40), (40 30))'), ST_GeomFromText('MULTIPOINT ((10 40), (40 30))')) from onerow", 29 | // result = "true" 30 | // ), 31 | // @HivePdkUnitTest( 32 | // query = "select ST_Equals(ST_GeomCollection('multilinestring ((2 4, 10 10), (20 20, 7 8))'), ST_GeomFromText('multilinestring ((2 4, 10 10), (20 20, 7 8))')) from onerow", 33 | // result = "true" 34 | // ), 35 | // @HivePdkUnitTest( 36 | // query = "select ST_Equals(ST_GeomCollection('multipolygon (((3 3, 4 6, 5 3, 3 3)),((8 24, 9 25, 1 28, 8 24)))'), ST_GeomFromText('multipolygon (((3 3, 4 6, 5 3, 3 3)),((8 24, 9 25, 1 28, 8 24)))')) from onerow", 37 | // result = "true" 38 | // ) 39 | // } 40 | // ) 41 | 42 | public class ST_GeomCollection extends ST_Geometry { 43 | 44 | static final Log LOG = LogFactory.getLog(ST_GeomCollection.class.getName()); 45 | 46 | public BytesWritable evaluate(Text wkt) throws UDFArgumentException { 47 | return evaluate(wkt, 0); 48 | } 49 | 50 | public BytesWritable evaluate(Text wkwrap, int wkid) throws UDFArgumentException { 51 | 52 | String wkt = wkwrap.toString(); 53 | 54 | try { 55 | Geometry geomObj = GeometryEngine.geometryFromWkt(wkt, 56 | 0, 57 | Geometry.Type.Unknown); 58 | SpatialReference spatialReference = null; // Idea: OGCGeometry.setSpatialReference after .fromText 59 | if (wkid != GeometryUtils.WKID_UNKNOWN) { 60 | spatialReference = SpatialReference.create(wkid); 61 | } 62 | OGCGeometry ogcObj = OGCGeometry.createFromEsriGeometry(geomObj, spatialReference); 63 | return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj); 64 | } catch (Exception e) { // IllegalArgumentException, GeometryException 65 | LogUtils.Log_InvalidText(LOG, wkt); 66 | return null; 67 | } 68 | } 69 | 70 | } 71 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_GeomFromShape.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.hive.ql.exec.UDFArgumentException; 7 | import org.apache.hadoop.io.BytesWritable; 8 | 9 | import com.esri.core.geometry.Geometry; 10 | import com.esri.core.geometry.GeometryEngine; 11 | import com.esri.core.geometry.SpatialReference; 12 | import com.esri.hadoop.hive.GeometryUtils.OGCType; 13 | 14 | @Description( 15 | name = "ST_GeomFromShape", 16 | value = "_FUNC_(shape) - construct ST_Geometry from Esri shape representation of geometry\n", 17 | extended = "Example:\n" + 18 | " SELECT _FUNC_(ST_AsShape(ST_Point(1, 2))); -- constructs ST_Point\n" 19 | ) 20 | public class ST_GeomFromShape extends ST_Geometry { 21 | 22 | static final Log LOG = LogFactory.getLog(ST_GeomFromShape.class.getName()); 23 | 24 | public BytesWritable evaluate(BytesWritable shape) throws UDFArgumentException { 25 | return evaluate(shape, 0); 26 | } 27 | 28 | public BytesWritable evaluate(BytesWritable shape, int wkid) throws UDFArgumentException { 29 | try { 30 | Geometry geometry = GeometryEngine.geometryFromEsriShape(shape.getBytes(), Geometry.Type.Unknown); 31 | switch (geometry.getType()) 32 | { 33 | case Point: 34 | return GeometryUtils.geometryToEsriShapeBytesWritable(geometry, wkid, OGCType.ST_POINT); 35 | 36 | case MultiPoint: 37 | return GeometryUtils.geometryToEsriShapeBytesWritable(geometry, wkid, OGCType.ST_MULTIPOINT); 38 | 39 | case Line: 40 | return GeometryUtils.geometryToEsriShapeBytesWritable(geometry, wkid, OGCType.ST_LINESTRING); 41 | 42 | case Polyline: 43 | return GeometryUtils.geometryToEsriShapeBytesWritable(geometry, wkid, OGCType.ST_MULTILINESTRING); 44 | 45 | case Envelope: 46 | return GeometryUtils.geometryToEsriShapeBytesWritable(geometry, wkid, OGCType.ST_POLYGON); 47 | 48 | case Polygon: 49 | return GeometryUtils.geometryToEsriShapeBytesWritable(geometry, wkid, OGCType.ST_MULTIPOLYGON); 50 | 51 | default: 52 | return GeometryUtils.geometryToEsriShapeBytesWritable(geometry, wkid, OGCType.UNKNOWN); 53 | } 54 | } catch (Exception e) { 55 | LogUtils.Log_ExceptionThrown(LOG, "geom-from-shape", e); 56 | return null; 57 | } 58 | } 59 | 60 | } 61 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_GeomFromText.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.hive.ql.exec.UDFArgumentException; 7 | import org.apache.hadoop.io.BytesWritable; 8 | import org.apache.hadoop.io.Text; 9 | 10 | 11 | import com.esri.core.geometry.SpatialReference; 12 | import com.esri.core.geometry.ogc.OGCGeometry; 13 | 14 | @Description( 15 | name = "ST_GeomFromText", 16 | value = "_FUNC_(wkt) - construct an ST_Geometry from OGC well-known text", 17 | extended = "Example:\n" 18 | + " SELECT _FUNC_('linestring (1 0, 2 3)') FROM src LIMIT 1; -- constructs ST_Linestring\n" 19 | + " SELECT _FUNC_('multipoint ((1 0), (2 3))') FROM src LIMIT 1; -- constructs ST_MultiPoint\n" 20 | ) 21 | //@HivePdkUnitTests( 22 | // cases = { 23 | // @HivePdkUnitTest( 24 | // query = "select ST_AsText(ST_GeomFromText('point (10.02 20.01)')) from onerow", 25 | // result = "POINT (10.02 20.01)" 26 | // ), 27 | // @HivePdkUnitTest( 28 | // query = "select ST_AsText(ST_GeomFromText('linestring (10 10, 20 20)')) from onerow", 29 | // result = "LINESTRING (10 10, 20 20)" 30 | // ), 31 | // @HivePdkUnitTest( 32 | // query = "select ST_AsText(ST_GeomFromText('polygon ((0 0, 0 10, 10 10, 0 0))')) from onerow", 33 | // result = "POLYGON ((0 0, 0 10, 10 10, 0 0))" 34 | // ), 35 | // @HivePdkUnitTest( 36 | // query = "select ST_AsText(ST_GeomFromText('MULTIPOINT ((10 40), (40 30), (20 20), (30 10))')) from onerow", 37 | // result = "MULTIPOINT (10 40, 40 30, 20 20, 30 10)" 38 | // ), 39 | // @HivePdkUnitTest( 40 | // query = "select ST_AsText(ST_GeomFromText('multilinestring ((2 4, 10 10), (20 20, 7 8))')) from onerow", 41 | // result = "MULTILINESTRING ((2 4, 10 10), (20 20, 7 8))" 42 | // ), 43 | // @HivePdkUnitTest( 44 | // query = "select ST_AsText(ST_GeomFromText('multipolygon (((0 0, 0 1, 1 0, 0 0)), ((2 2, 2 3, 3 2, 2 2)))')) from onerow", 45 | // result = "MULTIPOLYGON (((0 0, 0 1, 1 0, 0 0)), ((2 2, 2 3, 3 2, 2 2)))" 46 | // ) 47 | // } 48 | // ) 49 | 50 | public class ST_GeomFromText extends ST_Geometry { 51 | 52 | static final Log LOG = LogFactory.getLog(ST_GeomFromText.class.getName()); 53 | 54 | public BytesWritable evaluate(Text wkt) throws UDFArgumentException { 55 | return evaluate(wkt, 0); 56 | } 57 | 58 | public BytesWritable evaluate(Text wkwrap, int wkid) throws UDFArgumentException { 59 | 60 | String wkt = wkwrap.toString(); 61 | try { 62 | SpatialReference spatialReference = null; 63 | if (wkid != GeometryUtils.WKID_UNKNOWN) { 64 | spatialReference = SpatialReference.create(wkid); 65 | } 66 | OGCGeometry ogcObj = OGCGeometry.fromText(wkt); 67 | ogcObj.setSpatialReference(spatialReference); 68 | return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj); 69 | } catch (Exception e) { // IllegalArgumentException, GeometryException 70 | LogUtils.Log_InvalidText(LOG, wkt); 71 | return null; 72 | } 73 | } 74 | 75 | } 76 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Geometry.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.hadoop.hive.ql.exec.UDF; 4 | 5 | public abstract class ST_Geometry extends UDF { 6 | 7 | } 8 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_GeometryAccessor.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | /** 4 | * Abstract base class for all accessors (ST_X/Y, IsBoolTests, ...) 5 | * 6 | */ 7 | 8 | public abstract class ST_GeometryAccessor extends ST_Geometry { 9 | } 10 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_GeometryN.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.io.IntWritable; 8 | 9 | import com.esri.core.geometry.ogc.OGCGeometry; 10 | import com.esri.core.geometry.ogc.OGCMultiPoint; 11 | import com.esri.core.geometry.ogc.OGCMultiLineString; 12 | import com.esri.core.geometry.ogc.OGCMultiPolygon; 13 | 14 | @Description( 15 | name = "ST_GeometryN", 16 | value = "_FUNC_(ST_GeometryCollection, n) - return the nth ST_Geometry in the collection (1-based index)", 17 | extended = "Example:\n" 18 | + " SELECT _FUNC_(ST_GeomFromText('multipoint ((10 40), (40 30), (20 20), (30 10))'), 3) FROM src LIMIT 1; -- ST_Point(20 20)\n" 19 | + " SELECT _FUNC_(ST_GeomFromText('multilinestring ((2 4, 10 10), (20 20, 7 8))'), 2) FROM src LIMIT 1; -- ST_Linestring(20 20, 7 8)\n" 20 | ) 21 | 22 | public class ST_GeometryN extends ST_GeometryAccessor { 23 | static final Log LOG = LogFactory.getLog(ST_GeometryN.class.getName()); 24 | 25 | public BytesWritable evaluate(BytesWritable geomref, IntWritable index) { 26 | if (geomref == null || geomref.getLength() == 0 || index == null) { 27 | LogUtils.Log_ArgumentsNull(LOG); 28 | return null; 29 | } 30 | 31 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 32 | if (ogcGeometry == null){ 33 | LogUtils.Log_ArgumentsNull(LOG); 34 | return null; 35 | } 36 | 37 | int idx = index.get() - 1; // 1-based UI, 0-based engine 38 | try { 39 | GeometryUtils.OGCType ogcType = GeometryUtils.getType(geomref); 40 | OGCGeometry ogcGeom = null; 41 | switch(ogcType) { 42 | case ST_POINT: 43 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTIPOINT, ogcType); 44 | return null; 45 | case ST_LINESTRING: 46 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTILINESTRING, ogcType); 47 | return null; 48 | case ST_POLYGON: 49 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTIPOLYGON, ogcType); 50 | return null; 51 | case ST_MULTIPOINT: 52 | ogcGeom = ((OGCMultiPoint)ogcGeometry).geometryN(idx); 53 | break; 54 | case ST_MULTILINESTRING: 55 | ogcGeom = ((OGCMultiLineString)ogcGeometry).geometryN(idx); 56 | break; 57 | case ST_MULTIPOLYGON: 58 | ogcGeom = ((OGCMultiPolygon)ogcGeometry).geometryN(idx); 59 | break; 60 | } 61 | return GeometryUtils.geometryToEsriShapeBytesWritable(ogcGeom); 62 | } catch (Exception e) { 63 | LogUtils.Log_InternalError(LOG, "ST_GeometryN: " + e); 64 | return null; 65 | } 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_GeometryProcessing.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | public class ST_GeometryProcessing extends ST_Geometry { 4 | 5 | } 6 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_GeometryType.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.io.Text; 8 | 9 | 10 | @Description( 11 | name = "ST_GeometryType", 12 | value = "_FUNC_(geometry) - return type of geometry", 13 | extended = "Example:\n" 14 | + " > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1; -- ST_Point\n" 15 | + " > SELECT _FUNC_(ST_LineString(1.5,2.5, 3.0,2.2)) FROM src LIMIT 1; -- ST_LineString\n" 16 | + " > SELECT _FUNC_(ST_Polygon(2,0, 2,3, 3,0)) FROM src LIMIT 1; -- ST_Polygon\n" 17 | ) 18 | //@HivePdkUnitTests( 19 | // cases = { 20 | // @HivePdkUnitTest( 21 | // query = "select ST_GeometryType(ST_GeomFromText('point (10.02 20.01)')) from onerow", 22 | // result = "ST_POINT" 23 | // ), 24 | // @HivePdkUnitTest( 25 | // query = "selectST_GeometryType(ST_GeomFromText('linestring (10 10, 20 20)')) from onerow", 26 | // result = "ST_LINESTRING" 27 | // ), 28 | // @HivePdkUnitTest( 29 | // query = "select ST_GeometryType(ST_GeomFromText('polygon ((0 0, 0 10, 10 10, 0 0))')) from onerow", 30 | // result = "ST_POLYGON" 31 | // ), 32 | // @HivePdkUnitTest( 33 | // query = "select ST_GeometryType(ST_GeomFromText('MULTIPOINT ((10 40), (40 30), (20 20), (30 10))')) from onerow", 34 | // result = "ST_MULTIPOINT" 35 | // ), 36 | // @HivePdkUnitTest( 37 | // query = "select ST_GeometryType(ST_GeomFromText('multilinestring ((2 4, 10 10), (20 20, 7 8))')) from onerow", 38 | // result = "ST_MULTILINESTRING" 39 | // ), 40 | // @HivePdkUnitTest( 41 | // query = "select ST_GeometryType(ST_GeomFromText('multipolygon (((0 0, 0 1, 1 0, 0 0)), ((2 2, 2 3, 3 2, 2 2)))')) from onerow", 42 | // result = "ST_MULTIPOLYGON" 43 | // ) 44 | // } 45 | // ) 46 | 47 | public class ST_GeometryType extends ST_Geometry { 48 | static final Log LOG = LogFactory.getLog(ST_GeometryType.class.getName()); 49 | 50 | public Text evaluate(BytesWritable ref) { 51 | if (ref == null || ref.getLength() == 0) { 52 | LogUtils.Log_ArgumentsNull(LOG); 53 | return null; 54 | } 55 | return new Text(GeometryUtils.getType(ref).toString()); 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_InteriorRingN.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.io.IntWritable; 8 | 9 | 10 | import com.esri.core.geometry.ogc.OGCGeometry; 11 | import com.esri.core.geometry.ogc.OGCLineString; 12 | import com.esri.core.geometry.ogc.OGCPolygon; 13 | 14 | @Description( 15 | name = "ST_InteriorRingN", 16 | value = "_FUNC_(ST_Polygon, n) - return ST_LineString which is the nth interior ring of the ST_Polygon (1-based index)", 17 | extended = "Example:\n" 18 | + " SELECT _FUNC_(ST_Polygon('polygon ((0 0, 8 0, 0 8, 0 0), (1 1, 1 5, 5 1, 1 1))'), 1) FROM src LIMIT 1; -- LINESTRING (1 1, 5 1, 1 5, 1 1)\n" 19 | ) 20 | //@HivePdkUnitTests( 21 | // cases = { 22 | // @HivePdkUnitTest( 23 | // query = "select ST_Equals(ST_InteriorRingN(ST_Polygon('polygon ((0 0, 8 0, 0 8, 0 0), (1 1, 1 5, 5 1, 1 1))'), 1), ST_LineString('linestring(1 1, 5 1, 1 5, 1 1)')) from onerow", 24 | // result = "true" 25 | // ), 26 | // @HivePdkUnitTest( 27 | // query = "select ST_InteriorRingN(null, 1) from onerow", 28 | // result = "null" 29 | // ) 30 | // } 31 | //) 32 | 33 | public class ST_InteriorRingN extends ST_GeometryProcessing { 34 | static final Log LOG = LogFactory.getLog(ST_InteriorRingN.class.getName()); 35 | 36 | public BytesWritable evaluate(BytesWritable geomref, IntWritable index) { 37 | if (geomref == null || geomref.getLength() == 0 || index == null) { 38 | LogUtils.Log_ArgumentsNull(LOG); 39 | return null; 40 | } 41 | 42 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 43 | if (ogcGeometry == null){ 44 | LogUtils.Log_ArgumentsNull(LOG); 45 | return null; 46 | } 47 | 48 | int idx = index.get() - 1; // 1-based UI, 0-based engine 49 | if (GeometryUtils.getType(geomref) == GeometryUtils.OGCType.ST_POLYGON) { 50 | try { 51 | OGCLineString hole = ((OGCPolygon)(ogcGeometry)).interiorRingN(idx); 52 | return GeometryUtils.geometryToEsriShapeBytesWritable(hole); 53 | } catch (Exception e) { 54 | LogUtils.Log_InternalError(LOG, "ST_InteriorRingN: " + e); 55 | return null; 56 | } 57 | } else { 58 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POLYGON, GeometryUtils.getType(geomref)); 59 | return null; 60 | } 61 | } 62 | 63 | } 64 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Intersection.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | 8 | 9 | import com.esri.core.geometry.Geometry; 10 | import com.esri.core.geometry.GeometryEngine; 11 | import com.esri.core.geometry.ogc.OGCGeometry; 12 | 13 | @Description( 14 | name = "ST_Intersection", 15 | value = "_FUNC_(ST_Geometry1, ST_Geometry2) - intersection of ST_Geometry1 & ST_Geometry2", 16 | extended = "Example:\n" + 17 | " SELECT ST_AsText(_FUNC_(ST_Point(1,1), ST_Point(1,1))) FROM onerow; -- POINT (1 1)\n" + 18 | " SELECT ST_AsText(_FUNC_(ST_GeomFromText('linestring(0 2, 0 0, 2 0)'), ST_GeomFromText('linestring(0 3, 0 1, 1 0, 3 0)'))) FROM onerow; -- MULTILINESTRING ((1 0, 2 0), (0 2, 0 1))\n" + 19 | " SELECT ST_AsText(_FUNC_(ST_LineString(0,2, 2,3), ST_Polygon(1,1, 4,1, 4,4, 1,4))) FROM onerow; -- MULTILINESTRING ((1 2.5, 2 3))\n" + 20 | " SELECT ST_AsText(_FUNC_(ST_Polygon(2,0, 2,3, 3,0), ST_Polygon(1,1, 4,1, 4,4, 1,4))) FROM onerow; -- MULTIPOLYGON (((2.67 1, 2 3, 2 1, 2.67 1)))\n" + 21 | "OGC Compliance Notes : \n" + 22 | " In the case where the two geometries intersect in a lower dimension," + 23 | " ST_Intersection may drop the lower-dimension intersections, or output a closed linestring.\n" + 24 | "SELECT ST_AsText(_FUNC_(ST_Polygon(2,0, 3,1, 2,1), ST_Polygon(1,1, 4,1, 4,4, 1,4))) FROM onerow; -- MULTIPOLYGON EMPTY or LINESTRING (2 1, 3 1, 2 1)\n" 25 | ) 26 | 27 | public class ST_Intersection extends ST_GeometryProcessing { 28 | static final Log LOG = LogFactory.getLog(ST_Intersection.class.getName()); 29 | 30 | public BytesWritable evaluate(BytesWritable geometryref1, BytesWritable geometryref2) 31 | { 32 | if (geometryref1 == null || geometryref2 == null || 33 | geometryref1.getLength() == 0 || geometryref2.getLength() == 0) { 34 | LogUtils.Log_ArgumentsNull(LOG); 35 | return null; 36 | } 37 | if (!GeometryUtils.compareSpatialReferences(geometryref1, geometryref2)) { 38 | LogUtils.Log_SRIDMismatch(LOG, geometryref1, geometryref2); 39 | return null; 40 | } 41 | 42 | OGCGeometry ogcGeom1 = GeometryUtils.geometryFromEsriShape(geometryref1); 43 | OGCGeometry ogcGeom2 = GeometryUtils.geometryFromEsriShape(geometryref2); 44 | if (ogcGeom1 == null || ogcGeom2 == null){ 45 | LogUtils.Log_ArgumentsNull(LOG); 46 | return null; 47 | } 48 | 49 | OGCGeometry commonGeom; 50 | try { 51 | commonGeom = ogcGeom1.intersection(ogcGeom2); 52 | return GeometryUtils.geometryToEsriShapeBytesWritable(commonGeom); 53 | } catch (Exception e) { 54 | LogUtils.Log_InternalError(LOG, "ST_Intersection: " + e); 55 | return null; 56 | } 57 | } 58 | 59 | } 60 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Intersects.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.hadoop.hive.ql.exec.Description; 4 | 5 | import com.esri.core.geometry.OperatorIntersects; 6 | import com.esri.core.geometry.OperatorSimpleRelation; 7 | 8 | @Description( 9 | name = "ST_Intersects", 10 | value = "_FUNC_(geometry1, geometry2) - return true if geometry1 intersects geometry2", 11 | extended = "Example:\n" + 12 | "SELECT _FUNC_(ST_LineString(2,0, 2,3), ST_Polygon(1,1, 4,1, 4,4, 1,4))) from src LIMIT 1; -- return true\n" + 13 | "SELECT _FUNC_(ST_LineString(8,7, 7,8), ST_Polygon(1,1, 4,1, 4,4, 1,4)) from src LIMIT 1; -- return false\n" 14 | ) 15 | 16 | public class ST_Intersects extends ST_GeometryRelational { 17 | 18 | @Override 19 | protected OperatorSimpleRelation getRelationOperator() { 20 | return OperatorIntersects.local(); 21 | } 22 | 23 | @Override 24 | public String getDisplayString(String[] args) { 25 | return String.format("returns true if %s intersects %s", args[0], args[1]); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Is3D.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BooleanWritable; 7 | import org.apache.hadoop.io.BytesWritable; 8 | 9 | 10 | import com.esri.core.geometry.ogc.OGCGeometry; 11 | 12 | @Description( 13 | name = "ST_Is3D", 14 | value = "_FUNC_(geometry) - return true if the geometry object is three-dimensional", 15 | extended = "Example:\n" 16 | + " > SELECT _FUNC_(ST_Polygon(1,1, 1,4, 4,4, 4,1)) FROM src LIMIT 1; -- false\n" 17 | + " > SELECT _FUNC_(ST_LineString(0.,0., 3.,4., 0.,4., 0.,0.)) FROM src LIMIT 1; -- false\n" 18 | + " > SELECT _FUNC_(ST_Point(3., 4.)) FROM src LIMIT 1; -- false\n" 19 | + " > SELECT _FUNC_(ST_PointZ(3., 4., 2)) FROM src LIMIT 1; -- true\n" 20 | ) 21 | //@HivePdkUnitTests( 22 | // cases = { 23 | // @HivePdkUnitTest( 24 | // query = "select ST_Is3D(ST_Point(0., 3.)) from onerow", 25 | // result = "false" 26 | // ), 27 | // @HivePdkUnitTest( 28 | // query = "select ST_Is3D(ST_PointZ(0., 3., 1)) from onerow", 29 | // result = "true" 30 | // ), 31 | // @HivePdkUnitTest( 32 | // query = "select ST_Is3D(ST_Point('pointzm (0. 3. 1. 2.)')) from onerow", 33 | // result = "true" 34 | // ), 35 | // @HivePdkUnitTest( 36 | // query = "select ST_Is3D(null) from onerow", 37 | // result = "null" 38 | // ) 39 | // } 40 | //) 41 | 42 | public class ST_Is3D extends ST_GeometryAccessor { 43 | final BooleanWritable resultBoolean = new BooleanWritable(); 44 | static final Log LOG = LogFactory.getLog(ST_Is3D.class.getName()); 45 | 46 | public BooleanWritable evaluate(BytesWritable geomref) { 47 | if (geomref == null || geomref.getLength() == 0) { 48 | LogUtils.Log_ArgumentsNull(LOG); 49 | return null; 50 | } 51 | 52 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 53 | if (ogcGeometry == null) { 54 | LogUtils.Log_ArgumentsNull(LOG); 55 | return null; 56 | } 57 | 58 | resultBoolean.set(ogcGeometry.is3D()); 59 | return resultBoolean; 60 | } 61 | 62 | } 63 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_IsEmpty.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BooleanWritable; 7 | import org.apache.hadoop.io.BytesWritable; 8 | 9 | 10 | import com.esri.core.geometry.ogc.OGCGeometry; 11 | 12 | @Description( 13 | name = "ST_IsEmpty", 14 | value = "_FUNC_(geometry) - return true if the geometry object is empty of geometric information", 15 | extended = "Example:\n" 16 | + " > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1; -- false\n" 17 | + " > SELECT _FUNC_(ST_GeomFromText('point empty')) FROM src LIMIT 1; -- true\n" 18 | ) 19 | //@HivePdkUnitTests( 20 | // cases = { 21 | // @HivePdkUnitTest( 22 | // query = "select ST_IsEmpty(ST_GeomFromText('point empty')) from onerow", 23 | // result = "true" 24 | // ), 25 | // @HivePdkUnitTest( 26 | // query = "select ST_IsEmpty(ST_Intersection(st_point(2,0), ST_Point(1,1))) from onerow", 27 | // result = "true" 28 | // ), 29 | // @HivePdkUnitTest( 30 | // query = "select ST_IsEmpty(ST_GeomFromText('point (10.02 20.01)')) from onerow", 31 | // result = "false" 32 | // ), 33 | // @HivePdkUnitTest( 34 | // query = "select ST_IsEmpty(null) from onerow", 35 | // result = "null" 36 | // ) 37 | // } 38 | // ) 39 | 40 | public class ST_IsEmpty extends ST_GeometryAccessor { 41 | final BooleanWritable resultBoolean = new BooleanWritable(); 42 | static final Log LOG = LogFactory.getLog(ST_IsEmpty.class.getName()); 43 | 44 | public BooleanWritable evaluate(BytesWritable geomref) { 45 | if (geomref == null || geomref.getLength() == 0) { 46 | LogUtils.Log_ArgumentsNull(LOG); 47 | return null; 48 | } 49 | 50 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 51 | if (ogcGeometry == null){ 52 | LogUtils.Log_ArgumentsNull(LOG); 53 | return null; 54 | } 55 | 56 | try { 57 | resultBoolean.set(ogcGeometry.isEmpty()); 58 | } catch (Exception e) { 59 | LogUtils.Log_InternalError(LOG, "ST_IsEmpty" + e); 60 | return null; 61 | } 62 | return resultBoolean; 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_IsMeasured.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BooleanWritable; 7 | import org.apache.hadoop.io.BytesWritable; 8 | 9 | 10 | import com.esri.core.geometry.ogc.OGCGeometry; 11 | 12 | @Description( 13 | name = "ST_IsMeasured", 14 | value = "_FUNC_(geometry) - return true if the geometry object is three-dimensional", 15 | extended = "Example:\n" 16 | + " > SELECT _FUNC_(ST_Polygon(1,1, 1,4, 4,4, 4,1)) FROM src LIMIT 1; -- false\n" 17 | + " > SELECT _FUNC_(ST_LineString(0.,0., 3.,4., 0.,4., 0.,0.)) FROM src LIMIT 1; -- false\n" 18 | + " > SELECT _FUNC_(ST_Point(3., 4.)) FROM src LIMIT 1; -- false\n" 19 | + " > SELECT _FUNC_(ST_PointM(3., 4., 2)) FROM src LIMIT 1; -- true\n" 20 | ) 21 | //@HivePdkUnitTests( 22 | // cases = { 23 | // @HivePdkUnitTest( 24 | // query = "select ST_IsMeasured(ST_Point(0., 3.)) from onerow", 25 | // result = "false" 26 | // ), 27 | // @HivePdkUnitTest( 28 | // query = "select ST_IsMeasured(ST_Point('point m(0. 3. 1)')) from onerow", 29 | // result = "true" 30 | // ), 31 | // @HivePdkUnitTest( 32 | // query = "select ST_IsMeasured(ST_Point('pointzm (0. 3. 1. 2.)')) from onerow", 33 | // result = "true" 34 | // ), 35 | // @HivePdkUnitTest( 36 | // query = "select ST_IsMeasured(null) from onerow", 37 | // result = "null" 38 | // ) 39 | // } 40 | //) 41 | 42 | public class ST_IsMeasured extends ST_GeometryAccessor { 43 | final BooleanWritable resultBoolean = new BooleanWritable(); 44 | static final Log LOG = LogFactory.getLog(ST_IsMeasured.class.getName()); 45 | 46 | public BooleanWritable evaluate(BytesWritable geomref) { 47 | if (geomref == null || geomref.getLength() == 0) { 48 | LogUtils.Log_ArgumentsNull(LOG); 49 | return null; 50 | } 51 | 52 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 53 | if (ogcGeometry == null) { 54 | LogUtils.Log_ArgumentsNull(LOG); 55 | return null; 56 | } 57 | 58 | resultBoolean.set(ogcGeometry.isMeasured()); 59 | return resultBoolean; 60 | } 61 | 62 | } 63 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_IsRing.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BooleanWritable; 7 | import org.apache.hadoop.io.BytesWritable; 8 | 9 | 10 | import com.esri.core.geometry.ogc.OGCGeometry; 11 | import com.esri.core.geometry.ogc.OGCLineString; 12 | 13 | @Description( 14 | name = "ST_IsRing", 15 | value = "_FUNC_(ST_LineString) - return true if the linestring is closed & simple", 16 | extended = "Example:\n" 17 | + " SELECT _FUNC_(ST_LineString(0.,0., 3.,4., 0.,4., 0.,0.)) FROM src LIMIT 1; -- true\n" 18 | + " SELECT _FUNC_(ST_LineString(0.,0., 1.,1., 1.,2., 2.,1., 1.,1., 0.,0.)) FROM src LIMIT 1; -- false\n" 19 | + " SELECT _FUNC_(ST_LineString(0.,0., 3.,4.)) FROM src LIMIT 1; -- false\n" 20 | ) 21 | //@HivePdkUnitTests( 22 | // cases = { 23 | // @HivePdkUnitTest( 24 | // query = "select ST_IsRing(ST_LineString(0.,0., 3.,4., 0.,4., 0.,0.)) from onerow", 25 | // result = "true" 26 | // ), 27 | // @HivePdkUnitTest( 28 | // query = "select ST_IsRing(ST_LineString(0.,0., 3.,4.)) from onerow", 29 | // result = "false" 30 | // ), 31 | // @HivePdkUnitTest( 32 | // query = "select ST_IsRing(ST_LineString(0.,0., 1.,1., 1.,2., 2.,1., 1.,1., 0.,0.)) from onerow", 33 | // result = "false" 34 | // ), 35 | // @HivePdkUnitTest( 36 | // query = "select ST_IsRing(null) from onerow", 37 | // result = "null" 38 | // ) 39 | // } 40 | // ) 41 | 42 | public class ST_IsRing extends ST_GeometryAccessor { 43 | final BooleanWritable resultBoolean = new BooleanWritable(); 44 | static final Log LOG = LogFactory.getLog(ST_IsRing.class.getName()); 45 | 46 | public BooleanWritable evaluate(BytesWritable geomref) { 47 | if (geomref == null || geomref.getLength() == 0) { 48 | LogUtils.Log_ArgumentsNull(LOG); 49 | return null; 50 | } 51 | 52 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 53 | if (ogcGeometry == null){ 54 | LogUtils.Log_ArgumentsNull(LOG); 55 | return null; 56 | } 57 | 58 | try { 59 | 60 | switch(GeometryUtils.getType(geomref)) { 61 | case ST_LINESTRING: 62 | OGCLineString lns = (OGCLineString)ogcGeometry; 63 | resultBoolean.set(lns.isClosed() && lns.isSimple()); 64 | return resultBoolean; 65 | default: // ST_IsRing gives ERROR on Point, Polygon, or MultiLineString - on Postgres 66 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_LINESTRING, GeometryUtils.getType(geomref)); 67 | return null; 68 | } 69 | 70 | } catch (Exception e) { 71 | LogUtils.Log_InternalError(LOG, "ST_IsRing" + e); 72 | return null; 73 | } 74 | } 75 | 76 | } 77 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_IsSimple.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BooleanWritable; 7 | import org.apache.hadoop.io.BytesWritable; 8 | 9 | 10 | import com.esri.core.geometry.ogc.OGCGeometry; 11 | 12 | @Description( 13 | name = "ST_IsSimple", 14 | value = "_FUNC_(geometry) - return true if geometry is simple", 15 | extended = "Example:\n" 16 | + " > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1; -- true\n" 17 | + " > SELECT _FUNC_(ST_LineString(0.,0., 1.,1., 0.,1., 1.,0.)) FROM src LIMIT 1; -- false\n" 18 | ) 19 | //@HivePdkUnitTests( 20 | // cases = { 21 | // @HivePdkUnitTest( 22 | // query = "select ST_IsSimple(ST_Point(0,0)) from onerow", 23 | // result = "true" 24 | // ), 25 | // @HivePdkUnitTest( 26 | // query = "select ST_IsSimple(ST_MultiPoint(0,0, 2,2)) from onerow", 27 | // result = "true" 28 | // ), 29 | // @HivePdkUnitTest( 30 | // query = "select ST_IsSimple(ST_LineString(0.,0., 1.,1., 0.,1., 1.,0.)) from onerow", 31 | // result = "false" 32 | // ), 33 | // @HivePdkUnitTest( 34 | // query = "select ST_IsSimple(ST_LineString(0,0, 1,0, 1,1, 0,2, 2,2, 1,1, 2,0)) from onerow", 35 | // result = "false" 36 | // ), 37 | // @HivePdkUnitTest( 38 | // query = "select ST_IsSimple(null) from onerow", 39 | // result = "null" 40 | // ) 41 | // } 42 | // ) 43 | 44 | public class ST_IsSimple extends ST_GeometryAccessor { 45 | final BooleanWritable resultBoolean = new BooleanWritable(); 46 | static final Log LOG = LogFactory.getLog(ST_IsSimple.class.getName()); 47 | 48 | public BooleanWritable evaluate(BytesWritable geomref) { 49 | if (geomref == null || geomref.getLength() == 0) { 50 | LogUtils.Log_ArgumentsNull(LOG); 51 | return null; 52 | } 53 | 54 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 55 | 56 | if (ogcGeometry == null){ 57 | LogUtils.Log_ArgumentsNull(LOG); 58 | return null; 59 | } 60 | 61 | try { 62 | resultBoolean.set(ogcGeometry.isSimple()); 63 | } catch (Exception e) { 64 | LogUtils.Log_InternalError(LOG, "ST_IsSimple" + e); 65 | return null; 66 | } 67 | return resultBoolean; 68 | } 69 | 70 | } 71 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Length.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | // DoubleWritable - must use hive-serde2; the other one produces struct {value:d.d} 8 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 9 | 10 | 11 | import com.esri.core.geometry.ogc.OGCGeometry; 12 | 13 | @Description(name = "ST_Length", 14 | value = "_FUNC_(line) - returns the length of line", 15 | extended = "Example:\n" 16 | + " SELECT _FUNC_(ST_Line(0.0,0.0, 3.0,4.0)) FROM src LIMIT 1; -- 5.0" 17 | ) 18 | //@HivePdkUnitTests( 19 | // cases = { 20 | // @HivePdkUnitTest( 21 | // query = "select ST_Length(ST_SetSRID(ST_LineString(0.0,0.0, 3.0,4.0), 0)) from onerow", 22 | // result = "5.0" 23 | // ), 24 | // @HivePdkUnitTest( 25 | // query = "select ST_Length(ST_SetSRID(ST_MultiLineString(array(1,1, 1,2), array(10,10, 20,10)), 0)) from onerow", 26 | // result = "11" 27 | // ), 28 | // @HivePdkUnitTest( 29 | // query = "select ST_Length(null) from onerow", 30 | // result = "null" 31 | // ) 32 | // } 33 | //) 34 | 35 | public class ST_Length extends ST_GeometryAccessor { 36 | final DoubleWritable resultDouble = new DoubleWritable(); 37 | static final Log LOG = LogFactory.getLog(ST_Length.class.getName()); 38 | 39 | public DoubleWritable evaluate(BytesWritable geomref) { 40 | if (geomref == null || geomref.getLength() == 0) { 41 | LogUtils.Log_ArgumentsNull(LOG); 42 | return null; 43 | } 44 | 45 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 46 | if (ogcGeometry == null){ 47 | LogUtils.Log_ArgumentsNull(LOG); 48 | return null; 49 | } 50 | 51 | resultDouble.set(ogcGeometry.getEsriGeometry().calculateLength2D()); 52 | return resultDouble; 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_LineFromWKB.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import java.nio.ByteBuffer; 4 | import org.apache.commons.logging.Log; 5 | import org.apache.commons.logging.LogFactory; 6 | import org.apache.hadoop.hive.ql.exec.Description; 7 | import org.apache.hadoop.hive.ql.exec.UDFArgumentException; 8 | import org.apache.hadoop.io.BytesWritable; 9 | 10 | 11 | import com.esri.core.geometry.SpatialReference; 12 | import com.esri.core.geometry.ogc.OGCGeometry; 13 | 14 | @Description( 15 | name = "ST_LineFromWKB", 16 | value = "_FUNC_(wkb) - construct an ST_LineString from OGC well-known binary", 17 | extended = "Example:\n" 18 | + " SELECT _FUNC_(ST_AsBinary(ST_GeomFromText('linestring (1 0, 2 3)'))) FROM src LIMIT 1; -- constructs ST_Linestring\n" 19 | ) 20 | //@HivePdkUnitTests( 21 | // cases = { 22 | // @HivePdkUnitTest( 23 | // query = "select ST_GeometryType(ST_LineFromWKB(ST_AsBinary(ST_GeomFromText('linestring (10 10, 20 20)')))) from onerow", 24 | // result = "ST_LINESTRING" 25 | // ), 26 | // @HivePdkUnitTest( 27 | // query = "select ST_Equals(ST_LineFromWKB(ST_AsBinary(ST_GeomFromText('linestring (10 10, 20 20)'))), ST_GeomFromText('linestring (10 10, 20 20)')) from onerow", 28 | // result = "true" 29 | // ) 30 | // } 31 | // ) 32 | 33 | public class ST_LineFromWKB extends ST_Geometry { 34 | 35 | static final Log LOG = LogFactory.getLog(ST_LineFromWKB.class.getName()); 36 | 37 | public BytesWritable evaluate(BytesWritable wkb) throws UDFArgumentException { 38 | return evaluate(wkb, 0); 39 | } 40 | 41 | public BytesWritable evaluate(BytesWritable wkb, int wkid) throws UDFArgumentException { 42 | 43 | try { 44 | SpatialReference spatialReference = null; 45 | if (wkid != GeometryUtils.WKID_UNKNOWN) { 46 | spatialReference = SpatialReference.create(wkid); 47 | } 48 | byte [] byteArr = wkb.getBytes(); 49 | ByteBuffer byteBuf = ByteBuffer.allocate(byteArr.length); 50 | byteBuf.put(byteArr); 51 | OGCGeometry ogcObj = OGCGeometry.fromBinary(byteBuf); 52 | ogcObj.setSpatialReference(spatialReference); 53 | if (ogcObj.geometryType().equals("LineString")) { 54 | return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj); 55 | } else { 56 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_LINESTRING, GeometryUtils.OGCType.UNKNOWN); 57 | return null; 58 | } 59 | } catch (Exception e) { // IllegalArgumentException, GeometryException 60 | LOG.error(e.getMessage()); 61 | return null; 62 | } 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_M.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | // DoubleWritable - must use hive-serde2; the other one produces struct {value:d.d} 8 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 9 | 10 | 11 | import com.esri.core.geometry.ogc.OGCGeometry; 12 | import com.esri.core.geometry.ogc.OGCPoint; 13 | 14 | @Description( 15 | name = "ST_M", 16 | value = "_FUNC_(geometry) - return true if the geometry object is three-dimensional", 17 | extended = "Example:\n" 18 | + " > SELECT _FUNC_(ST_PointM(3., 4., 2)) FROM src LIMIT 1; -- 2\n" 19 | ) 20 | //@HivePdkUnitTests( 21 | // cases = { 22 | // @HivePdkUnitTest( 23 | // query = "select ST_M(ST_Point('point m(0. 3. 1)')) from onerow", 24 | // result = "1.0" 25 | // ), 26 | // @HivePdkUnitTest( 27 | // query = "select ST_M(ST_Point('pointzm (0. 3. 1. 2.)')) from onerow", 28 | // result = "2.0" 29 | // ), 30 | // @HivePdkUnitTest( 31 | // query = "select ST_M(ST_Point(0., 3.)) from onerow", 32 | // result = "null" 33 | // ), 34 | // @HivePdkUnitTest( 35 | // query = "select ST_M(null) from onerow", 36 | // result = "null" 37 | // ) 38 | // } 39 | //) 40 | 41 | public class ST_M extends ST_GeometryAccessor { 42 | final DoubleWritable resultDouble = new DoubleWritable(); 43 | static final Log LOG = LogFactory.getLog(ST_M.class.getName()); 44 | 45 | public DoubleWritable evaluate(BytesWritable geomref) { 46 | if (geomref == null || geomref.getLength() == 0) { 47 | LogUtils.Log_ArgumentsNull(LOG); 48 | return null; 49 | } 50 | 51 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 52 | if (ogcGeometry == null){ 53 | return null; 54 | } 55 | if (!ogcGeometry.isMeasured()) { 56 | LogUtils.Log_NotMeasured(LOG); 57 | return null; 58 | } 59 | 60 | switch(GeometryUtils.getType(geomref)) { 61 | case ST_POINT: 62 | OGCPoint pt = (OGCPoint)ogcGeometry; 63 | resultDouble.set(pt.M()); 64 | return resultDouble; 65 | default: 66 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POINT, GeometryUtils.getType(geomref)); 67 | return null; 68 | } 69 | } 70 | 71 | } 72 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_MLineFromWKB.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import java.nio.ByteBuffer; 4 | import org.apache.commons.logging.Log; 5 | import org.apache.commons.logging.LogFactory; 6 | import org.apache.hadoop.hive.ql.exec.Description; 7 | import org.apache.hadoop.hive.ql.exec.UDFArgumentException; 8 | import org.apache.hadoop.io.BytesWritable; 9 | 10 | 11 | import com.esri.core.geometry.SpatialReference; 12 | import com.esri.core.geometry.ogc.OGCGeometry; 13 | 14 | @Description( 15 | name = "ST_MLineFromWKB", 16 | value = "_FUNC_(wkb) - construct an ST_MultiLineString from OGC well-known binary", 17 | extended = "Example:\n" 18 | + " SELECT _FUNC_(ST_AsBinary(ST_GeomFromText('multilinestring ((1 0, 2 3), (5 7, 7 5))'))) FROM src LIMIT 1; -- constructs ST_MultiLineString\n" 19 | ) 20 | //@HivePdkUnitTests( 21 | // cases = { 22 | // @HivePdkUnitTest( 23 | // query = "select ST_GeometryType(ST_MLineFromWKB(ST_AsBinary(ST_GeomFromText('multilinestring ((1 2, 2 1),(10 10, 20 20))')))) from onerow", 24 | // result = "ST_MULTILINESTRING" 25 | // ), 26 | // @HivePdkUnitTest( 27 | // query = "select ST_Equals(ST_MLineFromWKB(ST_AsBinary(ST_GeomFromText('multilinestring ((1 2, 2 1),(10 10, 20 20))'))), ST_GeomFromText('multilinestring ((1 2, 2 1),(10 10, 20 20))')) from onerow", 28 | // result = "true" 29 | // ) 30 | // } 31 | // ) 32 | 33 | public class ST_MLineFromWKB extends ST_Geometry { 34 | 35 | static final Log LOG = LogFactory.getLog(ST_MLineFromWKB.class.getName()); 36 | 37 | public BytesWritable evaluate(BytesWritable wkb) throws UDFArgumentException { 38 | return evaluate(wkb, 0); 39 | } 40 | 41 | public BytesWritable evaluate(BytesWritable wkb, int wkid) throws UDFArgumentException { 42 | 43 | try { 44 | SpatialReference spatialReference = null; 45 | if (wkid != GeometryUtils.WKID_UNKNOWN) { 46 | spatialReference = SpatialReference.create(wkid); 47 | } 48 | byte [] byteArr = wkb.getBytes(); 49 | ByteBuffer byteBuf = ByteBuffer.allocate(byteArr.length); 50 | byteBuf.put(byteArr); 51 | OGCGeometry ogcObj = OGCGeometry.fromBinary(byteBuf); 52 | ogcObj.setSpatialReference(spatialReference); 53 | String gType = ogcObj.geometryType(); 54 | if (gType.equals("MultiLineString") || gType.equals("LineString")) { 55 | return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj); 56 | } else { 57 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTILINESTRING, GeometryUtils.OGCType.UNKNOWN); 58 | return null; 59 | } 60 | } catch (Exception e) { // IllegalArgumentException, GeometryException 61 | LOG.error(e.getMessage()); 62 | return null; 63 | } 64 | } 65 | 66 | } 67 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_MPointFromWKB.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import java.nio.ByteBuffer; 4 | import org.apache.commons.logging.Log; 5 | import org.apache.commons.logging.LogFactory; 6 | import org.apache.hadoop.hive.ql.exec.Description; 7 | import org.apache.hadoop.hive.ql.exec.UDFArgumentException; 8 | import org.apache.hadoop.io.BytesWritable; 9 | 10 | 11 | import com.esri.core.geometry.SpatialReference; 12 | import com.esri.core.geometry.ogc.OGCGeometry; 13 | 14 | @Description( 15 | name = "ST_MPointFromWKB", 16 | value = "_FUNC_(wkb) - construct an ST_MultiPoint from OGC well-known binary", 17 | extended = "Example:\n" 18 | + " SELECT _FUNC_(ST_AsBinary(ST_GeomFromText('multipoint ((1 0), (2 3))'))) FROM src LIMIT 1; -- constructs ST_MultiPoint\n" 19 | ) 20 | //@HivePdkUnitTests( 21 | // cases = { 22 | // @HivePdkUnitTest( 23 | // query = "select ST_GeometryType(ST_MPointFromWKB(ST_AsBinary(ST_GeomFromText('multipoint ((10 10), (20 20))')))) from onerow", 24 | // result = "ST_MULTIPOINT" 25 | // ), 26 | // @HivePdkUnitTest( 27 | // query = "select ST_Equals(ST_MPointFromWKB(ST_AsBinary(ST_GeomFromText('multipoint ((10 10), (20 20))'))), ST_GeomFromText('multipoint ((10 10), (20 20))')) from onerow", 28 | // result = "true" 29 | // ) 30 | // } 31 | // ) 32 | 33 | public class ST_MPointFromWKB extends ST_Geometry { 34 | 35 | static final Log LOG = LogFactory.getLog(ST_MPointFromWKB.class.getName()); 36 | 37 | public BytesWritable evaluate(BytesWritable wkb) throws UDFArgumentException { 38 | return evaluate(wkb, 0); 39 | } 40 | 41 | public BytesWritable evaluate(BytesWritable wkb, int wkid) throws UDFArgumentException { 42 | 43 | try { 44 | SpatialReference spatialReference = null; 45 | if (wkid != GeometryUtils.WKID_UNKNOWN) { 46 | spatialReference = SpatialReference.create(wkid); 47 | } 48 | byte [] byteArr = wkb.getBytes(); 49 | ByteBuffer byteBuf = ByteBuffer.allocate(byteArr.length); 50 | byteBuf.put(byteArr); 51 | OGCGeometry ogcObj = OGCGeometry.fromBinary(byteBuf); 52 | ogcObj.setSpatialReference(spatialReference); 53 | String gType = ogcObj.geometryType(); 54 | if (gType.equals("MultiPoint") || gType.equals("Point")) { 55 | return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj); 56 | } else { 57 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_LINESTRING, GeometryUtils.OGCType.UNKNOWN); 58 | return null; 59 | } 60 | } catch (Exception e) { // IllegalArgumentException, GeometryException 61 | LOG.error(e.getMessage()); 62 | return null; 63 | } 64 | } 65 | 66 | } 67 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_MPolyFromWKB.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import java.nio.ByteBuffer; 4 | import org.apache.commons.logging.Log; 5 | import org.apache.commons.logging.LogFactory; 6 | import org.apache.hadoop.hive.ql.exec.Description; 7 | import org.apache.hadoop.hive.ql.exec.UDFArgumentException; 8 | import org.apache.hadoop.io.BytesWritable; 9 | 10 | 11 | import com.esri.core.geometry.SpatialReference; 12 | import com.esri.core.geometry.ogc.OGCGeometry; 13 | 14 | @Description( 15 | name = "ST_MPolyFromWKB", 16 | value = "_FUNC_(wkb) - construct an ST_MultiPolygon from OGC well-known binary", 17 | extended = "Example:\n" 18 | + " SELECT _FUNC_(ST_AsBinary(ST_GeomFromText('multipolygon (((0 0, 1 0, 0 1, 0 0)), ((2 2, 1 2, 2 1, 2 2)))'))) FROM src LIMIT 1; -- constructs ST_MultiPolygon\n" 19 | ) 20 | //@HivePdkUnitTests( 21 | // cases = { 22 | // @HivePdkUnitTest( 23 | // query = "select ST_GeometryType(ST_MPolyFromWKB(ST_AsBinary(ST_GeomFromText('multipolygon (((0 0, 1 0, 0 1, 0 0)), ((2 2, 1 2, 2 1, 2 2)))')))) from onerow", 24 | // result = "ST_MULTIPOLYGON" 25 | // ), 26 | // @HivePdkUnitTest( 27 | // query = "select ST_Equals(ST_MPolyFromWKB(ST_AsBinary(ST_GeomFromText('multipolygon (((0 0, 1 0, 0 1, 0 0)), ((2 2, 1 2, 2 1, 2 2)))'))), ST_GeomFromText('multipolygon (((0 0, 1 0, 0 1, 0 0)), ((2 2, 1 2, 2 1, 2 2)))')) from onerow", 28 | // result = "true" 29 | // ) 30 | // } 31 | // ) 32 | 33 | public class ST_MPolyFromWKB extends ST_Geometry { 34 | 35 | static final Log LOG = LogFactory.getLog(ST_MPolyFromWKB.class.getName()); 36 | 37 | public BytesWritable evaluate(BytesWritable wkb) throws UDFArgumentException { 38 | return evaluate(wkb, 0); 39 | } 40 | 41 | public BytesWritable evaluate(BytesWritable wkb, int wkid) throws UDFArgumentException { 42 | 43 | try { 44 | SpatialReference spatialReference = null; 45 | if (wkid != GeometryUtils.WKID_UNKNOWN) { 46 | spatialReference = SpatialReference.create(wkid); 47 | } 48 | byte [] byteArr = wkb.getBytes(); 49 | ByteBuffer byteBuf = ByteBuffer.allocate(byteArr.length); 50 | byteBuf.put(byteArr); 51 | OGCGeometry ogcObj = OGCGeometry.fromBinary(byteBuf); 52 | ogcObj.setSpatialReference(spatialReference); 53 | String gType = ogcObj.geometryType(); 54 | if (gType.equals("MultiPolygon") || gType.equals("Polygon")) { 55 | return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj); 56 | } else { 57 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTIPOLYGON, GeometryUtils.OGCType.UNKNOWN); 58 | return null; 59 | } 60 | } catch (Exception e) { // IllegalArgumentException, GeometryException 61 | LOG.error(e.getMessage()); 62 | return null; 63 | } 64 | } 65 | 66 | } 67 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_MaxM.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | // DoubleWritable - must use hive-serde2; the other one produces struct {value:d.d} 8 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 9 | 10 | 11 | import com.esri.core.geometry.ogc.OGCGeometry; 12 | 13 | @Description(name = "ST_MaxM", 14 | value = "_FUNC_(geometry) - returns the maximum M coordinate of geometry", 15 | extended = "Example:\n" 16 | + " SELECT _FUNC_(ST_PointM(1.5, 2.5, 2)) FROM src LIMIT 1; -- 2\n" 17 | + " SELECT _FUNC_(ST_LineString('linestring m (1.5 2.5 2, 3.0 2.2 1)')) FROM src LIMIT 1; -- 1\n" 18 | ) 19 | //@HivePdkUnitTests( 20 | // cases = { 21 | // @HivePdkUnitTest( 22 | // query = "select ST_MaxM(ST_PointM(0., 3., 1.)) from onerow", 23 | // result = "1.0" 24 | // ), 25 | // @HivePdkUnitTest( 26 | // query = "select ST_MaxM(ST_GeomFromText('linestring m (10 10 2, 20 20 4)')) from onerow", 27 | // result = "4.0" 28 | // ), 29 | // @HivePdkUnitTest( 30 | // query = "select ST_MaxM(ST_MultiPoint('multipoint m((0 0 1), (2 2 3)')) from onerow", 31 | // result = "3.0" 32 | // ), 33 | // @HivePdkUnitTest( 34 | // query = "select ST_MaxM(ST_Point(1,2)) from onerow", 35 | // result = "null" 36 | // ), 37 | // @HivePdkUnitTest( 38 | // query = "select ST_MaxM(null) from onerow", 39 | // result = "null" 40 | // ) 41 | // } 42 | //) 43 | 44 | public class ST_MaxM extends ST_GeometryAccessor { 45 | final DoubleWritable resultDouble = new DoubleWritable(); 46 | static final Log LOG = LogFactory.getLog(ST_MaxM.class.getName()); 47 | 48 | public DoubleWritable evaluate(BytesWritable geomref) { 49 | if (geomref == null || geomref.getLength() == 0) { 50 | LogUtils.Log_ArgumentsNull(LOG); 51 | return null; 52 | } 53 | 54 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 55 | if (ogcGeometry == null) { 56 | LogUtils.Log_ArgumentsNull(LOG); 57 | return null; 58 | } 59 | if (!ogcGeometry.isMeasured()) { 60 | LogUtils.Log_NotMeasured(LOG); 61 | return null; 62 | } 63 | 64 | resultDouble.set(ogcGeometry.MaxMeasure()); 65 | return resultDouble; 66 | } 67 | 68 | } 69 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_MaxX.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | // DoubleWritable - must use hive-serde2; the other one produces struct {value:d.d} 8 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 9 | 10 | 11 | import com.esri.core.geometry.Envelope; 12 | import com.esri.core.geometry.ogc.OGCGeometry; 13 | 14 | @Description(name = "ST_MaxX", 15 | value = "_FUNC_(geometry) - returns the maximum X coordinate of geometry", 16 | extended = "Example:\n" 17 | + " > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1; -- 1.5\n" 18 | + " > SELECT _FUNC_(ST_LineString(1.5,2.5, 3.0,2.2)) FROM src LIMIT 1; -- 3.0\n" 19 | ) 20 | //@HivePdkUnitTests( 21 | // cases = { 22 | // @HivePdkUnitTest( 23 | // query = "select ST_MaxX(ST_Point(1,2)) from onerow", 24 | // result = "1" 25 | // ), 26 | // @HivePdkUnitTest( 27 | // query = "select ST_MaxX(ST_LineString(1.5,2.5, 3.0,2.2)) from onerow", 28 | // result = "3.0" 29 | // ), 30 | // @HivePdkUnitTest( 31 | // query = "select ST_MaxX(ST_Polygon(1,1, 1,4, 4,4, 4,1)) from onerow", 32 | // result = "4" 33 | // ), 34 | // @HivePdkUnitTest( 35 | // query = "select ST_MaxX(ST_MultiPoint(0,0, 2,2)) from onerow", 36 | // result = "2" 37 | // ), 38 | // @HivePdkUnitTest( 39 | // query = "select ST_MaxX(ST_MultiLineString(array(1, 1, 2, 2), array(10, 10, 20, 20))) from onerow", 40 | // result = "20" 41 | // ), 42 | // @HivePdkUnitTest( 43 | // query = "select ST_MaxX(ST_MultiPolygon(array(1,1, 1,2, 2,2, 2,1), array(3,3, 3,4, 4,4, 4,3))) from onerow", 44 | // result = "4" 45 | // ), 46 | // @HivePdkUnitTest( 47 | // query = "select ST_MaxX(null) from onerow", 48 | // result = "null" 49 | // ) 50 | // } 51 | //) 52 | 53 | public class ST_MaxX extends ST_GeometryAccessor { 54 | final DoubleWritable resultDouble = new DoubleWritable(); 55 | static final Log LOG = LogFactory.getLog(ST_MaxX.class.getName()); 56 | 57 | public DoubleWritable evaluate(BytesWritable geomref) { 58 | if (geomref == null || geomref.getLength() == 0) { 59 | LogUtils.Log_ArgumentsNull(LOG); 60 | return null; 61 | } 62 | 63 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 64 | if (ogcGeometry == null) { 65 | LogUtils.Log_ArgumentsNull(LOG); 66 | return null; 67 | } 68 | 69 | Envelope envBound = new Envelope(); 70 | ogcGeometry.getEsriGeometry().queryEnvelope(envBound); 71 | resultDouble.set(envBound.getXMax()); 72 | return resultDouble; 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_MaxY.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | // DoubleWritable - must use hive-serde2; the other one produces struct {value:d.d} 8 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 9 | 10 | 11 | import com.esri.core.geometry.Envelope; 12 | import com.esri.core.geometry.ogc.OGCGeometry; 13 | 14 | @Description(name = "ST_MaxY", 15 | value = "_FUNC_(geometry) - returns the maximum Y coordinate of geometry", 16 | extended = "Example:\n" 17 | + " > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1; -- 2.5\n" 18 | + " > SELECT _FUNC_(ST_LineString(1.5,2.5, 3.0,2.2)) FROM src LIMIT 1; -- 2.5\n" 19 | ) 20 | //@HivePdkUnitTests( 21 | // cases = { 22 | // @HivePdkUnitTest( 23 | // query = "select ST_MaxY(ST_Point(1,2)) from onerow", 24 | // result = "2" 25 | // ), 26 | // @HivePdkUnitTest( 27 | // query = "select ST_MaxY(ST_LineString(1.5,2.5, 3.0,2.2)) from onerow", 28 | // result = "2.5" 29 | // ), 30 | // @HivePdkUnitTest( 31 | // query = "select ST_MaxY(ST_Polygon(1,1, 1,4, 4,4, 4,1)) from onerow", 32 | // result = "4" 33 | // ), 34 | // @HivePdkUnitTest( 35 | // query = "select ST_MaxY(ST_MultiPoint(0,0, 4,2)) from onerow", 36 | // result = "2" 37 | // ), 38 | // @HivePdkUnitTest( 39 | // query = "select ST_MaxY(ST_MultiLineString(array(1, 1, 2, 2), array(10, 10, 25, 20))) from onerow", 40 | // result = "20" 41 | // ), 42 | // @HivePdkUnitTest( 43 | // query = "select ST_MaxY(ST_MultiPolygon(array(1,1, 1,2, 2,2, 2,1), array(3,3, 3,4, 4,4, 4,3))) from onerow", 44 | // result = "4" 45 | // ), 46 | // @HivePdkUnitTest( 47 | // query = "select ST_MaxY(null) from onerow", 48 | // result = "null" 49 | // ) 50 | // } 51 | //) 52 | 53 | public class ST_MaxY extends ST_GeometryAccessor { 54 | final DoubleWritable resultDouble = new DoubleWritable(); 55 | static final Log LOG = LogFactory.getLog(ST_MaxY.class.getName()); 56 | 57 | public DoubleWritable evaluate(BytesWritable geomref) { 58 | if (geomref == null || geomref.getLength() == 0) { 59 | LogUtils.Log_ArgumentsNull(LOG); 60 | return null; 61 | } 62 | 63 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 64 | if (ogcGeometry == null) { 65 | LogUtils.Log_ArgumentsNull(LOG); 66 | return null; 67 | } 68 | 69 | Envelope envBound = new Envelope(); 70 | ogcGeometry.getEsriGeometry().queryEnvelope(envBound); 71 | resultDouble.set(envBound.getYMax()); 72 | return resultDouble; 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_MaxZ.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | // DoubleWritable - must use hive-serde2; the other one produces struct {value:d.d} 8 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 9 | 10 | 11 | import com.esri.core.geometry.ogc.OGCGeometry; 12 | 13 | @Description(name = "ST_MaxZ", 14 | value = "_FUNC_(geometry) - returns the maximum Z coordinate of geometry", 15 | extended = "Example:\n" 16 | + " SELECT _FUNC_(ST_PointZ(1.5, 2.5, 2)) FROM src LIMIT 1; -- 2\n" 17 | + " SELECT _FUNC_(ST_LineString('linestring z (1.5 2.5 2, 3.0 2.2 1)')) FROM src LIMIT 1; -- 1\n" 18 | ) 19 | //@HivePdkUnitTests( 20 | // cases = { 21 | // @HivePdkUnitTest( 22 | // query = "select ST_MaxZ(ST_PointZ(0., 3., 1.)) from onerow", 23 | // result = "1.0" 24 | // ), 25 | // @HivePdkUnitTest( 26 | // query = "select ST_MaxZ(ST_GeomFromText('linestring z (10 10 2, 20 20 4)')) from onerow", 27 | // result = "4.0" 28 | // ), 29 | // @HivePdkUnitTest( 30 | // query = "select ST_MaxZ(ST_MultiPoint('multipoint z((0 0 1), (2 2 3))')) from onerow", 31 | // result = "3.0" 32 | // ), 33 | // @HivePdkUnitTest( 34 | // query = "select ST_MaxZ(ST_Point(1,2)) from onerow", 35 | // result = "null" 36 | // ), 37 | // @HivePdkUnitTest( 38 | // query = "select ST_MaxZ(null) from onerow", 39 | // result = "null" 40 | // ) 41 | // } 42 | //) 43 | 44 | public class ST_MaxZ extends ST_GeometryAccessor { 45 | final DoubleWritable resultDouble = new DoubleWritable(); 46 | static final Log LOG = LogFactory.getLog(ST_MaxZ.class.getName()); 47 | 48 | public DoubleWritable evaluate(BytesWritable geomref) { 49 | if (geomref == null || geomref.getLength() == 0) { 50 | LogUtils.Log_ArgumentsNull(LOG); 51 | return null; 52 | } 53 | 54 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 55 | if (ogcGeometry == null) { 56 | LogUtils.Log_ArgumentsNull(LOG); 57 | return null; 58 | } 59 | if (!ogcGeometry.is3D()) { 60 | LogUtils.Log_Not3D(LOG); 61 | return null; 62 | } 63 | 64 | resultDouble.set(ogcGeometry.MaxZ()); 65 | return resultDouble; 66 | } 67 | 68 | } 69 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_MinM.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | // DoubleWritable - must use hive-serde2; the other one produces struct {value:d.d} 8 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 9 | 10 | 11 | import com.esri.core.geometry.ogc.OGCGeometry; 12 | 13 | @Description(name = "ST_MinM", 14 | value = "_FUNC_(geometry) - returns the minimum M coordinate of geometry", 15 | extended = "Example:\n" 16 | + " SELECT _FUNC_(ST_PointM(1.5, 2.5, 2)) FROM src LIMIT 1; -- 2\n" 17 | + " SELECT _FUNC_(ST_LineString('linestring m (1.5 2.5 2, 3.0 2.2 1)')) FROM src LIMIT 1; -- 1\n" 18 | ) 19 | //@HivePdkUnitTests( 20 | // cases = { 21 | // @HivePdkUnitTest( 22 | // query = "select ST_MinM(ST_PointM(0., 3., 1.)) from onerow", 23 | // result = "1.0" 24 | // ), 25 | // @HivePdkUnitTest( 26 | // query = "select ST_MinM(ST_GeomFromText('linestring m (10 10 2, 20 20 4)')) from onerow", 27 | // result = "2.0" 28 | // ), 29 | // @HivePdkUnitTest( 30 | // query = "select ST_MinM(ST_MultiPoint('multipoint m((0 0 1), (2 2 3)')) from onerow", 31 | // result = "1.0" 32 | // ), 33 | // @HivePdkUnitTest( 34 | // query = "select ST_MinM(ST_Point(1,2)) from onerow", 35 | // result = "null" 36 | // ), 37 | // @HivePdkUnitTest( 38 | // query = "select ST_MinM(null) from onerow", 39 | // result = "null" 40 | // ) 41 | // } 42 | //) 43 | 44 | public class ST_MinM extends ST_GeometryAccessor { 45 | final DoubleWritable resultDouble = new DoubleWritable(); 46 | static final Log LOG = LogFactory.getLog(ST_MinM.class.getName()); 47 | 48 | public DoubleWritable evaluate(BytesWritable geomref) { 49 | if (geomref == null || geomref.getLength() == 0) { 50 | LogUtils.Log_ArgumentsNull(LOG); 51 | return null; 52 | } 53 | 54 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 55 | if (ogcGeometry == null) { 56 | LogUtils.Log_ArgumentsNull(LOG); 57 | return null; 58 | } 59 | if (!ogcGeometry.isMeasured()) { 60 | LogUtils.Log_NotMeasured(LOG); 61 | return null; 62 | } 63 | 64 | resultDouble.set(ogcGeometry.MinMeasure()); 65 | return resultDouble; 66 | } 67 | 68 | } 69 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_MinX.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | // DoubleWritable - must use hive-serde2; the other one produces struct {value:d.d} 8 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 9 | 10 | 11 | import com.esri.core.geometry.Envelope; 12 | import com.esri.core.geometry.ogc.OGCGeometry; 13 | 14 | @Description(name = "ST_MinX", 15 | value = "_FUNC_(geometry) - returns the minimum X coordinate of geometry", 16 | extended = "Example:\n" 17 | + " > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1; -- 1.5\n" 18 | + " > SELECT _FUNC_(ST_LineString(1.5,2.5, 3.0,2.2)) FROM src LIMIT 1; -- 3.0\n" 19 | ) 20 | //@HivePdkUnitTests( 21 | // cases = { 22 | // @HivePdkUnitTest( 23 | // query = "select ST_MinX(ST_Point(1,2)) from onerow", 24 | // result = "1" 25 | // ), 26 | // @HivePdkUnitTest( 27 | // query = "select ST_MinX(ST_LineString(1.5,2.5, 3.0,2.2)) from onerow", 28 | // result = "1.5" 29 | // ), 30 | // @HivePdkUnitTest( 31 | // query = "select ST_MinX(ST_Polygon(1, 1, 1, 4, 4, 4, 4, 1)) from onerow", 32 | // result = "1" 33 | // ), 34 | // @HivePdkUnitTest( 35 | // query = "select ST_MinX(ST_MultiPoint(0,0, 2,2)) from onerow", 36 | // result = "0" 37 | // ), 38 | // @HivePdkUnitTest( 39 | // query = "select ST_MinX(ST_MultiLineString(array(1, 1, 2, 2), array(10, 10, 20, 20))) from onerow", 40 | // result = "1" 41 | // ), 42 | // @HivePdkUnitTest( 43 | // query = "select ST_MinX(ST_MultiPolygon(array(1,1, 1,2, 2,2, 2,1), array(3,3, 3,4, 4,4, 4,3))) from onerow", 44 | // result = "1" 45 | // ), 46 | // @HivePdkUnitTest( 47 | // query = "select ST_MinX(null) from onerow", 48 | // result = "null" 49 | // ) 50 | // } 51 | //) 52 | 53 | public class ST_MinX extends ST_GeometryAccessor { 54 | final DoubleWritable resultDouble = new DoubleWritable(); 55 | static final Log LOG = LogFactory.getLog(ST_MinX.class.getName()); 56 | 57 | public DoubleWritable evaluate(BytesWritable geomref) { 58 | if (geomref == null || geomref.getLength() == 0) { 59 | LogUtils.Log_ArgumentsNull(LOG); 60 | return null; 61 | } 62 | 63 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 64 | if (ogcGeometry == null) { 65 | LogUtils.Log_ArgumentsNull(LOG); 66 | return null; 67 | } 68 | 69 | Envelope envBound = new Envelope(); 70 | ogcGeometry.getEsriGeometry().queryEnvelope(envBound); 71 | resultDouble.set(envBound.getXMin()); 72 | return resultDouble; 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_MinY.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | // DoubleWritable - must use hive-serde2; the other one produces struct {value:d.d} 8 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 9 | 10 | 11 | import com.esri.core.geometry.Envelope; 12 | import com.esri.core.geometry.ogc.OGCGeometry; 13 | 14 | @Description(name = "ST_MinY", 15 | value = "_FUNC_(geometry) - returns the minimum Y coordinate of geometry", 16 | extended = "Example:\n" 17 | + " > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1; -- 2.5\n" 18 | + " > SELECT _FUNC_(ST_LineString(1.5,2.5, 3.0,2.2)) FROM src LIMIT 1; -- 2.2\n" 19 | ) 20 | //@HivePdkUnitTests( 21 | // cases = { 22 | // @HivePdkUnitTest( 23 | // query = "select ST_MinY(ST_Point(1,2)) from onerow", 24 | // result = "2" 25 | // ), 26 | // @HivePdkUnitTest( 27 | // query = "select ST_MinY(ST_LineString(1.5,2.5, 3.0,2.2)) from onerow", 28 | // result = "2.2" 29 | // ), 30 | // @HivePdkUnitTest( 31 | // query = "select ST_MinY(ST_Polygon(1,1, 1,4, 4,4, 4,1)) from onerow", 32 | // result = "1" 33 | // ), 34 | // @HivePdkUnitTest( 35 | // query = "select ST_MinY(ST_MultiPoint(0,0, 2,2)) from onerow", 36 | // result = "0" 37 | // ), 38 | // @HivePdkUnitTest( 39 | // query = "select ST_MinY(ST_MultiLineString(array(1, 1, 2, 2), array(10, 10, 20, 20))) from onerow", 40 | // result = "1" 41 | // ), 42 | // @HivePdkUnitTest( 43 | // query = "select ST_MinY(ST_MultiPolygon(array(1,1, 1,2, 2,2, 2,1), array(3,3, 3,4, 4,4, 4,3))) from onerow", 44 | // result = "1" 45 | // ), 46 | // @HivePdkUnitTest( 47 | // query = "select ST_MinY(null) from onerow", 48 | // result = "null" 49 | // ) 50 | // } 51 | //) 52 | 53 | public class ST_MinY extends ST_GeometryAccessor { 54 | final DoubleWritable resultDouble = new DoubleWritable(); 55 | static final Log LOG = LogFactory.getLog(ST_MinY.class.getName()); 56 | 57 | public DoubleWritable evaluate(BytesWritable geomref) { 58 | if (geomref == null || geomref.getLength() == 0) { 59 | LogUtils.Log_ArgumentsNull(LOG); 60 | return null; 61 | } 62 | 63 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 64 | if (ogcGeometry == null) { 65 | LogUtils.Log_ArgumentsNull(LOG); 66 | return null; 67 | } 68 | 69 | Envelope envBound = new Envelope(); 70 | ogcGeometry.getEsriGeometry().queryEnvelope(envBound); 71 | resultDouble.set(envBound.getYMin()); 72 | return resultDouble; 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_MinZ.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | // DoubleWritable - must use hive-serde2; the other one produces struct {value:d.d} 8 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 9 | 10 | 11 | import com.esri.core.geometry.ogc.OGCGeometry; 12 | 13 | @Description(name = "ST_MinZ", 14 | value = "_FUNC_(geometry) - returns the minimum Z coordinate of geometry", 15 | extended = "Example:\n" 16 | + " SELECT _FUNC_(ST_PointZ(1.5, 2.5, 2)) FROM src LIMIT 1; -- 2\n" 17 | + " SELECT _FUNC_(ST_LineString('linestring z (1.5 2.5 2, 3.0 2.2 1)')) FROM src LIMIT 1; -- 1\n" 18 | ) 19 | //@HivePdkUnitTests( 20 | // cases = { 21 | // @HivePdkUnitTest( 22 | // query = "select ST_MinZ(ST_PointZ(0., 3., 1.)) from onerow", 23 | // result = "1.0" 24 | // ), 25 | // @HivePdkUnitTest( 26 | // query = "select ST_MinZ(ST_GeomFromText('linestring z (10 10 2, 20 20 4)')) from onerow", 27 | // result = "2.0" 28 | // ), 29 | // @HivePdkUnitTest( 30 | // query = "select ST_MinZ(ST_MultiPoint('multipoint z((0 0 1), (2 2 3))')) from onerow", 31 | // result = "1.0" 32 | // ), 33 | // @HivePdkUnitTest( 34 | // query = "select ST_MinZ(ST_Point(1,2)) from onerow", 35 | // result = "null" 36 | // ), 37 | // @HivePdkUnitTest( 38 | // query = "select ST_MinZ(null) from onerow", 39 | // result = "null" 40 | // ) 41 | // } 42 | //) 43 | 44 | public class ST_MinZ extends ST_GeometryAccessor { 45 | final DoubleWritable resultDouble = new DoubleWritable(); 46 | static final Log LOG = LogFactory.getLog(ST_MinZ.class.getName()); 47 | 48 | public DoubleWritable evaluate(BytesWritable geomref) { 49 | if (geomref == null || geomref.getLength() == 0) { 50 | LogUtils.Log_ArgumentsNull(LOG); 51 | return null; 52 | } 53 | 54 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 55 | if (ogcGeometry == null) { 56 | LogUtils.Log_ArgumentsNull(LOG); 57 | return null; 58 | } 59 | if (!ogcGeometry.is3D()) { 60 | LogUtils.Log_Not3D(LOG); 61 | return null; 62 | } 63 | 64 | resultDouble.set(ogcGeometry.MinZ()); 65 | return resultDouble; 66 | } 67 | 68 | } 69 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_NumGeometries.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.io.IntWritable; 8 | 9 | import com.esri.core.geometry.ogc.OGCGeometry; 10 | import com.esri.core.geometry.ogc.OGCMultiPoint; 11 | import com.esri.core.geometry.ogc.OGCMultiLineString; 12 | import com.esri.core.geometry.ogc.OGCMultiPolygon; 13 | 14 | @Description( 15 | name = "ST_NumGeometries", 16 | value = "_FUNC_(ST_GeometryCollection) - return the number of geometries in the geometry collection", 17 | extended = "Example:\n" 18 | + " SELECT _FUNC_(ST_GeomFromText('multipoint ((10 40), (40 30), (20 20), (30 10))')) FROM src LIMIT 1; -- 4\n" 19 | + " SELECT _FUNC_(ST_GeomFromText('multilinestring ((2 4, 10 10), (20 20, 7 8))')) FROM src LIMIT 1; -- 2\n" 20 | ) 21 | 22 | public class ST_NumGeometries extends ST_GeometryAccessor { 23 | final IntWritable resultInt = new IntWritable(); 24 | static final Log LOG = LogFactory.getLog(ST_NumGeometries.class.getName()); 25 | 26 | public IntWritable evaluate(BytesWritable geomref) { 27 | if (geomref == null || geomref.getLength() == 0) { 28 | LogUtils.Log_ArgumentsNull(LOG); 29 | return null; 30 | } 31 | 32 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 33 | if (ogcGeometry == null){ 34 | LogUtils.Log_ArgumentsNull(LOG); 35 | return null; 36 | } 37 | 38 | try { 39 | GeometryUtils.OGCType ogcType = GeometryUtils.getType(geomref); 40 | switch(ogcType) { 41 | case ST_POINT: 42 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTIPOINT, ogcType); 43 | return null; 44 | case ST_LINESTRING: 45 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTILINESTRING, ogcType); 46 | return null; 47 | case ST_POLYGON: 48 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTIPOLYGON, ogcType); 49 | return null; 50 | case ST_MULTIPOINT: 51 | resultInt.set(((OGCMultiPoint)ogcGeometry).numGeometries()); 52 | break; 53 | case ST_MULTILINESTRING: 54 | resultInt.set(((OGCMultiLineString)ogcGeometry).numGeometries()); 55 | break; 56 | case ST_MULTIPOLYGON: 57 | resultInt.set(((OGCMultiPolygon)ogcGeometry).numGeometries()); 58 | break; 59 | } 60 | } catch (ClassCastException cce) { // single vs Multi geometry type 61 | resultInt.set(1); 62 | } catch (Exception e) { 63 | LogUtils.Log_InternalError(LOG, "ST_NumGeometries: " + e); 64 | return null; 65 | } 66 | return resultInt; 67 | } 68 | 69 | } 70 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_NumInteriorRing.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.io.IntWritable; 8 | 9 | 10 | import com.esri.core.geometry.ogc.OGCGeometry; 11 | import com.esri.core.geometry.ogc.OGCPolygon; 12 | 13 | @Description( 14 | name = "ST_NumInteriorRing", 15 | value = "_FUNC_(ST_Polygon) - return the number of interior rings in the polygon", 16 | extended = "Example:\n" 17 | + " SELECT _FUNC_(ST_Polygon(1,1, 1,4, 4,1)) FROM src LIMIT 1; -- 0\n" 18 | + " SELECT _FUNC_(ST_Polygon('polygon ((0 0, 8 0, 0 8, 0 0), (1 1, 1 5, 5 1, 1 1))')) FROM src LIMIT 1; -- 1\n" 19 | ) 20 | 21 | //@HivePdkUnitTests( 22 | // cases = { 23 | // @HivePdkUnitTest( 24 | // query = "select ST_NumInteriorRing(ST_Polygon('polygon ((1 1, 4 1, 1 4))')) from onerow", 25 | // result = "0" 26 | // ), 27 | // @HivePdkUnitTest( 28 | // query = "select ST_NumInteriorRing(ST_Polygon('polygon ((0 0, 8 0, 0 8, 0 0), (1 1, 1 5, 5 1, 1 1))')) from onerow", 29 | // result = "1" 30 | // ), 31 | // @HivePdkUnitTest( 32 | // query = "select ST_NumInteriorRing(null) from onerow", 33 | // result = "null" 34 | // ) 35 | // } 36 | //) 37 | 38 | public class ST_NumInteriorRing extends ST_GeometryAccessor { 39 | static final Log LOG = LogFactory.getLog(ST_NumInteriorRing.class.getName()); 40 | final IntWritable resultInt = new IntWritable(); 41 | 42 | public IntWritable evaluate(BytesWritable geomref) { 43 | if (geomref == null || geomref.getLength() == 0) { 44 | LogUtils.Log_ArgumentsNull(LOG); 45 | return null; 46 | } 47 | 48 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 49 | if (ogcGeometry == null){ 50 | LogUtils.Log_ArgumentsNull(LOG); 51 | return null; 52 | } 53 | if (GeometryUtils.getType(geomref) == GeometryUtils.OGCType.ST_POLYGON) { 54 | try { 55 | resultInt.set(((OGCPolygon)(ogcGeometry)).numInteriorRing()); 56 | return resultInt; 57 | } catch (Exception e) { 58 | LogUtils.Log_InternalError(LOG, "ST_NumInteriorRing: " + e); 59 | return null; 60 | } 61 | } else { 62 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POLYGON, GeometryUtils.getType(geomref)); 63 | return null; 64 | } 65 | } 66 | 67 | } 68 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_NumPoints.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.io.IntWritable; 8 | 9 | 10 | import com.esri.core.geometry.MultiPath; 11 | import com.esri.core.geometry.MultiPoint; 12 | import com.esri.core.geometry.Polygon; 13 | 14 | import com.esri.core.geometry.Geometry; 15 | import com.esri.core.geometry.ogc.OGCGeometry; 16 | 17 | @Description( 18 | name = "ST_NumPoints", 19 | value = "_FUNC_(geometry) - return the number of points in the geometry", 20 | extended = "Example:\n" 21 | + " > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1; -- 1\n" 22 | + " > SELECT _FUNC_(ST_LineString(1.5,2.5, 3.0,2.2)) FROM src LIMIT 1; -- 2\n" 23 | + " > SELECT _FUNC_(ST_GeomFromText('polygon ((0 0, 10 0, 0 10, 0 0))')) FROM src LIMIT 1; -- 4\n" 24 | ) 25 | //@HivePdkUnitTests( 26 | // cases = { 27 | // @HivePdkUnitTest( 28 | // query = "select ST_NumPoints(ST_Point(0., 3.)) from onerow", 29 | // result = "1" 30 | // ), 31 | // @HivePdkUnitTest( 32 | // query = "select ST_NumPoints(ST_LineString(0.,0., 3.,4.)) from onerow", 33 | // result = "2" 34 | // ), 35 | // @HivePdkUnitTest( 36 | // query = "select ST_NumPoints(ST_GeomFromText('polygon ((0 0, 10 0, 0 10, 0 0))')) from onerow", 37 | // result = "4" 38 | // ), 39 | // @HivePdkUnitTest( 40 | // query = "select ST_NumPoints(ST_GeomFromText('multipoint ((10 40), (40 30), (20 20), (30 10))', 0)) from onerow", 41 | // result = "4" 42 | // ), 43 | // @HivePdkUnitTest( 44 | // query = "select ST_NumPoints(ST_GeomFromText('multilinestring ((2 4, 10 10), (20 20, 7 8))')) from onerow", 45 | // result = "4" 46 | // ), 47 | // @HivePdkUnitTest( 48 | // query = "select ST_NumPoints(ST_Point('point empty')) from onerow", 49 | // result = "0" 50 | // ) 51 | // } 52 | // ) 53 | 54 | public class ST_NumPoints extends ST_GeometryAccessor { 55 | final IntWritable resultInt = new IntWritable(); 56 | static final Log LOG = LogFactory.getLog(ST_IsClosed.class.getName()); 57 | 58 | public IntWritable evaluate(BytesWritable geomref) { 59 | if (geomref == null || geomref.getLength() == 0) { 60 | LogUtils.Log_ArgumentsNull(LOG); 61 | return null; 62 | } 63 | 64 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 65 | if (ogcGeometry == null){ 66 | LogUtils.Log_ArgumentsNull(LOG); 67 | return null; 68 | } 69 | 70 | Geometry esriGeom = ogcGeometry.getEsriGeometry(); 71 | switch(esriGeom.getType()) { 72 | case Point: 73 | resultInt.set(esriGeom.isEmpty() ? 0 : 1); 74 | break; 75 | case MultiPoint: 76 | resultInt.set(((MultiPoint)(esriGeom)).getPointCount()); 77 | break; 78 | case Polygon: 79 | Polygon polygon = (Polygon)(esriGeom); 80 | resultInt.set(polygon.getPointCount() + polygon.getPathCount()); 81 | break; 82 | default: 83 | resultInt.set(((MultiPath)(esriGeom)).getPointCount()); 84 | break; 85 | } 86 | return resultInt; 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Overlaps.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.hadoop.hive.ql.exec.Description; 4 | 5 | import com.esri.core.geometry.OperatorOverlaps; 6 | import com.esri.core.geometry.OperatorSimpleRelation; 7 | 8 | @Description( 9 | name = "ST_Overlaps", 10 | value = "_FUNC_(geometry1, geometry2) - return true if geometry1 overlaps geometry2", 11 | extended = "Example:\n" + 12 | "SELECT _FUNC_(st_polygon(2,0, 2,3, 3,0), st_polygon(1,1, 1,4, 4,4, 4,1)) from src LIMIT 1; -- return true\n" + 13 | "SELECT _FUNC_(st_polygon(2,0, 2,1, 3,1), ST_Polygon(1,1, 1,4, 4,4, 4,1)) from src LIMIT 1; -- return false" 14 | ) 15 | 16 | public class ST_Overlaps extends ST_GeometryRelational { 17 | 18 | @Override 19 | protected OperatorSimpleRelation getRelationOperator() { 20 | return OperatorOverlaps.local(); 21 | } 22 | 23 | @Override 24 | public String getDisplayString(String[] args) { 25 | return String.format("returns true if %s overlaps %s", args[0], args[1]); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Point.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.hive.ql.exec.UDFArgumentException; 7 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 8 | import org.apache.hadoop.io.BytesWritable; 9 | import org.apache.hadoop.io.Text; 10 | 11 | 12 | import com.esri.core.geometry.Point; 13 | import com.esri.core.geometry.ogc.OGCGeometry; 14 | 15 | @Description( 16 | name = "ST_Point", 17 | value = "_FUNC_(x, y) - constructor for 2D point\n" + 18 | "_FUNC_('point (x y)') - constructor for 2D point", 19 | extended = "Example:\n" + 20 | " SELECT _FUNC_(longitude, latitude) from src LIMIT 1;\n" + 21 | " SELECT _FUNC_('point (0 0)') from src LIMIT 1;") 22 | //@HivePdkUnitTests( 23 | // cases = { 24 | // @HivePdkUnitTest( 25 | // query = "select ST_GeometryType(ST_Point('point (10.02 20.01)')) from onerow", 26 | // result = "ST_POINT" 27 | // ), 28 | // @HivePdkUnitTest( 29 | // query = "select ST_Equals(ST_Point('point (10.02 20.01)'), ST_GeomFromText('point (10.02 20.01)')) from onerow", 30 | // result = "true" 31 | // ) 32 | // } 33 | // ) 34 | 35 | public class ST_Point extends ST_Geometry { 36 | static final Log LOG = LogFactory.getLog(ST_Point.class.getName()); 37 | 38 | // Number-pair constructor - 2D 39 | public BytesWritable evaluate(DoubleWritable x, DoubleWritable y) { 40 | return evaluate(x, y, null, null); 41 | } 42 | 43 | // Number-triplet constructor - 3D 44 | public BytesWritable evaluate(DoubleWritable x, DoubleWritable y, DoubleWritable z) { 45 | return evaluate(x, y, z, null); 46 | } 47 | 48 | // Number-list constructor - ZM 49 | public BytesWritable evaluate(DoubleWritable x, DoubleWritable y, DoubleWritable z, DoubleWritable m) { 50 | if (x == null || y == null) { 51 | //LogUtils.Log_ArgumentsNull(LOG); 52 | return null; 53 | } 54 | try { 55 | Point stPt = new Point(x.get(), y.get()); 56 | if (z != null) 57 | stPt.setZ(z.get()); 58 | if (m != null) 59 | stPt.setM(m.get()); 60 | BytesWritable ret = GeometryUtils.geometryToEsriShapeBytesWritable(OGCGeometry.createFromEsriGeometry(stPt, null)); 61 | return ret; 62 | } catch (Exception e) { 63 | //LogUtils.Log_InternalError(LOG, "ST_Point: " + e); 64 | return null; 65 | } 66 | } 67 | 68 | // WKT constructor - can use SetSRID on constructed point 69 | public BytesWritable evaluate(Text wkwrap) throws UDFArgumentException { 70 | String wkt = wkwrap.toString(); 71 | try { 72 | OGCGeometry ogcObj = OGCGeometry.fromText(wkt); 73 | ogcObj.setSpatialReference(null); 74 | if (ogcObj.geometryType().equals("Point")) { 75 | return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj); 76 | } else { 77 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POINT, GeometryUtils.OGCType.UNKNOWN); 78 | return null; 79 | } 80 | 81 | } catch (Exception e) { // IllegalArgumentException, GeometryException 82 | LogUtils.Log_InvalidText(LOG, wkt); 83 | return null; 84 | } 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_PointFromWKB.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import java.nio.ByteBuffer; 4 | import org.apache.commons.logging.Log; 5 | import org.apache.commons.logging.LogFactory; 6 | import org.apache.hadoop.hive.ql.exec.Description; 7 | import org.apache.hadoop.hive.ql.exec.UDFArgumentException; 8 | import org.apache.hadoop.io.BytesWritable; 9 | 10 | 11 | import com.esri.core.geometry.SpatialReference; 12 | import com.esri.core.geometry.ogc.OGCGeometry; 13 | 14 | @Description( 15 | name = "ST_PointFromWKB", 16 | value = "_FUNC_(wkb) - construct an ST_Point from OGC well-known binary", 17 | extended = "Example:\n" 18 | + " SELECT _FUNC_(ST_AsBinary(ST_GeomFromText('point (1 0))'))) FROM src LIMIT 1; -- constructs ST_Point\n" 19 | ) 20 | //@HivePdkUnitTests( 21 | // cases = { 22 | // @HivePdkUnitTest( 23 | // query = "select ST_GeometryType(ST_PointFromWKB(ST_AsBinary(ST_GeomFromText('point (10 10)')))) from onerow", 24 | // result = "ST_POINT" 25 | // ), 26 | // @HivePdkUnitTest( 27 | // query = "select ST_Equals(ST_PointFromWKB(ST_AsBinary(ST_GeomFromText('point (10 10)'))), ST_GeomFromText('point (10 10)')) from onerow", 28 | // result = "true" 29 | // ) 30 | // } 31 | // ) 32 | 33 | public class ST_PointFromWKB extends ST_Geometry { 34 | 35 | static final Log LOG = LogFactory.getLog(ST_PointFromWKB.class.getName()); 36 | 37 | public BytesWritable evaluate(BytesWritable wkb) throws UDFArgumentException { 38 | return evaluate(wkb, 0); 39 | } 40 | 41 | public BytesWritable evaluate(BytesWritable wkb, int wkid) throws UDFArgumentException { 42 | 43 | try { 44 | SpatialReference spatialReference = null; 45 | if (wkid != GeometryUtils.WKID_UNKNOWN) { 46 | spatialReference = SpatialReference.create(wkid); 47 | } 48 | byte [] byteArr = wkb.getBytes(); 49 | ByteBuffer byteBuf = ByteBuffer.allocate(byteArr.length); 50 | byteBuf.put(byteArr); 51 | OGCGeometry ogcObj = OGCGeometry.fromBinary(byteBuf); 52 | ogcObj.setSpatialReference(spatialReference); 53 | if (ogcObj.geometryType().equals("Point")) { 54 | return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj); 55 | } else { 56 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POINT, GeometryUtils.OGCType.UNKNOWN); 57 | return null; 58 | } 59 | } catch (Exception e) { // IllegalArgumentException, GeometryException 60 | LOG.error(e.getMessage()); 61 | return null; 62 | } 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_PointZ.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.hadoop.hive.ql.exec.Description; 4 | 5 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 6 | import org.apache.hadoop.io.BytesWritable; 7 | 8 | import com.esri.core.geometry.Point; 9 | import com.esri.core.geometry.ogc.OGCGeometry; 10 | 11 | @Description( 12 | name = "ST_PointZ", 13 | value = "_FUNC_(x, y, z) - constructor for 3D point", 14 | extended = "Example:\n" + 15 | "SELECT _FUNC_(longitude, latitude, elevation) from src LIMIT 1;") 16 | public class ST_PointZ extends ST_Geometry { 17 | 18 | public BytesWritable evaluate(DoubleWritable x, DoubleWritable y, DoubleWritable z){ 19 | return evaluate(x, y, z, null); 20 | } 21 | 22 | // ZM 23 | public BytesWritable evaluate(DoubleWritable x, DoubleWritable y, DoubleWritable z, DoubleWritable m) { 24 | if (x == null || y == null || z == null) { 25 | return null; 26 | } 27 | Point stPt = new Point(x.get(), y.get(), z.get()); 28 | if (m != null) 29 | stPt.setM(m.get()); 30 | return GeometryUtils.geometryToEsriShapeBytesWritable(OGCGeometry.createFromEsriGeometry(stPt, null)); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_PolyFromWKB.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import java.nio.ByteBuffer; 4 | import org.apache.commons.logging.Log; 5 | import org.apache.commons.logging.LogFactory; 6 | import org.apache.hadoop.hive.ql.exec.Description; 7 | import org.apache.hadoop.hive.ql.exec.UDFArgumentException; 8 | import org.apache.hadoop.io.BytesWritable; 9 | 10 | 11 | import com.esri.core.geometry.SpatialReference; 12 | import com.esri.core.geometry.ogc.OGCGeometry; 13 | 14 | @Description( 15 | name = "ST_PolyFromWKB", 16 | value = "_FUNC_(wkb) - construct an ST_Polygon from OGC well-known binary", 17 | extended = "Example:\n" 18 | + " SELECT _FUNC_(ST_AsBinary(ST_GeomFromText('polygon ((0 0, 10 0, 0 10, 0 0))'))) FROM src LIMIT 1; -- constructs ST_Polygon\n" 19 | ) 20 | //@HivePdkUnitTests( 21 | // cases = { 22 | // @HivePdkUnitTest( 23 | // query = "select ST_GeometryType(ST_PolyFromWKB(ST_AsBinary(ST_GeomFromText('polygon ((0 0, 1 0, 0 1, 0 0))')))) from onerow", 24 | // result = "ST_POLYGON" 25 | // ), 26 | // @HivePdkUnitTest( 27 | // query = "select ST_Equals(ST_PolyFromWKB(ST_AsBinary(ST_GeomFromText('polygon ((0 0, 1 0, 0 1, 0 0))'))), ST_GeomFromText('polygon ((0 0, 1 0, 0 1, 0 0))')) from onerow", 28 | // result = "true" 29 | // ) 30 | // } 31 | // ) 32 | 33 | public class ST_PolyFromWKB extends ST_Geometry { 34 | 35 | static final Log LOG = LogFactory.getLog(ST_PolyFromWKB.class.getName()); 36 | 37 | public BytesWritable evaluate(BytesWritable wkb) throws UDFArgumentException { 38 | return evaluate(wkb, 0); 39 | } 40 | 41 | public BytesWritable evaluate(BytesWritable wkb, int wkid) throws UDFArgumentException { 42 | 43 | try { 44 | SpatialReference spatialReference = null; 45 | if (wkid != GeometryUtils.WKID_UNKNOWN) { 46 | spatialReference = SpatialReference.create(wkid); 47 | } 48 | byte [] byteArr = wkb.getBytes(); 49 | ByteBuffer byteBuf = ByteBuffer.allocate(byteArr.length); 50 | byteBuf.put(byteArr); 51 | OGCGeometry ogcObj = OGCGeometry.fromBinary(byteBuf); 52 | ogcObj.setSpatialReference(spatialReference); 53 | if (ogcObj.geometryType().equals("Polygon")) { 54 | return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj); 55 | } else { 56 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POLYGON, GeometryUtils.OGCType.UNKNOWN); 57 | return null; 58 | } 59 | } catch (Exception e) { // IllegalArgumentException, GeometryException 60 | LOG.error(e.getMessage()); 61 | return null; 62 | } 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Relate.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BooleanWritable; 7 | import org.apache.hadoop.io.BytesWritable; 8 | 9 | import com.esri.core.geometry.ogc.OGCGeometry; 10 | 11 | @Description( 12 | name = "ST_Relate", 13 | value = "_FUNC_(ST_Geometry1, ST_Geometry2) - return true if ST_Geometry1 has the specified DE-9IM relationship with ST_Geometry2", 14 | extended = "Example:\n" + 15 | " SELECT _FUNC_(st_polygon(2,0, 2,1, 3,1), ST_Polygon(1,1, 1,4, 4,4, 4,1), '****T****') from src LIMIT 1; -- true\n" + 16 | " SELECT _FUNC_(st_polygon(2,0, 2,1, 3,1), ST_Polygon(1,1, 1,4, 4,4, 4,1), 'T********') from src LIMIT 1; -- false\n" + 17 | " SELECT _FUNC_(st_linestring(0,0, 3,3), ST_linestring(1,1, 4,4), 'T********') from src LIMIT 1; -- true\n" + 18 | " SELECT _FUNC_(st_linestring(0,0, 3,3), ST_linestring(1,1, 4,4), '****T****') from src LIMIT 1; -- false\n" 19 | ) 20 | 21 | public class ST_Relate extends ST_Geometry { 22 | 23 | final BooleanWritable resultBoolean = new BooleanWritable(); 24 | static final Log LOG = LogFactory.getLog(ST_Relate.class.getName()); 25 | 26 | public BooleanWritable evaluate(BytesWritable geometryref1, BytesWritable geometryref2, String relation) 27 | { 28 | if (geometryref1 == null || geometryref2 == null || relation == null || 29 | geometryref1.getLength() == 0 || geometryref2.getLength() == 0) { 30 | LogUtils.Log_ArgumentsNull(LOG); 31 | return null; 32 | } 33 | if (!GeometryUtils.compareSpatialReferences(geometryref1, geometryref2)) { 34 | LogUtils.Log_SRIDMismatch(LOG, geometryref1, geometryref2); 35 | return null; 36 | } 37 | 38 | OGCGeometry ogcGeom1 = GeometryUtils.geometryFromEsriShape(geometryref1); 39 | OGCGeometry ogcGeom2 = GeometryUtils.geometryFromEsriShape(geometryref2); 40 | if (ogcGeom1 == null || ogcGeom2 == null){ 41 | LogUtils.Log_ArgumentsNull(LOG); 42 | return null; 43 | } 44 | 45 | try { 46 | resultBoolean.set(ogcGeom1.relate(ogcGeom2, relation)); 47 | return resultBoolean; 48 | } catch (Exception e) { 49 | LogUtils.Log_InternalError(LOG, "ST_Relate: " + e); 50 | return null; 51 | } 52 | } 53 | 54 | } 55 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_SRID.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.io.IntWritable; 8 | 9 | 10 | @Description(name = "ST_SRID", 11 | value = "_FUNC_(ST_Geometry) - get the Spatial Reference ID of the geometry", 12 | extended = "Example:\n" 13 | + " SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1 -- returns SRID 0" 14 | ) 15 | //@HivePdkUnitTests( 16 | // cases = { 17 | // @HivePdkUnitTest( 18 | // query = "select ST_SRID(ST_SetSRID(ST_Point(1.1, 2.2), 4326)) FROM onerow", 19 | // result = "4326" 20 | // ) 21 | // } 22 | //) 23 | 24 | public class ST_SRID extends ST_GeometryAccessor { 25 | static final Log LOG = LogFactory.getLog(ST_SRID.class.getName()); 26 | 27 | IntWritable resultInt = new IntWritable(); 28 | 29 | public IntWritable evaluate(BytesWritable geomref){ 30 | if (geomref == null || geomref.getLength() == 0){ 31 | LogUtils.Log_ArgumentsNull(LOG); 32 | return null; 33 | } 34 | 35 | resultInt.set(GeometryUtils.getWKID(geomref)); 36 | return resultInt; 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_SetSRID.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.io.IntWritable; 8 | 9 | import com.esri.core.geometry.SpatialReference; 10 | import com.esri.core.geometry.ogc.OGCGeometry; 11 | 12 | @Description(name = "ST_SetSRID", 13 | value = "_FUNC_(, SRID) - set the Spatial Reference ID of the geometry", 14 | extended = "Example:\n" 15 | + " > SELECT _FUNC_(ST_SetSRID(ST_Point(1.5, 2.5), 4326)) FROM src LIMIT 1;\n" 16 | + " -- create a point and then set its SRID to 4326" 17 | ) 18 | 19 | public class ST_SetSRID extends ST_Geometry { 20 | static final Log LOG = LogFactory.getLog(ST_SetSRID.class.getName()); 21 | 22 | public BytesWritable evaluate(BytesWritable geomref, IntWritable wkwrap){ 23 | if (geomref == null || geomref.getLength() == 0){ 24 | LogUtils.Log_ArgumentsNull(LOG); 25 | return null; 26 | } 27 | 28 | // just return the geometry ref without setting anything if wkid is null 29 | if (wkwrap == null){ 30 | return geomref; 31 | } 32 | 33 | int wkid = wkwrap.get(); 34 | if (GeometryUtils.getWKID(geomref) != wkid) { 35 | GeometryUtils.setWKID(geomref, wkid); 36 | } 37 | 38 | return geomref; 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_StartPoint.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | 8 | import com.esri.core.geometry.MultiPath; 9 | import com.esri.core.geometry.SpatialReference; 10 | 11 | import com.esri.core.geometry.ogc.OGCGeometry; 12 | 13 | @Description( 14 | name = "ST_StartPoint", 15 | value = "_FUNC_(geometry) - returns the first point of an ST_Linestring", 16 | extended = "Example:\n" 17 | + " > SELECT _FUNC_(ST_LineString(1.5,2.5, 3.0,2.2)) FROM src LIMIT 1; -- POINT(1.5 2.5)\n" 18 | ) 19 | 20 | public class ST_StartPoint extends ST_GeometryAccessor { 21 | static final Log LOG = LogFactory.getLog(ST_StartPoint.class.getName()); 22 | 23 | /** 24 | * Return the first point of the ST_Linestring. 25 | * @param geomref hive geometry bytes 26 | * @return byte-reference of the first ST_Point 27 | */ 28 | public BytesWritable evaluate(BytesWritable geomref) { 29 | if (geomref == null || geomref.getLength() == 0){ 30 | LogUtils.Log_ArgumentsNull(LOG); 31 | return null; 32 | } 33 | 34 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 35 | if (ogcGeometry == null){ 36 | LogUtils.Log_ArgumentsNull(LOG); 37 | return null; 38 | } 39 | 40 | if (GeometryUtils.getType(geomref) == GeometryUtils.OGCType.ST_LINESTRING) { 41 | MultiPath lines = (MultiPath)(ogcGeometry.getEsriGeometry()); 42 | int wkid = GeometryUtils.getWKID(geomref); 43 | SpatialReference spatialReference = null; 44 | if (wkid != GeometryUtils.WKID_UNKNOWN) { 45 | spatialReference = SpatialReference.create(wkid); 46 | } 47 | return GeometryUtils.geometryToEsriShapeBytesWritable(OGCGeometry.createFromEsriGeometry(lines.getPoint(0), 48 | spatialReference)); 49 | } else { 50 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_LINESTRING, GeometryUtils.getType(geomref)); 51 | return null; 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_SymmetricDiff.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | 8 | 9 | import com.esri.core.geometry.ogc.OGCGeometry; 10 | 11 | @Description( 12 | name = "ST_SymmetricDiff", 13 | value = "_FUNC_(ST_Geometry1, ST_Geometry2) - return the symmetric difference between ST_Geometry1 & ST_Geometry2", 14 | extended = "Examples:\n" 15 | + " > SELECT ST_AsText(_FUNC_(ST_LineString('linestring(0 2, 2 2)'), ST_LineString('linestring(1 2, 3 2)'))) FROM onerow; \n" 16 | + " MULTILINESTRING((0 2, 1 2), (2 2, 3 2))\n" 17 | + " > SELECT ST_AsText(_FUNC_(ST_SymmetricDiff(ST_Polygon('polygon((0 0, 2 0, 2 2, 0 2, 0 0))'), ST_Polygon('polygon((1 1, 3 1, 3 3, 1 3, 1 1))'))) from onerow;\n" 18 | + " MULTIPOLYGON (((0 0, 2 0, 2 1, 1 1, 1 2, 0 2, 0 0)), ((3 1, 3 3, 1 3, 1 2, 2 2, 2 1, 3 1)))\n" 19 | ) 20 | //@HivePdkUnitTests( 21 | // cases = { 22 | // @HivePdkUnitTest( 23 | // query = "SELECT ST_Equals(ST_SymmetricDiff(ST_LineString('linestring(0 2, 2 2)'), ST_LineString('linestring(1 2, 3 2)')), ST_GeomFromText('multilinestring((0 2, 1 2), (2 2, 3 2))')) FROM onerow", 24 | // result = "true" 25 | // ), 26 | // @HivePdkUnitTest( 27 | // query = "SELECT ST_Equals(ST_SymmetricDiff(ST_Polygon('polygon((0 0, 2 0, 2 2, 0 2, 0 0))'), ST_Polygon('polygon((1 1, 3 1, 3 3, 1 3, 1 1))')), ST_MultiPolygon('multipolygon(((0 0, 2 0, 2 1, 1 1, 1 2, 0 2, 0 0)), ((3 1, 3 3, 1 3, 1 2, 2 2, 2 1, 3 1)))')) FROM onerow", 28 | // result = "true" 29 | // ), 30 | // @HivePdkUnitTest( 31 | // query = "SELECT ST_SymmetricDiff(ST_Point(0,0), null) from onerow", 32 | // result = "null" 33 | // ) 34 | // } 35 | // ) 36 | 37 | public class ST_SymmetricDiff extends ST_GeometryProcessing { 38 | 39 | static final Log LOG = LogFactory.getLog(ST_SymmetricDiff.class.getName()); 40 | 41 | public BytesWritable evaluate(BytesWritable geometryref1, BytesWritable geometryref2) 42 | { 43 | if (geometryref1 == null || geometryref2 == null || 44 | geometryref1.getLength() == 0 || geometryref2.getLength() == 0) { 45 | LogUtils.Log_ArgumentsNull(LOG); 46 | return null; 47 | } 48 | 49 | if (!GeometryUtils.compareSpatialReferences(geometryref1, geometryref2)) { 50 | LogUtils.Log_SRIDMismatch(LOG, geometryref1, geometryref2); 51 | return null; 52 | } 53 | 54 | OGCGeometry ogcGeom1 = GeometryUtils.geometryFromEsriShape(geometryref1); 55 | OGCGeometry ogcGeom2 = GeometryUtils.geometryFromEsriShape(geometryref2); 56 | if (ogcGeom1 == null || ogcGeom2 == null){ 57 | LogUtils.Log_ArgumentsNull(LOG); 58 | return null; 59 | } 60 | 61 | try { 62 | OGCGeometry diffGeometry = ogcGeom1.symDifference(ogcGeom2); 63 | return GeometryUtils.geometryToEsriShapeBytesWritable(diffGeometry); 64 | } catch (Exception e) { 65 | LogUtils.Log_InternalError(LOG, "ST_SymmetricDiff: " + e); 66 | return null; 67 | } 68 | } 69 | 70 | } 71 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Touches.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.hadoop.hive.ql.exec.Description; 4 | 5 | import com.esri.core.geometry.OperatorSimpleRelation; 6 | import com.esri.core.geometry.OperatorTouches; 7 | 8 | @Description( 9 | name = "ST_Touches", 10 | value = "_FUNC_(geometry1, geometry2) - return true if geometry1 touches geometry2", 11 | extended = "Example:\n" + 12 | "SELECT _FUNC_(st_point(1, 2), st_polygon(1, 1, 1, 4, 4, 4, 4, 1)) from src LIMIT 1; -- return true\n" + 13 | "SELECT _FUNC_(st_point(8, 8), st_polygon(1, 1, 1, 4, 4, 4, 4, 1)) from src LIMIT 1; -- return false" 14 | ) 15 | 16 | public class ST_Touches extends ST_GeometryRelational { 17 | 18 | @Override 19 | protected OperatorSimpleRelation getRelationOperator() { 20 | return OperatorTouches.local(); 21 | } 22 | 23 | @Override 24 | public String getDisplayString(String[] args) { 25 | return String.format("returns true if %s touches %s", args[0], args[1]); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Within.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.hadoop.hive.ql.exec.Description; 4 | 5 | import com.esri.core.geometry.OperatorSimpleRelation; 6 | import com.esri.core.geometry.OperatorWithin; 7 | 8 | @Description( 9 | name = "ST_Within", 10 | value = "_FUNC_(geometry1, geometry2) - return true if geometry1 is within geometry2", 11 | extended = "Example:\n" + 12 | "SELECT _FUNC_(st_point(2, 3), st_polygon(1,1, 1,4, 4,4, 4,1)) from src LIMIT 1; -- return true\n" + 13 | "SELECT _FUNC_(st_point(8, 8), st_polygon(1,1, 1,4, 4,4, 4,1)) from src LIMIT 1; -- return false" 14 | ) 15 | 16 | public class ST_Within extends ST_GeometryRelational { 17 | 18 | @Override 19 | protected OperatorSimpleRelation getRelationOperator() { 20 | return OperatorWithin.local(); 21 | } 22 | 23 | @Override 24 | public String getDisplayString(String[] args) { 25 | return String.format("returns true if %s within %s", args[0], args[1]); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_X.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | // DoubleWritable - must use hive-serde2; the other one produces struct {value:d.d} 8 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 9 | 10 | 11 | import com.esri.core.geometry.ogc.OGCGeometry; 12 | import com.esri.core.geometry.ogc.OGCPoint; 13 | 14 | @Description(name = "ST_X", 15 | value = "_FUNC_(point) - returns the X coordinate of point", 16 | extended = "Example:\n" 17 | + " SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1; -- 1.5" 18 | ) 19 | //@HivePdkUnitTests( 20 | // cases = { 21 | // @HivePdkUnitTest( 22 | // query = "select ST_X(ST_Point(1,2)) from onerow", 23 | // result = "1" 24 | // ), 25 | // @HivePdkUnitTest( 26 | // query = "select ST_X(ST_LineString(1.5,2.5, 3.0,2.2)) from onerow", 27 | // result = "null" 28 | // ), 29 | // @HivePdkUnitTest( 30 | // query = "select ST_X(null) from onerow", 31 | // result = "null" 32 | // ) 33 | // } 34 | //) 35 | 36 | public class ST_X extends ST_GeometryAccessor { 37 | final DoubleWritable resultDouble = new DoubleWritable(); 38 | static final Log LOG = LogFactory.getLog(ST_X.class.getName()); 39 | 40 | public DoubleWritable evaluate(BytesWritable geomref) { 41 | if (geomref == null || geomref.getLength() == 0) { 42 | LogUtils.Log_ArgumentsNull(LOG); 43 | return null; 44 | } 45 | 46 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 47 | if (ogcGeometry == null){ 48 | return null; 49 | } 50 | 51 | switch(GeometryUtils.getType(geomref)) { 52 | case ST_POINT: 53 | OGCPoint pt = (OGCPoint)ogcGeometry; 54 | resultDouble.set(pt.X()); 55 | return resultDouble; 56 | default: 57 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POINT, GeometryUtils.getType(geomref)); 58 | return null; 59 | } 60 | } 61 | 62 | } 63 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Y.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | // DoubleWritable - must use hive-serde2; the other one produces struct {value:d.d} 8 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 9 | 10 | 11 | import com.esri.core.geometry.ogc.OGCGeometry; 12 | import com.esri.core.geometry.ogc.OGCPoint; 13 | 14 | @Description(name = "ST_Y", 15 | value = "_FUNC_(point) - returns the Y coordinate of point", 16 | extended = "Example:\n" 17 | + " SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1; -- 2.5" 18 | ) 19 | //@HivePdkUnitTests( 20 | // cases = { 21 | // @HivePdkUnitTest( 22 | // query = "select ST_Y(ST_Point(1,2)) from onerow", 23 | // result = "2" 24 | // ), 25 | // @HivePdkUnitTest( 26 | // query = "select ST_Y(ST_LineString(1.5,2.5, 3.0,2.2)) from onerow", 27 | // result = "null" 28 | // ), 29 | // @HivePdkUnitTest( 30 | // query = "select ST_Y(null) from onerow", 31 | // result = "null" 32 | // ) 33 | // } 34 | //) 35 | 36 | public class ST_Y extends ST_GeometryAccessor { 37 | final DoubleWritable resultDouble = new DoubleWritable(); 38 | static final Log LOG = LogFactory.getLog(ST_Y.class.getName()); 39 | 40 | public DoubleWritable evaluate(BytesWritable geomref) { 41 | if (geomref == null || geomref.getLength() == 0) { 42 | LogUtils.Log_ArgumentsNull(LOG); 43 | return null; 44 | } 45 | 46 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 47 | if (ogcGeometry == null){ 48 | return null; 49 | } 50 | 51 | switch(GeometryUtils.getType(geomref)) { 52 | case ST_POINT: 53 | OGCPoint pt = (OGCPoint)ogcGeometry; 54 | resultDouble.set(pt.Y()); 55 | return resultDouble; 56 | default: 57 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POINT, GeometryUtils.getType(geomref)); 58 | return null; 59 | } 60 | } 61 | 62 | } 63 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/ST_Z.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import org.apache.hadoop.hive.ql.exec.Description; 6 | import org.apache.hadoop.io.BytesWritable; 7 | // DoubleWritable - must use hive-serde2; the other one produces struct {value:d.d} 8 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 9 | 10 | 11 | import com.esri.core.geometry.ogc.OGCGeometry; 12 | import com.esri.core.geometry.ogc.OGCPoint; 13 | 14 | @Description(name = "ST_Z", 15 | value = "_FUNC_(point) - returns the Z coordinate of point", 16 | extended = "Example:\n" 17 | + " SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1; -- 1.5" 18 | ) 19 | //@HivePdkUnitTests( 20 | // cases = { 21 | // @HivePdkUnitTest( 22 | // query = "select ST_Z(ST_Point(1,2,3)) from onerow", 23 | // result = "3.0" 24 | // ), 25 | // @HivePdkUnitTest( 26 | // query = "select ST_Z(ST_PointZ(0., 3., 1)) from onerow", 27 | // result = "1.0" 28 | // ), 29 | // @HivePdkUnitTest( 30 | // query = "select ST_Z(ST_Point('pointzm (0. 3. 1. 2.)')) from onerow", 31 | // result = "1.0" 32 | // ), 33 | // @HivePdkUnitTest( 34 | // query = "select ST_Z(ST_Point(1,2)) from onerow", 35 | // result = "null" 36 | // ), 37 | // @HivePdkUnitTest( 38 | // query = "select ST_Z(null) from onerow", 39 | // result = "null" 40 | // ) 41 | // } 42 | //) 43 | 44 | public class ST_Z extends ST_GeometryAccessor { 45 | final DoubleWritable resultDouble = new DoubleWritable(); 46 | static final Log LOG = LogFactory.getLog(ST_Z.class.getName()); 47 | 48 | public DoubleWritable evaluate(BytesWritable geomref) { 49 | if (geomref == null || geomref.getLength() == 0) { 50 | LogUtils.Log_ArgumentsNull(LOG); 51 | return null; 52 | } 53 | 54 | OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); 55 | if (ogcGeometry == null){ 56 | return null; 57 | } 58 | if (!ogcGeometry.is3D()) { 59 | LogUtils.Log_Not3D(LOG); 60 | return null; 61 | } 62 | 63 | switch(GeometryUtils.getType(geomref)) { 64 | case ST_POINT: 65 | OGCPoint pt = (OGCPoint)ogcGeometry; 66 | resultDouble.set(pt.Z()); 67 | return resultDouble; 68 | default: 69 | LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POINT, GeometryUtils.getType(geomref)); 70 | return null; 71 | } 72 | } 73 | 74 | } 75 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/serde/EsriJsonSerDe.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive.serde; 2 | 3 | import org.apache.commons.logging.Log; 4 | import org.apache.commons.logging.LogFactory; 5 | import com.fasterxml.jackson.core.JsonParser; 6 | 7 | import com.esri.core.geometry.GeometryEngine; 8 | import com.esri.core.geometry.MapGeometry; 9 | import com.esri.core.geometry.ogc.OGCGeometry; 10 | 11 | 12 | public class EsriJsonSerDe extends BaseJsonSerDe { 13 | 14 | static final Log LOG = LogFactory.getLog(EsriJsonSerDe.class.getName()); 15 | 16 | @Override 17 | protected String outGeom(OGCGeometry geom) { 18 | return geom.asJson(); 19 | } 20 | 21 | @Override 22 | protected OGCGeometry parseGeom(JsonParser parser) { 23 | MapGeometry mapGeom = GeometryEngine.jsonToGeometry(parser); 24 | return OGCGeometry.createFromEsriGeometry(mapGeom.getGeometry(), mapGeom.getSpatialReference()); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/serde/GeoJsonSerDe.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive.serde; 2 | 3 | import java.io.IOException; 4 | 5 | import org.apache.commons.logging.Log; 6 | import org.apache.commons.logging.LogFactory; 7 | import com.fasterxml.jackson.core.JsonParser; 8 | import com.fasterxml.jackson.core.JsonProcessingException; 9 | import com.fasterxml.jackson.databind.ObjectMapper; 10 | import com.fasterxml.jackson.databind.node.ObjectNode; 11 | 12 | import com.esri.core.geometry.ogc.OGCGeometry; 13 | 14 | 15 | public class GeoJsonSerDe extends BaseJsonSerDe { 16 | 17 | static final Log LOG = LogFactory.getLog(GeoJsonSerDe.class.getName()); 18 | 19 | ObjectMapper mapper = null; 20 | 21 | public GeoJsonSerDe() { 22 | super(); 23 | attrLabel = "properties"; 24 | mapper = new ObjectMapper(); 25 | } 26 | 27 | @Override 28 | protected String outGeom(OGCGeometry geom) { 29 | return geom.asGeoJson(); 30 | } 31 | 32 | @Override 33 | protected OGCGeometry parseGeom(JsonParser parser) { 34 | try { 35 | ObjectNode node = mapper.readTree(parser); 36 | return OGCGeometry.fromGeoJson(node.toString()); 37 | } catch (JsonProcessingException e1) { 38 | e1.printStackTrace(); // TODO Auto-generated catch block 39 | } catch (IOException e1) { 40 | e1.printStackTrace(); // TODO Auto-generated catch block 41 | } 42 | return null; // ? 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /hive/src/main/java/com/esri/hadoop/hive/serde/JsonSerde.java: -------------------------------------------------------------------------------- 1 | // Obsoleted 2 | // package com.esri.hadoop.hive.serde; 3 | // @Deprecated in v1.2 4 | // public class JsonSerde extends EsriJsonSerDe {} 5 | -------------------------------------------------------------------------------- /hive/src/test/java/com/esri/hadoop/hive/TestStAsShape.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import static org.junit.Assert.*; 4 | 5 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.junit.Test; 8 | 9 | import com.esri.core.geometry.GeometryEngine; 10 | import com.esri.core.geometry.Point; 11 | import com.esri.core.geometry.Geometry; 12 | import com.esri.core.geometry.Geometry.Type; 13 | 14 | public class TestStAsShape { 15 | 16 | private final static double Epsilon = 0.0001; 17 | 18 | @Test 19 | public void testPointAsShape() { 20 | ST_Point point = new ST_Point(); 21 | final double longitude = 12.224; 22 | final double latitude = 51.829; 23 | BytesWritable pointAsWritable = point.evaluate(new DoubleWritable(longitude), new DoubleWritable(latitude)); 24 | assertNotNull("The point writable must not be null!", pointAsWritable); 25 | 26 | ST_AsShape asShape = new ST_AsShape(); 27 | BytesWritable shapeAsWritable = asShape.evaluate(pointAsWritable); 28 | assertNotNull("The shape writable must not be null!", pointAsWritable); 29 | 30 | byte[] esriShapeBuffer = shapeAsWritable.getBytes(); 31 | Geometry esriGeometry = GeometryEngine.geometryFromEsriShape(esriShapeBuffer, Type.Point); 32 | assertNotNull("The geometry must not be null!", esriGeometry); 33 | assertTrue("Geometry type point expected!", esriGeometry instanceof Point); 34 | 35 | Point esriPoint = (Point) esriGeometry; 36 | assertEquals("Longitude is different!", longitude, esriPoint.getX(), Epsilon); 37 | assertEquals("Latitude is different!", latitude, esriPoint.getY(), Epsilon); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /hive/src/test/java/com/esri/hadoop/hive/TestStGeometryType.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import static org.junit.Assert.*; 4 | import org.junit.Test; 5 | 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.io.Text; 8 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 9 | 10 | // select ST_GeometryType(ST_Point(0, 0)) from onerow; 11 | // select ST_GeometryType(ST_Point('point (10.02 20.01)')) from onerow; 12 | // select ST_GeometryType(ST_Point('point z (10.02 20.01 2)')) from onerow; 13 | // select ST_GeometryType(ST_MultiPoint('multipoint ((1 2))')) from onerow; 14 | // select ST_GeometryType(ST_Linestring(10,10, 20,20)) from onerow; 15 | // select ST_GeometryType(ST_Linestring('linestring (10 10, 20 20)')) from onerow; 16 | // select ST_GeometryType(ST_Linestring('linestring z (10 10 2, 20 20 4)')) from onerow; 17 | // select ST_GeometryType(ST_GeomFromText('polygon ((0 0, 0 10, 10 0, 0 0))')) from onerow; 18 | // select ST_GeometryType(ST_Polygon('polygon ((0 0, 0 10, 10 0, 0 0))')) from onerow; 19 | // select ST_GeometryType(ST_Polygon(1,1, 1,4, 4,1)) from onerow; 20 | // select ST_GeometryType(ST_Polygon(1,1, 4,1, 1,4)) from onerow; 21 | // select ST_GeometryType(ST_Polygon(1,1, 1,4, 4,1, 1,1)) from onerow; 22 | // select ST_GeometryType(ST_Polygon(1,1, 4,1, 1,4, 1,1)) from onerow; 23 | // select ST_GeometryType(ST_GeomFromGeoJson('{"type":"Point", "coordinates":[1.2, 2.4]}')) from onerow; 24 | 25 | public class TestStGeometryType { 26 | 27 | @Test 28 | public void TestStGeometryType() throws Exception { 29 | ST_GeometryType typer = new ST_GeometryType(); 30 | ST_Point stPt = new ST_Point(); 31 | ST_MultiPoint stMp = new ST_MultiPoint(); 32 | ST_LineString stLn = new ST_LineString(); 33 | ST_Polygon stPoly = new ST_Polygon(); 34 | BytesWritable bwGeom = stPt.evaluate(new DoubleWritable(0), 35 | new DoubleWritable(0)); 36 | Text gty = typer.evaluate(bwGeom); 37 | assertEquals("ST_POINT", gty.toString()); 38 | bwGeom = stPt.evaluate(new Text("point z (10.02 20.01 2)")); 39 | gty = typer.evaluate(bwGeom); 40 | assertEquals("ST_POINT", gty.toString()); 41 | bwGeom = stLn.evaluate(new Text("linestring (10 10, 20 20)")); 42 | gty = typer.evaluate(bwGeom); 43 | assertEquals("ST_LINESTRING", gty.toString()); 44 | bwGeom = stPoly.evaluate(new Text("polygon ((0 0, 0 10, 10 0, 0 0))")); 45 | gty = typer.evaluate(bwGeom); 46 | assertEquals("ST_POLYGON", gty.toString()); 47 | } 48 | 49 | } 50 | 51 | -------------------------------------------------------------------------------- /hive/src/test/java/com/esri/hadoop/hive/TestStLineString.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import static org.junit.Assert.*; 4 | import org.junit.Test; 5 | 6 | import java.util.ArrayList; 7 | import java.util.Arrays; 8 | import org.apache.hadoop.io.BytesWritable; 9 | import org.apache.hadoop.io.Text; 10 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 11 | 12 | // select ST_GeometryType(ST_Linestring(10,10, 20,20)) from onerow; 13 | // select ST_GeometryType(ST_Linestring('linestring (10 10, 20 20)')) from onerow; 14 | // select ST_GeometryType(ST_Linestring('linestring z (10 10 2, 20 20 4)')) from onerow; 15 | 16 | public class TestStLineString { 17 | 18 | @Test 19 | public void test() throws Exception { 20 | ST_GeometryType typer = new ST_GeometryType(); 21 | ST_LineString stLn = new ST_LineString(); 22 | //ST_Equals stEq = new ST_Equals(); 23 | DoubleWritable ten = new DoubleWritable(10); 24 | DoubleWritable twenty = new DoubleWritable(20); 25 | BytesWritable bwGeom = stLn.evaluate(ten,ten, twenty); 26 | assertEquals(null, bwGeom); // odd arguments 27 | bwGeom = stLn.evaluate(ten,ten, twenty,twenty); 28 | Text gty = typer.evaluate(bwGeom); 29 | assertEquals("ST_LINESTRING", gty.toString()); 30 | Text wkt = new Text("linestring (10 10, 20 20)"); 31 | bwGeom = stLn.evaluate(wkt); 32 | gty = typer.evaluate(bwGeom); 33 | assertEquals("ST_LINESTRING", gty.toString()); 34 | //GUDF assertTrue(stEq.~eval~(new ST_GeomFromText().evaluate(wkt), bwGeom)); 35 | bwGeom = stLn.evaluate(new Text("linestring z (10 10 2, 20 20 4)")); 36 | gty = typer.evaluate(bwGeom); 37 | assertEquals("ST_LINESTRING", gty.toString()); 38 | ArrayList xs = new ArrayList(Arrays.asList(ten,twenty)); 39 | ArrayList ys = new ArrayList(Arrays.asList(twenty,ten)); 40 | bwGeom = stLn.evaluate(xs, ys); 41 | gty = typer.evaluate(bwGeom); 42 | assertEquals("ST_LINESTRING", gty.toString()); 43 | BytesWritable pt1020 = new ST_Point().evaluate(ten,twenty); 44 | BytesWritable pt2010 = new ST_Point().evaluate(twenty,ten); 45 | ArrayList pts = new ArrayList(Arrays.asList(pt1020, pt2010)); 46 | bwGeom = stLn.evaluate(pts); 47 | gty = typer.evaluate(bwGeom); 48 | assertEquals("ST_LINESTRING", gty.toString()); 49 | } 50 | 51 | } 52 | 53 | -------------------------------------------------------------------------------- /hive/src/test/java/com/esri/hadoop/hive/TestStMinX.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import static org.junit.Assert.*; 4 | import org.junit.Test; 5 | 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 8 | 9 | // select ST_MinX(ST_Point(1,2)) from onerow; 10 | // select ST_MinX(ST_LineString(1.5,2.5, 3.0,2.2)) from onerow; 11 | // select ST_MinX(ST_Polygon(1, 1, 1, 4, 4, 4, 4, 1)) from onerow; 12 | // select ST_MinX(ST_MultiPoint(0,0, 2,2)) from onerow; 13 | // select ST_MinX(ST_MultiLineString(array(1, 1, 2, 2), array(10, 10, 20, 20))) from onerow; 14 | // select ST_MinX(ST_MultiPolygon(array(1,1, 1,2, 2,2, 2,1), array(3,3, 3,4, 4,4, 4,3))) from onerow; 15 | 16 | public class TestStMinX { 17 | 18 | @Test 19 | public void TestStMinX() { 20 | ST_MinX stMinX = new ST_MinX(); 21 | ST_Point stPt = new ST_Point(); 22 | BytesWritable bwGeom = stPt.evaluate(new DoubleWritable(1.2), 23 | new DoubleWritable(3.4)); 24 | DoubleWritable dwx = stMinX.evaluate(bwGeom); 25 | assertEquals(1.2, dwx.get(), .000001); 26 | bwGeom = stPt.evaluate(new DoubleWritable(6.5), 27 | new DoubleWritable(4.3), 28 | new DoubleWritable(2.1)); 29 | dwx = stMinX.evaluate(bwGeom); 30 | assertEquals(6.5, dwx.get(), 0.0); 31 | } 32 | 33 | } 34 | -------------------------------------------------------------------------------- /hive/src/test/java/com/esri/hadoop/hive/TestStMinY.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import static org.junit.Assert.*; 4 | import org.junit.Test; 5 | 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 8 | 9 | // select ST_MinY(ST_GeomFromGeoJson('{"type":"LineString", "coordinates":[[1,2], [3,4]]}')) from onerow; 10 | // select ST_MinY(ST_Point(1,2)) from onerow; 11 | // select ST_MinY(ST_LineString(1.5,2.5, 3.0,2.2)) from onerow; 12 | // select ST_MinY(ST_Polygon(1, 1, 1, 4, 4, 4, 4, 1)) from onerow; 13 | // select ST_MinY(ST_MultiPoint(0,0, 2,2)) from onerow; 14 | // select ST_MinY(ST_MultiLineString(array(1, 1, 2, 2), array(10, 10, 20, 20))) from onerow; 15 | // select ST_MinY(ST_MultiPolygon(array(1,1, 1,2, 2,2, 2,1), array(3,3, 3,4, 4,4, 4,3))) from onerow; 16 | 17 | public class TestStMinY { 18 | 19 | @Test 20 | public void TestStMinY() { 21 | ST_MinY stMinY = new ST_MinY(); 22 | ST_Point stPt = new ST_Point(); 23 | } 24 | 25 | } 26 | -------------------------------------------------------------------------------- /hive/src/test/java/com/esri/hadoop/hive/TestStMultiPoint.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import static org.junit.Assert.*; 4 | import org.junit.Test; 5 | 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.io.Text; 8 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 9 | 10 | // select ST_GeometryType(ST_MultiPoint('multipoint ((1 2))')) from onerow; 11 | 12 | public class TestStMultiPoint { 13 | 14 | @Test 15 | public void test() throws Exception { 16 | ST_GeometryType typer = new ST_GeometryType(); 17 | ST_MultiPoint stMp = new ST_MultiPoint(); 18 | BytesWritable bwGeom = stMp.evaluate(new Text("multipoint ((1 2))")); 19 | Text gty = typer.evaluate(bwGeom); 20 | assertEquals("ST_MULTIPOINT", gty.toString()); 21 | } 22 | 23 | } 24 | 25 | -------------------------------------------------------------------------------- /hive/src/test/java/com/esri/hadoop/hive/TestStPoint.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import static org.junit.Assert.*; 4 | import org.junit.Test; 5 | 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.io.Text; 8 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 9 | 10 | // select ST_GeometryType(ST_Point(0, 0)) from onerow; 11 | // select ST_GeometryType(ST_Point('point (10.02 20.01)')) from onerow; 12 | // select ST_GeometryType(ST_Point('point z (10.02 20.01 2)')) from onerow; 13 | 14 | public class TestStPoint { 15 | 16 | @Test 17 | public void TestStPoint() throws Exception { 18 | ST_GeometryType typer = new ST_GeometryType(); 19 | ST_X stX = new ST_X(); 20 | ST_Y stY = new ST_Y(); 21 | ST_Point stPt = new ST_Point(); 22 | BytesWritable bwGeom = stPt.evaluate(new DoubleWritable(1.2), 23 | new DoubleWritable(3.4)); 24 | DoubleWritable dwx = stX.evaluate(bwGeom); 25 | DoubleWritable dwy = stY.evaluate(bwGeom); 26 | assertEquals(1.2, dwx.get(), .000001); 27 | assertEquals(3.4, dwy.get(), .000001); 28 | Text gty = typer.evaluate(bwGeom); 29 | assertEquals("ST_POINT", gty.toString()); 30 | bwGeom = stPt.evaluate(new DoubleWritable(6.5), 31 | new DoubleWritable(4.3), 32 | new DoubleWritable(2.1)); 33 | gty = typer.evaluate(bwGeom); 34 | assertEquals("ST_POINT", gty.toString()); 35 | bwGeom = stPt.evaluate(new Text("point (10.02 20.01)")); 36 | gty = typer.evaluate(bwGeom); 37 | assertEquals("ST_POINT", gty.toString()); 38 | bwGeom = stPt.evaluate(new Text("point z (10.02 20.01 2)")); 39 | gty = typer.evaluate(bwGeom); 40 | assertEquals("ST_POINT", gty.toString()); 41 | } 42 | 43 | } 44 | 45 | -------------------------------------------------------------------------------- /hive/src/test/java/com/esri/hadoop/hive/TestStX.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import static org.junit.Assert.*; 4 | import org.junit.Test; 5 | 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 8 | 9 | public class TestStX { 10 | 11 | @Test 12 | public void TestStX() { 13 | ST_X stX = new ST_X(); 14 | ST_Point stPt = new ST_Point(); 15 | BytesWritable bwGeom = stPt.evaluate(new DoubleWritable(1.2), 16 | new DoubleWritable(3.4)); 17 | DoubleWritable dwx = stX.evaluate(bwGeom); 18 | assertEquals(1.2, dwx.get(), .000001); 19 | bwGeom = stPt.evaluate(new DoubleWritable(6.5), 20 | new DoubleWritable(4.3), 21 | new DoubleWritable(2.1)); 22 | dwx = stX.evaluate(bwGeom); 23 | assertEquals(6.5, dwx.get(), 0.0); 24 | } 25 | 26 | } 27 | 28 | -------------------------------------------------------------------------------- /hive/src/test/java/com/esri/hadoop/hive/TestStY.java: -------------------------------------------------------------------------------- 1 | package com.esri.hadoop.hive; 2 | 3 | import static org.junit.Assert.*; 4 | import org.junit.Test; 5 | 6 | import org.apache.hadoop.io.BytesWritable; 7 | import org.apache.hadoop.hive.serde2.io.DoubleWritable; 8 | 9 | public class TestStY { 10 | 11 | @Test 12 | public void TestStY() { 13 | ST_Y stY = new ST_Y(); 14 | ST_Point stPt = new ST_Point(); 15 | BytesWritable bwGeom = stPt.evaluate(new DoubleWritable(1.2), 16 | new DoubleWritable(3.4)); 17 | DoubleWritable dwy = stY.evaluate(bwGeom); 18 | assertEquals(3.4, dwy.get(), .000001); 19 | bwGeom = stPt.evaluate(new DoubleWritable(6.5), 20 | new DoubleWritable(4.3), 21 | new DoubleWritable(2.1)); 22 | dwy = stY.evaluate(bwGeom); 23 | assertEquals(4.3, dwy.get(), 0.0); 24 | } 25 | 26 | } 27 | 28 | -------------------------------------------------------------------------------- /hive/test/README.md: -------------------------------------------------------------------------------- 1 | ## Examples/Tests 2 | 3 | Set up with `st-geom-onerow.txt` and `st-geom-aggr.txt`. 4 | 5 | ```sh 6 | hive -S -f st-geom-desc.sql >& st-geom-desc.out 7 | diff -q st-geom-desc.ref st-geom-desc.out 8 | hive -S -f st-geom-text.sql >& st-geom-text.out 9 | diff -wq st-geom-text.ref st-geom-text.out 10 | hive -S -f st-geom-exact.sql >& st-geom-exact.out 11 | diff -q st-geom-exact.ref st-geom-exact.out 12 | hive -S -f st-geom-bins.sql >& st-geom-bins.out 13 | diff -wq st-geom-bins.ref st-geom-bins.out 14 | hive -S -f st-geom-multi-call.sql >& st-geom-multi-call.out 15 | diff -q st-geom-multi-call.ref st-geom-multi-call.out 16 | hive -S -f st-geom-aggr.sql >& st-geom-aggr.out 17 | diff -q st-geom-aggr.ref st-geom-aggr.out 18 | ``` 19 | -------------------------------------------------------------------------------- /hive/test/st-geom-aggr.ref: -------------------------------------------------------------------------------- 1 | POLYGON ((1 1, 2 1, 2 2, 1 2, 1 1)) 2 | POINT (2 2) 3 | LINESTRING (0 2, 0 3) 4 | MULTIPOLYGON EMPTY 5 | POLYGON ((2 0, 3 0, 3 1, 3 2, 3 3, 2 3, 1 3, 0 3, 0 2, 0 1, 0 0, 1 0, 2 0)) 6 | MULTIPOLYGON (((0 0, 1 0, 2 2, 0 1, 0 0)), ((3 0, 4 0, 4 1, 2 2, 3 0)), ((2 2, 4 3, 4 4, 3 4, 2 2)), ((2 2, 1 4, 0 4, 0 3, 2 2))) 7 | LINESTRING (0 0, 0 1, 0 2, 0 3, 0 4, 0 5) 8 | MULTIPOINT ((0 0), (2 0), (1 1), (0 2), (2 2)) 9 | MULTIPOLYGON (((-1 -1, 0 -1, -1 0, -1 -1)), ((1 0, 1 1, 0 1, 1 0))) 10 | POLYGON ((0 0, 3 0, 3 3, 0 3, 0 0)) 11 | POLYGON ((0 0, 4 0, 4 4, 0 4, 0 0)) 12 | NULL 13 | POLYGON ((0 0, 2 0, 2 2, 0 2, 0 0)) 14 | -------------------------------------------------------------------------------- /hive/test/st-geom-aggr.sql: -------------------------------------------------------------------------------- 1 | select ST_AsText(ST_Aggr_Intersection(ST_Polygon(wkt))) from com_esri_hadoop_hive_aggr_tests where testcase = 0; 2 | select ST_AsText(ST_Aggr_Intersection(ST_Polygon(wkt))) from com_esri_hadoop_hive_aggr_tests where testcase = 1; 3 | select ST_AsText(ST_Aggr_Intersection(ST_LineString(wkt))) from com_esri_hadoop_hive_aggr_tests where testcase = 2; 4 | select ST_AsText(ST_Aggr_Intersection(ST_Point(wkt))) from com_esri_hadoop_hive_aggr_tests where testcase = 3; 5 | select ST_AsText(ST_Aggr_Union(ST_Polygon(wkt))) from com_esri_hadoop_hive_aggr_tests where testcase = 0; 6 | select ST_AsText(ST_Aggr_Union(ST_Polygon(wkt))) from com_esri_hadoop_hive_aggr_tests where testcase = 1; 7 | select ST_AsText(ST_Aggr_Union(ST_LineString(wkt))) from com_esri_hadoop_hive_aggr_tests where testcase = 2; 8 | select ST_AsText(ST_Aggr_Union(ST_Point(wkt))) from com_esri_hadoop_hive_aggr_tests where testcase = 3; 9 | select ST_AsText(ST_Aggr_Union(ST_Polygon(wkt))) from com_esri_hadoop_hive_aggr_tests where testcase = 4; 10 | select ST_AsText(ST_Aggr_ConvexHull(ST_Polygon(wkt))) from com_esri_hadoop_hive_aggr_tests where testcase = 0; 11 | select ST_AsText(ST_Aggr_ConvexHull(ST_Polygon(wkt))) from com_esri_hadoop_hive_aggr_tests where testcase = 1; 12 | select ST_AsText(ST_Aggr_ConvexHull(ST_LineString(wkt))) from com_esri_hadoop_hive_aggr_tests where testcase = 2; 13 | select ST_AsText(ST_Aggr_ConvexHull(ST_Point(wkt))) from com_esri_hadoop_hive_aggr_tests where testcase = 3; 14 | -------------------------------------------------------------------------------- /hive/test/st-geom-aggr.tsv: -------------------------------------------------------------------------------- 1 | 0 polygon ((0 0, 2 0, 2 2, 0 2, 0 0)) 2 | 0 polygon ((1 0, 3 0, 3 2, 1 2, 1 0)) 3 | 0 polygon ((1 1, 3 1, 3 3, 1 3, 1 1)) 4 | 0 polygon ((0 1, 2 1, 2 3, 0 3, 0 1)) 5 | 1 polygon ((0 0, 1 0, 2 2, 0 1, 0 0)) 6 | 1 polygon ((4 0, 4 1, 2 2, 3 0, 4 0)) 7 | 1 polygon ((4 4, 3 4, 2 2, 4 3, 4 4)) 8 | 1 polygon ((0 4, 0 3, 2 2, 1 4, 0 4)) 9 | 2 linestring (0 0, 0 3) 10 | 2 linestring (0 1, 0 4) 11 | 2 linestring (0 2, 0 5) 12 | 3 point (1 1) 13 | 3 point (0 0) 14 | 3 point (0 2) 15 | 3 point (2 2) 16 | 3 point (2 0) 17 | 4 polygon ((1 0, 1 1, 0 1, 1 0)) 18 | 4 polygon ((-1 0, -1 -1, 0 -1, -1 0)) 19 | -------------------------------------------------------------------------------- /hive/test/st-geom-aggr.txt: -------------------------------------------------------------------------------- 1 | create table com_esri_hadoop_hive_aggr_tests(testcase int, wkt string) 2 | row format delimited fields terminated by '\t'; 3 | load data local inpath 'st-geom-aggr.tsv' 4 | overwrite into table com_esri_hadoop_hive_aggr_tests; 5 | -------------------------------------------------------------------------------- /hive/test/st-geom-bins.ref: -------------------------------------------------------------------------------- 1 | 1.0 2 | POLYGON ((-0.5 -0.5, 0.5 -0.5, 0.5 0.5, -0.5 0.5, -0.5 -0.5)) 3 | -------------------------------------------------------------------------------- /hive/test/st-geom-bins.sql: -------------------------------------------------------------------------------- 1 | select ST_Area(ST_BinEnvelope(1.0, ST_Bin(1.0, ST_Point(0, 0)))) from onerow; 2 | select ST_AsText(ST_BinEnvelope(1.0, ST_Bin(1.0, ST_Point(0, 0)))) from onerow; 3 | -------------------------------------------------------------------------------- /hive/test/st-geom-multi-call.ref: -------------------------------------------------------------------------------- 1 | 3.0 9.0 18.0 2 | 1.0 9.0 3 | true false 4 | 2 3 5 | true false 6 | 0 1 7 | false true 8 | true false 9 | true false 10 | true false 11 | false true 12 | true false 13 | true false 14 | true false 15 | false 16 | false 17 | -------------------------------------------------------------------------------- /hive/test/st-geom-multi-call.sql: -------------------------------------------------------------------------------- 1 | select ST_Length(ST_Linestring(1,1, 1,2, 2,2, 2,1)), 2 | ST_Length(ST_Linestring(1,1, 1,4, 4,4, 4,1)), 3 | ST_Length(ST_Linestring(1,1, 1,7, 7,7, 7,1)) from onerow; 4 | select ST_Area(ST_Polygon(1,1, 1,2, 2,2, 2,1)), 5 | ST_Area(ST_Polygon(1,1, 1,4, 4,4, 4,1)) from onerow; 6 | select ST_Contains(ST_Polygon(1, 1, 1, 4, 4, 4, 4, 1), ST_Point(2, 3)), 7 | ST_Contains(ST_Polygon(1, 1, 1, 4, 4, 4, 4, 1), ST_Point(8, 8)) from onerow; 8 | select ST_CoordDim(ST_Point(0., 3.)), 9 | ST_CoordDim(ST_PointZ(0., 3., 1)) from onerow; 10 | select ST_Crosses(st_linestring(2,0, 2,3), ST_Polygon(1,1, 1,4, 4,4, 4,1)), 11 | ST_Crosses(st_linestring(8,7, 7,8), ST_Polygon(1,1, 1,4, 4,4, 4,1)) from onerow; 12 | select ST_Dimension(ST_Point(0,0)), 13 | ST_Dimension(ST_LineString(1.5,2.5, 3.0,2.2)) from onerow; 14 | select ST_Disjoint(st_point(1,1), ST_Point(1,1)), 15 | ST_Disjoint(st_point(2,0), ST_Point(1,1)) from onerow; 16 | select ST_EnvIntersects(st_point(1,1), ST_Point(1,1)), 17 | ST_EnvIntersects(st_point(2,0), ST_Point(1,1)) from onerow; 18 | select ST_Equals(st_point(1,1), ST_Point(1,1)), 19 | ST_Equals(st_point(2,0), ST_Point(1,1)) from onerow; 20 | select ST_Intersects(st_point(1,1), ST_Point(1,1)), 21 | ST_Intersects(st_point(2,0), ST_Point(1,1)) from onerow; 22 | select ST_Is3D(ST_Point(0., 3.)), 23 | ST_Is3D(ST_PointZ(0., 3., 1)) from onerow; 24 | select ST_Overlaps(st_polygon(2,0, 2,3, 3,0), ST_Polygon(1,1, 1,4, 4,4, 4,1)), 25 | ST_Overlaps(st_polygon(2,0, 2,1, 3,1), ST_Polygon(1,1, 1,4, 4,4, 4,1)) from onerow; 26 | select ST_Touches(ST_Point(1, 3), ST_Polygon(1, 1, 1, 4, 4, 4, 4, 1)), 27 | ST_Touches(ST_Point(8, 8), ST_Polygon(1, 1, 1, 4, 4, 4, 4, 1)) from onerow; 28 | select ST_Within(ST_Point(2, 3), ST_Polygon(1, 1, 1, 4, 4, 4, 4, 1)), 29 | ST_Within(ST_Point(8, 8), ST_Polygon(1, 1, 1, 4, 4, 4, 4, 1)) from onerow; 30 | SELECT ST_Intersects(ST_GeomFromGeoJson('{"type": "LineString", "coordinates": [[2.5,2.5], [8.0,0.0]]}'), 31 | ST_GeomFromGeoJson('{"type": "LineString", "coordinates": [[1.5,1.5], [0.0,7.0]]}')) from onerow; 32 | SELECT ST_Intersects(ST_GeomFromJson('{"paths":[[[2.5,2.5],[8,0]]],"spatialReference":{"wkid":4326}}'), 33 | ST_GeomFromJson('{"paths":[[[1.5,1.5],[0,7]]],"spatialReference":{"wkid":4326}}')) from onerow; 34 | -------------------------------------------------------------------------------- /hive/test/st-geom-onerow.txt: -------------------------------------------------------------------------------- 1 | echo onerow > st-geom-onerow.tsv 2 | create table onerow(s string) 3 | row format delimited fields terminated by '\t'; 4 | load data local inpath 'st-geom-onerow.tsv' 5 | overwrite into table onerow; 6 | -------------------------------------------------------------------------------- /hive/test/st-geom-text.ref: -------------------------------------------------------------------------------- 1 | {"rings":[[[1.5,2.5],[3,2.2],[2.2,1.1],[1.5,2.5]]]} 2 | {"rings":[[[1.5,2.5],[3,2.2],[2.2,1.1],[1.5,2.5]]]} 3 | {"rings":[[[0.1,2.2],[3,2.2],[2.2,1.1],[0.1,2.2]]]} 4 | {"rings":[[[1,1],[1,4],[4,4],[4,1],[1,1]]]} 5 | POLYGON ((1 1, 4 1, 1 4, 1 1)) 6 | POLYGON ((1 1, 4 1, 1 4, 1 1)) 7 | POLYGON ((1 1, 4 1, 1 4, 1 1)) 8 | POLYGON ((1 1, 4 1, 1 4, 1 1)) 9 | LINESTRING (0 2, 0 2) 10 | LINESTRING (0 0, 2 2) 11 | POLYGON ((0 0, 2 0, 2 2, 0 2, 0 0)) 12 | POLYGON ((1 1, 4 1, 4 4, 1 4, 1 1)) 13 | POLYGON ((2 0, 3 0, 3 3, 2 3, 2 0)) 14 | POINT (1 1) 15 | POINT (0 3) 16 | MULTILINESTRING ((1 0, 2 0), (0 2, 0 1)) 17 | LINESTRING (0 2, 0 4) 18 | LINESTRING (0 3, 0 4) 19 | LINESTRING (0 3, 0 4) 20 | LINESTRING (1 2.5, 2 3) 21 | LINESTRING (1 1, 1 4) 22 | LINESTRING (1 3, 1 4) 23 | POLYGON ((2 1, 2.666666666666667 1, 2 3, 2 1)) 24 | POLYGON ((1 1, 2 1, 1 2, 1 1)) 25 | POLYGON ((1 1, 2 1, 1 2, 1 1)) 26 | POLYGON ((2 2, 3 2, 3 3, 2 3, 2 2)) 27 | POLYGON ((1 1, 2 1, 2 2, 1 2, 1 1)) 28 | POLYGON ((0 0, 1 0, 0 1, 0 0)) 29 | POLYGON ((1 1, 2 1, 1 2, 1 1)) 30 | POLYGON ((1 0, 1 1, 0 1, 1 0)) 31 | POLYGON ((0 0, 1 0, 0 1, 0 0)) 32 | MULTIPOLYGON (((2 0, 3 0, 2 1, 2 0)), ((0 2, 1 2, 0 3, 0 2))) 33 | POINT (2 3) 34 | POINT (2 0) 35 | POINT (3 4) 36 | POINT (4 4) 37 | POINT (3 2) 38 | 3.139350203046865 39 | 3438.190416575652 40 | 3438.190416575652 41 | 5552.589421311623 42 | MULTIPOLYGON (((3 3, 4 3, 4 4, 3 4, 3 3)), ((1 1, 2 1, 2 2, 1 2, 1 1))) 43 | POINT (3 3) 44 | POINT (4 4) 45 | POINT (2.5 2.5) 46 | MULTIPOINT ((1 1), (2 2)) 47 | POLYGON ((5 0, 10 0, 10 10, 0 10, 0 5, 5 5, 5 0)) 48 | MULTIPOINT ((0 0), (2 2)) 49 | MULTIPOINT ((0 0), (2 2)) 50 | MULTILINESTRING ((0 2, 1 2), (2 2, 3 2)) 51 | MULTIPOLYGON (((0 0, 2 0, 2 1, 1 1, 1 2, 0 2, 0 0)), ((2 1, 3 1, 3 3, 1 3, 1 2, 2 2, 2 1))) 52 | LINESTRING (0 80, 0.03 80.04) 53 | LINESTRING (0 80, 0.03 80.04) 54 | LINESTRING (0 80, 0.03 80.04) 55 | MULTILINESTRING ((0 80, 0.03 80.04)) 56 | {"rings":[]} 57 | {"x":0,"y":0} 58 | POINT (1.2 2.4) 59 | {"x":1.2,"y":2.4,"spatialReference":{"wkid":4326}} 60 | LINESTRING (1 2, 3 4) 61 | {"x":1.2,"y":2.4,"spatialReference":{"wkid":4326}} 62 | {"x":1.2,"y":2.4} 63 | {"type":"Point","coordinates":[1.2,2.4]} 64 | {"type":"Point","coordinates":[1.2,2.4]} 65 | {"type":"MultiLineString","coordinates":[[[1.0,1.0],[2.0,2.0],[3.0,3.0]],[[10.0,10.0],[11.0,11.0],[12.0,12.0]]]} 66 | POINT (1.2 2.4) 67 | LINESTRING (1 2, 3 4) 68 | MULTIPOINT ((0 1), (1 0)) 69 | LINESTRING (1 1, 4 1, 1 4, 1 1) 70 | MULTILINESTRING ((0 0, 8 0, 0 8, 0 0), (1 1, 1 5, 5 1, 1 1)) 71 | {"x":10,"y":40} 72 | {"x":10,"y":40} 73 | {"x":40,"y":30} 74 | {"x":30,"y":10} 75 | {"x":10.02,"y":20.01} 76 | {"x":10.02,"y":20.01} 77 | {"x":10.32,"y":23.98} 78 | {"x":11.92,"y":25.64} 79 | LINESTRING (1 1, 4 1, 1 4, 1 1) 80 | LINESTRING (1 1, 4 1, 1 4, 1 1) 81 | LINESTRING (0 0, 8 0, 0 8, 0 0) 82 | POINT (20 20) 83 | LINESTRING (20 20, 7 8) 84 | {"paths":[[[20,20],[7,8]]]} 85 | POLYGON ((3 3, 5 3, 4 6, 3 3)) 86 | LINESTRING (1 1, 1 5, 5 1, 1 1) 87 | -------------------------------------------------------------------------------- /json/build.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /json/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4.0.0 3 | 4 | 5 | com.esri.hadoop 6 | spatial-sdk-hadoop 7 | 2.2.1-SNAPSHOT 8 | ../ 9 | 10 | 11 | spatial-sdk-json 12 | Spatial JSON Utilities 13 | 14 | 15 | 16 | org.apache.hadoop 17 | hadoop-client 18 | 19 | 20 | 21 | com.fasterxml.jackson.core 22 | jackson-core 23 | 24 | 25 | 26 | com.fasterxml.jackson.core 27 | jackson-databind 28 | 29 | 30 | 31 | com.esri.geometry 32 | esri-geometry-api 33 | 34 | 35 | 36 | junit 37 | junit 38 | test 39 | 40 | 41 | 42 | org.apache.hive 43 | hive-exec 44 | ${hive.version} 45 | test 46 | 47 | 48 | 49 | 50 | 51 | 52 | org.apache.maven.plugins 53 | maven-compiler-plugin 54 | 55 | 56 | org.apache.maven.plugins 57 | maven-surefire-plugin 58 | 59 | 60 | true 61 | org.apache.maven.plugins 62 | maven-javadoc-plugin 63 | 64 | 65 | 66 | 67 | 68 | -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/EsriFeature.java: -------------------------------------------------------------------------------- 1 | package com.esri.json; 2 | import java.io.IOException; 3 | import java.io.InputStream; 4 | import java.util.Map; 5 | 6 | import com.fasterxml.jackson.core.JsonGenerationException; 7 | import com.fasterxml.jackson.core.JsonParseException; 8 | import com.fasterxml.jackson.core.JsonParser; 9 | import com.fasterxml.jackson.databind.JsonMappingException; 10 | 11 | import com.esri.core.geometry.Geometry; 12 | 13 | 14 | public class EsriFeature { 15 | /** 16 | * Map of attributes 17 | */ 18 | public Map attributes; 19 | 20 | /** 21 | * Geometry associated with this feature 22 | */ 23 | public Geometry geometry; 24 | 25 | public String toJson() throws JsonGenerationException, JsonMappingException, IOException{ 26 | return EsriJsonFactory.JsonFromFeature(this); 27 | } 28 | 29 | /** 30 | * @param jsonStream JSON input stream 31 | * @return EsriFeature instance that describes the fully parsed JSON representation 32 | * @throws JsonParseException 33 | * @throws IOException 34 | */ 35 | public static EsriFeature fromJson(InputStream jsonStream) throws JsonParseException, IOException 36 | { 37 | return EsriJsonFactory.FeatureFromJson(jsonStream); 38 | } 39 | 40 | /** 41 | * 42 | * @param JsonParser parser that is pointed at the root of the JSON file created by ArcGIS 43 | * @return EsriFeature instance that describes the fully parsed JSON representation 44 | * @throws JsonParseException 45 | * @throws IOException 46 | */ 47 | public static EsriFeature fromJson(JsonParser parser) throws JsonParseException, IOException 48 | { 49 | return EsriJsonFactory.FeatureFromJson(parser); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/EsriFeatureClass.java: -------------------------------------------------------------------------------- 1 | package com.esri.json; 2 | 3 | import java.io.IOException; 4 | import java.io.InputStream; 5 | import java.util.Map; 6 | 7 | import com.fasterxml.jackson.core.JsonGenerationException; 8 | import com.fasterxml.jackson.core.JsonParseException; 9 | import com.fasterxml.jackson.core.JsonParser; 10 | import com.fasterxml.jackson.annotation.JsonIgnoreProperties; 11 | import com.fasterxml.jackson.databind.JsonMappingException; 12 | 13 | import com.esri.core.geometry.Geometry; 14 | import com.esri.core.geometry.SpatialReference; 15 | 16 | 17 | @JsonIgnoreProperties(ignoreUnknown = true) 18 | public class EsriFeatureClass { 19 | public String displayFieldName; 20 | 21 | /** 22 | * Map of field aliases for applicable fields in this feature class 23 | */ 24 | public Map fieldAliases; 25 | 26 | /** 27 | * Esri geometry type (Polygon, Point, ...) 28 | */ 29 | public Geometry.Type geometryType; 30 | 31 | /** 32 | * Spatial reference for the feature class (null, if undefined) 33 | */ 34 | public SpatialReference spatialReference; 35 | 36 | /** 37 | * Array of field definitions (name, type, alias, ...) 38 | */ 39 | public EsriField [] fields; 40 | 41 | /** 42 | * Array of features (attributes, geometry) 43 | */ 44 | public EsriFeature [] features; 45 | 46 | 47 | /** 48 | * 49 | * @return JSON string representation of this feature class 50 | * @throws JsonGenerationException 51 | * @throws JsonMappingException 52 | * @throws IOException 53 | */ 54 | public String toJson() throws JsonGenerationException, JsonMappingException, IOException{ 55 | return EsriJsonFactory.JsonFromFeatureClass(this); 56 | } 57 | 58 | /** 59 | * 60 | * @param jsonStream JSON input stream 61 | * @return EsriFeatureClass instance that describes the fully parsed JSON representation 62 | * @throws JsonParseException 63 | * @throws IOException 64 | */ 65 | public static EsriFeatureClass fromJson(InputStream jsonStream) throws JsonParseException, IOException 66 | { 67 | return EsriJsonFactory.FeatureClassFromJson(jsonStream); 68 | } 69 | 70 | /** 71 | * 72 | * @param JsonParser parser that is pointed at the root of the JSON file created by ArcGIS 73 | * @return EsriFeatureClass instance that describes the fully parsed JSON representation 74 | * @throws JsonParseException 75 | * @throws IOException 76 | */ 77 | public static EsriFeatureClass fromJson(JsonParser parser) throws JsonParseException, IOException 78 | { 79 | return EsriJsonFactory.FeatureClassFromJson(parser); 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/EsriField.java: -------------------------------------------------------------------------------- 1 | package com.esri.json; 2 | 3 | import com.fasterxml.jackson.annotation.JsonIgnoreProperties; 4 | 5 | @JsonIgnoreProperties(ignoreUnknown = true) 6 | public class EsriField { 7 | /** 8 | * Actual name of the field 9 | */ 10 | public String name; 11 | 12 | /** 13 | * Field value type (i.e. esriFieldTypeString) 14 | */ 15 | public EsriFieldType type; 16 | 17 | /** 18 | * Aliased name of the field 19 | */ 20 | public String alias; 21 | 22 | /** 23 | * Field maximum length (for value types like esriFieldTypeString) 24 | */ 25 | public int length; 26 | } 27 | -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/EsriFieldType.java: -------------------------------------------------------------------------------- 1 | package com.esri.json; 2 | 3 | /** 4 | * 5 | * Enumeration of Esri field types. These are not upper cased as they are a direct 6 | * string representation of what would is in the JSON 7 | */ 8 | public enum EsriFieldType { 9 | esriFieldTypeSmallInteger, 10 | esriFieldTypeInteger, 11 | esriFieldTypeSingle, 12 | esriFieldTypeDouble, 13 | esriFieldTypeString, 14 | esriFieldTypeDate, 15 | esriFieldTypeOID, 16 | esriFieldTypeGeometry, 17 | esriFieldTypeBlob, 18 | esriFieldTypeRaster, 19 | esriFieldTypeGUID, 20 | esriFieldTypeGlobalID, 21 | esriFieldTypeXML, 22 | } -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/deserializer/GeometryJsonDeserializer.java: -------------------------------------------------------------------------------- 1 | package com.esri.json.deserializer; 2 | import java.io.IOException; 3 | import com.fasterxml.jackson.core.JsonParser; 4 | import com.fasterxml.jackson.core.JsonProcessingException; 5 | import com.fasterxml.jackson.databind.DeserializationContext; 6 | import com.fasterxml.jackson.databind.JsonDeserializer; 7 | 8 | import com.esri.core.geometry.Geometry; 9 | import com.esri.core.geometry.GeometryEngine; 10 | 11 | /** 12 | * 13 | * Deserializes a JSON geometry definition into a Geometry instance 14 | */ 15 | public class GeometryJsonDeserializer extends JsonDeserializer { 16 | 17 | public GeometryJsonDeserializer(){} 18 | 19 | @Override 20 | public Geometry deserialize(JsonParser arg0, DeserializationContext arg1) 21 | throws IOException, JsonProcessingException { 22 | return GeometryEngine.jsonToGeometry(arg0).getGeometry(); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/deserializer/GeometryTypeJsonDeserializer.java: -------------------------------------------------------------------------------- 1 | package com.esri.json.deserializer; 2 | 3 | import java.io.IOException; 4 | 5 | import com.fasterxml.jackson.core.JsonParser; 6 | import com.fasterxml.jackson.core.JsonProcessingException; 7 | import com.fasterxml.jackson.databind.DeserializationContext; 8 | import com.fasterxml.jackson.databind.JsonDeserializer; 9 | 10 | import com.esri.core.geometry.Geometry; 11 | 12 | /** 13 | * 14 | * Deserializes a JSON geometry type enumeration into a Geometry.Type.* enumeration 15 | */ 16 | public class GeometryTypeJsonDeserializer extends JsonDeserializer { 17 | 18 | public GeometryTypeJsonDeserializer(){} 19 | 20 | @Override 21 | public Geometry.Type deserialize(JsonParser parser, DeserializationContext arg1) 22 | throws IOException, JsonProcessingException { 23 | 24 | String type_text = parser.getText(); 25 | 26 | // geometry type enumerations coming from the JSON are prepended with "esriGeometry" (i.e. esriGeometryPolygon) 27 | // while the geometry-java-api uses the form Geometry.Type.Polygon 28 | if (type_text.startsWith("esriGeometry")) 29 | { 30 | // cut out esriGeometry to match Geometry.Type enumeration values 31 | type_text = type_text.substring(12); 32 | 33 | try { 34 | return Enum.valueOf(Geometry.Type.class, type_text); 35 | } catch (Exception e){ 36 | // parsing failed, fall through to unknown geometry type 37 | } 38 | } 39 | 40 | 41 | return Geometry.Type.Unknown; 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/deserializer/SpatialReferenceJsonDeserializer.java: -------------------------------------------------------------------------------- 1 | package com.esri.json.deserializer; 2 | 3 | import java.io.IOException; 4 | 5 | import com.fasterxml.jackson.core.JsonParser; 6 | import com.fasterxml.jackson.core.JsonProcessingException; 7 | import com.fasterxml.jackson.databind.DeserializationContext; 8 | import com.fasterxml.jackson.databind.JsonDeserializer; 9 | 10 | import com.esri.core.geometry.SpatialReference; 11 | 12 | /** 13 | * 14 | * Deserializes a JSON spatial reference definition into a SpatialReference instance 15 | */ 16 | public class SpatialReferenceJsonDeserializer extends JsonDeserializer { 17 | 18 | public SpatialReferenceJsonDeserializer(){} 19 | 20 | @Override 21 | public SpatialReference deserialize(JsonParser parser, DeserializationContext arg1) 22 | throws IOException, JsonProcessingException { 23 | try { 24 | return SpatialReference.fromJson(parser); 25 | } catch (Exception e) { 26 | return null; 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/hadoop/EnclosedEsriJsonInputFormat.java: -------------------------------------------------------------------------------- 1 | package com.esri.json.hadoop; 2 | 3 | import java.io.IOException; 4 | 5 | import org.apache.hadoop.fs.FileSystem; 6 | import org.apache.hadoop.fs.Path; 7 | import org.apache.hadoop.io.LongWritable; 8 | import org.apache.hadoop.io.Text; 9 | import org.apache.hadoop.mapreduce.RecordReader; 10 | import org.apache.hadoop.mapreduce.TaskAttemptContext; 11 | import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; 12 | import org.apache.hadoop.mapreduce.InputSplit; 13 | import org.apache.hadoop.mapreduce.JobContext; 14 | 15 | /** 16 | * FileInputFormat for reading features from a feature exported as JSON in Esri standard format. 17 | */ 18 | public class EnclosedEsriJsonInputFormat extends FileInputFormat 19 | implements org.apache.hadoop.mapred.InputFormat { 20 | 21 | // Mrv1 implementation member will be used only for getSplits(), and 22 | // will be instantiated only when Mrv1 in use. 23 | private org.apache.hadoop.mapred.FileInputFormat ifmtMrv1 = null; 24 | 25 | @Override 26 | public RecordReader createRecordReader(InputSplit arg0, TaskAttemptContext arg1) 27 | throws IOException, InterruptedException { // MRv2 28 | return new EnclosedEsriJsonRecordReader(); 29 | } 30 | 31 | @Override 32 | public org.apache.hadoop.mapred.RecordReader getRecordReader( // MRv1 33 | org.apache.hadoop.mapred.InputSplit arg0, 34 | org.apache.hadoop.mapred.JobConf arg1, 35 | org.apache.hadoop.mapred.Reporter arg2) throws IOException { 36 | return new EnclosedEsriJsonRecordReader(arg0, arg1); 37 | } 38 | 39 | @Override 40 | public org.apache.hadoop.mapred.InputSplit[] getSplits( // MRv1 41 | org.apache.hadoop.mapred.JobConf arg0, 42 | int arg1) throws IOException { 43 | ifmtMrv1 = (ifmtMrv1!=null) ? ifmtMrv1 : 44 | new org.apache.hadoop.mapred.FileInputFormat() { 45 | @Override 46 | public boolean isSplitable(FileSystem fs, Path filename) { 47 | return false; 48 | } 49 | // Dummy method to satisfy interface but not meant to be called 50 | public org.apache.hadoop.mapred.RecordReader getRecordReader( 51 | org.apache.hadoop.mapred.InputSplit ign0, 52 | org.apache.hadoop.mapred.JobConf ign1, 53 | org.apache.hadoop.mapred.Reporter ign2) throws IOException { 54 | throw new UnsupportedOperationException("not meant to be called"); 55 | } 56 | }; 57 | return ifmtMrv1.getSplits(arg0, arg1); 58 | } 59 | 60 | @Override 61 | public boolean isSplitable(JobContext jc, Path filename) { 62 | return false; 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/hadoop/EnclosedEsriJsonRecordReader.java: -------------------------------------------------------------------------------- 1 | package com.esri.json.hadoop; 2 | 3 | import java.io.IOException; 4 | 5 | import org.apache.hadoop.conf.Configuration; 6 | import org.apache.hadoop.mapred.InputSplit; 7 | 8 | /** 9 | * Record reader for reading features from a feature exported as JSON in Esri standard format. 10 | * 11 | * Each record returned is a string { "attributes" : [...], "geometry" : ... } 12 | * 13 | */ 14 | public class EnclosedEsriJsonRecordReader extends EnclosedBaseJsonRecordReader { 15 | 16 | public EnclosedEsriJsonRecordReader() throws IOException { // explicit just to declare exception 17 | super(); 18 | } 19 | 20 | public EnclosedEsriJsonRecordReader(InputSplit split, Configuration conf) throws IOException 21 | { 22 | super(split, conf); 23 | } 24 | 25 | } 26 | -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/hadoop/EnclosedGeoJsonInputFormat.java: -------------------------------------------------------------------------------- 1 | package com.esri.json.hadoop; 2 | 3 | import java.io.IOException; 4 | 5 | import org.apache.hadoop.fs.FileSystem; 6 | import org.apache.hadoop.fs.Path; 7 | import org.apache.hadoop.io.LongWritable; 8 | import org.apache.hadoop.io.Text; 9 | import org.apache.hadoop.mapreduce.RecordReader; 10 | import org.apache.hadoop.mapreduce.TaskAttemptContext; 11 | import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; 12 | import org.apache.hadoop.mapreduce.InputSplit; 13 | import org.apache.hadoop.mapreduce.JobContext; 14 | 15 | /** 16 | * FileInputFormat for reading features in GeoJSON FeatureCollection. 17 | */ 18 | public class EnclosedGeoJsonInputFormat extends FileInputFormat 19 | implements org.apache.hadoop.mapred.InputFormat { 20 | 21 | // Mrv1 implementation member will be used only for getSplits(), and 22 | // will be instantiated only when Mrv1 in use. 23 | private org.apache.hadoop.mapred.FileInputFormat ifmtMrv1 = null; 24 | 25 | @Override 26 | public RecordReader createRecordReader(InputSplit arg0, TaskAttemptContext arg1) 27 | throws IOException, InterruptedException { // MRv2 28 | return new EnclosedGeoJsonRecordReader(); 29 | } 30 | 31 | @Override 32 | public org.apache.hadoop.mapred.RecordReader getRecordReader( // MRv1 33 | org.apache.hadoop.mapred.InputSplit arg0, 34 | org.apache.hadoop.mapred.JobConf arg1, 35 | org.apache.hadoop.mapred.Reporter arg2) throws IOException { 36 | return new EnclosedGeoJsonRecordReader(arg0, arg1); 37 | } 38 | 39 | @Override 40 | public org.apache.hadoop.mapred.InputSplit[] getSplits( // MRv1 41 | org.apache.hadoop.mapred.JobConf arg0, 42 | int arg1) throws IOException { 43 | ifmtMrv1 = (ifmtMrv1!=null) ? ifmtMrv1 : 44 | new org.apache.hadoop.mapred.FileInputFormat() { 45 | @Override 46 | public boolean isSplitable(FileSystem fs, Path filename) { 47 | return false; 48 | } 49 | // Dummy method to satisfy interface but not meant to be called 50 | public org.apache.hadoop.mapred.RecordReader getRecordReader( 51 | org.apache.hadoop.mapred.InputSplit ign0, 52 | org.apache.hadoop.mapred.JobConf ign1, 53 | org.apache.hadoop.mapred.Reporter ign2) throws IOException { 54 | throw new UnsupportedOperationException("not meant to be called"); 55 | } 56 | }; 57 | return ifmtMrv1.getSplits(arg0, arg1); 58 | } 59 | 60 | @Override 61 | public boolean isSplitable(JobContext jc, Path filename) { 62 | return false; 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/hadoop/EnclosedGeoJsonRecordReader.java: -------------------------------------------------------------------------------- 1 | package com.esri.json.hadoop; 2 | 3 | import java.io.IOException; 4 | 5 | import org.apache.hadoop.conf.Configuration; 6 | import org.apache.hadoop.mapred.InputSplit; 7 | 8 | /** 9 | * Record reader for reading features from GeoJSON FeatureCollection. 10 | * 11 | * Each record returned is a string { "type" : [...], "properties" : [...], "geometry" : ... } 12 | */ 13 | public class EnclosedGeoJsonRecordReader extends EnclosedBaseJsonRecordReader { 14 | 15 | public EnclosedGeoJsonRecordReader() throws IOException { // explicit just to declare exception 16 | super(); 17 | } 18 | 19 | public EnclosedGeoJsonRecordReader(InputSplit split, Configuration conf) throws IOException 20 | { 21 | super(split, conf); 22 | } 23 | 24 | } 25 | -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/hadoop/EnclosedJsonInputFormat.java: -------------------------------------------------------------------------------- 1 | // package com.esri.json.hadoop; 2 | // Obsolete - @Deprecated in v1.2 - superseded by EnclosedEsriJsonInputFormat 3 | // public class EnclosedJsonInputFormat extends EnclosedEsriJsonInputFormat {} 4 | -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/hadoop/EnclosedJsonRecordReader.java: -------------------------------------------------------------------------------- 1 | /* * 2 | package com.esri.json.hadoop; 3 | import java.io.IOException; 4 | import org.apache.hadoop.conf.Configuration; 5 | * Obsolete - renamed to and superseded by EnclosedEsriJsonRecordReader 6 | @Deprecated in v1.2 7 | public class EnclosedJsonRecordReader extends EnclosedEsriJsonRecordReader { 8 | public EnclosedJsonRecordReader() throws IOException { // explicit just to declare exception 9 | super(); 10 | } 11 | public EnclosedJsonRecordReader(org.apache.hadoop.mapred.InputSplit split, 12 | Configuration conf) throws IOException { 13 | super(split, conf); 14 | } 15 | } 16 | * */ 17 | -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/hadoop/UnenclosedEsriJsonInputFormat.java: -------------------------------------------------------------------------------- 1 | package com.esri.json.hadoop; 2 | 3 | import java.io.IOException; 4 | 5 | import org.apache.hadoop.io.LongWritable; 6 | import org.apache.hadoop.io.Text; 7 | import org.apache.hadoop.mapreduce.RecordReader; 8 | import org.apache.hadoop.mapreduce.TaskAttemptContext; 9 | import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; 10 | import org.apache.hadoop.mapreduce.InputSplit; 11 | 12 | // MRv2 by inheritance; MRv1 by composition/hybrid 13 | public class UnenclosedEsriJsonInputFormat extends FileInputFormat 14 | implements org.apache.hadoop.mapred.InputFormat { 15 | 16 | // Mrv1 implementation member will be used only for getSplits(). 17 | // Will be instantiated only when Mrv1 in use. 18 | private org.apache.hadoop.mapred.FileInputFormat baseIfmt = null; 19 | 20 | @Override 21 | public RecordReader createRecordReader(InputSplit arg0, TaskAttemptContext arg1) 22 | throws IOException, InterruptedException { // MRv2 23 | return new UnenclosedEsriJsonRecordReader(); 24 | } 25 | 26 | @Override 27 | public org.apache.hadoop.mapred.RecordReader getRecordReader( // MRv1 28 | org.apache.hadoop.mapred.InputSplit arg0, 29 | org.apache.hadoop.mapred.JobConf arg1, 30 | org.apache.hadoop.mapred.Reporter arg2) throws IOException { 31 | return new UnenclosedEsriJsonRecordReader(arg0, arg1); 32 | } 33 | 34 | @Override 35 | public org.apache.hadoop.mapred.InputSplit[] getSplits( // MRv1 36 | org.apache.hadoop.mapred.JobConf arg0, 37 | int arg1) throws IOException { 38 | baseIfmt = (baseIfmt!=null) ? baseIfmt : 39 | new org.apache.hadoop.mapred.FileInputFormat() { 40 | // Dummy method to satisfy interface but not meant to be called 41 | public org.apache.hadoop.mapred.RecordReader getRecordReader( 42 | org.apache.hadoop.mapred.InputSplit ign0, 43 | org.apache.hadoop.mapred.JobConf ign1, 44 | org.apache.hadoop.mapred.Reporter ign2) throws IOException { 45 | throw new UnsupportedOperationException("not meant to be called"); 46 | } 47 | }; 48 | return baseIfmt.getSplits(arg0, arg1); 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/hadoop/UnenclosedGeoJsonInputFormat.java: -------------------------------------------------------------------------------- 1 | package com.esri.json.hadoop; 2 | 3 | import java.io.IOException; 4 | 5 | import org.apache.hadoop.io.LongWritable; 6 | import org.apache.hadoop.io.Text; 7 | import org.apache.hadoop.mapreduce.RecordReader; 8 | import org.apache.hadoop.mapreduce.TaskAttemptContext; 9 | import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; 10 | import org.apache.hadoop.mapreduce.InputSplit; 11 | 12 | // MRv2 by inheritance; MRv1 by composition/hybrid 13 | public class UnenclosedGeoJsonInputFormat extends FileInputFormat 14 | implements org.apache.hadoop.mapred.InputFormat { 15 | 16 | // Mrv1 implementation member will be used only for getSplits(). 17 | // Will be instantiated only when Mrv1 in use. 18 | private org.apache.hadoop.mapred.FileInputFormat ifmtMrv1 = null; 19 | 20 | @Override 21 | public RecordReader createRecordReader(InputSplit arg0, TaskAttemptContext arg1) 22 | throws IOException, InterruptedException { // MRv2 23 | return new UnenclosedGeoJsonRecordReader(); 24 | } 25 | 26 | @Override 27 | public org.apache.hadoop.mapred.RecordReader getRecordReader( // MRv1 28 | org.apache.hadoop.mapred.InputSplit arg0, 29 | org.apache.hadoop.mapred.JobConf arg1, 30 | org.apache.hadoop.mapred.Reporter arg2) throws IOException { 31 | return new UnenclosedGeoJsonRecordReader(arg0, arg1); 32 | } 33 | 34 | @Override 35 | public org.apache.hadoop.mapred.InputSplit[] getSplits( // MRv1 36 | org.apache.hadoop.mapred.JobConf arg0, 37 | int arg1) throws IOException { 38 | ifmtMrv1 = (ifmtMrv1!=null) ? ifmtMrv1 : 39 | new org.apache.hadoop.mapred.FileInputFormat() { 40 | // Dummy method to satisfy interface but not meant to be called 41 | public org.apache.hadoop.mapred.RecordReader getRecordReader( 42 | org.apache.hadoop.mapred.InputSplit ign0, 43 | org.apache.hadoop.mapred.JobConf ign1, 44 | org.apache.hadoop.mapred.Reporter ign2) throws IOException { 45 | throw new UnsupportedOperationException("not meant to be called"); 46 | } 47 | }; 48 | return ifmtMrv1.getSplits(arg0, arg1); 49 | } 50 | 51 | } 52 | -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/hadoop/UnenclosedJsonInputFormat.java: -------------------------------------------------------------------------------- 1 | // package com.esri.json.hadoop; 2 | // Obsolete - @Deprecated in v1.2 - superseded by UnenclosedEsriJsonInputFormat 3 | // public class UnenclosedJsonInputFormat extends UnenclosedEsriJsonInputFormat {} 4 | -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/hadoop/UnenclosedJsonRecordReader.java: -------------------------------------------------------------------------------- 1 | /* * 2 | package com.esri.json.hadoop; 3 | import java.io.IOException; 4 | import org.apache.hadoop.conf.Configuration; 5 | * Obsolete renamed to and superseded by UnenclosedEsriJsonRecordReader 6 | @Deprecated in v1.2 7 | public class UnenclosedJsonRecordReader extends UnenclosedEsriJsonRecordReader { 8 | public UnenclosedJsonRecordReader() throws IOException { // explicit just to declare exception 9 | super(); 10 | } 11 | public UnenclosedJsonRecordReader(org.apache.hadoop.mapred.InputSplit split, 12 | Configuration conf) throws IOException { 13 | super(split, conf); 14 | } 15 | } 16 | * */ 17 | -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/serializer/GeometryJsonSerializer.java: -------------------------------------------------------------------------------- 1 | package com.esri.json.serializer; 2 | 3 | import java.io.IOException; 4 | 5 | import com.fasterxml.jackson.core.JsonGenerator; 6 | import com.fasterxml.jackson.core.JsonProcessingException; 7 | import com.fasterxml.jackson.databind.JsonSerializer; 8 | import com.fasterxml.jackson.databind.SerializerProvider; 9 | 10 | import com.esri.core.geometry.Geometry; 11 | import com.esri.core.geometry.GeometryEngine; 12 | 13 | public class GeometryJsonSerializer extends JsonSerializer { 14 | 15 | @Override 16 | public void serialize(Geometry geometry, JsonGenerator jsonGenerator, 17 | SerializerProvider arg2) throws IOException, 18 | JsonProcessingException { 19 | 20 | jsonGenerator.writeRawValue(GeometryEngine.geometryToJson(null, geometry)); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/serializer/GeometryTypeJsonSerializer.java: -------------------------------------------------------------------------------- 1 | package com.esri.json.serializer; 2 | 3 | import java.io.IOException; 4 | 5 | import com.fasterxml.jackson.core.JsonGenerator; 6 | import com.fasterxml.jackson.core.JsonProcessingException; 7 | import com.fasterxml.jackson.databind.JsonSerializer; 8 | import com.fasterxml.jackson.databind.SerializerProvider; 9 | 10 | import com.esri.core.geometry.Geometry; 11 | import com.esri.core.geometry.Geometry.Type; 12 | 13 | public class GeometryTypeJsonSerializer extends JsonSerializer{ 14 | 15 | @Override 16 | public void serialize(Type geometryType, JsonGenerator jsonGenerator, SerializerProvider arg2) 17 | throws IOException, JsonProcessingException { 18 | jsonGenerator.writeString("esriGeometry" + geometryType); 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /json/src/main/java/com/esri/json/serializer/SpatialReferenceJsonSerializer.java: -------------------------------------------------------------------------------- 1 | package com.esri.json.serializer; 2 | 3 | import java.io.IOException; 4 | 5 | import com.fasterxml.jackson.core.JsonGenerator; 6 | import com.fasterxml.jackson.core.JsonProcessingException; 7 | import com.fasterxml.jackson.databind.JsonSerializer; 8 | import com.fasterxml.jackson.databind.SerializerProvider; 9 | 10 | import com.esri.core.geometry.SpatialReference; 11 | 12 | public class SpatialReferenceJsonSerializer extends JsonSerializer{ 13 | 14 | @Override 15 | public void serialize(SpatialReference spatialReference, JsonGenerator jsonGenerator, 16 | SerializerProvider arg2) throws IOException, 17 | JsonProcessingException { 18 | 19 | int wkid = spatialReference.getID(); 20 | 21 | jsonGenerator.writeStartObject(); 22 | jsonGenerator.writeObjectField("wkid", wkid); 23 | jsonGenerator.writeEndObject(); 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /json/src/test/resources/com/esri/json/hadoop/sample-geojs-area.json: -------------------------------------------------------------------------------- 1 | {"type": "FeatureCollection", 2 | "crs" : { 3 | "type" : "name", 4 | "properties" : { 5 | "name" : "urn:ogc:def:crs:EPSG:4301" 6 | } 7 | }, 8 | "features": [{ 9 | "properties":{}, 10 | "geometry" : { 11 | "type":"Polygon", 12 | "coordinates":[[[135.0,38.0],[136.0,39.0],[137.0,38.0],[136.0,37.0],[135.0,38.0]]] 13 | } 14 | }] 15 | } 16 | -------------------------------------------------------------------------------- /json/src/test/resources/com/esri/json/hadoop/sample-study-area.json: -------------------------------------------------------------------------------- 1 | {"fields": [], 2 | "hasZ": false, 3 | "hasM": false, 4 | "spatialReference": {"wkid": 4301}, 5 | "features": [{ 6 | "attributes":{}, 7 | "geometry":{"rings":[[[135.0,38.0],[136.0,39.0],[137.0,38.0],[136.0,37.0],[135.0,38.0]]]} 8 | }] 9 | } 10 | -------------------------------------------------------------------------------- /json/src/test/resources/com/esri/json/hadoop/unenclosed-geojs-escape.json: -------------------------------------------------------------------------------- 1 | {"type":"Feature","properties":{"text":"0\"b"},"geometry":{}}{"type":"Feature","properties":{"text":"1\"d"},"geometry":{}}{"type":"Feature","properties":{"text":"2\"blah\""},"geometry":{}}{"type":"Feature","properties":{"text":"3\\f"},"geometry":{}}{"type":"Feature","properties":{"text":"4\"\\\"h"},"geometry":{}}{"type":"Feature","properties":{"text":"5"},"geometry":{}}{"type":"Feature","properties":{"text":"6{}"},"geometry":{}}{"type":"Feature","properties":{"text":"7}{"},"geometry":{}}{"type":"Feature","properties":{"text":"8","test":"}{"},"geometry":{}} -------------------------------------------------------------------------------- /json/src/test/resources/com/esri/json/hadoop/unenclosed-geojs-simple.json: -------------------------------------------------------------------------------- 1 | {"type":"Feature","properties":{"index":0},"geometry":{}}{"type":"Feature","properties":{"index":1},"geometry":{}}{"type":"Feature","properties":{"index":2},"geometry":{}}{"type":"Feature","properties":{"index":3},"geometry":{}}{"type":"Feature","properties":{"index":4},"geometry":{}}{"type":"Feature","properties":{"index":5},"geometry":{}}{"type":"Feature","properties":{"index":6},"geometry":{}}{"type":"Feature","properties":{"index":7},"geometry":{}}{"type":"Feature","properties":{"index":8,"test":"}{"},"geometry":{}}{"type":"Feature","properties":{"index":9},"geometry":{}} -------------------------------------------------------------------------------- /json/src/test/resources/com/esri/json/hadoop/unenclosed-geojs-type.json: -------------------------------------------------------------------------------- 1 | {"type":"Feature","properties":{"type":"Feature","index":0},"geometry":{}}{"type":"Feature","properties":{"type":"Feature","index":1},"geometry":{}}{"type":"Feature","properties":{"type":"Feature","index":2},"geometry":{}}{"type":"Feature","properties":{"type":"Feature","index":3},"geometry":{}}{"type":"Feature","properties":{"type":"Feature","index":4},"geometry":{}}{"type":"Feature","properties":{"type":"Feature","index":5},"geometry":{}}{"type":"Feature","properties":{"type":"Feature","index":6},"geometry":{}}{"properties":{"type":"Feature","index":7},"type":"Feature","geometry":{}} -------------------------------------------------------------------------------- /json/src/test/resources/com/esri/json/hadoop/unenclosed-json-attrs.json: -------------------------------------------------------------------------------- 1 | {"attributes":{"attributes":0},"geometry":{}}{"attributes":{"geometry":1},"geometry":{}}{"attributes":{"attributes":2},"geometry":{}}{"attributes":{"geometry":3},"geometry":{}}{"attributes":{"attributes":4},"geometry":{}} -------------------------------------------------------------------------------- /json/src/test/resources/com/esri/json/hadoop/unenclosed-json-chars.json: -------------------------------------------------------------------------------- 1 | {"attributes":{"text":"0á"},"geometry":{}}{"attributes":{"text":"1é"},"geometry":{}}{"attributes":{"text":"2Í"},"geometry":{}}{"attributes":{"text":"3ò"},"geometry":{}}{"attributes":{"text":"4ü"},"geometry":{}}{"attributes":{"text":"5ñ"},"geometry":{}}{"attributes":{"text":"6«"},"geometry":{}}{"attributes":{"text":"7»"},"geometry":{}}{"attributes":{"text":"8õ"},"geometry":{}} -------------------------------------------------------------------------------- /json/src/test/resources/com/esri/json/hadoop/unenclosed-json-comma.json: -------------------------------------------------------------------------------- 1 | {"attributes":{"rowid": 0},"geometry":{"x":15.0,"y":5.0}},{"attributes":{"rowid": 1},"geometry":{"x":5,"y":35}},{"attributes":{"rowid": 2},"geometry":{"x":23,"y":23}},{"attributes":{"rowid": 3},"geometry":{"x":32,"y":22}} 2 | -------------------------------------------------------------------------------- /json/src/test/resources/com/esri/json/hadoop/unenclosed-json-esc-points.json: -------------------------------------------------------------------------------- 1 | {"attributes":{"rowid": 1505, "text": "\""},"geometry":{"x":15.0,"y":5.0}} 2 | {"attributes":{"rowid": 535, "text": "\'"},"geometry":{"x":5,"y":35}} 3 | {"attributes":{"rowid": 2323, "text": "\\"},"geometry":{"x":23,"y":23}} 4 | {"attributes":{"rowid": 3222, "text": "\}"},"geometry":{"x":32,"y":22}} 5 | {"attributes":{"rowid": 3728, "text": "\{"},"geometry":{"x":37,"y":28}} 6 | {"attributes":{"rowid": 2233, "text": "\,"},"geometry":{"x":22,"y":33}} 7 | {"attributes":{"rowid": 2838, "text": "\:"},"geometry":{"x":28,"y":38}} 8 | {"attributes":{"rowid": 3434, "text": "\1"},"geometry":{"x":34,"y":34}} 9 | {"attributes":{"rowid": 6219, "text": "\2"},"geometry":{"x":62,"y":19}} 10 | {"attributes":{"rowid": 7114, "text": "\3"},"geometry":{"x":71,"y":14}} 11 | {"attributes":{"rowid": 7525, "text": "\a"},"geometry":{"x":75,"y":25}} 12 | {"attributes":{"rowid": 6535, "text": "\b"},"geometry":{"x":65,"y":35}} 13 | {"attributes":{"rowid": 5549, "text": "\C"},"geometry":{"x":55,"y":49}} 14 | {"attributes":{"rowid": 6545, "text": "\D"},"geometry":{"x":65,"y":45}} 15 | {"attributes":{"rowid": 4566, "text": "x\"y\"z"},"geometry":{"x":45,"y":66}} 16 | -------------------------------------------------------------------------------- /json/src/test/resources/com/esri/json/hadoop/unenclosed-json-esc1.json: -------------------------------------------------------------------------------- 1 | {"attributes":{"text":"0b\""},"geometry":{}}{"attributes":{"text":"1d\""},"geometry":{}}{"attributes":{"text":"2\"blah\""},"geometry":{}}{"attributes":{"text":"3\\f"},"geometry":{}}{"attributes":{"text":"4\"\\\"h"},"geometry":{}}{"attributes":{"text":"5"},"geometry":{}}{"attributes":{"text":"6"},"geometry":{}}{"attributes":{"text":"7{}"},"geometry":{}}{"attributes":{"text":"8"},"geometry":{}} -------------------------------------------------------------------------------- /json/src/test/resources/com/esri/json/hadoop/unenclosed-json-esc2.json: -------------------------------------------------------------------------------- 1 | {"attributes":{"text":"0b\'"},"geometry":{}}{"attributes":{"text":"1d\'"},"geometry":{}}{"attributes":{"text":"2\"blah\""},"geometry":{}}{"attributes":{"text":"3\\f"},"geometry":{}}{"attributes":{"text":"4\"\\\"h"},"geometry":{}}{"attributes":{"text":"5"},"geometry":{}}{"attributes":{"text":"6"},"geometry":{}}{"attributes":{"text":"7{}"},"geometry":{}}{"attributes":{"text":"8"},"geometry":{}} -------------------------------------------------------------------------------- /json/src/test/resources/com/esri/json/hadoop/unenclosed-json-esc3.json: -------------------------------------------------------------------------------- 1 | {"attributes":{"text":"0b\\"},"geometry":{}}{"attributes":{"text":"1d\\"},"geometry":{}}{"attributes":{"text":"2\"blah\""},"geometry":{}}{"attributes":{"text":"3\\f"},"geometry":{}}{"attributes":{"text":"4\"\\\"h"},"geometry":{}}{"attributes":{"text":"5"},"geometry":{}}{"attributes":{"text":"6"},"geometry":{}}{"attributes":{"text":"7{}"},"geometry":{}}{"attributes":{"text":"8"},"geometry":{}} -------------------------------------------------------------------------------- /json/src/test/resources/com/esri/json/hadoop/unenclosed-json-esc4.json: -------------------------------------------------------------------------------- 1 | {"attributes":{"text":"0b\}"},"geometry":{}}{"attributes":{"text":"1d\}"},"geometry":{}}{"attributes":{"text":"2\"blah\""},"geometry":{}}{"attributes":{"text":"3\\f"},"geometry":{}}{"attributes":{"text":"4\"\\\"h"},"geometry":{}}{"attributes":{"text":"5"},"geometry":{}}{"attributes":{"text":"6"},"geometry":{}}{"attributes":{"text":"7{}"},"geometry":{}}{"attributes":{"text":"8"},"geometry":{}} -------------------------------------------------------------------------------- /json/src/test/resources/com/esri/json/hadoop/unenclosed-json-esc5.json: -------------------------------------------------------------------------------- 1 | {"attributes":{"text":"0b\{"},"geometry":{}}{"attributes":{"text":"1d\{"},"geometry":{}}{"attributes":{"text":"2\"blah\""},"geometry":{}}{"attributes":{"text":"3\\f"},"geometry":{}}{"attributes":{"text":"4\"\\\"h"},"geometry":{}}{"geometry":{},"attributes":{"text":"5\{"}}{"attributes":{"text":"6"},"geometry":{}}{"attributes":{"text":"7{}"},"geometry":{}}{"attributes":{"text":"8"},"geometry":{}} -------------------------------------------------------------------------------- /json/src/test/resources/com/esri/json/hadoop/unenclosed-json-escape.json: -------------------------------------------------------------------------------- 1 | {"attributes":{"text":"0\"b"},"geometry":{}}{"attributes":{"text":"1\"d"},"geometry":{}}{"attributes":{"text":"2\"blah\""},"geometry":{}}{"attributes":{"text":"3\\f"},"geometry":{}}{"attributes":{"text":"4\"\\\"h"},"geometry":{}}{"attributes":{"text":"5"},"geometry":{}}{"attributes":{"text":"6"},"geometry":{}}{"attributes":{"text":"7{}"},"geometry":{}}{"attributes":{"text":"8"},"geometry":{}} -------------------------------------------------------------------------------- /json/src/test/resources/com/esri/json/hadoop/unenclosed-json-geom-first.json: -------------------------------------------------------------------------------- 1 | {"geometry":{},"attributes":{"index":0,"text":"\""}}{"attributes":{"index":1},"geometry":{}} -------------------------------------------------------------------------------- /json/src/test/resources/com/esri/json/hadoop/unenclosed-json-return.json: -------------------------------------------------------------------------------- 1 | {"attributes":{"rowid": 0},"geometry":{"x":15.0,"y":5.0}} 2 | {"attributes":{"rowid": 1},"geometry":{"x":5,"y":35}} 3 | {"attributes":{"rowid": 2},"geometry":{"x":23,"y":23}} 4 | {"attributes":{"rowid": 3},"geometry":{"x":32,"y":22}} 5 | -------------------------------------------------------------------------------- /json/src/test/resources/com/esri/json/hadoop/unenclosed-json-simple.json: -------------------------------------------------------------------------------- 1 | {"attributes":{"index":0},"geometry":{}}{"attributes":{"index":1},"geometry":{}}{"attributes":{"index":2},"geometry":{}}{"attributes":{"index":3},"geometry":{}}{"attributes":{"index":4},"geometry":{}}{"attributes":{"index":5},"geometry":{}}{"attributes":{"index":6},"geometry":{}}{"attributes":{"index":7},"geometry":{}}{"attributes":{"index":8,"test":"}{"},"geometry":{}}{"attributes":{"index":9},"geometry":{}} --------------------------------------------------------------------------------