├── .gitignore ├── .sbtopts ├── .travis.yml ├── .travis ├── build-and-test.sh ├── deploy.sh ├── publish.sh └── run.sh ├── LICENSE ├── README.md ├── benchmark ├── README.md └── src │ └── main │ ├── resources │ └── log4j.properties │ └── scala │ └── geotrellis │ └── pointcloud │ └── raster │ └── ept │ ├── GeoTrellisTINRasterSource.scala │ ├── IDWRasterSourceBench.scala │ ├── JavaTINRasterSource.scala │ ├── JavaTINReprojectRasterSource.scala │ ├── JavaTINResampleRasterSource.scala │ ├── ReadEPTBench.scala │ └── TINRasterSourceBench.scala ├── build.sbt ├── pointcloud └── src │ ├── main │ ├── resources │ │ └── META-INF │ │ │ └── services │ │ │ └── geotrellis.raster.RasterSourceProvider │ └── scala │ │ └── geotrellis │ │ └── pointcloud │ │ ├── layer │ │ ├── DepthKey.scala │ │ ├── LayoutDefinition3D.scala │ │ ├── MapKeyTransform3D.scala │ │ ├── VoxelKey.scala │ │ └── package.scala │ │ ├── raster │ │ ├── GridBounds3D.scala │ │ ├── GridExtent3D.scala │ │ ├── RasterExtent3D.scala │ │ ├── VolumetricGrid.scala │ │ ├── VoxelSize.scala │ │ ├── ept │ │ │ ├── EPTMetadata.scala │ │ │ ├── EPTPath.scala │ │ │ ├── EPTRasterSourceProvider.scala │ │ │ ├── Field.scala │ │ │ ├── IDWRasterSource.scala │ │ │ ├── IDWReprojectRasterSource.scala │ │ │ ├── IDWResampleRasterSource.scala │ │ │ ├── Raw.scala │ │ │ ├── SRS.scala │ │ │ ├── TINRasterSource.scala │ │ │ ├── TINReprojectRasterSource.scala │ │ │ └── TINResampleRasterSource.scala │ │ ├── package.scala │ │ └── rasterize │ │ │ ├── StitchedDelaunayRasterizeMethods.scala │ │ │ ├── points │ │ │ └── IDWRasterizer.scala │ │ │ ├── polygon │ │ │ └── Polygon3DRasterizer.scala │ │ │ └── triangles │ │ │ ├── PDALTrianglesRasterizer.scala │ │ │ └── TrianglesRasterizer.scala │ │ ├── spark │ │ ├── buffer │ │ │ └── BufferUnionable.scala │ │ ├── datasource │ │ │ ├── DataSourceOptions.scala │ │ │ ├── DefaultSource.scala │ │ │ ├── PointCloudRelation.scala │ │ │ └── package.scala │ │ ├── dem │ │ │ ├── Implicits.scala │ │ │ ├── PointCloudDemMethods.scala │ │ │ ├── PointCloudToDem.scala │ │ │ ├── PointCloudToDemMethods.scala │ │ │ └── PointToGrid.scala │ │ ├── package.scala │ │ ├── pyramid │ │ │ └── Pyramid.scala │ │ ├── store │ │ │ ├── PointCloudHeader.scala │ │ │ ├── hadoop │ │ │ │ ├── HadoopPointCloudHeader.scala │ │ │ │ ├── HadoopPointCloudRDD.scala │ │ │ │ └── formats │ │ │ │ │ ├── FileStreamRecordReader.scala │ │ │ │ │ └── PointCloudInputFormat.scala │ │ │ ├── package.scala │ │ │ └── s3 │ │ │ │ ├── S3PointCloudHeader.scala │ │ │ │ ├── S3PointCloudInputFormat.scala │ │ │ │ ├── S3PointCloudRDD.scala │ │ │ │ ├── S3StreamRecordReader.scala │ │ │ │ └── S3URIRecordReader.scala │ │ ├── tiling │ │ │ ├── CutPointCloud.scala │ │ │ ├── Implicits.scala │ │ │ └── TilerMethods.scala │ │ └── triangulation │ │ │ └── TinToDem.scala │ │ ├── store │ │ └── avro │ │ │ ├── Implicits.scala │ │ │ ├── codecs │ │ │ └── PointCloudCodecs.scala │ │ │ └── package.scala │ │ ├── util │ │ ├── EitherMethods.scala │ │ └── Filesystem.scala │ │ └── vector │ │ ├── Extent3D.scala │ │ ├── ProjectedExtent3D.scala │ │ └── triangulation │ │ ├── DelaunayPointSet.scala │ │ └── LightPoint.scala │ └── test │ ├── resources │ ├── csv │ │ └── test-pdal.csv │ ├── las │ │ ├── 1.2-with-color.las │ │ └── files │ │ │ ├── 1.2-with-color_1.las │ │ │ ├── 1.2-with-color_2.las │ │ │ ├── 1.2-with-color_3.las │ │ │ └── 1.2-with-color_4.las │ ├── red-rocks │ │ ├── ept-build.json │ │ ├── ept-data │ │ │ ├── 0-0-0-0.laz │ │ │ ├── 1-0-0-0.laz │ │ │ ├── 1-0-0-1.laz │ │ │ ├── 1-0-1-0.laz │ │ │ ├── 1-0-1-1.laz │ │ │ ├── 1-1-0-0.laz │ │ │ ├── 1-1-1-0.laz │ │ │ ├── 1-1-1-1.laz │ │ │ ├── 2-0-0-2.laz │ │ │ ├── 2-0-1-2.laz │ │ │ ├── 2-0-2-2.laz │ │ │ ├── 2-0-3-2.laz │ │ │ ├── 2-1-0-1.laz │ │ │ ├── 2-1-1-1.laz │ │ │ ├── 2-1-1-2.laz │ │ │ ├── 2-1-2-2.laz │ │ │ ├── 2-1-3-1.laz │ │ │ ├── 2-1-3-2.laz │ │ │ ├── 2-2-0-1.laz │ │ │ ├── 2-2-1-1.laz │ │ │ ├── 2-2-2-1.laz │ │ │ ├── 2-2-3-1.laz │ │ │ ├── 2-3-1-1.laz │ │ │ ├── 2-3-2-1.laz │ │ │ ├── 2-3-3-1.laz │ │ │ ├── 3-1-2-4.laz │ │ │ ├── 3-1-3-4.laz │ │ │ ├── 3-1-4-4.laz │ │ │ ├── 3-1-5-4.laz │ │ │ ├── 3-2-0-3.laz │ │ │ ├── 3-2-1-3.laz │ │ │ ├── 3-2-2-3.laz │ │ │ ├── 3-2-3-3.laz │ │ │ ├── 3-2-3-4.laz │ │ │ ├── 3-2-4-4.laz │ │ │ ├── 3-2-5-4.laz │ │ │ ├── 3-2-6-4.laz │ │ │ ├── 3-2-7-4.laz │ │ │ ├── 3-3-1-3.laz │ │ │ ├── 3-3-2-3.laz │ │ │ ├── 3-3-3-3.laz │ │ │ ├── 3-3-3-4.laz │ │ │ ├── 3-3-4-4.laz │ │ │ ├── 3-3-5-4.laz │ │ │ ├── 3-3-6-4.laz │ │ │ ├── 3-3-7-3.laz │ │ │ ├── 3-3-7-4.laz │ │ │ ├── 3-4-1-3.laz │ │ │ ├── 3-4-2-3.laz │ │ │ ├── 3-4-3-3.laz │ │ │ ├── 3-4-4-3.laz │ │ │ ├── 3-4-5-3.laz │ │ │ ├── 3-4-6-3.laz │ │ │ ├── 3-4-7-3.laz │ │ │ ├── 3-5-2-3.laz │ │ │ ├── 3-5-3-3.laz │ │ │ ├── 3-5-4-3.laz │ │ │ ├── 3-5-5-3.laz │ │ │ ├── 3-5-6-3.laz │ │ │ ├── 3-5-7-3.laz │ │ │ ├── 3-6-2-3.laz │ │ │ ├── 3-6-3-3.laz │ │ │ ├── 3-6-4-3.laz │ │ │ ├── 3-6-5-3.laz │ │ │ ├── 3-6-6-3.laz │ │ │ ├── 3-6-7-3.laz │ │ │ ├── 4-10-10-7.laz │ │ │ ├── 4-10-11-7.laz │ │ │ ├── 4-10-13-7.laz │ │ │ ├── 4-10-9-7.laz │ │ │ ├── 4-11-13-7.laz │ │ │ ├── 4-11-14-7.laz │ │ │ ├── 4-11-4-6.laz │ │ │ ├── 4-11-5-6.laz │ │ │ ├── 4-11-6-6.laz │ │ │ ├── 4-11-7-6.laz │ │ │ ├── 4-11-9-7.laz │ │ │ ├── 4-12-10-6.laz │ │ │ ├── 4-12-11-6.laz │ │ │ ├── 4-12-12-6.laz │ │ │ ├── 4-12-7-6.laz │ │ │ ├── 4-12-9-6.laz │ │ │ ├── 4-13-7-6.laz │ │ │ ├── 4-13-9-6.laz │ │ │ ├── 4-3-10-8.laz │ │ │ ├── 4-3-11-8.laz │ │ │ ├── 4-3-7-8.laz │ │ │ ├── 4-3-8-8.laz │ │ │ ├── 4-3-9-8.laz │ │ │ ├── 4-4-11-8.laz │ │ │ ├── 4-4-13-8.laz │ │ │ ├── 4-4-9-8.laz │ │ │ ├── 4-5-1-7.laz │ │ │ ├── 4-5-11-8.laz │ │ │ ├── 4-5-13-8.laz │ │ │ ├── 4-5-14-8.laz │ │ │ ├── 4-5-3-7.laz │ │ │ ├── 4-5-4-7.laz │ │ │ ├── 4-5-5-7.laz │ │ │ ├── 4-5-8-8.laz │ │ │ ├── 4-6-10-8.laz │ │ │ ├── 4-6-3-7.laz │ │ │ ├── 4-6-4-7.laz │ │ │ ├── 4-6-7-8.laz │ │ │ ├── 4-6-8-8.laz │ │ │ ├── 4-7-3-7.laz │ │ │ ├── 4-8-11-7.laz │ │ │ ├── 4-8-14-7.laz │ │ │ ├── 4-8-5-7.laz │ │ │ ├── 4-8-7-7.laz │ │ │ ├── 4-8-9-7.laz │ │ │ ├── 4-9-11-7.laz │ │ │ ├── 4-9-12-7.laz │ │ │ ├── 4-9-13-7.laz │ │ │ ├── 4-9-14-7.laz │ │ │ ├── 4-9-3-7.laz │ │ │ ├── 4-9-5-7.laz │ │ │ ├── 4-9-7-7.laz │ │ │ └── 4-9-9-7.laz │ │ ├── ept-hierarchy │ │ │ └── 0-0-0-0.json │ │ ├── ept-sources │ │ │ ├── manifest.json │ │ │ └── red-rocks.json │ │ └── ept.json │ ├── tiff │ │ ├── dem-rasterizer-bug.tiff │ │ └── dem-reprojection-bug.tiff │ └── wkt │ │ └── erringPoints.wkt │ └── scala │ └── geotrellis │ └── pointcloud │ ├── raster │ ├── ept │ │ ├── EPTMetadataSpec.scala │ │ ├── EPTPathSpec.scala │ │ ├── EPTRasterSourceProviderSpec.scala │ │ ├── IDWRasterSourceSpec.scala │ │ └── TINRasterSourceSpec.scala │ └── rasterize │ │ └── triangles │ │ └── TrianglesRasterizerSpec.scala │ └── spark │ ├── PointCloudSpatialTestFiles.scala │ ├── PointCloudTestEnvironment.scala │ ├── buffer │ └── BufferUnionableSpec.scala │ ├── datasource │ └── PointCloudDatasourceSpec.scala │ ├── dem │ └── PointCloudDemSpec.scala │ ├── pyramid │ └── PyramidSpec.scala │ ├── store │ ├── file │ │ ├── FileArrayCoordinateSpatialSpec.scala │ │ └── FilePointCloudSpatialSpec.scala │ ├── hadoop │ │ ├── HadoopArrayCoordinateSpatialSpec.scala │ │ ├── HadoopPackedPointsRDDSpec.scala │ │ └── HadoopPointCloudSpatialSpec.scala │ └── s3 │ │ ├── MockS3Client.scala │ │ ├── S3ArrayCoordinateSpatialSpec.scala │ │ ├── S3PackedPointsRDDSpec.scala │ │ ├── S3PointCloudSpatialSpec.scala │ │ └── S3TestUtils.scala │ └── tiling │ └── PointCloudTilingSpec.scala ├── project ├── Dependencies.scala ├── Environment.scala ├── build.properties └── plugins.sbt ├── sbt └── scripts └── docker └── 1.8.0 └── debian └── spark ├── Dockerfile ├── Dockerfile.mbio └── fs └── opt └── spark └── conf ├── fairscheduler.xml ├── log4j.properties ├── metrics.properties ├── spark-defaults.conf └── spark-env.sh /.gitignore: -------------------------------------------------------------------------------- 1 | index.html 2 | index.js 3 | package.html 4 | lib 5 | site/ 6 | docs/_build/ 7 | 8 | project/boot 9 | project/plugins/project 10 | project/plugins/target 11 | project/target 12 | target 13 | .ensime 14 | \#*# 15 | *~ 16 | .#* 17 | .lib 18 | *.aux.xml 19 | *.jar 20 | *.crc 21 | _SUCCESS 22 | hs_err_*.log 23 | core 24 | 25 | *.pyc 26 | .project 27 | .classpath 28 | .cache 29 | .settings 30 | .history 31 | .idea 32 | .DS_Store 33 | *.iml 34 | *.swp 35 | *.swo 36 | *.sublime-* 37 | .vagrant 38 | .metals 39 | 40 | lib 41 | index.html 42 | index.js 43 | .ensime* 44 | 45 | nohup.out 46 | 47 | site/ 48 | -------------------------------------------------------------------------------- /.sbtopts: -------------------------------------------------------------------------------- 1 | -J-Xmx2G 2 | -J-Xms1G 3 | -J-Xss2M 4 | -Djava.awt.headless=true 5 | -Dsbt.color=always 6 | -Dsbt.supershell=false 7 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | # See https://docs.travis-ci.com/user/reference/overview/#Virtualisation-Environment-vs-Operating-System 2 | dist: xenial 3 | 4 | language: scala 5 | 6 | addons: 7 | hostname: localhost 8 | 9 | services: 10 | - docker 11 | 12 | jdk: 13 | - openjdk8 14 | 15 | scala: 16 | - "2.11.12" 17 | - "2.12.12" 18 | 19 | before_install: 20 | - docker run -d --restart=always -p 9091:9000 -e MINIO_ACCESS_KEY=minio -e MINIO_SECRET_KEY=password minio/minio:RELEASE.2019-05-02T19-07-09Z server /data 21 | 22 | before_cache: 23 | - find $HOME/.ivy2/cache -name "ivydata-*.properties" -type f -delete 24 | - find $HOME/.sbt -name "*.lock" -type f -delete 25 | - find $HOME/.coursier/cache -name "*.lock" -type f -delete 26 | 27 | cache: 28 | directories: 29 | - $HOME/.ivy2 30 | - $HOME/.sbt 31 | - $HOME/.couriser 32 | 33 | script: 34 | - .travis/run.sh 35 | 36 | notifications: 37 | email: 38 | recipients: 39 | - lossyrob@gmail.com 40 | - echeipesh@gmail.com 41 | - gr.pomadchin@gmail.com 42 | slack: 43 | rooms: 44 | secure: jsY+v/UWRwknxzxuOt3SWUXmfhVm5yE1v1sMtZqM+Vag6vUb8E+oL9XfGYbn7AmYcViP5ZsLPU88rZqwwHg5GgWKfD+34r3C2ZxG+zRybm9q1m3VwsG/ykmHrK47wuSiqBoX4/0rcX86drtL413gQ3bKWt7Vl80ff0AVCw/3wSnQjcHHvfJLBhwUpwD8AX791E8/ITT98+B1+kaIt1Lw13QJ1MsoiIuB8KnfRABA4DIuRkXPAWwr3zt0dcwHqIfrow97KodHQnLr19VU9D7Rfv6pDGRBSMTObXh1/z/4Lk5k+4oDXJhH6wzDDAi5w/2/fb9q0tphx/XCUHolYIJDVNokkJRfoLrvwn2a8iAI/rfqDVjM8nnVIIlYhuodSrlJvgzig7IMCVo4qIp31XJNzNlI5D7OK8Vm7VBEVhm1rUt+d9+oz5vN4KXk4lozQ37vNmRdZzdqvEAeuHWcOSZ/HJE7TG8josCXK/TIvI14wZ+xguPqUlEuHs21EIQQzDxL1pD8VW+DXPvzKca/ZeH+BSYyHHzW+ijkT/Yyr8aGfbofp2aXrLHZdelYejPSHdgiOZVnQcFdMjIqPYHWwHe6V/lo7QvYlbwMg98432Sgne6GKUy96hMT3JHxRKG7E+6XpJ3g4d2U4B286Td1Vbm0pu1hV0HRdrYkgkSO3w1UWC4= 45 | 46 | before_deploy: 47 | - export VERSION_SUFFIX="-${TRAVIS_COMMIT:0:7}" 48 | 49 | deploy: 50 | - provider: script 51 | script: .travis/deploy.sh 52 | skip_cleanup: true 53 | on: 54 | repo: geotrellis/geotrellis-pointcloud 55 | branch: master 56 | jdk: openjdk8 57 | 58 | after_deploy: 59 | - rm -f "${HOME}/.bintray/.credentials" 60 | -------------------------------------------------------------------------------- /.travis/build-and-test.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | ./sbt -J-Xmx2G "++$TRAVIS_SCALA_VERSION" "project pointcloud" test || { exit 1; } 3 | -------------------------------------------------------------------------------- /.travis/deploy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | docker run -it --net=host \ 4 | -v $HOME/.ivy2:/root/.ivy2 \ 5 | -v $HOME/.coursier:/root/.coursier \ 6 | -v $HOME/.sbt:/root/.sbt \ 7 | -v $TRAVIS_BUILD_DIR:/geotrellis-pointcloud \ 8 | -e TRAVIS_SCALA_VERSION=$TRAVIS_SCALA_VERSION \ 9 | -e TRAVIS_COMMIT=$TRAVIS_COMMIT \ 10 | -e TRAVIS_JDK_VERSION=$TRAVIS_JDK_VERSION \ 11 | -e BINTRAY_USER=$BINTRAY_USER \ 12 | -e BINTRAY_API_KEY=$BINTRAY_API_KEY \ 13 | -e BINTRAY_PASS=$BINTRAY_API_KEY \ 14 | -e VERSION_SUFFIX=$VERSION_SUFFIX daunnc/pdal-ubuntu:2.2.0 /bin/bash -c "cd /geotrellis-pointcloud; .travis/publish.sh" 15 | -------------------------------------------------------------------------------- /.travis/publish.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ./sbt "++$TRAVIS_SCALA_VERSION" "project pointcloud" publish 4 | -------------------------------------------------------------------------------- /.travis/run.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | docker run -it --net=host \ 4 | -v $HOME/.ivy2:/root/.ivy2 \ 5 | -v $HOME/.coursier:/root/.coursier \ 6 | -v $HOME/.sbt:/root/.sbt \ 7 | -v $TRAVIS_BUILD_DIR:/geotrellis-pointcloud \ 8 | -e TRAVIS_SCALA_VERSION=$TRAVIS_SCALA_VERSION \ 9 | -e TRAVIS_COMMIT=$TRAVIS_COMMIT \ 10 | -e TRAVIS_JDK_VERSION=$TRAVIS_JDK_VERSION daunnc/pdal-ubuntu:2.2.0 /bin/bash -c "cd /geotrellis-pointcloud; .travis/build-and-test.sh" 11 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # GeoTrellis PointCloud 2 | 3 | [![Build Status](https://travis-ci.org/geotrellis/geotrellis-pointcloud.svg?branch=master)](https://travis-ci.org/geotrellis/geotrellis-pointcloud) 4 | [![Bintray](https://api.bintray.com/packages/azavea/geotrellis/geotrellis-pointcloud/images/download.svg)](https://bintray.com/azavea/geotrellis/geotrellis-pointcloud) 5 | [![Join the chat at https://gitter.im/geotrellis/geotrellis](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/geotrellis/geotrellis?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) 6 | 7 | GeoTrellis PointCloud uses PDAL bindings to work with PointCloud data. 8 | 9 | > PDAL is Point Data Abstraction Library. 10 | > GDAL for point cloud data. 11 | - [pdal.io](https://pdal.io/) 12 | 13 | PDAL supports reading pointcloud data in various of formats. 14 | GeoTrellis PDAL allows read PointCloud data in any PDAL supported format into RDDs 15 | and to rasterize this data using. It's also possible to store data as a GeoTrellis layer 16 | without rasterizing, this feature allows to rasterize data on demand. 17 | 18 | ## GeoTrellis PointCloud with SBT 19 | 20 | ```scala 21 | scalaVersion := "2.12.12" 22 | 23 | libraryDependencies ++= Seq( 24 | "com.azavea.geotrellis" %% "geotrellis-pointcloud" % "" 25 | ) 26 | 27 | resolvers ++= Seq( 28 | "GeoTrellis Bintray Repository" at "http://dl.bintray.com/azavea/geotrellis/" 29 | ) 30 | ``` 31 | 32 | NOTE: Using GeoTrellis PointCloud requires a working installation of 33 | [PDAL](https://pdal.io/). 34 | -------------------------------------------------------------------------------- /benchmark/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # Set everything to be logged to the console 2 | log4j.rootCategory=INFO, console 3 | log4j.appender.console=org.apache.log4j.ConsoleAppender 4 | log4j.appender.console.target=System.out 5 | log4j.appender.console.layout=org.apache.log4j.PatternLayout 6 | log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss} %c{1}: [%X{IMAGE_ID},%X{FIELD_GEOHASH},%X{TRACE_ID}] %m%n 7 | -------------------------------------------------------------------------------- /benchmark/src/main/scala/geotrellis/pointcloud/raster/ept/IDWRasterSourceBench.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster.ept 18 | 19 | import geotrellis.raster.{CellSize, GridExtent, MultibandTile, Raster, RasterExtent} 20 | import geotrellis.raster.io.geotiff.Auto 21 | import geotrellis.proj4.LatLng 22 | 23 | import org.openjdk.jmh.annotations._ 24 | import java.util.concurrent.TimeUnit 25 | 26 | @BenchmarkMode(Array(Mode.AverageTime)) 27 | @State(Scope.Benchmark) 28 | @OutputTimeUnit(TimeUnit.MILLISECONDS) 29 | class IDWRasterSourceBench { 30 | val catalogPath = "../pointcloud/src/test/resources/red-rocks" 31 | 32 | /** This bench benchmarks the actual PDAL reads, not the RasterSource initialization time. */ 33 | val tin = TINRasterSource(catalogPath, overviewStrategy = Auto(6)) 34 | val idw = IDWRasterSource(catalogPath, overviewStrategy = Auto(6)) 35 | 36 | /** 37 | * jmh:run -i 10 -wi 5 -f1 -t1 .*IDWRasterSourceBench.* 38 | * 39 | * [info] Benchmark Mode Cnt Score Error Units 40 | * [info] IDWRasterSourceBench.IDWRasterSourceReadAll avgt 10 7923.583 ± 2071.873 ms/op 41 | * [info] IDWRasterSourceBench.TINRasterSourceReadAll avgt 10 734.133 ± 237.725 ms/op 42 | */ 43 | 44 | @Benchmark 45 | def TINRasterSourceReadAll(): Option[Raster[MultibandTile]] = tin.read() 46 | 47 | @Benchmark 48 | def IDWRasterSourceReadAll(): Option[Raster[MultibandTile]] = idw.read() 49 | 50 | } 51 | -------------------------------------------------------------------------------- /benchmark/src/main/scala/geotrellis/pointcloud/raster/ept/JavaTINRasterSource.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster.ept 18 | 19 | import geotrellis.pointcloud.raster.rasterize.triangles.PDALTrianglesRasterizer 20 | import geotrellis.proj4._ 21 | import geotrellis.raster._ 22 | import geotrellis.raster.io.geotiff.OverviewStrategy 23 | import geotrellis.vector._ 24 | 25 | import _root_.io.circe.syntax._ 26 | import _root_.io.pdal.pipeline._ 27 | import cats.syntax.option._ 28 | import org.log4s._ 29 | 30 | import scala.collection.JavaConverters._ 31 | 32 | case class JavaTINRasterSource( 33 | eptSource: String, 34 | resampleTarget: ResampleTarget = DefaultTarget, 35 | sourceMetadata: Option[EPTMetadata] = None, 36 | threads: Option[Int] = None, 37 | targetCellType: Option[TargetCellType] = None 38 | ) extends RasterSource { 39 | @transient private[this] lazy val logger = getLogger 40 | 41 | lazy val metadata: EPTMetadata = sourceMetadata.getOrElse(EPTMetadata(eptSource)) 42 | 43 | def attributes: Map[String, String] = metadata.attributes 44 | def attributesForBand(band: Int): Map[String, String] = metadata.attributesForBand(band) 45 | def bandCount: Int = metadata.bandCount 46 | def cellType: CellType = metadata.cellType 47 | def crs: CRS = metadata.crs 48 | def gridExtent: GridExtent[Long] = metadata.gridExtent 49 | def name: SourceName = metadata.name 50 | def resolutions: List[CellSize] = metadata.resolutions 51 | 52 | def reprojection(targetCRS: CRS, resampleTarget: ResampleTarget, method: ResampleMethod, strategy: OverviewStrategy): JavaTINReprojectRasterSource = 53 | JavaTINReprojectRasterSource(eptSource, targetCRS, resampleTarget, sourceMetadata = metadata.some, threads = threads, method, targetCellType = targetCellType) 54 | 55 | def resample(resampleTarget: ResampleTarget, method: ResampleMethod, strategy: OverviewStrategy): JavaTINResampleRasterSource = 56 | JavaTINResampleRasterSource(eptSource, resampleTarget, metadata.some, threads, method, targetCellType) 57 | 58 | def read(bounds: GridBounds[Long], bands: Seq[Int]): Option[Raster[MultibandTile]] = { 59 | val targetRegion = gridExtent.extentFor(bounds, clamp = false) 60 | val Extent(exmin, eymin, exmax, eymax) = targetRegion.extent 61 | 62 | val expression = ReadEpt( 63 | filename = eptSource, 64 | resolution = gridExtent.cellSize.resolution.some, 65 | bounds = s"([$exmin, $eymin], [$exmax, $eymax])".some, 66 | threads = threads 67 | ) ~ FilterDelaunay() 68 | 69 | logger.debug(expression.asJson.spaces4) 70 | 71 | val pipeline = expression toPipeline 72 | 73 | try { 74 | if(pipeline.validate()) { 75 | pipeline.execute 76 | 77 | val pointViews = pipeline.getPointViews().asScala.toList 78 | assert(pointViews.length == 1, "Triangulation pipeline should have single resulting point view") 79 | 80 | pointViews.headOption.map { pv => 81 | PDALTrianglesRasterizer 82 | .apply(pv, RasterExtent(targetRegion, bounds.width.toInt, bounds.height.toInt)) 83 | .mapTile(MultibandTile(_)) 84 | } 85 | } else None 86 | } finally pipeline.close() 87 | } 88 | 89 | def read(extent: Extent, bands: Seq[Int]): Option[Raster[MultibandTile]] = { 90 | val bounds = gridExtent.gridBoundsFor(extent.buffer(- cellSize.width / 2, - cellSize.height / 2), clamp = false) 91 | read(bounds, bands) 92 | } 93 | 94 | def convert(targetCellType: TargetCellType): RasterSource = 95 | throw new UnsupportedOperationException("DEM height fields may only be of floating point type") 96 | 97 | 98 | } 99 | -------------------------------------------------------------------------------- /benchmark/src/main/scala/geotrellis/pointcloud/raster/ept/ReadEPTBench.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster.ept 18 | 19 | import geotrellis.pointcloud.raster.rasterize.triangles.PDALTrianglesRasterizer 20 | 21 | import geotrellis.raster.{MultibandTile, Raster, RasterExtent} 22 | import geotrellis.vector.Extent 23 | import io.pdal.{PointView, TriangularMesh} 24 | import io.pdal.pipeline.{ENil, FilterDelaunay, ReadEpt} 25 | import cats.syntax.option._ 26 | import org.openjdk.jmh.annotations._ 27 | 28 | import scala.collection.JavaConverters._ 29 | import java.util.concurrent.TimeUnit 30 | 31 | @BenchmarkMode(Array(Mode.AverageTime)) 32 | @State(Scope.Benchmark) 33 | @OutputTimeUnit(TimeUnit.MILLISECONDS) 34 | class ReadEPTBench { 35 | val catalogPath = "../pointcloud/src/test/resources/red-rocks" 36 | 37 | /** Params are used to match [[DEMRasterSourceBench]] behavior. **/ 38 | val extent @ Extent(exmin, eymin, exmax, eymax) = Extent(481968.0, 4390186.0, 482856.0, 4391074.0) 39 | val readEpt: ReadEpt = ReadEpt( 40 | filename = catalogPath, 41 | bounds = s"([$exmin, $eymin], [$exmax, $eymax])".some, 42 | resolution = 6.9375.some 43 | ) 44 | 45 | /** 46 | * jmh:run -i 10 -wi 5 -f1 -t1 .*ReadEPTBench.* 47 | * 48 | * 03/23/2020 #1 49 | * [info] Benchmark Mode Cnt Score Error Units 50 | * [info] ReadEPTBench.EPTRasterize avgt 10 419.780 ± 63.084 ms/op 51 | * [info] ReadEPTBench.EPTRead avgt 10 207.858 ± 18.166 ms/op 52 | * [info] ReadEPTBench.EPTReadTriangulation avgt 10 388.973 ± 76.595 ms/op 53 | */ 54 | @Benchmark 55 | def EPTReadTriangulation(): Option[TriangularMesh] = { 56 | val pipeline = readEpt ~ FilterDelaunay() toPipeline 57 | 58 | try { 59 | if (pipeline.validate()) { 60 | pipeline.execute 61 | 62 | pipeline 63 | .getPointViews() 64 | .asScala 65 | .toList 66 | .headOption 67 | .map { _.getTriangularMesh() } 68 | } else None 69 | } finally pipeline.close() 70 | } 71 | 72 | @Benchmark 73 | def EPTRead(): Option[PointView] = { 74 | val pipeline = readEpt ~ ENil toPipeline 75 | 76 | try { 77 | if (pipeline.validate()) { 78 | pipeline.execute 79 | 80 | pipeline 81 | .getPointViews() 82 | .asScala 83 | .toList 84 | .headOption 85 | } else None 86 | } finally pipeline.close() 87 | } 88 | 89 | @Benchmark 90 | def EPTRasterize(): Option[Raster[MultibandTile]] = { 91 | val pipeline = readEpt ~ FilterDelaunay() toPipeline 92 | 93 | try { 94 | if (pipeline.validate()) { 95 | pipeline.execute 96 | 97 | pipeline 98 | .getPointViews() 99 | .asScala 100 | .toList 101 | .headOption 102 | .map { 103 | PDALTrianglesRasterizer 104 | .native(_, RasterExtent(extent, 128, 128)) 105 | .mapTile(MultibandTile(_)) 106 | } 107 | } else None 108 | } finally pipeline.close() 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /pointcloud/src/main/resources/META-INF/services/geotrellis.raster.RasterSourceProvider: -------------------------------------------------------------------------------- 1 | geotrellis.pointcloud.raster.ept.EPTRasterSourceProvider 2 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/layer/DepthKey.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.layer 18 | 19 | case class DepthKey(depth: Int) 20 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/layer/LayoutDefinition3D.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.layer 18 | 19 | import geotrellis.raster.TileLayout 20 | import geotrellis.layer._ 21 | import geotrellis.vector.Extent 22 | 23 | case class LayoutDefinition3D(extent: Extent, 24 | tileLayout: TileLayout, 25 | cellDepth: Double, 26 | tileLayers: Int, 27 | datum: Double = 0.0) { 28 | 29 | def tileCols = tileLayout.tileCols 30 | def tileRows = tileLayout.tileRows 31 | def layoutCols = tileLayout.layoutCols 32 | def layoutRows = tileLayout.layoutRows 33 | val layerThickness: Double = tileLayers * cellDepth 34 | 35 | lazy val mapTransform = new MapKeyTransform3D(extent, tileLayout.layoutCols, tileLayout.layoutRows, layerThickness, datum) 36 | 37 | def layout2D = LayoutDefinition(extent, tileLayout) 38 | } 39 | 40 | object LayoutDefinition3D { 41 | def apply(layoutDefinition: LayoutDefinition, layerThickness: Double, tileLayers: Int): LayoutDefinition3D = 42 | LayoutDefinition3D(layoutDefinition.extent, layoutDefinition.tileLayout, layerThickness, tileLayers) 43 | 44 | def apply(layoutDefinition: LayoutDefinition, layerThickness: Double, tileLayers: Int, datum: Double): LayoutDefinition3D = 45 | LayoutDefinition3D(layoutDefinition.extent, layoutDefinition.tileLayout, layerThickness, tileLayers, datum) 46 | } 47 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/layer/VoxelKey.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.layer 18 | 19 | import geotrellis.layer._ 20 | import geotrellis.util._ 21 | 22 | import io.circe.generic.JsonCodec 23 | 24 | // --- // 25 | 26 | /** A three-dimensional spatial key. A ''voxel'' is the 3D equivalent of a pixel. */ 27 | @JsonCodec 28 | case class VoxelKey(col: Int, row: Int, layer: Int) { 29 | def spatialKey = SpatialKey(col, row) 30 | def depthKey = DepthKey(layer) 31 | } 32 | 33 | /** Typeclass instances. These (particularly [[Boundable]]) are necessary 34 | * for when a layer's key type is parameterized as ''K''. 35 | */ 36 | object VoxelKey { 37 | implicit def ordering[A <: VoxelKey]: Ordering[A] = 38 | Ordering.by(k => (k.col, k.row, k.layer)) 39 | 40 | implicit object Boundable extends Boundable[VoxelKey] { 41 | def minBound(a: VoxelKey, b: VoxelKey) = { 42 | VoxelKey(math.min(a.col, b.col), math.min(a.row, b.row), math.min(a.layer, b.layer)) 43 | } 44 | 45 | def maxBound(a: VoxelKey, b: VoxelKey) = { 46 | VoxelKey(math.max(a.col, b.col), math.max(a.row, b.row), math.max(a.layer, b.layer)) 47 | } 48 | } 49 | 50 | /** Since [[VoxelKey]] has x and y coordinates, it can take advantage of 51 | * the [[SpatialComponent]] lens. Lenses are essentially "getters and setters" 52 | * that can be used in highly generic code. 53 | */ 54 | implicit val spatialComponent: Component[VoxelKey, SpatialKey] = { 55 | Component[VoxelKey, SpatialKey]( 56 | /* "get" a SpatialKey from VoxelKey */ 57 | k => SpatialKey(k.col, k.row), 58 | /* "set" (x,y) spatial elements of a VoxelKey */ 59 | (k, sk) => VoxelKey(sk.col, sk.row, k.layer) 60 | ) 61 | } 62 | 63 | implicit val depthComponent: Component[VoxelKey, DepthKey] = { 64 | Component[VoxelKey, DepthKey]( 65 | k => DepthKey(k.layer), 66 | (k, dk) => VoxelKey(k.col, k.row, dk.depth) 67 | ) 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/layer/package.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud 18 | 19 | import geotrellis.util.Component 20 | 21 | package object layer { 22 | type DepthComponent[K] = Component[K, DepthKey] 23 | 24 | } 25 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/raster/GridBounds3D.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster 18 | 19 | import geotrellis.raster.GridBounds 20 | import spire.math._ 21 | import spire.implicits._ 22 | 23 | case class GridBounds3D[@specialized(Int, Long) N: Integral](colMin: N, rowMin: N, layerMin: N, colMax: N, rowMax: N, layerMax: N) { 24 | def width: N = colMax - colMin + 1 25 | def height: N = rowMax - rowMin + 1 26 | def depth: N = layerMax - layerMin + 1 27 | 28 | def size: N = width * height * depth 29 | } 30 | 31 | object GridBounds3D { 32 | def apply[@specialized(Int, Long) N: Integral](gridBounds: GridBounds[N], layerMin: N, layerMax: N): GridBounds3D[N] = 33 | GridBounds3D(gridBounds.colMin, gridBounds.rowMin, layerMin, gridBounds.colMax, gridBounds.rowMax, layerMax) 34 | } 35 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/raster/GridExtent3D.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster 18 | 19 | import geotrellis.pointcloud.vector.Extent3D 20 | import geotrellis.raster.{CellSize, GeoAttrsError} 21 | 22 | class GridExtent3D(val extent: Extent3D, val cellwidth: Double, val cellheight: Double, val celldepth: Double) extends Serializable { 23 | def this(extent: Extent3D, voxelSize: VoxelSize) = 24 | this(extent, voxelSize.width, voxelSize.height, voxelSize.depth) 25 | 26 | def toRasterExtent3D: RasterExtent3D = { 27 | val targetCols = math.max(1L, math.round(extent.width / cellwidth)) 28 | val targetRows = math.max(1L, math.round(extent.height / cellheight)) 29 | val targetLayers = math.max(1L, math.round(extent.depth / celldepth)) 30 | if(targetCols > Int.MaxValue) { 31 | throw new GeoAttrsError(s"Cannot convert GridExtent into a RasterExtent: number of columns exceeds maximum integer value ($targetCols > ${Int.MaxValue})") 32 | } 33 | if(targetRows > Int.MaxValue) { 34 | throw new GeoAttrsError(s"Cannot convert GridExtent into a RasterExtent: number of rows exceeds maximum integer value ($targetRows > ${Int.MaxValue})") 35 | } 36 | if(targetLayers > Int.MaxValue) { 37 | throw new GeoAttrsError(s"Cannot convert GridExtent into a RasterExtent: number of layers exceeds maximum integer value ($targetLayers > ${Int.MaxValue})") 38 | } 39 | 40 | RasterExtent3D(extent, cellwidth, cellheight, celldepth, targetCols.toInt, targetRows.toInt, targetLayers.toInt) 41 | } 42 | } 43 | 44 | object GridExtent3D { 45 | def apply(extent: Extent3D, voxelSize: VoxelSize) = 46 | new GridExtent3D(extent, voxelSize.width, voxelSize.height, voxelSize.depth) 47 | 48 | def apply(extent: Extent3D, cellSize: CellSize, zResolution: Double) = 49 | new GridExtent3D(extent, cellSize.width, cellSize.height, zResolution) 50 | } 51 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/raster/VolumetricGrid.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster 18 | 19 | trait VolumetricGrid extends Serializable { 20 | def cols: Int 21 | def rows: Int 22 | def layers: Int 23 | 24 | /** 25 | * The size of the grid, e.g. cols * rows * layers. 26 | */ 27 | def size: Int = cols * rows * layers 28 | def dimensions: (Int, Int, Int) = (cols, rows, layers) 29 | def gridBounds: GridBounds3D[Int] = GridBounds3D(0, 0, 0, cols - 1, rows - 1, layers - 1) 30 | } 31 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/raster/VoxelSize.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster 18 | 19 | import geotrellis.pointcloud.vector.Extent3D 20 | import geotrellis.raster.CellSize 21 | 22 | /** 23 | * A case class containing the width and height of a cell. 24 | * 25 | * @param width The width of a cell 26 | * @param height The height of a cell 27 | */ 28 | case class VoxelSize(width: Double, height: Double, depth: Double) { 29 | def resolution: Double = math.pow(width * height * depth, 1.0/3.0) 30 | } 31 | 32 | /** 33 | * The companion object for the [[CellSize]] type. 34 | */ 35 | object VoxelSize { 36 | 37 | /** 38 | * Create a new [[VoxelSize]] from an extent, a number of columns, 39 | * a number of rows, and a number of layers. 40 | * 41 | * @param extent The extent, which provides an overall height and width 42 | * @param cols The number of columns 43 | * @param rows The number of rows 44 | * @param layers The number of layers 45 | * @return The VoxelSize 46 | */ 47 | def apply(extent: Extent3D, cols: Int, rows: Int, layers: Int): VoxelSize = 48 | VoxelSize(extent.width / cols, extent.height / rows, extent.depth / layers) 49 | 50 | /** 51 | * Create a new [[VoxelSize]] from an extent, a number of columns, 52 | * a number of rows, and a number of layers. 53 | * 54 | * @param extent The extent, which provides an overall height and width 55 | * @param dims The numbers of columns and rows as a tuple 56 | * @return The CellSize 57 | */ 58 | def apply(extent: Extent3D, dims: (Int, Int, Int)): VoxelSize = { 59 | val (cols, rows, layers) = dims 60 | apply(extent, cols, rows, layers) 61 | } 62 | 63 | def apply(cellSize: CellSize, zResolution: Double): VoxelSize = 64 | VoxelSize(cellSize.width, cellSize.height, zResolution) 65 | 66 | /** 67 | * Create a new [[CellSize]] from a string containing the width and 68 | * height separated by a comma. 69 | * 70 | * @param s The string 71 | * @return The CellSize 72 | */ 73 | def fromString(s:String) = { 74 | val Array(width, height, depth) = s.split(",").map(_.toDouble) 75 | VoxelSize(width, height, depth) 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/raster/ept/EPTMetadata.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster.ept 18 | 19 | import geotrellis.proj4._ 20 | import geotrellis.raster._ 21 | import geotrellis.util.RangeReader 22 | 23 | import _root_.io.circe.parser._ 24 | import cats.instances.long._ 25 | import cats.instances.map._ 26 | import cats.syntax.semigroup._ 27 | import cats.syntax.either._ 28 | 29 | import java.net.URI 30 | 31 | case class EPTMetadata( 32 | name: SourceName, 33 | crs: CRS, 34 | cellType: CellType, 35 | gridExtent: GridExtent[Long], 36 | resolutions: List[CellSize], 37 | attributes: Map[String, String] 38 | ) extends RasterMetadata { 39 | val bandCount = 1 40 | def attributesForBand(i: Int): Map[String, String] = Map.empty 41 | } 42 | 43 | object EPTMetadata { 44 | def pointsInLevels(base: URI, key: String): Map[Int, Long] = { 45 | val rr = RangeReader(base.resolve(s"ept-hierarchy/$key.json").toString) 46 | val raw = new String(rr.readAll) 47 | val json = parse(raw).valueOr(throw _) 48 | val table = json.asObject.get.toList.toMap.mapValues(_.toString.toLong) 49 | 50 | val recurseKeys = table.filter(_._2 == -1).keys.toList 51 | val joined = (table -- recurseKeys).groupBy(_._1.split("-").head.toInt).mapValues(_.values.sum) 52 | val nested = recurseKeys.map(pointsInLevels(base, _)) 53 | 54 | nested.fold(joined)(_ combine _) 55 | } 56 | 57 | private def approxPointsPerTile(base: URI): Long = { 58 | val rr = RangeReader(base.resolve(s"ept-hierarchy/0-0-0-0.json").toString) 59 | val raw = new String(rr.readAll) 60 | val json = parse(raw).valueOr(throw _) 61 | val table = json.asObject.get.toList.toMap.mapValues(_.toString.toLong) 62 | 63 | val nontrivials = table.filterNot(_._2 == -1) 64 | val avgCnt = nontrivials.map(_._2).sum / nontrivials.size 65 | 66 | avgCnt 67 | } 68 | 69 | def apply(source: String, withHierarchy: Boolean = false): EPTMetadata = { 70 | val src = if (source.endsWith("/")) source else s"$source/" 71 | val raw = Raw(src) 72 | val uri = new URI(src) 73 | val (counts, maxDepth) = { 74 | if(withHierarchy) { 75 | val cnts = pointsInLevels(uri, "0-0-0-0").toList.sorted 76 | cnts -> cnts.last._1 77 | } else { 78 | val appt = approxPointsPerTile(uri) 79 | val approxTileCount = raw.points / appt 80 | 81 | // Assume that geometry is "flat", tree is more quad-tree like 82 | val fullLevels = math.log(approxTileCount) / math.log(4) 83 | 84 | (Map.empty[Int, Long], (1.5 * fullLevels).toInt) 85 | } 86 | } 87 | 88 | // https://github.com/PDAL/PDAL/blob/2.1.0/io/EptReader.cpp#L293-L318 89 | val resolutions = (maxDepth to 0).by(-1).toList.map { l => 90 | CellSize((raw.extent.width / raw.span) / math.pow(2, l), (raw.extent.height / raw.span) / math.pow(2, l)) 91 | } 92 | 93 | EPTMetadata( 94 | StringName(src), 95 | raw.srs.toCRS(), 96 | DoubleCellType, 97 | GridExtent[Long](raw.extent, resolutions.head), 98 | resolutions, 99 | Map( 100 | "points" -> raw.points.toString, 101 | "pointsInLevels" -> counts.map(_._2).mkString(","), 102 | "minz" -> raw.boundsConforming(2).toString, 103 | "maxz" -> raw.boundsConforming(5).toString 104 | ) 105 | ) 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/raster/ept/EPTPath.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster.ept 18 | 19 | import geotrellis.raster.SourcePath 20 | 21 | import cats.syntax.option._ 22 | import io.lemonlabs.uri.Uri 23 | import io.lemonlabs.uri.encoding.PercentEncoder 24 | import io.lemonlabs.uri.encoding.PercentEncoder.PATH_CHARS_TO_ENCODE 25 | 26 | import java.net.MalformedURLException 27 | 28 | /** Represents a VALID path that points to an Entwine catalog to be read 29 | * 30 | * @param value Path to an Entwine catalog. 31 | * 32 | * @example "data/my-data.tiff" 33 | * @example "ept://data/my-data.tiff" 34 | * @example "ept+file://data/my-data.tiff" 35 | * @example "ept+s3://bucket/prefix/data.tif" 36 | * @example "ept+file:///tmp/data.tiff" 37 | * 38 | * @note Capitalization of the extension is not regarded. 39 | */ 40 | case class EPTPath(value: String) extends SourcePath { 41 | def ept: String = s"$value/ept.json" 42 | } 43 | 44 | object EPTPath { 45 | val PREFIX = "ept+" 46 | val PREFIX_TIN = "tin+" 47 | val PREFIX_IDW = "idw+" 48 | val SCHEME = "ept://" 49 | 50 | implicit def toEPTPath(path: String): EPTPath = parse(path) 51 | 52 | def parseOption(path: String, percentEncoder: PercentEncoder = PercentEncoder(PATH_CHARS_TO_ENCODE ++ Set('%', '?', '#'))): Option[EPTPath] = { 53 | val upath = percentEncoder.encode(path, "UTF-8") 54 | Uri.parseOption(upath.split("ept://").last).fold(Option.empty[EPTPath]) { uri => 55 | EPTPath(uri.schemeOption.fold(uri.toStringRaw) { scheme => 56 | uri.withScheme(scheme.split("\\+").last).toStringRaw 57 | }).some 58 | } 59 | } 60 | 61 | def parse(path: String, percentEncoder: PercentEncoder = PercentEncoder(PATH_CHARS_TO_ENCODE ++ Set('%', '?', '#'))): EPTPath = 62 | parseOption(path, percentEncoder).getOrElse(throw new MalformedURLException(s"Unable to parse EPTPath: $path")) 63 | } -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/raster/ept/EPTRasterSourceProvider.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster.ept 18 | 19 | import geotrellis.raster.{RasterSource, RasterSourceProvider} 20 | 21 | class EPTRasterSourceProvider extends RasterSourceProvider { 22 | // TIN is a default fallback 23 | def isTin(path: String): Boolean = 24 | path.nonEmpty && (path.startsWith(EPTPath.PREFIX_TIN) || path.startsWith(EPTPath.PREFIX) || path.startsWith(EPTPath.SCHEME)) 25 | def isIdw(path: String): Boolean = 26 | path.nonEmpty && path.startsWith(EPTPath.PREFIX_IDW) && (path.contains(EPTPath.PREFIX) || path.contains(EPTPath.SCHEME)) 27 | 28 | def canProcess(path: String): Boolean = isTin(path) || isIdw(path) 29 | def rasterSource(path: String): RasterSource = if(isIdw(path)) IDWRasterSource(path) else TINRasterSource(path) 30 | } 31 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/raster/ept/Field.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster.ept 18 | 19 | import io.circe.generic.JsonCodec 20 | 21 | @JsonCodec 22 | case class Field(name: String, size: Int, `type`: String, offset: Option[Double], scale: Option[Double]) 23 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/raster/ept/Raw.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster.ept 18 | 19 | import geotrellis.util.RangeReader 20 | import geotrellis.vector.Extent 21 | 22 | import cats.syntax.either._ 23 | import io.circe.generic.JsonCodec 24 | import io.circe.parser.decode 25 | 26 | import java.net.URI 27 | 28 | @JsonCodec 29 | case class Raw( 30 | bounds: Seq[Double], 31 | boundsConforming: Seq[Double], 32 | dataType: String, 33 | hierarchyType: String, 34 | points: Long, 35 | schema: Seq[Field], 36 | span: Int, 37 | srs: SRS, 38 | version: String 39 | ) { 40 | def extent: Extent = { 41 | val Seq(xmin, ymin, _, xmax, ymax, _) = bounds 42 | Extent(xmin, ymin, xmax, ymax) 43 | } 44 | } 45 | 46 | object Raw { 47 | def apply(eptSource: String): Raw = { 48 | val rr = RangeReader(new URI(eptSource).resolve("ept.json").toString) 49 | val jsonString = new String(rr.readAll) 50 | decode[Raw](jsonString).valueOr(throw _) 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/raster/ept/SRS.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster.ept 18 | 19 | import geotrellis.proj4.{CRS, LatLng} 20 | import io.circe.generic.JsonCodec 21 | 22 | import scala.util.Try 23 | 24 | @JsonCodec 25 | case class SRS(authority: Option[String], horizontal: Option[String], vertical: Option[String], wkt: Option[String]) { 26 | def toCRS(defaultCRS: CRS = LatLng): CRS = { 27 | val parsed: Option[CRS] = for { txt <- wkt; crs <- Try { CRS.fromWKT(txt) }.toOption.flatten } yield crs 28 | val fromCode = authority.filter(_.toLowerCase == "epsg").fold(defaultCRS) { _ => 29 | horizontal.map(epsg => CRS.fromEpsgCode(epsg.toInt)).getOrElse(defaultCRS) 30 | } 31 | parsed.getOrElse(fromCode) 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/raster/package.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud 18 | 19 | import geotrellis.vector.triangulation.StitchedDelaunay 20 | 21 | package object raster { 22 | implicit class withStitchedDelaunayRasterizeMethods(val self: StitchedDelaunay) extends rasterize.StitchedDelaunayRasterizeMethods 23 | } 24 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/raster/rasterize/StitchedDelaunayRasterizeMethods.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster.rasterize 18 | 19 | import geotrellis.raster.triangulation.DelaunayRasterizer 20 | import geotrellis.raster.{ArrayTile, CellType, DoubleConstantNoDataCellType, RasterExtent} 21 | import geotrellis.util.MethodExtensions 22 | import geotrellis.vector.triangulation.{DelaunayTriangulation, StitchedDelaunay} 23 | 24 | trait StitchedDelaunayRasterizeMethods extends MethodExtensions[StitchedDelaunay] { 25 | def rasterize(re: RasterExtent, cellType: CellType = DoubleConstantNoDataCellType)(center: DelaunayTriangulation) = { 26 | val tile = ArrayTile.empty(cellType, re.cols, re.rows) 27 | DelaunayRasterizer.rasterizeDelaunayTriangulation(center, re, tile) 28 | DelaunayRasterizer.rasterize( 29 | tile, 30 | re, 31 | self.fillTriangles, 32 | self.halfEdgeTable, 33 | self.pointSet 34 | ) 35 | tile 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/raster/rasterize/points/IDWRasterizer.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster.rasterize.points 18 | 19 | import geotrellis.raster._ 20 | import geotrellis.raster.interpolation._ 21 | import geotrellis.vector.{Point, PointFeature} 22 | 23 | import _root_.io.pdal.{DimType, PointView} 24 | import spire.syntax.cfor._ 25 | 26 | object IDWRasterizer { 27 | def apply(pv: PointView, re: RasterExtent, radiusMultiplier: Double = 1.5, cellType: CellType = DoubleConstantNoDataCellType): Raster[Tile] = { 28 | val pc = pv.getPointCloud(Array(DimType.X, DimType.Y, DimType.Z)) 29 | val features = Array.ofDim[PointFeature[Double]](pc.length) 30 | cfor(0)(_ < pc.length, _ + 1) { i => 31 | features(i) = PointFeature[Double](Point(pc.getX(i), pc.getY(i)), pc.getZ(i)) 32 | } 33 | 34 | val CellSize(w, h) = re.cellSize 35 | val radius = radiusMultiplier * math.sqrt(w * w + h * h) 36 | 37 | features 38 | .toTraversable 39 | .inverseDistanceWeighted( 40 | re, 41 | InverseDistanceWeighted.Options( 42 | radius, 43 | radius, 44 | 0.0, // rotation 45 | 3.0, // weighting power 46 | 0.0, // smoothing factor 47 | re.cellSize.resolution / 2, //equal weight radius 48 | cellType 49 | )) 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/raster/rasterize/triangles/TrianglesRasterizer.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster.rasterize.triangles 18 | 19 | import geotrellis.raster._ 20 | import geotrellis.vector._ 21 | 22 | import org.locationtech.jts.geom.{ Envelope => JtsEnvelope } 23 | 24 | import scala.collection.JavaConverters._ 25 | 26 | object TrianglesRasterizer { 27 | 28 | def apply( 29 | re: RasterExtent, 30 | sourceArray: Array[Double], 31 | triangles: Seq[Polygon], 32 | indexMap: Map[(Double, Double), Int] 33 | ): ArrayTile = { 34 | val tile = DoubleArrayTile.empty(re.cols, re.rows) 35 | apply(re, tile, sourceArray, triangles, indexMap) 36 | tile 37 | } 38 | 39 | def apply( 40 | re: RasterExtent, 41 | tile: MutableArrayTile, 42 | sourceArray: Array[Double], 43 | triangles: Seq[Polygon], 44 | indexMap: Map[(Double, Double), Int] 45 | ): Unit = { 46 | triangles 47 | .foreach({ triangle => 48 | renderTriangle(triangle, re, sourceArray, tile, indexMap) 49 | }) 50 | } 51 | 52 | def renderTriangle( 53 | triangle: Polygon, 54 | re: RasterExtent, 55 | sourceArray: Array[Double], 56 | tile: MutableArrayTile, 57 | indexMap: Map[(Double, Double), Int] 58 | ): Unit = { 59 | 60 | val Extent(xmin, ymin, xmax, ymax) = triangle.extent 61 | val xn = ((xmin - re.extent.xmin) / re.cellwidth).toInt 62 | val yn = ((ymin - re.extent.ymin) / re.cellheight).toInt 63 | val xStart = re.extent.xmin + (xn + 0.5) * re.cellwidth 64 | val yStart = re.extent.ymin + (yn + 0.5) * re.cellheight 65 | val cols = math.ceil((xmax - xmin) / re.cellwidth).toInt 66 | val rows = math.ceil((ymax - ymin) / re.cellheight).toInt 67 | 68 | var row = 0; while (row < rows) { 69 | var col = 0; while (col < cols) { 70 | val x = xStart + col * re.cellwidth 71 | val y = yStart + row * re.cellheight 72 | val screenCol = ((x - re.extent.xmin) / re.cellwidth).toInt 73 | val screenRow = ((re.extent.ymax -y) / re.cellheight).toInt 74 | if ( 75 | triangle.covers(Point(x,y)) && 76 | screenCol < re.cols && screenRow < re.rows && 77 | 0 <= screenCol && 0 <= screenRow 78 | ) { 79 | val result = { 80 | val verts = triangle.vertices; require(verts.length == 4) 81 | val x1 = verts(0).x 82 | val y1 = verts(0).y 83 | val x2 = verts(1).x 84 | val y2 = verts(1).y 85 | val x3 = verts(2).x 86 | val y3 = verts(2).y 87 | val index1 = indexMap.getOrElse((verts(0).x, verts(0).y), throw new Exception) 88 | val index2 = indexMap.getOrElse((verts(1).x, verts(1).y), throw new Exception) 89 | val index3 = indexMap.getOrElse((verts(2).x, verts(2).y), throw new Exception) 90 | 91 | val determinant = (y2-y3)*(x1-x3)+(x3-x2)*(y1-y3) 92 | val lambda1 = ((y2-y3)*(x-x3)+(x3-x2)*(y-y3)) / determinant 93 | val lambda2 = ((y3-y1)*(x-x3)+(x1-x3)*(y-y3)) / determinant 94 | val lambda3 = 1.0 - lambda1 - lambda2 95 | 96 | lambda1*sourceArray(index1) + lambda2*sourceArray(index2) + lambda3*sourceArray(index3) 97 | } 98 | 99 | tile.setDouble(screenCol, screenRow, result) 100 | } 101 | col += 1 102 | } 103 | row += 1 104 | } 105 | } 106 | 107 | } 108 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/buffer/BufferUnionable.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.buffer 18 | 19 | import geotrellis.layer._ 20 | 21 | import org.apache.spark.rdd.RDD 22 | 23 | import scala.reflect.ClassTag 24 | 25 | object BufferUnionable { 26 | 27 | /** 28 | * Given an RDD of (K, V) pairs, union each object with its 29 | * neighbors. The "neighbor" relationship is given by the keys. 30 | * 31 | * @tparam K The key type. 32 | * @tparam V The value type; must be unionable. 33 | * 34 | * @param rdd An RDD of K-V pairs. 35 | * @return An RDD of K-V pairs where each V has been unioned with its neighbors. 36 | */ 37 | def apply[ 38 | K: SpatialComponent, 39 | X <: { def union(other: Any): V }, 40 | V: (? => X) : ClassTag 41 | ](rdd: RDD[(K, V)]): RDD[(K, V)] = { 42 | rdd 43 | .flatMap({ case (key, data) => 44 | val SpatialKey(col, row) = key 45 | 46 | for (deltaX <- -1 to +1; deltaY <- -1 to +1) yield { 47 | if (deltaX == 0 && deltaY == 0) 48 | (SpatialKey(col + deltaX, row + deltaY), (key, data, true)) 49 | else 50 | (SpatialKey(col + deltaX, row + deltaY), (key, data, false)) 51 | } 52 | }) 53 | .groupByKey 54 | .filter({ case (_, seq) => seq.exists { case (_, _, center) => center } }) 55 | .map({ case (sortKey, seq) => 56 | val resultKey = seq.filter({ case (_, _, center) => center }).head._1 57 | val resultValue = seq.map({ case (_, data, _) => data }).reduce(_ union _) 58 | 59 | (resultKey, resultValue) 60 | }) 61 | } 62 | 63 | } 64 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/datasource/DataSourceOptions.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.datasource 18 | 19 | trait DataSourceOptions { 20 | final val PATH_PARAM = "path" 21 | final val PIPELINE_PARAM = "pipeline" 22 | } 23 | 24 | object DataSourceOptions extends DataSourceOptions -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/datasource/DefaultSource.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.datasource 18 | 19 | import geotrellis.pointcloud.spark.store.hadoop.HadoopPointCloudRDD.{Options => HadoopOptions} 20 | import io.pdal.pipeline._ 21 | 22 | import cats.syntax.either._ 23 | import io.circe._ 24 | import io.circe.parser._ 25 | 26 | import org.apache.spark.annotation.Experimental 27 | import org.apache.spark.sql._ 28 | import org.apache.spark.sql.sources._ 29 | 30 | /** 31 | * DataSource over a GeoTrellis layer store. 32 | */ 33 | @Experimental 34 | class DefaultSource extends DataSourceRegister with RelationProvider with DataSourceOptions { 35 | def shortName(): String = DefaultSource.SHORT_NAME 36 | 37 | /** 38 | * Create a GeoTrellis pointcloud data source. 39 | * @param sqlContext spark stuff 40 | * @param parameters required parameters are: 41 | * `path` - layer store URI (e.g. "s3://bucket/gt_layers; 42 | * `pipeline`- json pipeline string to make PDAL work; 43 | */ 44 | def createRelation(sqlContext: SQLContext, parameters: Map[String, String]): BaseRelation = { 45 | require(parameters.contains(PATH_PARAM), s"'$PATH_PARAM' parameter is required.") 46 | 47 | val path = parameters(PATH_PARAM) 48 | val pipeline = parameters.get(PIPELINE_PARAM).map { str => parse(str).getOrElse(Json.Null) }.getOrElse(Read("local") ~ ENil: Json) 49 | 50 | new PointCloudRelation(sqlContext, path, HadoopOptions.DEFAULT.copy(pipeline = pipeline)) 51 | } 52 | } 53 | 54 | object DefaultSource { 55 | final val SHORT_NAME = "geotrellis-pointcloud" 56 | } 57 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/datasource/PointCloudRelation.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.datasource 18 | 19 | import geotrellis.pointcloud.spark.store.hadoop._ 20 | import geotrellis.pointcloud.spark.store.hadoop.HadoopPointCloudRDD.{Options => HadoopOptions} 21 | import geotrellis.pointcloud.util.Filesystem 22 | import geotrellis.proj4.CRS 23 | import geotrellis.store.hadoop.util.HdfsUtils 24 | import geotrellis.vector.Extent 25 | 26 | import cats.implicits._ 27 | import io.pdal._ 28 | import io.circe.syntax._ 29 | import org.apache.hadoop.fs.Path 30 | import org.apache.spark.SparkContext 31 | import org.apache.spark.rdd.RDD 32 | import org.apache.spark.sql.sources.{BaseRelation, TableScan} 33 | import org.apache.spark.sql.types._ 34 | import org.apache.spark.sql.{Row, SQLContext} 35 | 36 | import java.io.File 37 | 38 | import scala.collection.JavaConverters._ 39 | 40 | // This class has to be serializable since it is shipped over the network. 41 | class PointCloudRelation( 42 | val sqlContext: SQLContext, 43 | path: String, 44 | options: HadoopOptions 45 | ) extends BaseRelation with TableScan/* with PrunedScan with PrunedFilteredScan*/ with Serializable { 46 | 47 | @transient implicit lazy val sc: SparkContext = sqlContext.sparkContext 48 | 49 | // TODO: switch between HadoopPointCloudRDD and S3PointcCloudRDD 50 | lazy val isS3: Boolean = path.startsWith("s3") 51 | 52 | override def schema: StructType = { 53 | lazy val (local, fixedPath) = 54 | if(path.startsWith("s3") || path.startsWith("hdfs")) { 55 | val tmpDir = Filesystem.createDirectory() 56 | val remotePath = new Path(path) 57 | // copy remote file into local tmp dir 58 | val localPath = new File(tmpDir, remotePath.getName) 59 | HdfsUtils.copyPath(remotePath, new Path(s"file:///${localPath.getAbsolutePath}"), sc.hadoopConfiguration) 60 | (true, localPath.toString) 61 | } else (false, path) 62 | 63 | val localPipeline = 64 | options.pipeline 65 | .hcursor 66 | .downField("pipeline").downArray 67 | .downField("filename").withFocus(_ => fixedPath.asJson) 68 | .top.fold(options.pipeline)(identity) 69 | 70 | val pl = Pipeline(localPipeline.noSpaces) 71 | if (pl.validate()) pl.execute() 72 | val pointCloud = try { 73 | pl.getPointViews().next().getPointCloud(0) 74 | } finally { 75 | pl.close() 76 | if(local) println(new File(fixedPath).delete) 77 | } 78 | 79 | val rdd = HadoopPointCloudRDD(new Path(path), options) 80 | 81 | val md: (Option[Extent], Option[CRS]) = 82 | rdd 83 | .map { case (header, _) => (header.projectedExtent3D.map(_.extent3d.toExtent), header.crs) } 84 | .reduce { case ((e1, c), (e2, _)) => ((e1, e2).mapN(_ combine _), c) } 85 | 86 | val metadata = new MetadataBuilder().putString("metadata", md.asJson.noSpaces).build 87 | 88 | pointCloud.deriveSchema(metadata) 89 | } 90 | 91 | override def buildScan(): RDD[Row] = { 92 | val rdd = HadoopPointCloudRDD(new Path(path), options) 93 | rdd.flatMap { _._2.flatMap { pc => pc.readAll.toList.map { k => Row(k: _*) } } } 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/dem/Implicits.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.dem 18 | 19 | import io.pdal._ 20 | import geotrellis.layer._ 21 | import geotrellis.util._ 22 | import org.apache.spark.rdd.RDD 23 | 24 | object Implicits extends Implicits 25 | 26 | trait Implicits { 27 | implicit class withPointCloudToDemMethods[M: GetComponent[*, LayoutDefinition]]( 28 | self: RDD[(SpatialKey, PointCloud)] with Metadata[M] 29 | ) extends PointCloudToDemMethods[M](self) 30 | 31 | implicit class withPointCloudDemMethods(val self: PointCloud) 32 | extends PointCloudDemMethods 33 | } 34 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/dem/PointCloudDemMethods.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.dem 18 | 19 | import io.pdal._ 20 | 21 | import geotrellis.raster._ 22 | import geotrellis.pointcloud.raster.rasterize.triangles.TrianglesRasterizer 23 | import geotrellis.util.MethodExtensions 24 | import geotrellis.vector._ 25 | import geotrellis.vector.triangulation.DelaunayTriangulation 26 | import geotrellis.vector.mesh.IndexedPointSet 27 | 28 | import org.locationtech.jts.geom.Coordinate 29 | 30 | trait PointCloudDemMethods extends MethodExtensions[PointCloud] { 31 | 32 | /** 33 | * Compute the union of this PointCloud and the other one. 34 | */ 35 | def union(other: Any): PointCloud = { 36 | val otherCloud = other match { 37 | case other: PointCloud => other 38 | case _ => throw new Exception 39 | } 40 | 41 | require(self.dimTypes == otherCloud.dimTypes) 42 | 43 | PointCloud(self.bytes ++ otherCloud.bytes, self.dimTypes) 44 | } 45 | 46 | lazy val coords: Array[Coordinate] = 47 | (0 until self.length).map({ i => new Coordinate(self.getDouble(i, "X"), self.getDouble(i, "Y")) }).toArray 48 | lazy val xs = (0 until self.length).map({ i => self.getDouble(i, "X") }).toArray 49 | lazy val ys = (0 until self.length).map({ i => self.getDouble(i, "Y") }).toArray 50 | lazy val indexMap: Map[(Double, Double), Int] = xs.zip(ys).zipWithIndex.toMap 51 | lazy val delaunayTriangulation = DelaunayTriangulation(IndexedPointSet(coords)) 52 | val indexToCoord = delaunayTriangulation.pointSet.getCoordinate(_) 53 | lazy val triangles: Seq[Polygon] = delaunayTriangulation.triangleMap.triangleVertices.toSeq.map { 54 | case (i, j, k) => Polygon(indexToCoord(i), indexToCoord(j), indexToCoord(k), indexToCoord(i)) 55 | } 56 | 57 | def toTile(re: RasterExtent, dimension: String): ArrayTile = { 58 | val sourceArray = (0 until self.length).map({ i => self.getDouble(i, dimension) }).toArray 59 | TrianglesRasterizer(re, sourceArray, triangles, indexMap) 60 | } 61 | 62 | } 63 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/dem/PointCloudToDem.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.dem 18 | 19 | import io.pdal._ 20 | import geotrellis.layer._ 21 | import geotrellis.raster._ 22 | import geotrellis.spark._ 23 | import geotrellis.util._ 24 | import geotrellis.vector._ 25 | 26 | import org.apache.spark.rdd.RDD 27 | 28 | object PointCloudToDem { 29 | def apply[M: GetComponent[*, LayoutDefinition]](rdd: RDD[(SpatialKey, PointCloud)] with Metadata[M], tileDimensions: (Int, Int), options: PointToGrid.Options): RDD[(SpatialKey, Tile)] with Metadata[LayoutDefinition] = 30 | apply[M](rdd, options) { e => RasterExtent(e, tileDimensions._1, tileDimensions._2) } 31 | 32 | def apply[M: GetComponent[*, LayoutDefinition]](rdd: RDD[(SpatialKey, PointCloud)] with Metadata[M], cellSize: CellSize, options: PointToGrid.Options): RDD[(SpatialKey, Tile)] with Metadata[LayoutDefinition] = 33 | apply[M](rdd, options) { e => RasterExtent(e, cellSize) } 34 | 35 | def apply[M: GetComponent[*, LayoutDefinition]](rdd: RDD[(SpatialKey, PointCloud)] with Metadata[M], options: PointToGrid.Options)(createRE: Extent => RasterExtent): RDD[(SpatialKey, Tile)] with Metadata[LayoutDefinition] = { 36 | val layoutDefinition = rdd.metadata.getComponent[LayoutDefinition] 37 | val mapTransform = layoutDefinition.mapTransform 38 | 39 | val result = 40 | rdd 41 | .collectNeighbors 42 | .mapPartitions({ partition => 43 | partition.map { case (key, neighbors) => 44 | val extent = mapTransform(key) 45 | val raster = 46 | PointToGrid.createRaster(neighbors.map(_._2._2), createRE(extent), options) 47 | (key, raster.tile) 48 | } 49 | }, preservesPartitioning = true) 50 | 51 | ContextRDD(result, layoutDefinition) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/dem/PointCloudToDemMethods.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.dem 18 | 19 | import io.pdal._ 20 | import geotrellis.raster._ 21 | import geotrellis.layer._ 22 | import geotrellis.util._ 23 | 24 | import org.apache.spark.rdd.RDD 25 | 26 | abstract class PointCloudToDemMethods[M: GetComponent[*, LayoutDefinition]]( 27 | val self: RDD[(SpatialKey, PointCloud)] with Metadata[M] 28 | ) extends MethodExtensions[RDD[(SpatialKey, PointCloud)] with Metadata[M]] { 29 | def pointToGrid(cellSize: CellSize, options: PointToGrid.Options): RDD[(SpatialKey, Tile)] with Metadata[LayoutDefinition] = 30 | PointCloudToDem(self, cellSize, options) 31 | } 32 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/package.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud 18 | 19 | import geotrellis.pointcloud.vector.ProjectedExtent3D 20 | import geotrellis.layer._ 21 | import geotrellis.spark.tiling.TilerKeyMethods 22 | import geotrellis.util._ 23 | 24 | import org.apache.spark.rdd.RDD 25 | import org.locationtech.jts.geom.Coordinate 26 | 27 | package object spark extends dem.Implicits with tiling.Implicits with Serializable { 28 | type PointCloudLayerRDD[K] = RDD[(SpatialKey, Array[Coordinate])] with Metadata[TileLayerMetadata[K]] 29 | 30 | implicit class withProjectedExtent3DTilerKeyMethods[K: Component[*, ProjectedExtent3D]](val self: K) extends TilerKeyMethods[K, SpatialKey] { 31 | def extent = self.getComponent[ProjectedExtent3D].extent3d.toExtent 32 | def translate(spatialKey: SpatialKey) = spatialKey 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/store/PointCloudHeader.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.store 18 | 19 | import geotrellis.pointcloud.vector.{Extent3D, ProjectedExtent3D} 20 | import geotrellis.proj4.CRS 21 | import geotrellis.vector.Extent 22 | 23 | import io.circe.parser._ 24 | import cats.syntax.either._ 25 | import io.pdal.pipeline.ReaderTypes 26 | 27 | trait PointCloudHeader { 28 | val metadata: String 29 | val schema: String 30 | 31 | def projectedExtent3D: Option[ProjectedExtent3D] = 32 | parse(metadata).right.flatMap(_.as[ProjectedExtent3D]).toOption 33 | 34 | def extent3D: Option[Extent3D] = projectedExtent3D.map(_.extent3d) 35 | def extent: Option[Extent] = projectedExtent3D.map(_.extent3d.toExtent) 36 | def crs: Option[CRS] = { 37 | val result = projectedExtent3D.map(_.crs) 38 | if(result.isEmpty) { 39 | parse(metadata).right.toOption.flatMap { json => 40 | val md = json.hcursor.downField("metadata") 41 | val driver = 42 | ReaderTypes 43 | .all.flatMap(s => md.downField(s.toString).focus) 44 | .headOption 45 | .map(_.hcursor) 46 | .getOrElse(throw new Exception(s"Unsupported reader driver: ${md.keys.getOrElse(Nil)}")) 47 | 48 | driver.downField("srs").downField("proj4").focus.map { str => 49 | CRS.fromString(str.noSpaces.replace("\"", "")) 50 | } 51 | } 52 | } 53 | else result 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/store/hadoop/HadoopPointCloudHeader.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.store.hadoop 18 | 19 | import geotrellis.pointcloud.spark.store.PointCloudHeader 20 | import org.apache.hadoop.fs.Path 21 | 22 | case class HadoopPointCloudHeader(fileName: String, metadata: String, schema: String) extends PointCloudHeader { 23 | def fileNamePath: Path = new Path(fileName) 24 | } 25 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/store/hadoop/HadoopPointCloudRDD.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.store.hadoop 18 | 19 | import geotrellis.pointcloud.spark.store.hadoop.formats._ 20 | import geotrellis.store.hadoop._ 21 | import geotrellis.vector.Extent 22 | 23 | import io.circe.Json 24 | import io.pdal._ 25 | import io.pdal.pipeline._ 26 | import org.apache.hadoop.fs.Path 27 | import org.apache.spark.SparkContext 28 | import org.apache.spark.rdd.RDD 29 | 30 | /** 31 | * Allows for reading point data files using PDAL as RDD[(ProjectedPackedPointsBounds, PointCloud)]s through Hadoop FileSystem API. 32 | */ 33 | object HadoopPointCloudRDD { 34 | 35 | /** 36 | * This case class contains the various parameters one can set when reading RDDs from Hadoop using Spark. 37 | */ 38 | 39 | case class Options( 40 | filesExtensions: Seq[String] = PointCloudInputFormat.filesExtensions, 41 | pipeline: Json = Read("local") ~ ENil, 42 | tmpDir: Option[String] = None, 43 | filterExtent: Option[Extent] = None, 44 | dimTypes: Option[Iterable[String]] = None 45 | ) 46 | 47 | object Options { 48 | def DEFAULT = Options() 49 | } 50 | 51 | /** 52 | * Creates a RDD[(ProjectedPackedPointsBounds, PointCloud)] whose K depends on the type of the point data file that is going to be read in. 53 | * 54 | * @param path Hdfs point data files path. 55 | * @param options An instance of [[Options]] that contains any user defined or default settings. 56 | */ 57 | def apply(path: Path, options: Options = Options.DEFAULT)(implicit sc: SparkContext): RDD[(HadoopPointCloudHeader, List[PointCloud])] = { 58 | val conf = sc.hadoopConfiguration.withInputDirectory(path, options.filesExtensions) 59 | 60 | options.tmpDir.foreach(PointCloudInputFormat.setTmpDir(conf, _)) 61 | options.dimTypes.foreach(PointCloudInputFormat.setDimTypes(conf, _)) 62 | PointCloudInputFormat.setPipeline(conf, options.pipeline) 63 | 64 | options.filterExtent match { 65 | case Some(filterExtent) => 66 | PointCloudInputFormat.setFilterExtent(conf, filterExtent) 67 | 68 | sc.newAPIHadoopRDD( 69 | conf, 70 | classOf[PointCloudInputFormat], 71 | classOf[HadoopPointCloudHeader], 72 | classOf[List[PointCloud]] 73 | ).filter { case (header, _) => 74 | header.extent3D.map(_.toExtent.intersects(filterExtent)).getOrElse(false) 75 | } 76 | case None => 77 | sc.newAPIHadoopRDD( 78 | conf, 79 | classOf[PointCloudInputFormat], 80 | classOf[HadoopPointCloudHeader], 81 | classOf[List[PointCloud]] 82 | ) 83 | } 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/store/hadoop/formats/FileStreamRecordReader.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2018 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.store.hadoop.formats 18 | 19 | import org.apache.hadoop.fs._ 20 | import org.apache.hadoop.io.compress.CompressionCodecFactory 21 | import org.apache.hadoop.mapreduce._ 22 | import org.apache.hadoop.mapreduce.lib.input._ 23 | 24 | import java.io.InputStream 25 | 26 | class FileStreamRecordReader[K, V](read: InputStream => (K, V)) extends RecordReader[K, V] { 27 | private var tup: (K, V) = null 28 | private var hasNext: Boolean = true 29 | 30 | def initialize(split: InputSplit, context: TaskAttemptContext) = { 31 | val path = split.asInstanceOf[FileSplit].getPath() 32 | val conf = context.getConfiguration() 33 | val fs = path.getFileSystem(conf) 34 | 35 | val is: InputStream = { 36 | val factory = new CompressionCodecFactory(conf) 37 | val codec = factory.getCodec(path) 38 | 39 | if (codec == null) fs.open(path) 40 | else codec.createInputStream(fs.open(path)) 41 | } 42 | 43 | tup = read(is) 44 | } 45 | 46 | def close = {} 47 | def getCurrentKey = tup._1 48 | def getCurrentValue = { hasNext = false ; tup._2 } 49 | def getProgress = 1 50 | def nextKeyValue = hasNext 51 | } 52 | 53 | trait FileStreamInputFormat[K, V] extends FileInputFormat[K, V] { 54 | def read(is: InputStream, context: TaskAttemptContext): (K, V) 55 | 56 | override def isSplitable(context: JobContext, fileName: Path) = false 57 | 58 | override def createRecordReader(split: InputSplit, context: TaskAttemptContext): RecordReader[K, V] = 59 | new FileStreamRecordReader({ is => read(is, context) }) 60 | } 61 | 62 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/store/package.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark 18 | 19 | import geotrellis.pointcloud.store.avro.Implicits 20 | 21 | package object store extends Implicits with Serializable 22 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/store/s3/S3PointCloudHeader.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.store.s3 18 | 19 | import geotrellis.pointcloud.spark.store.PointCloudHeader 20 | 21 | case class S3PointCloudHeader(key: String, metadata: String, schema: String) extends PointCloudHeader 22 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/store/s3/S3PointCloudRDD.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.store.s3 18 | 19 | import geotrellis.pointcloud.spark.store.hadoop.formats.PointCloudInputFormat 20 | import geotrellis.spark.store.s3._ 21 | import geotrellis.store.s3.S3ClientProducer 22 | import geotrellis.vector.Extent 23 | import io.circe._ 24 | import io.pdal._ 25 | import io.pdal.pipeline._ 26 | import org.apache.spark.SparkContext 27 | import org.apache.spark.rdd.RDD 28 | import software.amazon.awssdk.services.s3.S3Client 29 | 30 | /** 31 | * Allows for reading point data files using PDAL as RDD[(ProjectedPackedPointsBounds, PointCloud)]s through S3 API. 32 | */ 33 | object S3PointCloudRDD { 34 | /** 35 | * This case class contains the various parameters one can set when reading RDDs from Hadoop using Spark. 36 | * @param filesExtensions Supported files extensions 37 | * @param numPartitions How many partitions Spark should create when it repartitions the data. 38 | * @param partitionBytes Desired partition size in bytes, at least one item per partition will be assigned 39 | * @param getClient A function to instantiate an S3Client. 40 | */ 41 | case class Options( 42 | filesExtensions: Seq[String] = PointCloudInputFormat.filesExtensions, 43 | pipeline: Json = Read("local") ~ ENil, 44 | numPartitions: Option[Int] = None, 45 | partitionBytes: Option[Long] = None, 46 | getClient: () => S3Client = S3ClientProducer.get, 47 | tmpDir: Option[String] = None, 48 | filterExtent: Option[Extent] = None, 49 | dimTypes: Option[Iterable[String]] = None 50 | ) 51 | 52 | object Options { 53 | def DEFAULT = Options() 54 | } 55 | 56 | /** 57 | * Creates a RDD[(ProjectedPackedPointsBounds, PointCloud)] whose K depends on the type of the point data file that is going to be read in. 58 | * 59 | * @param bucket Name of the bucket on S3 where the files are kept. 60 | * @param prefix Prefix of all of the keys on S3 that are to be read in. 61 | * @param options An instance of [[Options]] that contains any user defined or default settings. 62 | */ 63 | def apply(bucket: String, prefix: String, options: Options = Options.DEFAULT)(implicit sc: SparkContext): RDD[(S3PointCloudHeader, List[PointCloud])] = { 64 | val conf = sc.hadoopConfiguration 65 | 66 | S3InputFormat.setBucket(conf, bucket) 67 | S3InputFormat.setPrefix(conf, prefix) 68 | S3InputFormat.setExtensions(conf, options.filesExtensions) 69 | S3InputFormat.setCreateS3Client(conf, options.getClient) 70 | options.numPartitions.foreach(S3InputFormat.setPartitionCount(conf, _)) 71 | options.partitionBytes.foreach(S3InputFormat.setPartitionBytes(conf, _)) 72 | 73 | options.tmpDir.foreach(PointCloudInputFormat.setTmpDir(conf, _)) 74 | options.dimTypes.foreach(PointCloudInputFormat.setDimTypes(conf, _)) 75 | PointCloudInputFormat.setPipeline(conf, options.pipeline) 76 | 77 | options.filterExtent match { 78 | case Some(filterExtent) => 79 | PointCloudInputFormat.setFilterExtent(conf, filterExtent) 80 | 81 | sc.newAPIHadoopRDD( 82 | conf, 83 | classOf[S3PointCloudInputFormat], 84 | classOf[S3PointCloudHeader], 85 | classOf[List[PointCloud]] 86 | ).filter { case (header, _) => header.extent3D.exists(_.toExtent.intersects(filterExtent)) } 87 | case None => 88 | sc.newAPIHadoopRDD( 89 | conf, 90 | classOf[S3PointCloudInputFormat], 91 | classOf[S3PointCloudHeader], 92 | classOf[List[PointCloud]] 93 | ) 94 | } 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/store/s3/S3StreamRecordReader.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2018 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.store.s3 18 | 19 | import geotrellis.spark.store.s3.BaseS3RecordReader 20 | 21 | import software.amazon.awssdk.services.s3.S3Client 22 | import software.amazon.awssdk.services.s3.model.GetObjectRequest 23 | import java.io.InputStream 24 | 25 | /** This reader will fetch bytes of each key one at a time using [AmazonS3Client.getObject]. 26 | * Subclass must extend [read] method to map from S3 object bytes to (K,V) */ 27 | abstract class S3StreamRecordReader[K, V](s3Client: S3Client) extends BaseS3RecordReader[K, V](s3Client: S3Client) { 28 | def readObjectRequest(objectRequest: GetObjectRequest): (K, V) = { 29 | val response = s3Client.getObject(objectRequest) 30 | read(objectRequest.key, response) 31 | } 32 | 33 | def read(key: String, is: InputStream): (K, V) 34 | } 35 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/store/s3/S3URIRecordReader.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2018 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.store.s3 18 | 19 | import geotrellis.spark.store.s3.BaseS3RecordReader 20 | 21 | import software.amazon.awssdk.services.s3.S3Client 22 | import software.amazon.awssdk.services.s3.model.GetObjectRequest 23 | import java.net.URI 24 | 25 | abstract class S3URIRecordReader[K, V](s3Client: S3Client) extends BaseS3RecordReader[K, V](s3Client: S3Client) { 26 | def readObjectRequest(objectRequest: GetObjectRequest): (K, V) = 27 | read(objectRequest.key, new URI(s"s3://${objectRequest.bucket}/${objectRequest.key}")) 28 | 29 | def read(key: String, uri: URI): (K, V) 30 | } 31 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/tiling/Implicits.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.tiling 18 | 19 | import io.pdal._ 20 | import org.apache.spark.rdd._ 21 | 22 | object Implicits extends Implicits 23 | 24 | trait Implicits { 25 | implicit class withTilerMethods(self: RDD[PointCloud]) extends TilerMethods(self) 26 | } 27 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/spark/tiling/TilerMethods.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.tiling 18 | 19 | import io.pdal._ 20 | import geotrellis.layer._ 21 | import geotrellis.util.MethodExtensions 22 | 23 | import org.apache.spark.rdd._ 24 | 25 | class TilerMethods(val self: RDD[PointCloud]) extends MethodExtensions[RDD[PointCloud]] { 26 | def tileToLayout(layoutDefinition: LayoutDefinition): RDD[(SpatialKey, PointCloud)] with Metadata[LayoutDefinition] = 27 | CutPointCloud(self, layoutDefinition) 28 | } 29 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/store/avro/Implicits.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.store.avro 18 | 19 | import geotrellis.pointcloud.store.avro.codecs.PointCloudCodecs 20 | 21 | object Implicits extends Implicits 22 | 23 | trait Implicits extends PointCloudCodecs 24 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/store/avro/package.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.store 18 | 19 | package object avro extends Implicits 20 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/util/EitherMethods.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.util 18 | 19 | object EitherMethods { 20 | def sequence[A, B](s: Seq[Either[A, B]]): Either[A, Seq[B]] = 21 | s.foldRight(Right(Nil): Either[A, List[B]]) { 22 | (e, acc) => for (xs <- acc.right; x <- e.right) yield x :: xs 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/util/Filesystem.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.util 18 | 19 | import java.io._ 20 | import java.util.UUID 21 | 22 | object Filesystem { 23 | /** 24 | * Create a directory inside the given parent directory. The directory is guaranteed to be 25 | * newly created, and is not marked for automatic deletion. 26 | * Function design took from spark.util.Utils.scala. 27 | * 28 | * @param root The root path where to create temporary directory 29 | * @param namePrefix prefix of the created dir 30 | * @param maxDirCreationAttempts max attempts to create tmp dir 31 | */ 32 | def createDirectory(root: String = System.getProperty("java.io.tmpdir"), namePrefix: String = "spark", maxDirCreationAttempts: Int = 10): File = { 33 | var attempts = 0 34 | val maxAttempts = maxDirCreationAttempts 35 | var dir: File = null 36 | while (dir == null) { 37 | attempts += 1 38 | if (attempts > maxAttempts) { 39 | throw new IOException("Failed to create a temp directory (under " + root + ") after " + 40 | maxAttempts + " attempts!") 41 | } 42 | try { 43 | dir = new File(root, namePrefix + "-" + UUID.randomUUID.toString) 44 | if (dir.exists() || !dir.mkdirs()) { 45 | dir = null 46 | } 47 | } catch { case e: SecurityException => dir = null; } 48 | } 49 | 50 | dir 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/vector/Extent3D.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.vector 18 | 19 | import geotrellis.pointcloud.util.EitherMethods 20 | import geotrellis.vector.{Extent, Point} 21 | 22 | import _root_.io.pdal.pipeline.ReaderTypes 23 | import _root_.io.circe.Decoder 24 | 25 | case class Extent3D(xmin: Double, ymin: Double, zmin: Double, xmax: Double, ymax: Double, zmax: Double) { 26 | def toExtent = Extent(xmin, ymin, xmax, ymax) 27 | def width = xmax - xmin 28 | def height = ymax - ymin 29 | def depth = zmax - zmin 30 | 31 | /** Return the smallest extent that contains this extent and the provided extent. */ 32 | def combine(other:Extent3D): Extent3D = 33 | Extent3D( 34 | if(xmin < other.xmin) xmin else other.xmin, 35 | if(ymin < other.ymin) ymin else other.ymin, 36 | if(zmin < other.zmin) zmin else other.zmin, 37 | if(xmax > other.xmax) xmax else other.xmax, 38 | if(ymax > other.ymax) ymax else other.ymax, 39 | if(zmax > other.zmax) zmax else other.zmax 40 | ) 41 | 42 | /** Return the smallest extent that contains this extent and the provided extent. */ 43 | def expandToInclude(other: Extent3D): Extent3D = 44 | combine(other) 45 | 46 | /** Return the smallest extent that contains this extent and the provided point. */ 47 | def expandToInclude(p: Point): Extent3D = { 48 | val c = p.getCoordinate 49 | expandToInclude(c.getX, c.getY, c.getZ) 50 | } 51 | 52 | /** Return the smallest extent that contains this extent and the provided point. */ 53 | def expandToInclude(x: Double, y: Double, z: Double): Extent3D = 54 | Extent3D( 55 | if(xmin < x) xmin else x, 56 | if(ymin < y) ymin else y, 57 | if(zmin < z) zmin else z, 58 | if(xmax > x) xmax else x, 59 | if(ymax > y) ymax else y, 60 | if(zmax > z) zmax else z 61 | ) 62 | 63 | } 64 | 65 | object Extent3D { 66 | implicit val extent3DDecoder: Decoder[Extent3D] = Decoder.instance { cursor => 67 | val md = cursor.downField("metadata") 68 | val driver = 69 | ReaderTypes 70 | .all.flatMap(s => md.downField(s.toString).focus) 71 | .headOption 72 | .map(_.hcursor) 73 | .getOrElse(throw new Exception(s"Unsupported reader driver: ${md.keys.getOrElse(Nil)}")) 74 | 75 | lazy val info = 76 | md 77 | .downField("filters.info") 78 | .downField("bbox") 79 | .focus 80 | .map(_.hcursor) 81 | .getOrElse(throw new Exception(s"Unsupported reader driver: ${md.keys.getOrElse(Nil)}")) 82 | 83 | lazy val resultInfo = EitherMethods.sequence( 84 | info.downField("minx").as[Double] :: 85 | info.downField("miny").as[Double] :: 86 | info.downField("minz").as[Double] :: 87 | info.downField("maxx").as[Double] :: 88 | info.downField("maxy").as[Double] :: 89 | info.downField("maxz").as[Double] :: Nil 90 | ).right.map { case List(xmin, ymin, zmin, xmax, ymax, zmax) => 91 | Extent3D(xmin, ymin, zmin, xmax, ymax, zmax) 92 | } 93 | 94 | val result = EitherMethods.sequence( 95 | driver.downField("minx").as[Double] :: 96 | driver.downField("miny").as[Double] :: 97 | driver.downField("minz").as[Double] :: 98 | driver.downField("maxx").as[Double] :: 99 | driver.downField("maxy").as[Double] :: 100 | driver.downField("maxz").as[Double] :: Nil 101 | ).right.map { case List(xmin, ymin, zmin, xmax, ymax, zmax) => 102 | Extent3D(xmin, ymin, zmin, xmax, ymax, zmax) 103 | } 104 | 105 | if(result.isRight) result 106 | else resultInfo 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/vector/ProjectedExtent3D.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.vector 18 | 19 | import geotrellis.proj4.CRS 20 | 21 | import _root_.io.pdal.pipeline.ReaderTypes 22 | import _root_.io.circe.Decoder 23 | 24 | case class ProjectedExtent3D(extent3d: Extent3D, crs: CRS) 25 | 26 | object ProjectedExtent3D { 27 | implicit val projectedExtent3DDecoder: Decoder[ProjectedExtent3D] = Decoder.instance { cursor => 28 | val md = cursor.downField("metadata") 29 | val driver = 30 | ReaderTypes 31 | .all.flatMap(s => md.downField(s.toString).focus) 32 | .headOption 33 | .map(_.hcursor) 34 | .getOrElse(throw new Exception(s"Unsupported reader driver: ${md.keys.getOrElse(Nil)}")) 35 | 36 | lazy val proj4StringInfo = 37 | md 38 | .downField("filters.info") 39 | .downField("srs") 40 | .focus 41 | .map(_.hcursor) 42 | .getOrElse(throw new Exception(s"Unsupported reader driver: ${md.keys.getOrElse(Nil)}")) 43 | 44 | val proj4String = driver.downField("srs").downField("proj4").as[String] 45 | 46 | val result = if(proj4String.isRight) proj4String else proj4StringInfo.downField("srs").downField("proj4").as[String] 47 | 48 | val crs = 49 | CRS.fromString(result match { 50 | case Right(s) => s 51 | case Left(e) => throw new Exception("Incorrect CRS metadata information, try to provide the input CRS").initCause(e) 52 | }) 53 | 54 | cursor.value.as[Extent3D].right.map(ProjectedExtent3D(_, crs)) 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/vector/triangulation/DelaunayPointSet.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.vector.triangulation 18 | 19 | import geotrellis.vector.Point 20 | 21 | import org.locationtech.jts.geom.Coordinate 22 | 23 | trait DelaunayPointSet { 24 | def length: Int 25 | def getX(i: Int): Double 26 | def getY(i: Int): Double 27 | def getZ(i: Int): Double 28 | def getCoordinate(i: Int): Coordinate = new Coordinate(getX(i), getY(i), getZ(i)) 29 | def getPoint(i: Int): Point = Point(getCoordinate(i)) 30 | def apply(i: Int): Coordinate = getCoordinate(i) 31 | def distance(i1: Int, i2: Int): Double = { 32 | val dx = getX(i1) - getX(i2) 33 | val dy = getY(i1) - getY(i2) 34 | 35 | math.sqrt((dx * dx) + (dy * dy)) 36 | } 37 | } 38 | 39 | object DelaunayPointSet { 40 | 41 | def apply(points: Array[Coordinate]): DelaunayPointSet = 42 | new DelaunayPointSet { 43 | def length = points.length 44 | def getX(i: Int) = points(i).getX 45 | def getY(i: Int) = points(i).getY 46 | def getZ(i: Int) = points(i).getZ 47 | override def getCoordinate(i: Int) = points(i) 48 | } 49 | 50 | def apply(points: Map[Int, Coordinate]): DelaunayPointSet = 51 | apply(points, points.size) 52 | 53 | def apply(points: Int => Coordinate, len: Int): DelaunayPointSet = 54 | new DelaunayPointSet { 55 | def length = len 56 | def getX(i: Int) = points(i).getX 57 | def getY(i: Int) = points(i).getY 58 | def getZ(i: Int) = points(i).getZ 59 | override def getCoordinate(i: Int) = points(i) 60 | } 61 | 62 | implicit def coordinateArrayToDelaunayPointSet(points: Array[Coordinate]): DelaunayPointSet = 63 | apply(points) 64 | } 65 | -------------------------------------------------------------------------------- /pointcloud/src/main/scala/geotrellis/pointcloud/vector/triangulation/LightPoint.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.vector.triangulation 18 | 19 | import geotrellis.vector.{Extent, Point} 20 | 21 | case class LightPoint(x: Double, y: Double, z: Double = 0.0) { 22 | def normalized(e: Extent): LightPoint = LightPoint((x - e.xmin) / e.width, (y - e.ymin) / e.height, z) 23 | 24 | def distance(other: LightPoint): Double = { 25 | val dx = other.x - x 26 | val dy = other.y - y 27 | math.sqrt(dx * dx + dy * dy) 28 | } 29 | 30 | def toPoint: Point = Point(x, y) 31 | } 32 | -------------------------------------------------------------------------------- /pointcloud/src/test/resources/csv/test-pdal.csv: -------------------------------------------------------------------------------- 1 | X,Y,Z,TEST 2 | 289814.150,4320978.610,170.760,2.000 3 | 289814.640,4320978.840,170.760,2.000 4 | 289815.120,4320979.060,170.750,2.000 5 | 289815.600,4320979.280,170.740,2.000 6 | 289816.080,4320979.500,170.680,2.000 7 | 289816.560,4320979.710,170.660,2.000 8 | 289817.030,4320979.920,170.630,2.000 9 | 289817.530,4320980.160,170.620,2.000 10 | 289818.010,4320980.380,170.610,2.000 11 | 289818.500,4320980.590,170.580,2.000 12 | -------------------------------------------------------------------------------- /pointcloud/src/test/resources/las/1.2-with-color.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/las/1.2-with-color.las -------------------------------------------------------------------------------- /pointcloud/src/test/resources/las/files/1.2-with-color_1.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/las/files/1.2-with-color_1.las -------------------------------------------------------------------------------- /pointcloud/src/test/resources/las/files/1.2-with-color_2.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/las/files/1.2-with-color_2.las -------------------------------------------------------------------------------- /pointcloud/src/test/resources/las/files/1.2-with-color_3.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/las/files/1.2-with-color_3.las -------------------------------------------------------------------------------- /pointcloud/src/test/resources/las/files/1.2-with-color_4.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/las/files/1.2-with-color_4.las -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-build.json: -------------------------------------------------------------------------------- 1 | { 2 | "maxNodeSize": 65536, 3 | "minNodeSize": 16384, 4 | "software": "Entwine", 5 | "version": "2.1.0" 6 | } -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/0-0-0-0.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/0-0-0-0.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/1-0-0-0.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/1-0-0-0.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/1-0-0-1.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/1-0-0-1.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/1-0-1-0.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/1-0-1-0.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/1-0-1-1.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/1-0-1-1.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/1-1-0-0.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/1-1-0-0.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/1-1-1-0.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/1-1-1-0.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/1-1-1-1.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/1-1-1-1.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/2-0-0-2.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/2-0-0-2.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/2-0-1-2.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/2-0-1-2.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/2-0-2-2.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/2-0-2-2.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/2-0-3-2.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/2-0-3-2.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/2-1-0-1.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/2-1-0-1.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/2-1-1-1.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/2-1-1-1.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/2-1-1-2.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/2-1-1-2.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/2-1-2-2.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/2-1-2-2.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/2-1-3-1.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/2-1-3-1.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/2-1-3-2.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/2-1-3-2.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/2-2-0-1.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/2-2-0-1.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/2-2-1-1.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/2-2-1-1.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/2-2-2-1.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/2-2-2-1.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/2-2-3-1.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/2-2-3-1.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/2-3-1-1.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/2-3-1-1.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/2-3-2-1.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/2-3-2-1.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/2-3-3-1.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/2-3-3-1.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-1-2-4.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-1-2-4.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-1-3-4.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-1-3-4.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-1-4-4.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-1-4-4.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-1-5-4.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-1-5-4.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-2-0-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-2-0-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-2-1-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-2-1-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-2-2-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-2-2-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-2-3-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-2-3-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-2-3-4.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-2-3-4.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-2-4-4.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-2-4-4.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-2-5-4.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-2-5-4.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-2-6-4.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-2-6-4.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-2-7-4.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-2-7-4.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-3-1-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-3-1-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-3-2-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-3-2-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-3-3-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-3-3-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-3-3-4.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-3-3-4.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-3-4-4.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-3-4-4.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-3-5-4.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-3-5-4.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-3-6-4.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-3-6-4.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-3-7-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-3-7-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-3-7-4.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-3-7-4.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-4-1-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-4-1-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-4-2-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-4-2-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-4-3-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-4-3-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-4-4-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-4-4-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-4-5-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-4-5-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-4-6-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-4-6-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-4-7-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-4-7-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-5-2-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-5-2-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-5-3-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-5-3-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-5-4-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-5-4-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-5-5-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-5-5-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-5-6-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-5-6-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-5-7-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-5-7-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-6-2-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-6-2-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-6-3-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-6-3-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-6-4-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-6-4-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-6-5-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-6-5-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-6-6-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-6-6-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/3-6-7-3.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/3-6-7-3.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-10-10-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-10-10-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-10-11-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-10-11-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-10-13-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-10-13-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-10-9-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-10-9-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-11-13-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-11-13-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-11-14-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-11-14-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-11-4-6.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-11-4-6.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-11-5-6.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-11-5-6.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-11-6-6.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-11-6-6.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-11-7-6.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-11-7-6.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-11-9-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-11-9-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-12-10-6.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-12-10-6.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-12-11-6.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-12-11-6.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-12-12-6.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-12-12-6.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-12-7-6.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-12-7-6.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-12-9-6.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-12-9-6.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-13-7-6.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-13-7-6.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-13-9-6.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-13-9-6.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-3-10-8.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-3-10-8.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-3-11-8.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-3-11-8.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-3-7-8.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-3-7-8.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-3-8-8.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-3-8-8.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-3-9-8.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-3-9-8.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-4-11-8.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-4-11-8.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-4-13-8.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-4-13-8.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-4-9-8.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-4-9-8.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-5-1-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-5-1-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-5-11-8.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-5-11-8.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-5-13-8.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-5-13-8.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-5-14-8.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-5-14-8.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-5-3-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-5-3-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-5-4-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-5-4-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-5-5-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-5-5-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-5-8-8.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-5-8-8.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-6-10-8.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-6-10-8.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-6-3-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-6-3-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-6-4-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-6-4-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-6-7-8.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-6-7-8.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-6-8-8.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-6-8-8.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-7-3-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-7-3-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-8-11-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-8-11-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-8-14-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-8-14-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-8-5-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-8-5-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-8-7-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-8-7-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-8-9-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-8-9-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-9-11-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-9-11-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-9-12-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-9-12-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-9-13-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-9-13-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-9-14-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-9-14-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-9-3-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-9-3-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-9-5-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-9-5-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-9-7-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-9-7-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-data/4-9-9-7.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/red-rocks/ept-data/4-9-9-7.laz -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-hierarchy/0-0-0-0.json: -------------------------------------------------------------------------------- 1 | { 2 | "0-0-0-0": 15431, 3 | "1-0-0-0": 10002, 4 | "1-0-0-1": 44176, 5 | "1-0-1-0": 63188, 6 | "1-0-1-1": 14736, 7 | "1-1-0-0": 10725, 8 | "1-1-1-0": 17574, 9 | "1-1-1-1": 24929, 10 | "2-0-0-2": 48523, 11 | "2-0-1-2": 9256, 12 | "2-0-2-2": 23083, 13 | "2-0-3-2": 65103, 14 | "2-1-0-1": 36921, 15 | "2-1-1-1": 15887, 16 | "2-1-1-2": 45711, 17 | "2-1-2-2": 23385, 18 | "2-1-3-1": 48099, 19 | "2-1-3-2": 16341, 20 | "2-2-0-1": 18824, 21 | "2-2-1-1": 25585, 22 | "2-2-2-1": 27770, 23 | "2-2-3-1": 21715, 24 | "2-3-1-1": 24919, 25 | "2-3-2-1": 12455, 26 | "2-3-3-1": 6720, 27 | "3-1-2-4": 57659, 28 | "3-1-3-4": 60713, 29 | "3-1-4-4": 60257, 30 | "3-1-5-4": 57854, 31 | "3-2-0-3": 46288, 32 | "3-2-1-3": 57159, 33 | "3-2-2-3": 49443, 34 | "3-2-3-3": 50199, 35 | "3-2-3-4": 51662, 36 | "3-2-4-4": 60925, 37 | "3-2-5-4": 61355, 38 | "3-2-6-4": 62223, 39 | "3-2-7-4": 48144, 40 | "3-3-1-3": 61147, 41 | "3-3-2-3": 60170, 42 | "3-3-3-3": 25409, 43 | "3-3-3-4": 57088, 44 | "3-3-4-4": 49778, 45 | "3-3-5-4": 56021, 46 | "3-3-6-4": 59907, 47 | "3-3-7-3": 34433, 48 | "3-3-7-4": 62224, 49 | "3-4-1-3": 46766, 50 | "3-4-2-3": 62500, 51 | "3-4-3-3": 62082, 52 | "3-4-4-3": 61394, 53 | "3-4-5-3": 57264, 54 | "3-4-6-3": 47697, 55 | "3-4-7-3": 45721, 56 | "3-5-2-3": 62887, 57 | "3-5-3-3": 64110, 58 | "3-5-4-3": 63277, 59 | "3-5-5-3": 62590, 60 | "3-5-6-3": 62273, 61 | "3-5-7-3": 55888, 62 | "3-6-2-3": 57676, 63 | "3-6-3-3": 62210, 64 | "3-6-4-3": 62292, 65 | "3-6-5-3": 50664, 66 | "3-6-6-3": 58909, 67 | "3-6-7-3": 45434, 68 | "4-10-10-7": 19474, 69 | "4-10-11-7": 18673, 70 | "4-10-13-7": 20557, 71 | "4-10-9-7": 18577, 72 | "4-11-13-7": 18653, 73 | "4-11-14-7": 19969, 74 | "4-11-4-6": 17821, 75 | "4-11-5-6": 19023, 76 | "4-11-6-6": 20827, 77 | "4-11-7-6": 17872, 78 | "4-11-9-7": 19285, 79 | "4-12-10-6": 19860, 80 | "4-12-11-6": 19959, 81 | "4-12-12-6": 20467, 82 | "4-12-7-6": 19974, 83 | "4-12-9-6": 19379, 84 | "4-13-7-6": 20609, 85 | "4-13-9-6": 19578, 86 | "4-3-10-8": 20390, 87 | "4-3-11-8": 20098, 88 | "4-3-7-8": 19682, 89 | "4-3-8-8": 20041, 90 | "4-3-9-8": 20122, 91 | "4-4-11-8": 20899, 92 | "4-4-13-8": 20632, 93 | "4-4-9-8": 20675, 94 | "4-5-1-7": 20008, 95 | "4-5-11-8": 20994, 96 | "4-5-13-8": 21762, 97 | "4-5-14-8": 20882, 98 | "4-5-3-7": 20231, 99 | "4-5-4-7": 20566, 100 | "4-5-5-7": 19882, 101 | "4-5-8-8": 20321, 102 | "4-6-10-8": 18430, 103 | "4-6-3-7": 21479, 104 | "4-6-4-7": 20364, 105 | "4-6-7-8": 17583, 106 | "4-6-8-8": 20191, 107 | "4-7-3-7": 20653, 108 | "4-8-11-7": 16536, 109 | "4-8-14-7": 19883, 110 | "4-8-5-7": 19544, 111 | "4-8-7-7": 19523, 112 | "4-8-9-7": 18297, 113 | "4-9-11-7": 20466, 114 | "4-9-12-7": 20442, 115 | "4-9-13-7": 20345, 116 | "4-9-14-7": 21491, 117 | "4-9-3-7": 18967, 118 | "4-9-5-7": 20109, 119 | "4-9-7-7": 19752, 120 | "4-9-9-7": 19779 121 | } -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept-sources/manifest.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "bounds": [ 4 | 482060.5, 5 | 4390187.5, 6 | 1843.98, 7 | 482763.78, 8 | 4391071.0, 9 | 2029.41 10 | ], 11 | "inserted": true, 12 | "metadataPath": "red-rocks.json", 13 | "path": "https://data.entwine.io/red-rocks.laz", 14 | "points": 4004326 15 | } 16 | ] -------------------------------------------------------------------------------- /pointcloud/src/test/resources/red-rocks/ept.json: -------------------------------------------------------------------------------- 1 | { 2 | "bounds": [ 3 | 481969, 4 | 4390186, 5 | 1494, 6 | 482855, 7 | 4391072, 8 | 2380 9 | ], 10 | "boundsConforming": [ 11 | 482060.0, 12 | 4390187.0, 13 | 1843.0, 14 | 482764.0, 15 | 4391072.0, 16 | 2030.0 17 | ], 18 | "dataType": "laszip", 19 | "hierarchyType": "json", 20 | "points": 4004326, 21 | "schema": [ 22 | { 23 | "name": "X", 24 | "offset": 482412, 25 | "scale": 0.01, 26 | "size": 4, 27 | "type": "signed" 28 | }, 29 | { 30 | "name": "Y", 31 | "offset": 4390629, 32 | "scale": 0.01, 33 | "size": 4, 34 | "type": "signed" 35 | }, 36 | { 37 | "name": "Z", 38 | "offset": 1937, 39 | "scale": 0.01, 40 | "size": 4, 41 | "type": "signed" 42 | }, 43 | { 44 | "name": "Intensity", 45 | "size": 2, 46 | "type": "unsigned" 47 | }, 48 | { 49 | "name": "ReturnNumber", 50 | "size": 1, 51 | "type": "unsigned" 52 | }, 53 | { 54 | "name": "NumberOfReturns", 55 | "size": 1, 56 | "type": "unsigned" 57 | }, 58 | { 59 | "name": "ScanDirectionFlag", 60 | "size": 1, 61 | "type": "unsigned" 62 | }, 63 | { 64 | "name": "EdgeOfFlightLine", 65 | "size": 1, 66 | "type": "unsigned" 67 | }, 68 | { 69 | "name": "Classification", 70 | "size": 1, 71 | "type": "unsigned" 72 | }, 73 | { 74 | "name": "ScanAngleRank", 75 | "size": 4, 76 | "type": "float" 77 | }, 78 | { 79 | "name": "UserData", 80 | "size": 1, 81 | "type": "unsigned" 82 | }, 83 | { 84 | "name": "PointSourceId", 85 | "size": 2, 86 | "type": "unsigned" 87 | }, 88 | { 89 | "name": "GpsTime", 90 | "size": 8, 91 | "type": "float" 92 | }, 93 | { 94 | "name": "Red", 95 | "size": 2, 96 | "type": "unsigned" 97 | }, 98 | { 99 | "name": "Green", 100 | "size": 2, 101 | "type": "unsigned" 102 | }, 103 | { 104 | "name": "Blue", 105 | "size": 2, 106 | "type": "unsigned" 107 | } 108 | ], 109 | "span": 128, 110 | "srs": { 111 | "authority": "EPSG", 112 | "horizontal": "26913", 113 | "wkt": "PROJCS[\"NAD83 / UTM zone 13N\",GEOGCS[\"NAD83\",DATUM[\"North_American_Datum_1983\",SPHEROID[\"GRS 1980\",6378137,298.257222101,AUTHORITY[\"EPSG\",\"7019\"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY[\"EPSG\",\"6269\"]],PRIMEM[\"Greenwich\",0,AUTHORITY[\"EPSG\",\"8901\"]],UNIT[\"degree\",0.0174532925199433,AUTHORITY[\"EPSG\",\"9122\"]],AUTHORITY[\"EPSG\",\"4269\"]],PROJECTION[\"Transverse_Mercator\"],PARAMETER[\"latitude_of_origin\",0],PARAMETER[\"central_meridian\",-105],PARAMETER[\"scale_factor\",0.9996],PARAMETER[\"false_easting\",500000],PARAMETER[\"false_northing\",0],UNIT[\"metre\",1,AUTHORITY[\"EPSG\",\"9001\"]],AXIS[\"Easting\",EAST],AXIS[\"Northing\",NORTH],AUTHORITY[\"EPSG\",\"26913\"]]" 114 | }, 115 | "version": "1.0.0" 116 | } -------------------------------------------------------------------------------- /pointcloud/src/test/resources/tiff/dem-rasterizer-bug.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/tiff/dem-rasterizer-bug.tiff -------------------------------------------------------------------------------- /pointcloud/src/test/resources/tiff/dem-reprojection-bug.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geotrellis/geotrellis-pointcloud/01ed410c6e8fb916bbd0f6e03a3ba23f9fffae46/pointcloud/src/test/resources/tiff/dem-reprojection-bug.tiff -------------------------------------------------------------------------------- /pointcloud/src/test/scala/geotrellis/pointcloud/raster/ept/EPTMetadataSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster.ept 18 | 19 | import org.scalatest.funspec.AnyFunSpec 20 | 21 | class EPTMetadataSpec extends AnyFunSpec { 22 | val catalog: String = "src/test/resources/red-rocks" 23 | 24 | describe("EPTMetadata") { 25 | it("must have sorted resolutions") { 26 | val md = EPTMetadata(catalog) 27 | val rs = md.resolutions.map(_.resolution) 28 | val diffs = rs.zip(rs.drop(1)).map{ case (a,b) => b - a } 29 | 30 | assert(diffs.forall(_ > 0)) 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /pointcloud/src/test/scala/geotrellis/pointcloud/raster/ept/EPTPathSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster.ept 18 | 19 | import org.scalatest.funspec.AnyFunSpec 20 | 21 | class EPTPathSpec extends AnyFunSpec { 22 | describe("EPTPathSpec") { 23 | it("relative path") { 24 | assert(EPTPath.parse("data/my-data.tiff").value == "data/my-data.tiff") 25 | } 26 | 27 | it("relative path with a scheme") { 28 | assert(EPTPath.parse("ept://data/my-data.tiff").value == "data/my-data.tiff") 29 | } 30 | 31 | it("relative path and localfs in a scheme") { 32 | assert(EPTPath.parse("ept+file://data/my-data.tiff").value == "file://data/my-data.tiff") 33 | } 34 | 35 | it("absolute path and s3 in a scheme") { 36 | assert(EPTPath.parse("ept+s3://data/my-data.tiff").value == "s3://data/my-data.tiff") 37 | } 38 | 39 | it("absolute path with a scheme") { 40 | assert(EPTPath.parse("ept+file:///tmp/data.tiff").value == "file:///tmp/data.tiff") 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /pointcloud/src/test/scala/geotrellis/pointcloud/raster/ept/EPTRasterSourceProviderSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster.ept 18 | 19 | import geotrellis.raster.RasterSource 20 | import org.scalatest.funspec.AnyFunSpec 21 | 22 | class EPTRasterSourceProviderSpec extends AnyFunSpec { 23 | describe("EPTRasterSourceProvider") { 24 | val provider = new EPTRasterSourceProvider() 25 | 26 | it("should process a local prefixed string") { 27 | assert(provider.canProcess("ept+file:///tmp/path/to/random")) 28 | } 29 | 30 | it("should process an s3 prefixed string") { 31 | assert(provider.canProcess("ept+s3://bucket/key")) 32 | } 33 | 34 | it("should process a relative string") { 35 | assert(provider.canProcess("ept://tmp/path/to/random")) 36 | } 37 | 38 | it("should produce a TINRasterSource from a string") { 39 | assert(RasterSource("ept+file://dumping-ground/part-2/random").isInstanceOf[TINRasterSource]) 40 | } 41 | 42 | it("should produce a TINRasterSource from an extra string") { 43 | assert(RasterSource("tin+ept+file://dumping-ground/part-2/random").isInstanceOf[TINRasterSource]) 44 | } 45 | 46 | it("should produce a TINRasterSource from a relative string") { 47 | assert(RasterSource("ept://dumping-ground/part-2/random").isInstanceOf[TINRasterSource]) 48 | } 49 | 50 | it("should produce a IDWRasterSource from a string") { 51 | assert(RasterSource("idw+ept://dumping-ground/part-2/random").isInstanceOf[IDWRasterSource]) 52 | } 53 | 54 | it("should produce a IDWRasterSource from an s3 prefixed string") { 55 | assert(RasterSource("idw+ept+s3://dumping-ground/part-2/random").isInstanceOf[IDWRasterSource]) 56 | } 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /pointcloud/src/test/scala/geotrellis/pointcloud/raster/rasterize/triangles/TrianglesRasterizerSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.raster.rasterize.triangles 18 | 19 | import geotrellis.raster._ 20 | import geotrellis.vector._ 21 | 22 | import org.scalatest.funspec.AnyFunSpec 23 | import org.scalatest.matchers.should.Matchers 24 | 25 | class TrianglesRasterizerSpec extends AnyFunSpec with Matchers { 26 | 27 | describe("TrianglesRasterizer") { 28 | 29 | val points = Array( 30 | Point(0, 2), Point(1, 2), Point(2, 2), 31 | Point(2, 1), Point(1, 1), Point(0, 1), 32 | Point(0, 0), Point(1, 0), Point(2, 0)) 33 | val zs = Array[Double](0, 1, 2, 3, 4, 5, 6, 7, 8) 34 | val triangles = Array( 35 | Polygon(points(1), points(4), points(5), points(1)), 36 | Polygon(points(1), points(2), points(4), points(1)), 37 | Polygon(points(4), points(2), points(3), points(4)), 38 | Polygon(points(6), points(5), points(4), points(6)), 39 | Polygon(points(6), points(4), points(7), points(6))) 40 | val re = RasterExtent( 41 | Extent(0, 0, 2, 2), 42 | 2, 2 43 | ) 44 | val indexMap = points.map({ point => (point.x, point.y) }).zipWithIndex.toMap 45 | 46 | it("should work when pixel falls on boundary of two triangles") { 47 | val tile = 48 | TrianglesRasterizer( 49 | re, 50 | (0 to 8).map({ i => i.toDouble }).toArray, 51 | triangles, 52 | indexMap) 53 | 54 | tile.getDouble(0, 1) should be ((6.0 + 4.0) / 2) 55 | } 56 | 57 | it("should work when pixel falls on boundary of only one triangle") { 58 | val tile = 59 | TrianglesRasterizer( 60 | re, 61 | (0 to 8).map({ i => i.toDouble }).toArray, 62 | triangles, 63 | indexMap) 64 | 65 | tile.getDouble(0, 0) should be ((1.0 + 5.0) / 2) 66 | } 67 | 68 | it("should work when pixel falls in no triangles") { 69 | val tile = 70 | TrianglesRasterizer( 71 | re, 72 | (0 to 8).map({ i => i.toDouble }).toArray, 73 | triangles, 74 | indexMap) 75 | 76 | java.lang.Double.isNaN(tile.getDouble(1, 1)) should be (true) 77 | } 78 | 79 | it("should work when pixel falls in the interior of one triangle") { 80 | val tile = 81 | TrianglesRasterizer( 82 | RasterExtent(Extent(0.1, 0, 2, 2), 2, 2), 83 | (0 to 8).map({ i => i.toDouble }).toArray, 84 | triangles, 85 | indexMap) 86 | 87 | tile.getDouble(0, 0) should be (2.925) 88 | } 89 | 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /pointcloud/src/test/scala/geotrellis/pointcloud/spark/PointCloudSpatialTestFiles.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark 18 | 19 | import geotrellis.pointcloud.spark.store.hadoop._ 20 | import geotrellis.proj4.CRS 21 | import geotrellis.raster.{DoubleConstantNoDataCellType, TileLayout} 22 | import geotrellis.spark.ContextRDD 23 | import geotrellis.layer._ 24 | import geotrellis.vector.Extent 25 | 26 | import spire.syntax.cfor.cfor 27 | import org.locationtech.jts.geom.Coordinate 28 | 29 | import scala.collection.mutable 30 | 31 | trait PointCloudSpatialTestFiles extends Serializable { self: PointCloudTestEnvironment => 32 | val extent = Extent(635609.85, 848889.7, 638992.55, 853545.43) 33 | val crs = CRS.fromEpsgCode(20255) 34 | val rdd = HadoopPointCloudRDD(lasPath).flatMap(_._2) 35 | val layoutDefinition = LayoutDefinition( 36 | extent, 37 | TileLayout(layoutCols = 5, layoutRows = 5, tileCols = 10, tileRows = 10)) 38 | val tiledWithLayout = rdd.tileToLayout(layoutDefinition) 39 | val gb = layoutDefinition.mapTransform(extent) 40 | 41 | val md = 42 | TileLayerMetadata[SpatialKey]( 43 | cellType = DoubleConstantNoDataCellType, 44 | layout = layoutDefinition, 45 | extent = extent, 46 | crs = crs, 47 | bounds = KeyBounds(gb) 48 | ) 49 | 50 | val pointCloudSample = ContextRDD(tiledWithLayout, md) 51 | 52 | val rddc = HadoopPointCloudRDD(lasPath).flatMap { case (_, pointClouds) => 53 | val extent = Extent(635609.85, 848889.7, 638992.55, 853545.43) 54 | val layoutDefinition = LayoutDefinition( 55 | extent, 56 | TileLayout(layoutCols = 5, layoutRows = 5, tileCols = 10, tileRows = 10)) 57 | val mapTransform = layoutDefinition.mapTransform 58 | 59 | var lastKey: SpatialKey = null 60 | val keysToPoints = mutable.Map[SpatialKey, mutable.ArrayBuffer[Coordinate]]() 61 | 62 | for (pointCloud <- pointClouds) { 63 | val len = pointCloud.length 64 | cfor(0)(_ < len, _ + 1) { i => 65 | val x = pointCloud.getX(i) 66 | val y = pointCloud.getY(i) 67 | val z = pointCloud.getZ(i) 68 | val p = new Coordinate(x, y, z) 69 | val key = mapTransform(x, y) 70 | if (key == lastKey) { 71 | keysToPoints(lastKey) += p 72 | } else if (keysToPoints.contains(key)) { 73 | keysToPoints(key) += p 74 | lastKey = key 75 | } else { 76 | keysToPoints(key) = mutable.ArrayBuffer(p) 77 | lastKey = key 78 | } 79 | } 80 | } 81 | 82 | keysToPoints.map { case (k, v) => (k, v.toArray) } 83 | } 84 | .reduceByKey(_ ++ _).filter { _._2.length > 2 } 85 | 86 | val pointCloudSampleC = ContextRDD(rddc, md) 87 | 88 | } 89 | -------------------------------------------------------------------------------- /pointcloud/src/test/scala/geotrellis/pointcloud/spark/PointCloudTestEnvironment.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark 18 | 19 | import geotrellis.spark.testkit._ 20 | 21 | import org.apache.hadoop.fs.Path 22 | import org.scalatest.Suite 23 | 24 | import java.io.File 25 | 26 | trait PointCloudTestEnvironment extends TestEnvironment { self: Suite => 27 | val testResources = new File("src/test/resources") 28 | val lasPath = new Path(s"file://${testResources.getAbsolutePath}/las") 29 | val multipleLasPath = new Path(s"file://${testResources.getAbsolutePath}/las/files") 30 | 31 | def setS3Credentials: Unit = { 32 | try { 33 | val conf = ssc.sparkContext.hadoopConfiguration 34 | 35 | conf.set("fs.s3.impl", classOf[org.apache.hadoop.fs.s3a.S3AFileSystem].getName) 36 | conf.set("fs.s3a.aws.credentials.provider", classOf[com.amazonaws.auth.DefaultAWSCredentialsProviderChain].getName) 37 | conf.set("fs.s3a.endpoint", "s3.eu-west-2.amazonaws.com") 38 | } catch { 39 | case e: Throwable => println(e.getMessage) 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /pointcloud/src/test/scala/geotrellis/pointcloud/spark/buffer/BufferUnionableSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.buffer 18 | 19 | import geotrellis.layer._ 20 | import geotrellis.util._ 21 | 22 | import geotrellis.spark.testkit._ 23 | 24 | import org.scalatest.funspec.AnyFunSpec 25 | 26 | object Stuff { 27 | case class UnionableThing(n: Int) { 28 | def union(other: Any): UnionableThing = { 29 | other match { 30 | case that: UnionableThing => UnionableThing(this.n * that.n) 31 | case _ => throw new Exception 32 | } 33 | } 34 | } 35 | } 36 | 37 | class BufferUnionableSpec extends AnyFunSpec with TestEnvironment { 38 | 39 | import Stuff.UnionableThing 40 | 41 | describe("General BufferTiles functionality") { 42 | it("should union neighbors, not union non-neighbors") { 43 | val key1 = SpatialKey(0,0) 44 | val key2 = SpatialKey(1,1) 45 | val key3 = SpatialKey(13, 33) 46 | val thing1 = UnionableThing(47) 47 | val thing2 = UnionableThing(53) 48 | val thing3 = UnionableThing(59) 49 | 50 | val rdd = sc.parallelize(List((key1, thing1), (key2, thing2), (key3, thing3))) 51 | val results = BufferUnionable(rdd).map({ case (k, thing) => k -> thing.n }).collect.toMap 52 | 53 | results(key1) should be (47 * 53) 54 | results(key2) should be (47 * 53) 55 | results(key3) should be (59) 56 | } 57 | } 58 | 59 | } 60 | -------------------------------------------------------------------------------- /pointcloud/src/test/scala/geotrellis/pointcloud/spark/dem/PointCloudDemSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.dem 18 | 19 | import geotrellis.raster._ 20 | import geotrellis.pointcloud.spark.buffer._ 21 | import geotrellis.pointcloud.spark.store.hadoop.HadoopPointCloudRDD 22 | import geotrellis.pointcloud.spark.{PointCloudTestEnvironment, _} 23 | import geotrellis.layer._ 24 | import geotrellis.vector.Extent 25 | 26 | import org.scalatest.funspec.AnyFunSpec 27 | import org.scalatest.matchers.should.Matchers 28 | 29 | class PointCloudDemSpec extends AnyFunSpec with Matchers with PointCloudTestEnvironment { 30 | 31 | describe("PointCloud DEM support") { 32 | val min = { (a: Double, b: Double) => math.min(a, b) } 33 | val max = { (a: Double, b: Double) => math.max(a, b) } 34 | val rdd = HadoopPointCloudRDD(lasPath).flatMap(_._2) 35 | val cloud = rdd.first 36 | 37 | it("should be able to union two clouds") { 38 | val clouds = cloud.union(cloud) 39 | 40 | clouds.length should be (cloud.length * 2) 41 | } 42 | 43 | it("should be able to produce a tile") { 44 | val length = cloud.length 45 | val xs = (0 until length).map { i => cloud.getDouble(i, "X") } 46 | val ys = (0 until length).map { i => cloud.getDouble(i, "Y") } 47 | val xmin = xs.reduce(min) 48 | val xmax = xs.reduce(max) 49 | val ymin = ys.reduce(min) 50 | val ymax = ys.reduce(max) 51 | 52 | val re = RasterExtent(Extent(xmin, ymin, xmax, ymax), 10, 10) 53 | 54 | val tile = cloud.toTile(re, "Z") 55 | 56 | tile.getDouble(0, 0) should be < 420.51 // 435.50 57 | tile.getDouble(0, 0) should be > 420.50 // 435.49 58 | } 59 | 60 | it("should work with BufferUnionable") { 61 | val layoutDefinition = LayoutDefinition( 62 | Extent(635609.85, 848889.7, 638992.55, 853545.43), 63 | TileLayout(layoutCols = 5, layoutRows = 5, tileCols = 10, tileRows = 10)) 64 | val tiled = rdd.tileToLayout(layoutDefinition) 65 | val buffered = BufferUnionable(tiled) 66 | 67 | tiled.count should be (buffered.count) 68 | 69 | val result = tiled 70 | .zip(buffered) 71 | .map({ case ((_, a), (_, b)) => a.length < b.length }) 72 | .reduce(_ && _) 73 | 74 | result should be (true) 75 | } 76 | 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /pointcloud/src/test/scala/geotrellis/pointcloud/spark/pyramid/PyramidSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.pyramid 18 | 19 | import geotrellis.pointcloud.spark._ 20 | import geotrellis.proj4._ 21 | import geotrellis.raster._ 22 | import geotrellis.spark._ 23 | import geotrellis.layer._ 24 | import geotrellis.vector._ 25 | 26 | import org.locationtech.jts.geom.Coordinate 27 | 28 | import org.scalatest.funspec.AnyFunSpec 29 | import org.scalatest.matchers.should.Matchers 30 | 31 | class PyramidSpec extends AnyFunSpec with Matchers with PointCloudTestEnvironment with PointCloudSpatialTestFiles { 32 | describe("Pyramid") { 33 | it("should pyramid Bounds[SpatialKey]") { 34 | val md = TileLayerMetadata( 35 | ByteCellType, 36 | LayoutDefinition( 37 | Extent(-2.0037508342789244E7, -2.0037508342789244E7, 38 | 2.0037508342789244E7, 2.0037508342789244E7), 39 | TileLayout(8192,8192,256,256) 40 | ), 41 | Extent(-9634947.090382002, 4024185.376428919, 42 | -9358467.589532925, 4300664.877277998), 43 | WebMercator, 44 | KeyBounds(SpatialKey(2126,3216),SpatialKey(2182,3273)) 45 | ) 46 | 47 | val scheme = ZoomedLayoutScheme(WebMercator, 256) 48 | var rdd = ContextRDD(sc.emptyRDD[(SpatialKey, Array[Coordinate])], md) 49 | var zoom: Int = 13 50 | 51 | while (zoom > 0) { 52 | val (newZoom, newRDD) = Pyramid.up(rdd, scheme, zoom) 53 | val previousExtent = rdd.metadata.mapTransform(rdd.metadata.bounds.get.toGridBounds()) 54 | val nextExtent = newRDD.metadata.mapTransform(newRDD.metadata.bounds.get.toGridBounds()) 55 | nextExtent.contains(previousExtent) should be (true) 56 | zoom = newZoom 57 | rdd = newRDD 58 | } 59 | } 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /pointcloud/src/test/scala/geotrellis/pointcloud/spark/store/file/FileArrayCoordinateSpatialSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.store.file 18 | 19 | import geotrellis.pointcloud.spark._ 20 | import geotrellis.pointcloud.spark.store._ 21 | import geotrellis.layer._ 22 | import geotrellis.store._ 23 | import geotrellis.store.file._ 24 | import geotrellis.spark.store.file._ 25 | import geotrellis.store.index._ 26 | import geotrellis.spark.testkit.io._ 27 | import geotrellis.spark.testkit.testfiles.TestFiles 28 | 29 | import org.locationtech.jts.geom.Coordinate 30 | 31 | class FileArrayCoordinateSpatialSpec 32 | extends PersistenceSpec[SpatialKey, Array[Coordinate], TileLayerMetadata[SpatialKey]] 33 | with SpatialKeyIndexMethods 34 | with PointCloudTestEnvironment 35 | with TestFiles 36 | with PointCloudSpatialTestFiles { 37 | 38 | lazy val reader = FileLayerReader(outputLocalPath) 39 | lazy val creader = FileCollectionLayerReader(outputLocalPath) 40 | lazy val writer = FileLayerWriter(outputLocalPath) 41 | lazy val deleter = FileLayerDeleter(outputLocalPath) 42 | lazy val copier = FileLayerCopier(outputLocalPath) 43 | lazy val mover = FileLayerMover(outputLocalPath) 44 | lazy val reindexer = FileLayerReindexer(outputLocalPath) 45 | lazy val tiles = FileValueReader(outputLocalPath) 46 | lazy val sample = pointCloudSampleC 47 | 48 | describe("Filesystem layer names") { 49 | it("should not throw with bad characters in name") { 50 | val layer = sample 51 | val layerId = LayerId("Some!layer:%@~`{}id", 10) 52 | 53 | println(outputLocalPath) 54 | writer.write[SpatialKey, Array[Coordinate], TileLayerMetadata[SpatialKey]](layerId, layer, ZCurveKeyIndexMethod) 55 | val backin = reader.read[SpatialKey, Array[Coordinate], TileLayerMetadata[SpatialKey]](layerId) 56 | } 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /pointcloud/src/test/scala/geotrellis/pointcloud/spark/store/file/FilePointCloudSpatialSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.store.file 18 | 19 | import io.pdal._ 20 | 21 | import geotrellis.pointcloud.spark._ 22 | import geotrellis.pointcloud.spark.store._ 23 | import geotrellis.layer._ 24 | import geotrellis.store._ 25 | import geotrellis.store.file._ 26 | import geotrellis.spark.store.file._ 27 | import geotrellis.store.index._ 28 | import geotrellis.spark.testkit.io._ 29 | import geotrellis.spark.testkit.testfiles.TestFiles 30 | 31 | class FilePointCloudSpatialSpec 32 | extends PersistenceSpec[SpatialKey, PointCloud, TileLayerMetadata[SpatialKey]] 33 | with SpatialKeyIndexMethods 34 | with PointCloudTestEnvironment 35 | with TestFiles 36 | with PointCloudSpatialTestFiles { 37 | 38 | lazy val reader = FileLayerReader(outputLocalPath) 39 | lazy val creader = FileCollectionLayerReader(outputLocalPath) 40 | lazy val writer = FileLayerWriter(outputLocalPath) 41 | lazy val deleter = FileLayerDeleter(outputLocalPath) 42 | lazy val copier = FileLayerCopier(outputLocalPath) 43 | lazy val mover = FileLayerMover(outputLocalPath) 44 | lazy val reindexer = FileLayerReindexer(outputLocalPath) 45 | lazy val tiles = FileValueReader(outputLocalPath) 46 | lazy val sample = pointCloudSample 47 | 48 | describe("Filesystem layer names") { 49 | it("should not throw with bad characters in name") { 50 | val layer = sample 51 | val layerId = LayerId("Some!layer:%@~`{}id", 10) 52 | 53 | println(outputLocalPath) 54 | writer.write[SpatialKey, PointCloud, TileLayerMetadata[SpatialKey]](layerId, layer, ZCurveKeyIndexMethod) 55 | val backin = reader.read[SpatialKey, PointCloud, TileLayerMetadata[SpatialKey]](layerId) 56 | } 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /pointcloud/src/test/scala/geotrellis/pointcloud/spark/store/hadoop/HadoopArrayCoordinateSpatialSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.store.hadoop 18 | 19 | import geotrellis.pointcloud.spark._ 20 | import geotrellis.layer.{SpatialKey, TileLayerMetadata} 21 | import geotrellis.spark._ 22 | import geotrellis.store.{InvalidLayerIdError, LayerId} 23 | import geotrellis.store.hadoop._ 24 | import geotrellis.spark.store.hadoop._ 25 | import geotrellis.store.index._ 26 | import geotrellis.pointcloud.store.avro._ 27 | import geotrellis.spark.testkit.io._ 28 | import geotrellis.spark.testkit.testfiles.TestFiles 29 | 30 | import org.locationtech.jts.geom.Coordinate 31 | 32 | class HadoopArrayCoordinateSpatialSpec 33 | extends PersistenceSpec[SpatialKey, Array[Coordinate], TileLayerMetadata[SpatialKey]] 34 | with SpatialKeyIndexMethods 35 | with PointCloudTestEnvironment 36 | with TestFiles 37 | with PointCloudSpatialTestFiles { 38 | 39 | lazy val reader = HadoopLayerReader(outputLocal) 40 | lazy val creader = HadoopCollectionLayerReader(outputLocal) 41 | lazy val writer = HadoopLayerWriter(outputLocal) 42 | lazy val deleter = HadoopLayerDeleter(outputLocal) 43 | lazy val copier = HadoopLayerCopier(outputLocal) 44 | lazy val mover = HadoopLayerMover(outputLocal) 45 | lazy val reindexer = HadoopLayerReindexer(outputLocal) 46 | lazy val tiles = HadoopValueReader(outputLocal) 47 | lazy val sample = pointCloudSampleC 48 | 49 | describe("HDFS layer names") { 50 | it("should handle layer names with spaces") { 51 | val layer = sample 52 | val layerId = LayerId("Some layer", 10) 53 | 54 | writer.write[SpatialKey, Array[Coordinate], TileLayerMetadata[SpatialKey]](layerId, layer, ZCurveKeyIndexMethod) 55 | val backin = reader.read[SpatialKey, Array[Coordinate], TileLayerMetadata[SpatialKey]](layerId) 56 | } 57 | 58 | it("should fail gracefully with colon in name") { 59 | val layer = sample 60 | val layerId = LayerId("Some:layer", 10) 61 | 62 | intercept[InvalidLayerIdError] { 63 | writer.write[SpatialKey, Array[Coordinate], TileLayerMetadata[SpatialKey]](layerId, layer, ZCurveKeyIndexMethod) 64 | } 65 | } 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /pointcloud/src/test/scala/geotrellis/pointcloud/spark/store/hadoop/HadoopPackedPointsRDDSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.store.hadoop 18 | 19 | import geotrellis.pointcloud.spark.PointCloudTestEnvironment 20 | 21 | import spire.syntax.cfor._ 22 | 23 | import org.scalatest.funspec.AnyFunSpec 24 | import org.scalatest.matchers.should.Matchers 25 | 26 | class HadoopPackedPointsRDDSpec extends AnyFunSpec with Matchers with PointCloudTestEnvironment { 27 | describe("PackedPoints RDD reads") { 28 | it("should read LAS file as RDD using hadoop input format") { 29 | val source = HadoopPointCloudRDD(lasPath).flatMap(_._2) 30 | val pointsCount = source.mapPartitions { _.map { packedPoints => 31 | var acc = 0l 32 | cfor(0)(_ < packedPoints.length, _ + 1) { i => 33 | packedPoints.get(i) 34 | acc += 1 35 | } 36 | acc 37 | } }.reduce(_ + _) 38 | val sourceList = source.take(1).toList 39 | sourceList.map(_.length).head should be (1065) 40 | pointsCount should be (1065) 41 | } 42 | 43 | it("should read multiple LAS files as RDD using hadoop input format") { 44 | val source = HadoopPointCloudRDD(multipleLasPath).flatMap(_._2) 45 | val pointsCount = source.mapPartitions { _.map { packedPoints => 46 | var acc = 0l 47 | cfor(0)(_ < packedPoints.length, _ + 1) { i => 48 | packedPoints.get(i) 49 | acc += 1 50 | } 51 | acc 52 | } }.reduce(_ + _) 53 | val sourceList = source.take(1).toList 54 | sourceList.map(_.length).head should be (1065) 55 | pointsCount should be (4 * 1065) 56 | } 57 | 58 | it("should read correct crs") { 59 | val sourceHeader = HadoopPointCloudRDD(lasPath).take(1).head._1 60 | sourceHeader.crs.map(_.proj4jCrs.getName) should be (Some("lcc-CS")) 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /pointcloud/src/test/scala/geotrellis/pointcloud/spark/store/hadoop/HadoopPointCloudSpatialSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.store.hadoop 18 | 19 | import io.pdal._ 20 | 21 | import geotrellis.pointcloud.spark._ 22 | import geotrellis.pointcloud.spark.store._ 23 | import geotrellis.layer._ 24 | import geotrellis.store._ 25 | import geotrellis.store.hadoop._ 26 | import geotrellis.spark.store.hadoop._ 27 | import geotrellis.store.index._ 28 | import geotrellis.spark.testkit.io._ 29 | import geotrellis.spark.testkit.testfiles.TestFiles 30 | 31 | class HadoopPointCloudSpatialSpec 32 | extends PersistenceSpec[SpatialKey, PointCloud, TileLayerMetadata[SpatialKey]] 33 | with SpatialKeyIndexMethods 34 | with PointCloudTestEnvironment 35 | with TestFiles 36 | with PointCloudSpatialTestFiles { 37 | 38 | lazy val reader = HadoopLayerReader(outputLocal) 39 | lazy val creader = HadoopCollectionLayerReader(outputLocal) 40 | lazy val writer = HadoopLayerWriter(outputLocal) 41 | lazy val deleter = HadoopLayerDeleter(outputLocal) 42 | lazy val copier = HadoopLayerCopier(outputLocal) 43 | lazy val mover = HadoopLayerMover(outputLocal) 44 | lazy val reindexer = HadoopLayerReindexer(outputLocal) 45 | lazy val tiles = HadoopValueReader(outputLocal) 46 | lazy val sample = pointCloudSample 47 | 48 | describe("HDFS layer names") { 49 | it("should handle layer names with spaces") { 50 | val layer = sample 51 | val layerId = LayerId("Some layer", 10) 52 | 53 | writer.write[SpatialKey, PointCloud, TileLayerMetadata[SpatialKey]](layerId, layer, ZCurveKeyIndexMethod) 54 | val backin = reader.read[SpatialKey, PointCloud, TileLayerMetadata[SpatialKey]](layerId) 55 | } 56 | 57 | it("should fail gracefully with colon in name") { 58 | val layer = sample 59 | val layerId = LayerId("Some:layer", 10) 60 | 61 | intercept[InvalidLayerIdError] { 62 | writer.write[SpatialKey, PointCloud, TileLayerMetadata[SpatialKey]](layerId, layer, ZCurveKeyIndexMethod) 63 | } 64 | } 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /pointcloud/src/test/scala/geotrellis/pointcloud/spark/store/s3/MockS3Client.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.store.s3 18 | 19 | import software.amazon.awssdk.auth.credentials.{AwsBasicCredentials, StaticCredentialsProvider} 20 | import software.amazon.awssdk.services.s3.S3Client 21 | import software.amazon.awssdk.regions.Region 22 | 23 | import java.net.URI 24 | 25 | object MockS3Client extends Serializable { 26 | def apply(): S3Client = { 27 | val cred = AwsBasicCredentials.create("minio", "password") 28 | val credProvider = StaticCredentialsProvider.create(cred) 29 | S3Client.builder() 30 | .endpointOverride(new URI("http://localhost:9091")) 31 | .credentialsProvider(credProvider) 32 | .region(Region.US_EAST_1) 33 | .build() 34 | } 35 | 36 | @transient lazy val instance: S3Client = apply() 37 | } 38 | -------------------------------------------------------------------------------- /pointcloud/src/test/scala/geotrellis/pointcloud/spark/store/s3/S3ArrayCoordinateSpatialSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.store.s3 18 | 19 | import geotrellis.pointcloud.spark._ 20 | import geotrellis.pointcloud.spark.store._ 21 | import geotrellis.layer._ 22 | import geotrellis.spark.store.GenericLayerReindexer 23 | import geotrellis.store._ 24 | import geotrellis.store.s3._ 25 | import geotrellis.spark.store.s3._ 26 | import geotrellis.spark.testkit.io._ 27 | import geotrellis.spark.testkit.testfiles.TestFiles 28 | 29 | import org.locationtech.jts.geom.Coordinate 30 | 31 | class S3ArrayCoordinateSpatialSpec 32 | extends PersistenceSpec[SpatialKey, Array[Coordinate], TileLayerMetadata[SpatialKey]] 33 | with SpatialKeyIndexMethods 34 | with PointCloudTestEnvironment 35 | with TestFiles 36 | with PointCloudSpatialTestFiles { 37 | 38 | lazy val bucket = "mock-bucket" 39 | lazy val prefix = "catalog" 40 | val client = MockS3Client.instance 41 | S3TestUtils.cleanBucket(client, bucket) 42 | registerAfterAll { () => 43 | S3TestUtils.cleanBucket(client, bucket) 44 | } 45 | 46 | // We need to register the mock client for SPI loaded classes 47 | S3ClientProducer.set(() => MockS3Client.instance) 48 | 49 | lazy val attributeStore = new S3AttributeStore(bucket, prefix, MockS3Client.instance) 50 | 51 | lazy val rddReader = new S3RDDReader(MockS3Client.instance) 52 | lazy val rddWriter = new S3RDDWriter(MockS3Client.instance) 53 | 54 | lazy val reader = new S3LayerReader(attributeStore, MockS3Client.instance) 55 | lazy val creader = new S3CollectionLayerReader(attributeStore) 56 | lazy val writer = new S3LayerWriter(attributeStore, bucket, prefix, identity, MockS3Client.instance) 57 | lazy val deleter = new S3LayerDeleter(attributeStore, MockS3Client.instance) 58 | lazy val copier = new S3LayerCopier(attributeStore, bucket, prefix, MockS3Client.instance) 59 | lazy val reindexer = GenericLayerReindexer(attributeStore, reader, writer, deleter, copier) 60 | lazy val mover = GenericLayerMover(copier, deleter) 61 | lazy val tiles = new S3ValueReader(attributeStore, MockS3Client.instance) 62 | lazy val sample = pointCloudSampleC 63 | } 64 | -------------------------------------------------------------------------------- /pointcloud/src/test/scala/geotrellis/pointcloud/spark/store/s3/S3PointCloudSpatialSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.store.s3 18 | 19 | import io.pdal._ 20 | 21 | import geotrellis.pointcloud.spark._ 22 | import geotrellis.pointcloud.spark.store._ 23 | import geotrellis.layer._ 24 | import geotrellis.spark.store.GenericLayerReindexer 25 | import geotrellis.store._ 26 | import geotrellis.store.s3._ 27 | import geotrellis.spark.store.s3._ 28 | import geotrellis.spark.testkit.io._ 29 | import geotrellis.spark.testkit.testfiles.TestFiles 30 | 31 | class S3PointCloudSpatialSpec 32 | extends PersistenceSpec[SpatialKey, PointCloud, TileLayerMetadata[SpatialKey]] 33 | with SpatialKeyIndexMethods 34 | with PointCloudTestEnvironment 35 | with TestFiles 36 | with PointCloudSpatialTestFiles { 37 | 38 | lazy val bucket = "mock-bucket" 39 | lazy val prefix = "catalog" 40 | val client = MockS3Client.instance 41 | S3TestUtils.cleanBucket(client, bucket) 42 | registerAfterAll { () => 43 | S3TestUtils.cleanBucket(client, bucket) 44 | } 45 | 46 | // We need to register the mock client for SPI loaded classes 47 | S3ClientProducer.set(() => MockS3Client.instance) 48 | 49 | lazy val attributeStore = new S3AttributeStore(bucket, prefix, MockS3Client.instance) 50 | 51 | lazy val rddReader = new S3RDDReader(MockS3Client.instance) 52 | lazy val rddWriter = new S3RDDWriter(MockS3Client.instance) 53 | 54 | lazy val reader = new S3LayerReader(attributeStore, MockS3Client.instance) 55 | lazy val creader = new S3CollectionLayerReader(attributeStore) 56 | lazy val writer = new S3LayerWriter(attributeStore, bucket, prefix, identity, MockS3Client.instance) 57 | lazy val deleter = new S3LayerDeleter(attributeStore, MockS3Client.instance) 58 | lazy val copier = new S3LayerCopier(attributeStore, bucket, prefix, MockS3Client.instance) 59 | lazy val reindexer = GenericLayerReindexer(attributeStore, reader, writer, deleter, copier) 60 | lazy val mover = GenericLayerMover(copier, deleter) 61 | lazy val tiles = new S3ValueReader(attributeStore, MockS3Client.instance) 62 | lazy val sample = pointCloudSample 63 | } 64 | -------------------------------------------------------------------------------- /pointcloud/src/test/scala/geotrellis/pointcloud/spark/store/s3/S3TestUtils.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.store.s3 18 | 19 | import software.amazon.awssdk.services.s3.S3Client 20 | import software.amazon.awssdk.services.s3.model._ 21 | 22 | import scala.collection.JavaConverters._ 23 | 24 | object S3TestUtils { 25 | def cleanBucket(client: S3Client, bucket: String) = { 26 | try { 27 | val listObjectsReq = 28 | ListObjectsV2Request.builder() 29 | .bucket(bucket) 30 | .build() 31 | val objIdentifiers = client.listObjectsV2Paginator(listObjectsReq) 32 | .contents 33 | .asScala 34 | .map { s3obj => ObjectIdentifier.builder.key(s3obj.key).build() } 35 | .toList 36 | val deleteDefinition = Delete.builder() 37 | .objects(objIdentifiers:_*) 38 | .build() 39 | val deleteReq = DeleteObjectsRequest.builder() 40 | .bucket(bucket) 41 | .delete(deleteDefinition) 42 | .build() 43 | client.deleteObjects(deleteReq) 44 | } catch { 45 | case nsb: NoSuchBucketException => 46 | val createBucketReq = 47 | CreateBucketRequest.builder() 48 | .bucket(bucket) 49 | .build() 50 | client.createBucket(createBucketReq) 51 | } 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /pointcloud/src/test/scala/geotrellis/pointcloud/spark/tiling/PointCloudTilingSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Azavea 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package geotrellis.pointcloud.spark.tiling 18 | 19 | import geotrellis.raster.TileLayout 20 | import Implicits._ 21 | import geotrellis.pointcloud.spark.PointCloudTestEnvironment 22 | import geotrellis.layer.LayoutDefinition 23 | import geotrellis.pointcloud.spark.store.hadoop.HadoopPointCloudRDD 24 | import geotrellis.vector.Extent 25 | 26 | import org.scalatest.funspec.AnyFunSpec 27 | import org.scalatest.matchers.should.Matchers 28 | 29 | class PointCloudTilingSpec extends AnyFunSpec with Matchers with PointCloudTestEnvironment { 30 | describe("PointCloud RDD tiling") { 31 | it("should tile RDD of packed points") { 32 | //Pipeline.loadNativeLibrary() 33 | val source = HadoopPointCloudRDD(lasPath).flatMap(_._2) 34 | val original = source.take(1).toList.head 35 | // that means there can be no more points per "tile" than tileCols * tileRows 36 | val ld = LayoutDefinition( 37 | Extent(635609.85, 848889.7, 638992.55, 853545.43), 38 | TileLayout(layoutCols = 5, layoutRows = 5, tileCols = 10, tileRows = 10) 39 | ) 40 | val tiled = source.tileToLayout(ld) 41 | tiled.map(_._2.length).reduce(_ + _) should be (original.length) 42 | tiled.count() should be (25) 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /project/Dependencies.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2017 Azavea. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import sbt._ 18 | 19 | object Version { 20 | val geotrellisPointCloud = "0.4.0" + Environment.versionSuffix 21 | val geotrellis = "3.5.0" 22 | val crossScala = List("2.12.10", "2.11.12") 23 | val scala = crossScala.head 24 | val pdal = "2.2.0" 25 | val scalaTest = "3.2.2" 26 | lazy val hadoop = Environment.hadoopVersion 27 | lazy val spark = Environment.sparkVersion 28 | } 29 | 30 | object Dependencies { 31 | val scalatest = "org.scalatest" %% "scalatest" % Version.scalaTest 32 | val sparkCore = "org.apache.spark" %% "spark-core" % Version.spark 33 | val sparkSQL = "org.apache.spark" %% "spark-sql" % Version.spark 34 | val hadoopClient = "org.apache.hadoop" % "hadoop-client" % Version.hadoop 35 | val hadoopAWS = "org.apache.hadoop" % "hadoop-aws" % Version.hadoop 36 | 37 | val pdalScala = "io.pdal" %% "pdal-scala" % Version.pdal 38 | val pdalNative = "io.pdal" % "pdal-native" % Version.pdal 39 | 40 | val geotrellisSparkTestkit = "org.locationtech.geotrellis" %% "geotrellis-spark-testkit" % Version.geotrellis 41 | val geotrellisSpark = "org.locationtech.geotrellis" %% "geotrellis-spark" % Version.geotrellis 42 | val geotrellisRaster = "org.locationtech.geotrellis" %% "geotrellis-raster" % Version.geotrellis 43 | val geotrellisS3 = "org.locationtech.geotrellis" %% "geotrellis-s3" % Version.geotrellis 44 | val geotrellisS3Spark = "org.locationtech.geotrellis" %% "geotrellis-s3-spark" % Version.geotrellis 45 | val geotrellisS3Testkit = "org.locationtech.geotrellis" %% "geotrellis-s3-testkit" % Version.geotrellis 46 | 47 | val all = Seq( 48 | pdalScala, 49 | pdalNative, 50 | geotrellisSpark % Provided, 51 | geotrellisRaster % Provided, 52 | geotrellisS3 % Provided, 53 | geotrellisS3Spark % Provided, 54 | geotrellisSparkTestkit % Test, 55 | sparkCore % Provided, 56 | sparkSQL % Provided, 57 | hadoopClient % Provided, 58 | hadoopAWS % Test, 59 | scalatest % Test 60 | ) 61 | } 62 | -------------------------------------------------------------------------------- /project/Environment.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2017 Azavea. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import scala.util.Properties 18 | 19 | object Environment { 20 | def either(environmentVariable: String, default: String): String = 21 | Properties.envOrElse(environmentVariable, default) 22 | 23 | lazy val hadoopVersion = either("SPARK_HADOOP_VERSION", "2.8.5") 24 | lazy val sparkVersion = either("SPARK_VERSION", "2.4.5") 25 | lazy val versionSuffix = either("VERSION_SUFFIX", "-SNAPSHOT") 26 | lazy val ldLibraryPath = either("LD_LIBRARY_PATH", "/usr/local/lib") 27 | } 28 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.3.13 2 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.10.0-RC1") 2 | addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.15.0") 3 | addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") 4 | addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.5.1") 5 | addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.5.0") 6 | addSbtPlugin("org.foundweekends" % "sbt-bintray" % "0.5.6") 7 | addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7") 8 | -------------------------------------------------------------------------------- /scripts/docker/1.8.0/debian/spark/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:sid 2 | MAINTAINER Grigory Pomadchin 3 | 4 | RUN set -ex && \ 5 | apt update -y && \ 6 | apt install \ 7 | openjdk-8-jdk \ 8 | ca-certificates-java -y 9 | 10 | ENV JAVA_HOME /usr/lib/jvm/java-1.8.0-openjdk-amd64 11 | 12 | # Install PDAL and Python deps https://tracker.debian.org/pkg/pdal 13 | RUN apt-get -y install pdal libpdal-dev bash gcc g++ cmake 14 | RUN apt-get -y install wget curl python-pip python3 python3-pip 15 | 16 | # Install GDAL 17 | ENV ROOTDIR /usr/local 18 | ENV LD_LIBRARY_PATH /usr/local/lib 19 | ENV GDAL_VERSION 2.4.0 20 | ENV OPENJPEG_VERSION 2.3.0 21 | 22 | # Load assets 23 | WORKDIR $ROOTDIR/ 24 | RUN mkdir -p $ROOTDIR/src 25 | 26 | RUN wget -qO- \ 27 | http://download.osgeo.org/gdal/${GDAL_VERSION}/gdal-${GDAL_VERSION}.tar.gz | \ 28 | tar -xzC $ROOTDIR/src/ 29 | RUN wget -qO- \ 30 | https://github.com/uclouvain/openjpeg/archive/v${OPENJPEG_VERSION}.tar.gz | \ 31 | tar -xzC $ROOTDIR/src/ 32 | 33 | RUN set -ex \ 34 | # Runtime dependencies 35 | && deps=" \ 36 | python-dev \ 37 | python3-dev \ 38 | python-numpy \ 39 | python3-numpy \ 40 | bash-completion \ 41 | libspatialite-dev \ 42 | libpq-dev \ 43 | libcurl4-gnutls-dev \ 44 | libproj-dev \ 45 | libxml2-dev \ 46 | libgeos-dev \ 47 | libnetcdf-dev \ 48 | libpoppler-dev \ 49 | libhdf4-alt-dev \ 50 | libhdf5-serial-dev \ 51 | " \ 52 | # Build dependencies 53 | && buildDeps=" \ 54 | build-essential \ 55 | cmake \ 56 | swig \ 57 | ant \ 58 | pkg-config \ 59 | "\ 60 | && apt-get update && apt-get install -y $buildDeps $deps --no-install-recommends \ 61 | # Compile and install OpenJPEG 62 | && cd src/openjpeg-${OPENJPEG_VERSION} \ 63 | && mkdir build && cd build \ 64 | && cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$ROOTDIR \ 65 | && make -j3 && make -j3 install && make -j3 clean \ 66 | && cd $ROOTDIR && rm -Rf src/openjpeg* \ 67 | # Compile and install GDAL 68 | && cd src/gdal-${GDAL_VERSION} \ 69 | && ./configure --with-python --with-spatialite --with-pg --with-curl --with-java \ 70 | --with-poppler --with-openjpeg=$ROOTDIR \ 71 | && make -j3 && make -j3 install && ldconfig \ 72 | # Compile Python and Java bindings for GDAL 73 | && cd $ROOTDIR/src/gdal-${GDAL_VERSION}/swig/java && make -j3 && make -j3 install \ 74 | && cd $ROOTDIR/src/gdal-${GDAL_VERSION}/swig/python \ 75 | && python3 setup.py build \ 76 | && python3 setup.py install \ 77 | && cd $ROOTDIR && rm -Rf src/gdal* \ 78 | # Remove build dependencies 79 | && apt-get purge -y --auto-remove $buildDeps \ 80 | && rm -rf /var/lib/apt/lists/* 81 | 82 | # Install Spark 83 | ENV PYSPARK_PYTHON /usr/bin/python3 84 | ENV PYSPARK_DRIVER_PYTHON /usr/bin/python3 85 | ENV SPARK_HOME /opt/spark 86 | ENV SPARK_CONF_DIR $SPARK_HOME/conf 87 | ENV PATH $PATH:$SPARK_HOME/bin:$SPARK_HOME/sbin 88 | 89 | RUN set -x \ 90 | && mkdir -p $SPARK_HOME $SPARK_CONF_DIR \ 91 | && curl -sS -# http://mirror.metrocast.net/apache/spark/spark-2.4.1/spark-2.4.1-bin-hadoop2.7.tgz \ 92 | | tar -xz -C ${SPARK_HOME} --strip-components=1 93 | 94 | COPY ./fs / 95 | 96 | RUN mkdir -p /data/spark 97 | 98 | VOLUME [ "/data/spark" ] 99 | 100 | WORKDIR "${SPARK_HOME}" 101 | -------------------------------------------------------------------------------- /scripts/docker/1.8.0/debian/spark/Dockerfile.mbio: -------------------------------------------------------------------------------- 1 | FROM quay.io/geodocker/pdal-mbio:1.8.0 2 | MAINTAINER Grigory Pomadchin 3 | 4 | RUN set -ex && \ 5 | apt update -y && \ 6 | apt install \ 7 | openjdk-8-jdk \ 8 | ca-certificates-java -y 9 | 10 | ENV JAVA_HOME /usr/lib/jvm/java-1.8.0-openjdk-amd64 11 | 12 | # Install PDAL and Python deps https://tracker.debian.org/pkg/pdal 13 | RUN apt-get -y install bash gcc g++ cmake 14 | RUN apt-get -y install wget curl python-pip python3 python3-pip 15 | 16 | # Install GDAL 17 | ENV ROOTDIR /usr/local 18 | ENV LD_LIBRARY_PATH /usr/local/lib 19 | ENV GDAL_VERSION 2.4.0 20 | ENV OPENJPEG_VERSION 2.3.0 21 | 22 | # Load assets 23 | WORKDIR $ROOTDIR/ 24 | RUN mkdir -p $ROOTDIR/src 25 | 26 | RUN wget -qO- \ 27 | http://download.osgeo.org/gdal/${GDAL_VERSION}/gdal-${GDAL_VERSION}.tar.gz | \ 28 | tar -xzC $ROOTDIR/src/ 29 | RUN wget -qO- \ 30 | https://github.com/uclouvain/openjpeg/archive/v${OPENJPEG_VERSION}.tar.gz | \ 31 | tar -xzC $ROOTDIR/src/ 32 | 33 | RUN set -ex \ 34 | # Runtime dependencies 35 | && deps=" \ 36 | python-dev \ 37 | python3-dev \ 38 | python-numpy \ 39 | python3-numpy \ 40 | bash-completion \ 41 | libspatialite-dev \ 42 | libpq-dev \ 43 | libcurl4-gnutls-dev \ 44 | libproj-dev \ 45 | libxml2-dev \ 46 | libgeos-dev \ 47 | libnetcdf-dev \ 48 | libpoppler-dev \ 49 | libhdf4-alt-dev \ 50 | libhdf5-serial-dev \ 51 | " \ 52 | # Build dependencies 53 | && buildDeps=" \ 54 | build-essential \ 55 | cmake \ 56 | swig \ 57 | ant \ 58 | pkg-config \ 59 | "\ 60 | && apt-get update && apt-get install -y $buildDeps $deps --no-install-recommends \ 61 | # Compile and install OpenJPEG 62 | && cd src/openjpeg-${OPENJPEG_VERSION} \ 63 | && mkdir build && cd build \ 64 | && cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$ROOTDIR \ 65 | && make -j3 && make -j3 install && make -j3 clean \ 66 | && cd $ROOTDIR && rm -Rf src/openjpeg* \ 67 | # Compile and install GDAL 68 | && cd src/gdal-${GDAL_VERSION} \ 69 | && ./configure --with-python --with-spatialite --with-pg --with-curl --with-java \ 70 | --with-poppler --with-openjpeg=$ROOTDIR \ 71 | && make -j3 && make -j3 install && ldconfig \ 72 | # Compile Python and Java bindings for GDAL 73 | && cd $ROOTDIR/src/gdal-${GDAL_VERSION}/swig/java && make -j3 && make -j3 install \ 74 | && cd $ROOTDIR/src/gdal-${GDAL_VERSION}/swig/python \ 75 | && python3 setup.py build \ 76 | && python3 setup.py install \ 77 | && cd $ROOTDIR && rm -Rf src/gdal* \ 78 | # Remove build dependencies 79 | && apt-get purge -y --auto-remove $buildDeps \ 80 | && rm -rf /var/lib/apt/lists/* 81 | 82 | # Install Spark 83 | ENV PYSPARK_PYTHON /usr/bin/python3 84 | ENV PYSPARK_DRIVER_PYTHON /usr/bin/python3 85 | ENV SPARK_HOME /opt/spark 86 | ENV SPARK_CONF_DIR $SPARK_HOME/conf 87 | ENV PATH $PATH:$SPARK_HOME/bin:$SPARK_HOME/sbin 88 | 89 | RUN set -x \ 90 | && mkdir -p $SPARK_HOME $SPARK_CONF_DIR \ 91 | && curl -sS -# http://mirror.metrocast.net/apache/spark/spark-2.4.1/spark-2.4.1-bin-hadoop2.7.tgz \ 92 | | tar -xz -C ${SPARK_HOME} --strip-components=1 93 | 94 | COPY ./fs / 95 | 96 | RUN mkdir -p /data/spark 97 | 98 | VOLUME [ "/data/spark" ] 99 | 100 | WORKDIR "${SPARK_HOME}" 101 | -------------------------------------------------------------------------------- /scripts/docker/1.8.0/debian/spark/fs/opt/spark/conf/fairscheduler.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | FAIR 5 | 1 6 | 2 7 | 8 | 9 | FIFO 10 | 2 11 | 3 12 | 13 | 14 | -------------------------------------------------------------------------------- /scripts/docker/1.8.0/debian/spark/fs/opt/spark/conf/log4j.properties: -------------------------------------------------------------------------------- 1 | # Set everything to be logged to the console 2 | log4j.rootCategory=INFO, console 3 | log4j.appender.console=org.apache.log4j.ConsoleAppender 4 | log4j.appender.console.target=System.err 5 | log4j.appender.console.layout=org.apache.log4j.PatternLayout 6 | log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n 7 | 8 | # Settings to quiet third party logs that are too verbose 9 | log4j.logger.org.eclipse.jetty=WARN 10 | log4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR 11 | log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO 12 | log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO 13 | -------------------------------------------------------------------------------- /scripts/docker/1.8.0/debian/spark/fs/opt/spark/conf/spark-defaults.conf: -------------------------------------------------------------------------------- 1 | # spark.driver.cores 2 2 | # spark.driver.memory 8g 3 | # spark.executor.memory 8g 4 | # spark.local.dir /data/spark 5 | # spark.master spark://master.spark.gt 6 | 7 | # spark.master.ui.port 8180 8 | # spark.worker.ui.port 8181 9 | # spark.ui.port 4040 10 | # spark.history.ui.port 18080 11 | 12 | # spark.driver.port 40010 13 | # spark.executor.port 40020 14 | # spark.fileserver.port 40030 15 | # spark.broadcast.port 40040 16 | # spark.replClassServer.port 40050 17 | # spark.blockManager.port 40060 -------------------------------------------------------------------------------- /scripts/docker/1.8.0/debian/spark/fs/opt/spark/conf/spark-env.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # 4 | # Licensed to the Apache Software Foundation (ASF) under one or more 5 | # contributor license agreements. See the NOTICE file distributed with 6 | # this work for additional information regarding copyright ownership. 7 | # The ASF licenses this file to You under the Apache License, Version 2.0 8 | # (the "License"); you may not use this file except in compliance with 9 | # the License. You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | # 19 | 20 | # This file is sourced when running various Spark programs. 21 | # Copy it as spark-env.sh and edit that to configure Spark for your site. 22 | 23 | # Options read when launching programs locally with 24 | # ./bin/run-example or ./bin/spark-submit 25 | # - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files 26 | # - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node 27 | # - SPARK_PUBLIC_DNS, to set the public dns name of the driver program 28 | # - SPARK_CLASSPATH, default classpath entries to append 29 | 30 | # Options read by executors and drivers running inside the cluster 31 | # - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node 32 | # - SPARK_PUBLIC_DNS, to set the public DNS name of the driver program 33 | # - SPARK_CLASSPATH, default classpath entries to append 34 | # - SPARK_LOCAL_DIRS, storage directories to use on this node for shuffle and RDD data 35 | # - MESOS_NATIVE_JAVA_LIBRARY, to point to your libmesos.so if you use Mesos 36 | 37 | # Options read in YARN client mode 38 | # - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files 39 | # - SPARK_EXECUTOR_INSTANCES, Number of executors to start (Default: 2) 40 | # - SPARK_EXECUTOR_CORES, Number of cores for the executors (Default: 1). 41 | # - SPARK_EXECUTOR_MEMORY, Memory per Executor (e.g. 1000M, 2G) (Default: 1G) 42 | # - SPARK_DRIVER_MEMORY, Memory for Driver (e.g. 1000M, 2G) (Default: 1G) 43 | 44 | # Options for the daemons used in the standalone deploy mode 45 | # - SPARK_MASTER_HOST, to bind the master to a different IP address or hostname 46 | # - SPARK_MASTER_PORT / SPARK_MASTER_WEBUI_PORT, to use non-default ports for the master 47 | # - SPARK_MASTER_OPTS, to set config properties only for the master (e.g. "-Dx=y") 48 | # - SPARK_WORKER_CORES, to set the number of cores to use on this machine 49 | # - SPARK_WORKER_MEMORY, to set how much total memory workers have to give executors (e.g. 1000m, 2g) 50 | # - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT, to use non-default ports for the worker 51 | # - SPARK_WORKER_INSTANCES, to set the number of worker processes per node 52 | # - SPARK_WORKER_DIR, to set the working directory of worker processes 53 | SPARK_WORKER_DIR=/data/spark 54 | # - SPARK_WORKER_OPTS, to set config properties only for the worker (e.g. "-Dx=y") 55 | # - SPARK_DAEMON_MEMORY, to allocate to the master, worker and history server themselves (default: 1g). 56 | # - SPARK_HISTORY_OPTS, to set config properties only for the history server (e.g. "-Dx=y") 57 | # - SPARK_SHUFFLE_OPTS, to set config properties only for the external shuffle service (e.g. "-Dx=y") 58 | # - SPARK_DAEMON_JAVA_OPTS, to set config properties for all daemons (e.g. "-Dx=y") 59 | # - SPARK_PUBLIC_DNS, to set the public dns name of the master or workers 60 | 61 | # Generic options for the daemons used in the standalone deploy mode 62 | # - SPARK_CONF_DIR Alternate conf dir. (Default: ${SPARK_HOME}/conf) 63 | SPARK_CONF_DIR=/etc/spark 64 | # - SPARK_LOG_DIR Where log files are stored. (Default: ${SPARK_HOME}/logs) 65 | # - SPARK_PID_DIR Where the pid file is stored. (Default: /tmp) 66 | # - SPARK_IDENT_STRING A string representing this instance of spark. (Default: $USER) 67 | # - SPARK_NICENESS The scheduling priority for daemons. (Default: 0) 68 | --------------------------------------------------------------------------------