├── .gitignore
├── README.md
├── data
├── geotrellis-landsat-tutorial
│ └── readme.txt
└── rast.tif
├── pom.xml
├── src
├── main
│ ├── resources
│ │ └── log4j.properties
│ └── scala
│ │ ├── biggis
│ │ └── landuse
│ │ │ ├── api
│ │ │ └── package.scala
│ │ │ └── spark
│ │ │ └── examples
│ │ │ ├── ConvolveLayerExample.scala
│ │ │ ├── DeleteLayer.scala
│ │ │ ├── DummyGeotrellisExample.scala
│ │ │ ├── GeotiffTilingExample.scala
│ │ │ ├── GeotiffToPyramid.scala
│ │ │ ├── GettingStarted.scala
│ │ │ ├── LayerToGeotiff.scala
│ │ │ ├── LayerToPyramid.scala
│ │ │ ├── LayerUpdaterExample.scala
│ │ │ ├── ManyLayersToMultibandLayer.scala
│ │ │ ├── ManySingleBandLayersToMultibandLayer.scala
│ │ │ ├── MultibandExample.scala
│ │ │ ├── MultibandGeotiffTilingExample.scala
│ │ │ ├── MultibandGeotiffToLayerNoReproj.scala
│ │ │ ├── MultibandLayerToGeotiff.scala
│ │ │ ├── NDVILayerExample.scala
│ │ │ ├── NDVILayerWithCloudMaskExample.scala
│ │ │ ├── ServeLayerAsMap.scala
│ │ │ ├── ShapefileExample.scala
│ │ │ ├── ShapefilePolygonRasterizer.scala
│ │ │ ├── SpatialGetisOrd.scala
│ │ │ ├── TestClassifierSVM.scala
│ │ │ ├── TilePixelingExample.scala
│ │ │ ├── TilePixelingToCSVExample.scala
│ │ │ ├── UploadToHdfs.scala
│ │ │ ├── Utils.scala
│ │ │ ├── UtilsKafka.scala
│ │ │ ├── UtilsML.scala
│ │ │ ├── UtilsSVM.scala
│ │ │ ├── UtilsShape.scala
│ │ │ ├── WordCount.scala
│ │ │ ├── WorkflowExample.scala
│ │ │ └── ZoomResampleLayer.scala
│ │ ├── geotrellis
│ │ └── spark
│ │ │ ├── rasterize
│ │ │ └── RasterizeFeaturesRDD.scala
│ │ │ └── resample
│ │ │ ├── Implicits.scala
│ │ │ ├── LayerRDDZoomResampleMethods.scala
│ │ │ ├── ZoomResampleMultiband.scala
│ │ │ ├── ZoomResampleMultibandMethods.scala
│ │ │ └── ZoomResampleTEST.scala
│ │ └── org
│ │ └── apache
│ │ └── spark
│ │ └── mllib
│ │ └── classification
│ │ ├── SVMMultiClass.scala
│ │ └── impl
│ │ └── GLMClassificationMultiClassOVAModel.scala
└── test
│ └── scala
│ └── samples
│ └── specs.scala
└── static
├── GettingStarted.html
├── css
└── l.geosearch.css
├── images
├── alert.png
├── geosearch.png
├── layers-2x.png
├── layers.png
├── marker-icon-2x.png
├── marker-icon.png
├── marker-shadow.png
├── spinner.gif
└── transparent.png
├── index.html
├── js
├── l.control.geosearch.js
└── l.geosearch.provider.nominatim.js
├── leaflet.css
└── leaflet.js
/.gitignore:
--------------------------------------------------------------------------------
1 | # use glob syntax.
2 | syntax: glob
3 | *.ser
4 | *.class
5 | *~
6 | *.bak
7 | #*.off
8 | *.old
9 |
10 | # eclipse conf file
11 | .settings
12 | .classpath
13 | .project
14 | .manager
15 | .scala_dependencies
16 |
17 | # idea
18 | .idea
19 | *.iml
20 |
21 | # building
22 | target
23 | build
24 | null
25 | tmp*
26 | temp*
27 | dist
28 | test-output
29 | build.log
30 |
31 | # other scm
32 | .svn
33 | .CVS
34 | .hg*
35 |
36 | # switch to regexp syntax.
37 | # syntax: regexp
38 | # ^\.pc/
39 |
40 | #SHITTY output not in target directory
41 | /.cache-main
42 | /.cache-tests
43 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # biggis-landuse
2 | Land use update detection based on Geotrellis and Spark
3 |
4 | # Quick and dirty usage example
5 |
6 | ``` sh
7 | # first, we compile everything and produce a fat jar
8 | # which contains all the dependences
9 | mvn package
10 |
11 | # now we can run the example app
12 | java -cp target/biggis-landuse-0.0.8-SNAPSHOT.jar \
13 | biggis.landuse.spark.examples.GeotiffToPyramid \
14 | /path/to/raster.tif \
15 | new_layer_name \
16 | /path/to/catalog-dir
17 | ```
18 |
19 | # GettingStarted Example
20 | Code for this example is located inside `src/main/scala/biggis.landuse.spark.examples/GettingStarted.scala`
21 |
22 | ```
23 | # based on https://github.com/geotrellis/geotrellis-landsat-tutorial
24 | # download examples from geotrellis-landsat-tutorial
25 | # to data/geotrellis-landsat-tutorial
26 | wget http://landsat-pds.s3.amazonaws.com/L8/107/035/LC81070352015218LGN00/LC81070352015218LGN00_B3.TIF
27 | wget http://landsat-pds.s3.amazonaws.com/L8/107/035/LC81070352015218LGN00/LC81070352015218LGN00_B4.TIF
28 | wget http://landsat-pds.s3.amazonaws.com/L8/107/035/LC81070352015218LGN00/LC81070352015218LGN00_B5.TIF
29 | wget http://landsat-pds.s3.amazonaws.com/L8/107/035/LC81070352015218LGN00/LC81070352015218LGN00_BQA.TIF
30 | wget http://landsat-pds.s3.amazonaws.com/L8/107/035/LC81070352015218LGN00/LC81070352015218LGN00_MTL.txt
31 | ```
32 |
33 | # Using an IDE
34 | We strongly recommend using an IDE for Scala development,
35 | in particular IntelliJ IDEA which has a better support
36 | for Scala than Eclipse
37 |
38 | For IDE Builds please select *Maven Profile* **IDE** before running to avoid using *scope provided* (necessary for cluster builds)
39 |
40 | [biggis-spark]: https://github.com/biggis-project/biggis-spark
41 |
42 | Since Geotrellis uses Apache Spark for processing, we need to set the `spark.master` property first.
43 | - For local debugging, the easiest option is to set the VM command line argument to `-Dspark.master=local[*]`.
44 | - Other option for local debugging, that is closer to a cluster setup is to run geotrellis in a docker container as implemented in [biggis-spark]. In this case, use `-Dspark.master=spark://localhost:7077`
45 | - Third option is to use a real cluster which can run on the same docker-based infrastructure from [biggis-spark]
46 |
47 | Geotrellis always work with a "catalog" which is basically a directory either in local filesystem or in HDFS.
48 | You might want to use `target/geotrellis-catalog` during development. This way, the catalog will be deleted when running `mvn clean` and won't be included into git repository.
49 |
--------------------------------------------------------------------------------
/data/geotrellis-landsat-tutorial/readme.txt:
--------------------------------------------------------------------------------
1 | #! /bin/bash
2 | # example data download, see: https://github.com/geotrellis/geotrellis-landsat-tutorial/blob/master/data/landsat/download-data.sh
3 |
4 | wget http://landsat-pds.s3.amazonaws.com/L8/107/035/LC81070352015218LGN00/LC81070352015218LGN00_B3.TIF
5 | wget http://landsat-pds.s3.amazonaws.com/L8/107/035/LC81070352015218LGN00/LC81070352015218LGN00_B4.TIF
6 | wget http://landsat-pds.s3.amazonaws.com/L8/107/035/LC81070352015218LGN00/LC81070352015218LGN00_B5.TIF
7 | wget http://landsat-pds.s3.amazonaws.com/L8/107/035/LC81070352015218LGN00/LC81070352015218LGN00_BQA.TIF
8 | wget http://landsat-pds.s3.amazonaws.com/L8/107/035/LC81070352015218LGN00/LC81070352015218LGN00_MTL.txt
9 |
10 | # https://s3.amazonaws.com/geotrellis-sample-datasets/landsat/LC80140322014139LGN00.tar.bz
11 | # tar xvfj LC80140322014139LGN00.tar.bz
--------------------------------------------------------------------------------
/data/rast.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/biggis-project/biggis-landuse/d31ffc26eaab875618fee32bf220e7b870fd36be/data/rast.tif
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
3 | 4.0.0
4 | biggis
5 | biggis-landuse
6 | 0.0.8-SNAPSHOT
7 | ${project.artifactId}
8 | Land use update detection based on Geotrellis and Spark
9 | 2017
10 |
11 | BigGIS Project
12 | http://biggis-project.eu/
13 |
14 |
15 | Github Issue Tracker
16 | https://github.com/biggis-project/biggis-landuse/issues
17 |
18 | https://github.com/biggis-project/biggis-landuse
19 |
20 |
21 |
22 | My License
23 | http://....
24 | repo
25 |
26 |
27 |
28 |
29 | 1.6
30 | 1.6
31 | UTF-8
32 | 2.11.8
33 | 2.11
34 | 2.0.0
35 | 2.2.0
36 | 0.10.2.0
37 | provided
38 |
39 |
40 |
41 |
42 |
43 | IDE
44 |
45 | compile
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 | maven2-repository.dev.java.net
55 | Java.net repository
56 | http://download.java.net/maven/2
57 |
58 |
59 | osgeo
60 | Open Source Geospatial Foundation Repository
61 | http://download.osgeo.org/webdav/geotools/
62 |
63 |
64 |
65 | true
66 |
67 | opengeo
68 | OpenGeo Maven Repository
69 | http://repo.opengeo.org
70 |
71 |
72 |
73 | true
74 |
75 | repo.locationtech.org
76 | LocationTech Maven Repository
77 | https://repo.locationtech.org/content/repositories/releases/
78 |
79 |
80 |
81 |
82 |
83 |
84 |
89 |
90 |
91 | com.chuusai
92 | shapeless_${scala.compat.version}
93 | 2.3.2
94 |
95 |
96 |
97 |
98 | org.locationtech.geotrellis
99 | geotrellis-spark_${scala.compat.version}
100 | ${geotrellis.version}
101 |
102 |
103 |
104 | org.scala-lang
105 | scala-library
106 | ${scala.version}
107 |
108 |
109 |
110 | org.locationtech.geotrellis
111 | geotrellis-spark-etl_${scala.compat.version}
112 | ${geotrellis.version}
113 |
114 |
115 |
120 |
121 |
126 |
127 |
128 | org.locationtech.geotrellis
129 | geotrellis-vector_${scala.compat.version}
130 | ${geotrellis.version}
131 |
132 |
133 |
134 | org.locationtech.geotrellis
135 | geotrellis-shapefile_${scala.compat.version}
136 | ${geotrellis.version}
137 |
138 |
139 |
140 | org.locationtech.geotrellis
141 | geotrellis-proj4_${scala.compat.version}
142 | ${geotrellis.version}
143 |
144 |
145 |
150 |
151 |
156 |
157 |
162 |
163 |
164 |
165 |
170 |
171 |
172 | io.spray
173 | spray-routing-shapeless2_${scala.compat.version}
174 | 1.3.3
175 |
176 |
177 |
178 | io.spray
179 | spray-can_${scala.compat.version}
180 | 1.3.3
181 |
182 |
183 |
184 | com.typesafe.akka
185 | akka-actor_${scala.compat.version}
186 | 2.4.16
187 |
188 |
189 |
194 |
195 |
200 |
201 |
202 |
203 |
204 | org.apache.spark
205 | spark-core_${scala.compat.version}
206 | ${spark.version}
207 | ${scopes.provided}
208 |
209 |
210 |
211 | org.apache.spark
212 | spark-mllib_${scala.compat.version}
213 | ${spark.version}
214 | ${scopes.provided}
215 |
216 |
217 |
218 |
219 | org.apache.spark
220 | spark-sql_${scala.compat.version}
221 | ${spark.version}
222 | ${scopes.provided}
223 |
224 |
225 |
231 |
232 |
233 |
234 | org.apache.spark
235 | spark-streaming-kafka-0-8_${scala.compat.version}
236 | ${spark.version}
237 | ${scopes.provided}
238 |
239 |
240 |
241 |
242 | org.apache.spark
243 | spark-streaming-kafka-0-10_${scala.compat.version}
244 | ${spark.version}
245 | ${scopes.provided}
246 |
247 |
248 |
249 |
250 |
251 |
256 |
257 |
258 |
259 |
260 | org.json4s
261 | json4s-native_${scala.compat.version}
262 | 3.5.0
263 |
264 |
265 |
266 | org.json4s
267 | json4s-core_${scala.compat.version}
268 | 3.5.0
269 |
270 |
271 |
272 | org.json4s
273 | json4s-ext_${scala.compat.version}
274 | 3.5.0
275 |
276 |
277 |
278 |
279 |
280 | com.vividsolutions
281 | jts
282 | 1.13
283 |
284 |
285 |
286 |
287 |
292 |
293 |
298 |
299 |
304 |
305 |
306 |
307 |
308 |
313 |
314 |
315 |
316 |
317 | junit
318 | junit
319 | 4.11
320 | test
321 |
322 |
323 |
324 | org.specs2
325 | specs2-core_${scala.compat.version}
326 | 3.8.5
327 |
328 |
329 | org.specs2
330 | specs2-junit_${scala.compat.version}
331 | 3.8.5
332 |
333 |
334 |
335 |
336 | src/main/scala
337 | src/test/scala
338 |
339 |
340 |
341 | net.alchim31.maven
342 | scala-maven-plugin
343 | 3.2.0
344 |
345 |
346 |
347 | compile
348 | testCompile
349 |
350 |
351 |
352 | -dependencyfile
353 | ${project.build.directory}/.scala_dependencies
354 |
355 |
356 |
357 |
358 |
359 |
360 |
361 |
362 | org.spire-math
363 | kind-projector_2.11
364 | 0.9.6
365 |
366 |
367 |
368 |
369 |
370 | org.apache.maven.plugins
371 | maven-surefire-plugin
372 | 3.0.0-M1
373 |
374 | false
375 | true
376 |
377 |
378 |
379 | **/*Test.*
380 | **/*Suite.*
381 |
382 |
383 |
384 |
385 | org.apache.maven.plugins
386 | maven-shade-plugin
387 | 2.4
388 |
389 |
390 | package
391 |
392 | shade
393 |
394 |
395 | false
396 |
397 |
398 | *:*
399 |
400 | META-INF/*.SF
401 | META-INF/*.DSA
402 | META-INF/*.RSA
403 |
404 |
405 |
406 |
407 |
408 |
409 | reference.conf
410 |
411 |
412 |
413 |
414 |
415 |
416 |
417 |
418 |
419 |
420 |
--------------------------------------------------------------------------------
/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | ################################################################################
2 | # Licensed to the Apache Software Foundation (ASF) under one
3 | # or more contributor license agreements. See the NOTICE file
4 | # distributed with this work for additional information
5 | # regarding copyright ownership. The ASF licenses this file
6 | # to you under the Apache License, Version 2.0 (the
7 | # "License"); you may not use this file except in compliance
8 | # with the License. You may obtain a copy of the License at
9 | #
10 | # http://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | ################################################################################
18 |
19 | # debug messages
20 | log4j.rootLogger=INFO, console
21 |
22 | log4j.appender.console=org.apache.log4j.ConsoleAppender
23 | log4j.appender.console.layout=org.apache.log4j.PatternLayout
24 | log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss} %-5p %-60c %x - %m%n
25 |
26 | # specific classes should only log ERROR messages
27 | log4j.logger.org.apache.flink.runtime.taskmanager.Task=ERROR, console
28 | log4j.additivity.org.apache.flink.runtime.taskmanager.Task$=false
29 |
30 | log4j.logger.org.apache.kafka.common.metrics.Metrics=ERROR, console
31 | log4j.additivity.org.apache.kafka.common.metrics.Metrics=false
32 |
33 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/ConvolveLayerExample.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import geotrellis.raster.Tile
5 | import geotrellis.raster.mapalgebra.focal.Kernel
6 | import geotrellis.raster.withTileMethods
7 | import geotrellis.spark.LayerId
8 | import geotrellis.spark.Metadata
9 | import geotrellis.spark.SpatialKey
10 | import geotrellis.spark.TileLayerMetadata
11 | import geotrellis.spark.io.SpatialKeyFormat
12 | import geotrellis.spark.io.hadoop.HadoopAttributeStore
13 | import geotrellis.spark.io.hadoop.HadoopLayerReader
14 | import geotrellis.spark.io.spatialKeyAvroFormat
15 | import geotrellis.spark.io.tileLayerMetadataFormat
16 | import geotrellis.spark.io.tileUnionCodec
17 | import org.apache.hadoop.fs.Path
18 | import org.apache.spark.SparkContext
19 | import org.apache.spark.SparkException
20 | import org.apache.spark.rdd.RDD
21 |
22 | /**
23 | * Created by Viliam Simko (viliam.simko@gmail.com)
24 | */
25 | object ConvolveLayerExample extends LazyLogging {
26 |
27 | def main(args: Array[String]): Unit = {
28 | try {
29 | val Array(layerName, circleKernelRadius, catalogPath) = args
30 | implicit val sc = Utils.initSparkAutoContext
31 | ConvolveLayerExample(layerName, circleKernelRadius.toInt)(catalogPath, sc)
32 | sc.stop()
33 | } catch {
34 | case _: MatchError => println("Run as: layerName circleKernelRadius /path/to/catalog")
35 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parmaeter: -Dspark.master=local[*]"
36 | }
37 | }
38 |
39 | def apply(layerName: String, circleKernelRadius: Int)(implicit catalogPath: String, sc: SparkContext): Unit = {
40 | logger info s"Running convolution of layer '$layerName' in catalog '$catalogPath'"
41 | logger info s"Using circular kernel of radius $circleKernelRadius"
42 |
43 | //implicit val sc = Utils.initSparkContext
44 |
45 | // Create the attributes store that will tell us information about our catalog.
46 | val catalogPathHdfs = new Path(catalogPath)
47 | val attributeStore = HadoopAttributeStore(catalogPathHdfs)
48 | val layerReader = HadoopLayerReader(attributeStore)
49 |
50 | val zoomsOfLayer = attributeStore.layerIds filter (_.name == layerName)
51 | if (zoomsOfLayer.isEmpty) {
52 | logger error s"Layer '$layerName' not found in the catalog '$catalogPath'"
53 | return
54 | }
55 |
56 | val srcLayerId = zoomsOfLayer.sortBy(_.zoom).last
57 | logger debug s"The following layerId will be used: $srcLayerId"
58 |
59 | val queryResult: RDD[(SpatialKey, Tile)] with Metadata[TileLayerMetadata[SpatialKey]] =
60 | layerReader.read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](srcLayerId)
61 |
62 | val focalKernel = Kernel.circle(circleKernelRadius, queryResult.metadata.cellwidth, circleKernelRadius)
63 | logger info s"extent of focalKernel is ${focalKernel.extent}"
64 |
65 | // here, the convolution takes place
66 | // see also https://media.readthedocs.org/pdf/geotrellis/stable/geotrellis.pdf
67 | val convolvedLayerRdd = queryResult.withContext { rdd =>
68 | rdd
69 | .bufferTiles(focalKernel.extent)
70 | .mapValues { v =>
71 | v.tile.focalMean(focalKernel, Some(v.targetArea))
72 | }
73 | }
74 |
75 | // this will be the new convoluted layer
76 | val convolvedLayerId = LayerId(srcLayerId.name + "_conv", srcLayerId.zoom)
77 |
78 | biggis.landuse.api.deleteLayerFromCatalog(convolvedLayerId)
79 | biggis.landuse.api.writeRddToLayer(convolvedLayerRdd, convolvedLayerId)
80 |
81 | //sc.stop()
82 | logger info "done."
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/DeleteLayer.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import org.apache.spark.{SparkContext, SparkException}
5 |
6 | object DeleteLayer extends App with LazyLogging {
7 | try {
8 | //val Array(catalogPath, layerName) = args
9 | val Array(layerName, catalogPath) = args
10 | implicit val sc = Utils.initSparkAutoContext
11 | DeleteLayer(layerName)(catalogPath, sc)
12 | sc.stop()
13 | } catch {
14 | //case _: MatchError => println("Run as: /path/to/catalog layerName")
15 | case _: MatchError => println("Run as: layerName /path/to/catalog")
16 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parameter: -Dspark.master=local[*]"
17 | }
18 |
19 | def apply(layerName: String)(implicit catalogPath: String, sc: SparkContext): Unit = {
20 | logger info s"Deleting layer $layerName including all zoom levels in catalog $catalogPath ..."
21 |
22 | biggis.landuse.api.deleteLayerFromCatalog(layerName)(catalogPath, sc)
23 |
24 | logger info "done"
25 | }
26 |
27 | }
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/DummyGeotrellisExample.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import geotrellis.spark.LayerId
4 | import geotrellis.spark.io.hadoop.HadoopLayerReader
5 | import geotrellis.spark.io.hadoop.HadoopLayerWriter
6 | import org.apache.hadoop.fs.Path
7 | import org.apache.spark.SparkConf
8 | import org.apache.spark.SparkContext
9 |
10 | object DummyGeotrellisExample extends App {
11 |
12 | val catalogPath = "testCatalog" // CONFIG
13 | val layerName = "testLayer" // INPUT
14 |
15 | // implicit val sc = Utils.initSparkAutoContext
16 |
17 | val sparkConf = new SparkConf()
18 | sparkConf.setAppName("Geotrellis Example")
19 | sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
20 | sparkConf.set("spark.kryo.registrator", "geotrellis.spark.io.kryo.KryoRegistrator")
21 | sparkConf.setMaster("local[*]")
22 | implicit val sc = new SparkContext(sparkConf)
23 |
24 | val layerId = LayerId(layerName, 0)
25 |
26 | val writer = HadoopLayerWriter(new Path(catalogPath))
27 |
28 | // write some metadata to layer
29 | import spray.json.DefaultJsonProtocol._
30 | val attributeStore = writer.attributeStore
31 | attributeStore.write(layerId, "metadata", "some content")
32 |
33 | // check that we created the layer
34 | val isThere = writer.attributeStore.layerExists(layerId) // OUTPUT
35 |
36 |
37 | sc.stop()
38 |
39 | if (isThere)
40 | println("the layer is there")
41 | else
42 | println("not there")
43 | }
44 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/GeotiffTilingExample.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import geotrellis.proj4.WebMercator
5 | import geotrellis.raster.withTileMethods
6 | import geotrellis.spark.LayerId
7 | import geotrellis.spark.TileLayerMetadata
8 | import geotrellis.spark.TileLayerRDD
9 | import geotrellis.spark.io.hadoop.HadoopSparkContextMethodsWrapper
10 | import geotrellis.spark.tiling.FloatingLayoutScheme
11 | import geotrellis.spark.tiling.ZoomedLayoutScheme
12 | import geotrellis.spark.withProjectedExtentTilerKeyMethods
13 | import geotrellis.spark.withTileRDDReprojectMethods
14 | import geotrellis.spark.withTilerMethods
15 | import org.apache.spark.SparkContext
16 | import org.apache.spark.SparkException
17 |
18 |
19 | /**
20 | * Within this example:
21 | * - Geotiff raster file is opened as a Spark RDD
22 | * - the raster is reprojected to WebMercator
23 | * - the raster is tiled into a grid
24 | * - all tiles are stored as a layer in geotrellis catalog
25 | * - histogram data are stored as an attribute in the catalog (into zoom level 0)
26 | */
27 | object GeotiffTilingExample extends LazyLogging {
28 |
29 | /**
30 | * Run as: /path/to/raster.tif some_layer /path/to/some/dir
31 | */
32 | def main(args: Array[String]): Unit = {
33 | try {
34 | val Array(inputPath, layerName, catalogPath) = args
35 | implicit val sc = Utils.initSparkAutoContext
36 | GeotiffTilingExample(inputPath, layerName)(catalogPath, sc)
37 | sc.stop()
38 | } catch {
39 | case _: MatchError => println("Run as: inputPath layerName /path/to/catalog")
40 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parmaeter: -Dspark.master=local[*]"
41 | }
42 | }
43 |
44 | def apply(inputPath: String, layerName: String)(implicit catalogPath: String, sc: SparkContext) {
45 |
46 | logger info s"Loading geotiff '$inputPath' into '$layerName' in catalog '$catalogPath' ... "
47 |
48 | logger debug "Opening geotiff as RDD"
49 | val inputRdd = sc.hadoopGeoTiffRDD(inputPath)
50 | val (_, myRasterMetaData) = TileLayerMetadata.fromRDD(inputRdd, FloatingLayoutScheme(Utils.TILE_SIZE))
51 |
52 | val tiled = inputRdd
53 | .tileToLayout(myRasterMetaData.cellType, myRasterMetaData.layout, Utils.RESAMPLING_METHOD)
54 | .repartition(Utils.RDD_PARTITIONS)
55 |
56 | val layoutScheme = ZoomedLayoutScheme(WebMercator, tileSize = Utils.TILE_SIZE)
57 |
58 | logger debug "Reprojecting to WebMercator"
59 | val (zoom, reprojected) =
60 | TileLayerRDD(tiled, myRasterMetaData).reproject(WebMercator, layoutScheme, Utils.RESAMPLING_METHOD)
61 |
62 | biggis.landuse.api.writeRddToLayer(reprojected, LayerId(layerName, zoom))
63 |
64 | //sc.stop()
65 | logger info "done."
66 | }
67 | }
68 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/GeotiffToPyramid.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import geotrellis.proj4.WebMercator
5 | import geotrellis.raster.resample.Bilinear
6 | import geotrellis.raster.withTileMethods
7 | import geotrellis.spark.io.hadoop.{HadoopAttributeStore, HadoopLayerDeleter, HadoopLayerWriter, HadoopSparkContextMethodsWrapper}
8 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod
9 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod.spatialKeyIndexMethod
10 | import geotrellis.spark.io.{SpatialKeyFormat, spatialKeyAvroFormat, tileLayerMetadataFormat, tileUnionCodec}
11 | import geotrellis.spark.pyramid.Pyramid
12 | import geotrellis.spark.tiling.{FloatingLayoutScheme, ZoomedLayoutScheme}
13 | import geotrellis.spark.{LayerId, TileLayerMetadata, TileLayerRDD, withProjectedExtentTilerKeyMethods, withTileRDDReprojectMethods, withTilerMethods}
14 | import org.apache.hadoop.fs.Path
15 | import org.apache.spark.SparkException
16 |
17 | /**
18 | * This code is now redundant because we can use:
19 | * GeotiffTilingExample + LayerToPyramid
20 | */
21 | object GeotiffToPyramid extends LazyLogging {
22 |
23 | def main(args: Array[String]): Unit = {
24 | try {
25 | val Array(inputPath, layerName, catalogPath) = args
26 | GeotiffToPyramid(inputPath, layerName)(catalogPath)
27 | } catch {
28 | case _: MatchError => println("Run as: inputPath layerName /path/to/catalog")
29 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parmaeter: -Dspark.master=local[*]"
30 | }
31 | }
32 |
33 | def apply(inputPath: String, layerName: String)(implicit catalogPath: String) {
34 |
35 | logger debug s"Building the pyramid '$layerName' from geotiff '$inputPath' ... "
36 |
37 | implicit val sc = Utils.initSparkContext
38 |
39 | val inputRdd = sc.hadoopGeoTiffRDD(inputPath)
40 | val (_, myRasterMetaData) = TileLayerMetadata.fromRDD(inputRdd, FloatingLayoutScheme(Utils.TILE_SIZE))
41 |
42 | val tiled = inputRdd
43 | .tileToLayout(myRasterMetaData.cellType, myRasterMetaData.layout, Bilinear)
44 | .repartition(Utils.RDD_PARTITIONS)
45 |
46 | val layoutScheme = ZoomedLayoutScheme(WebMercator, tileSize = Utils.TILE_SIZE)
47 | val (zoom, reprojected) = TileLayerRDD(tiled, myRasterMetaData)
48 | .reproject(WebMercator, layoutScheme, Utils.RESAMPLING_METHOD)
49 |
50 | // Create the attributes store that will tell us information about our catalog.
51 | val catalogPathHdfs = new Path(catalogPath)
52 | val attributeStore = HadoopAttributeStore( catalogPathHdfs )
53 |
54 | // Create the writer that we will use to store the tiles in the local catalog.
55 | val writer = HadoopLayerWriter(catalogPathHdfs, attributeStore)
56 |
57 | // Pyramiding up the zoom levels, write our tiles out to the local file system.
58 | Pyramid.upLevels(reprojected, layoutScheme, zoom) { (rdd, z) =>
59 | val layerId = LayerId(layerName, z)
60 |
61 | // If the layer exists already, delete it before writing
62 | if (attributeStore.layerExists(layerId)) {
63 | logger debug s"Layer $layerId already exists, deleting ..."
64 | HadoopLayerDeleter(attributeStore).delete(layerId)
65 | }
66 |
67 | logger debug s"Writing $layerId tiles using space filling curve"
68 | writer.write(layerId, rdd, ZCurveKeyIndexMethod)
69 | }
70 |
71 | Utils.writeHistogram(attributeStore, layerName, reprojected.histogram)
72 |
73 | sc.stop()
74 | logger debug s"Pyramid '$layerName' is ready in catalog '$catalogPath'"
75 | }
76 |
77 | }
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/GettingStarted.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import org.apache.spark.{SparkContext, SparkException}
5 |
6 | /**
7 | * Created by ak on 03.04.2017.
8 | *
9 | * see: https://github.com/geotrellis/geotrellis-landsat-tutorial
10 | *
11 | * biggis-landuse/
12 | * data/geotrellis-landsat-tutorial
13 | */
14 | object GettingStarted extends LazyLogging {
15 | def main(args: Array[String]): Unit = {
16 | try {
17 | val Array(catalogPath) = args
18 | //implicit val catalogPath = "target/geotrellis-catalog/"
19 | implicit val sc = Utils.initSparkAutoContext
20 | GettingStarted()(catalogPath, sc)
21 | sc.stop()
22 | }
23 | catch {
24 | case _: MatchError => println("Run as: /path/to/catalog")
25 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parmaeter: -Dspark.master=local[*]"
26 | }
27 | }
28 |
29 | def apply()(implicit catalogPath: String, sc: SparkContext): Unit = {
30 | /**
31 | * see: https://github.com/geotrellis/geotrellis-landsat-tutorial
32 | *
33 | * download data, see: data/geotrellis-landsat-tutorial/readme.txt
34 | *
35 | * see: https://en.wikipedia.org/wiki/Landsat_8
36 | * Band 3 - Green 0.525 – 0.600 µm 30 m 1826 W/(m²µm)
37 | * Band 4 - Red 0.630 – 0.680 µm 30 m 1574 W/(m²µm)
38 | * Band 5 - Near Infrared 0.845 – 0.885 µm 30 m 955 W/(m²µm)
39 | *
40 | * To Debug in IDE, please set VM options to
41 | * -Dspark.master=local[*]
42 | * and program arguments to
43 | * target/geotrellis-catalog
44 | */
45 | val path = "data/geotrellis-landsat-tutorial/"
46 |
47 | def bandPath(b: String) = s"LC81070352015218LGN00_${b}.TIF"
48 | val (layer_L8_B3_green, layer_L8_B4_red, layer_L8_B5_nir, layer_L8_BQA_clouds) =
49 | ("layer_L8_B3_green", "layer_L8_B4_red", "layer_L8_B5_nir" ,"layer_L8_BQA_clouds")
50 | val (file_L8_B3_green, file_L8_B4_red, file_L8_B5_nir, file_L8_BQA_clouds) =
51 | ( bandPath("B3"), bandPath("B4"), bandPath("B5"), bandPath("BQA"))
52 |
53 | // see: geotrellis-landsat-tutorial/src/main/scala/tutorial/IngestImage.scala
54 | // https://github.com/geotrellis/geotrellis-landsat-tutorial/blob/master/src/main/scala/tutorial/IngestImage.scala
55 | // replaced by GeotiffTilingExample
56 | //GeotiffTilingExample( path + file_L8_B3_green, layer_L8_B3_green)
57 | GeotiffTilingExample( path + file_L8_B4_red, layer_L8_B4_red)
58 | GeotiffTilingExample( path + file_L8_B5_nir, layer_L8_B5_nir)
59 | GeotiffTilingExample( path + file_L8_BQA_clouds, layer_L8_BQA_clouds)
60 |
61 | val layer_NDVI = "layer_NDVI"
62 | //NDVILayerExample( layer_L8_B5_nir, layer_L8_B4_red, layer_NDVI)
63 | NDVILayerWithCloudMaskExample( layer_L8_B5_nir, layer_L8_B4_red, layer_L8_BQA_clouds, layer_NDVI)
64 |
65 | /*
66 | * Attention: LayerToGeotiff export notes (stitching disabled by default)
67 | * create directory "data/geotrellis-landsat-tutorial/test" first for tiles export
68 | * */
69 | // Save NDVI To File
70 | //LayerToGeotiff(layer_NDVI, path + "test/" + bandPath("NDVI")) // ToDo: change tiling, too many tiles if disabled stitching (default)
71 | //LayerToGeotiff(layer_NDVI, path + bandPath("NDVI_stitched"), useStitching = true) // ToDo: fix stitching, file too big for memory
72 |
73 | // Serve Layer via Leaflet static/GettingStarted.html
74 | LayerToPyramid(catalogPath, layer_NDVI)
75 | ServeLayerAsMap(catalogPath, layer_NDVI, NDVILayerExample.colorMap)
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/LayerToGeotiff.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import geotrellis.raster.Tile
5 | import geotrellis.raster._
6 | import geotrellis.raster.io.geotiff._
7 | import geotrellis.raster.withTileMethods
8 | import geotrellis.raster.{io => _}
9 | import geotrellis.spark.Metadata
10 | import geotrellis.spark.SpatialKey
11 | import geotrellis.spark.TileLayerMetadata
12 | import geotrellis.spark._
13 | import geotrellis.spark.io.SpatialKeyFormat
14 | import geotrellis.spark.io.hadoop._
15 | import geotrellis.spark.io.hadoop.HadoopAttributeStore
16 | import geotrellis.spark.io.hadoop.HadoopLayerReader
17 | import geotrellis.spark.io.spatialKeyAvroFormat
18 | import geotrellis.spark.io.tileLayerMetadataFormat
19 | import geotrellis.spark.io.tileUnionCodec
20 | import geotrellis.spark.{io => _}
21 | import geotrellis.util._
22 | import geotrellis.vector.Extent
23 | import org.apache.hadoop.fs.{FileSystem, Path}
24 | import org.apache.hadoop.conf.Configuration
25 | import org.apache.spark.SparkContext
26 | import org.apache.spark.SparkException
27 | import org.apache.spark.rdd.RDD
28 |
29 | // https://github.com/geotrellis/geotrellis/blob/master/docs/spark/spark-examples.md
30 |
31 | object LayerToGeotiff extends LazyLogging {
32 | def main(args: Array[String]): Unit = {
33 | try {
34 | implicit val sc = Utils.initSparkAutoContext
35 | val Array(layerName, outputPath, catalogPath) = args
36 | LayerToGeotiff(layerName, outputPath)(catalogPath, sc)
37 | sc.stop()
38 | logger debug "Spark context stopped"
39 | } catch {
40 | case _: MatchError => println("Run as: layerName outputPath /path/to/catalog")
41 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parameter: -Dspark.master=local[*]"
42 | }
43 | }
44 |
45 | def apply(layerName: String, outputPath: String, useStitching: Boolean = false)(implicit catalogPath: String, sc: SparkContext): Unit = {
46 | logger info s"Writing layer '$layerName' in catalog '$catalogPath' to '$outputPath'"
47 |
48 | val catalogPathHdfs = new Path(catalogPath)
49 | val attributeStore = HadoopAttributeStore(catalogPathHdfs)
50 | val layerReader = HadoopLayerReader(attributeStore)
51 |
52 | val zoomsOfLayer = attributeStore.layerIds filter (_.name == layerName)
53 | if (zoomsOfLayer.isEmpty) {
54 | logger info s"Layer '$layerName' not found in the catalog '$catalogPath'"
55 | return
56 | }
57 |
58 | val srcLayerId = zoomsOfLayer.sortBy(_.zoom).last
59 | logger debug s"The following layerId will be used: $srcLayerId"
60 |
61 | val inputRdd: RDD[(SpatialKey, Tile)] with Metadata[TileLayerMetadata[SpatialKey]] = layerReader
62 | .read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](srcLayerId)
63 |
64 | val metadata = inputRdd.metadata
65 |
66 | val crs = metadata.crs
67 |
68 | // Hadoop Config is accessible from SparkContext
69 | implicit val conf: Configuration = sc.hadoopConfiguration
70 | val serConf = new SerializableConfiguration(conf)
71 | //implicit val fs: FileSystem = FileSystem.get(conf);
72 |
73 | // ToDo: replace both "stitch" and "256x256 tiles" by "intelligent" tile size (as many as necessary, as few as possible)
74 | if (useStitching) { //Attn: stitched version may exceed max Memory, has georeference issues with WebMercator
75 | // one single GeoTiff, but attention
76 | val tiled: RDD[(SpatialKey, Tile)] = inputRdd.distinct()
77 |
78 | val tile: Tile = tiled.stitch()
79 |
80 | //val datum = crs.proj4jCrs.getDatum()
81 | //val epsg = crs.epsgCode.get
82 | //val param = crs.proj4jCrs.getParameters()
83 | //val proj = crs.proj4jCrs.getProjection()
84 | //val falseEasting = proj.getFalseEasting()
85 | if (crs.epsgCode.get == 3857) { //"WebMercator"
86 | val raster: Raster[Tile] = tile.reproject(metadata.extent, metadata.crs, metadata.crs)
87 | GeoTiff(raster, crs).write(outputPath)
88 | //val tileextent: Extent = metadata.extent
89 | //GeoTiff(tile, tileextent, crs).write(outputPath)
90 | } else {
91 | val layoutextent: Extent = metadata.layoutExtent
92 | GeoTiff(tile, layoutextent, crs).write(outputPath) //for UTM32
93 | }
94 | } else {
95 | // many GeoTiff tiles
96 | val outputRdd: RDD[(SpatialKey, Tile)] = inputRdd
97 | //.tileToLayout(metadata.cellType, metadata.layout, Utils.RESAMPLING_METHOD)
98 | //.repartition(Utils.RDD_PARTITIONS)
99 |
100 | val useSerializedHadoopConfig = true
101 | if(useSerializedHadoopConfig){
102 | // ToDo: test Spark Cluster version
103 | outputRdd.foreachPartition { partition =>
104 | partition.foreach { tuple =>
105 | val (key, tile) = tuple
106 | val (col, row) = (key.col, key.row)
107 | val tileextent: Extent = metadata.layout.mapTransform(key)
108 | val filename = new Path(outputPath + "_" + col + "_" + row + ".tif")
109 | logger info s" writing: '${filename.toString}'"
110 | GeoTiff(tile, tileextent, crs)
111 | .write(filename, serConf.value)
112 | }
113 | }
114 | } else {
115 | // only for local debugging - do not use in cloud // ToDo: delete after testing
116 | outputRdd.foreach(mbtile => {
117 | val (key, tile) = mbtile
118 | val (col, row) = (key.col, key.row)
119 | val tileextent: Extent = metadata.layout.mapTransform(key)
120 | //val filename = new Path(outputPath + "_" + col + "_" + row + ".tif")
121 | //logger info s" writing: '${filename.toString}'"
122 | GeoTiff(tile, tileextent, crs)
123 | //.write(filename.toString) //.write(filename, serConf.value)
124 | .write(outputPath + "_" + col + "_" + row + ".tif")
125 | }
126 | )
127 | }
128 | }
129 |
130 | //sc.stop()
131 | //logger debug "Spark context stopped"
132 |
133 | logger info "done."
134 | }
135 | }
136 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/LayerToPyramid.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import geotrellis.proj4.WebMercator
5 | import geotrellis.raster.Tile
6 | import geotrellis.spark.io.hadoop.{HadoopAttributeStore, HadoopLayerDeleter, HadoopLayerReader, HadoopLayerWriter}
7 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod
8 | import geotrellis.spark.io.{SpatialKeyFormat, spatialKeyAvroFormat, tileLayerMetadataFormat, tileUnionCodec}
9 | import geotrellis.spark.pyramid.Pyramid
10 | import geotrellis.spark.tiling.{FloatingLayoutScheme, ZoomedLayoutScheme}
11 | import geotrellis.spark.{LayerId, Metadata, SpatialKey, TileLayerMetadata}
12 | import org.apache.hadoop.fs.Path
13 | import org.apache.spark.{SparkContext, SparkException}
14 | import org.apache.spark.rdd.RDD
15 |
16 | object LayerToPyramid extends LazyLogging {
17 |
18 | def main(args: Array[String]): Unit = {
19 | try {
20 | val Array(catalogPath, layerName) = args
21 | implicit val sc = Utils.initSparkAutoContext
22 | LayerToPyramid(catalogPath, layerName)
23 | sc.stop()
24 | } catch {
25 | case _: MatchError => println("Run as: /path/to/catalog layerName")
26 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parmaeter: -Dspark.master=local[*]"
27 | }
28 | }
29 |
30 | def apply(catalogPath: String, layerName: String)(implicit sc: SparkContext): Unit = {
31 | logger debug s"Building the pyramid from '$layerName' in catalog $catalogPath ..."
32 |
33 | //implicit val sc = Utils.initSparkContext
34 |
35 | // Create the attributes store that will tell us information about our catalog.
36 | val catalogPathHdfs = new Path(catalogPath)
37 | val attributeStore = HadoopAttributeStore(catalogPathHdfs)
38 | val layerReader = HadoopLayerReader(attributeStore)
39 |
40 | val zoomsOfLayer = attributeStore.layerIds filter (_.name == layerName)
41 | if (zoomsOfLayer.isEmpty) {
42 | logger info s"Layer '$layerName' not found in the catalog '$catalogPath'"
43 | return
44 | }
45 |
46 | val srcLayerId = zoomsOfLayer.sortBy(_.zoom).last
47 | logger debug s"The following layerId will be used: $srcLayerId"
48 |
49 | val inputRdd: RDD[(SpatialKey, Tile)] with Metadata[TileLayerMetadata[SpatialKey]] = layerReader
50 | .read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](srcLayerId)
51 |
52 | // TODO: figure out how to extract the layoutScheme from the inputRdd
53 | val layoutScheme = ZoomedLayoutScheme(WebMercator, tileSize = Utils.TILE_SIZE)
54 |
55 | // Create the writer that we will use to store the tiles in the local catalog.
56 | val writer = HadoopLayerWriter(catalogPathHdfs, attributeStore)
57 |
58 | // Pyramiding up the zoom levels, write our tiles out to the local file system.
59 | Pyramid.upLevels(inputRdd, layoutScheme, srcLayerId.zoom) { (rdd, z) =>
60 | val layerId = LayerId(layerName, z)
61 | if (!attributeStore.layerExists(layerId)) {
62 | logger info s"Writing $layerId tiles using space filling curve"
63 | writer.write(layerId, rdd, ZCurveKeyIndexMethod)
64 | }
65 | }
66 |
67 | // writing the histogram is only needed if we create a new layer
68 | // Utils.writeHistogram(attributeStore, layerName + "_p", inputRdd.histogram)
69 |
70 | //sc.stop()
71 | logger debug "Spark context stopped"
72 | logger debug s"Pyramid '$layerName' is ready in catalog '$catalogPath'"
73 | }
74 |
75 | }
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/LayerUpdaterExample.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import org.apache.spark.{SparkContext, SparkException}
5 | import biggis.landuse.api._
6 | import geotrellis.spark.LayerId
7 |
8 | object LayerUpdaterExample extends LazyLogging { //extends App with LazyLogging
9 | def main(args: Array[String]): Unit = {
10 | try {
11 | val Array(layerNameToUpdate, layerNameNew, catalogPath) = args
12 | implicit val sc : SparkContext = Utils.initSparkAutoContext // only for debugging - needs rework
13 | LayerUpdaterExample(layerNameToUpdate, layerNameNew)(catalogPath,sc)
14 | sc.stop()
15 | logger debug "Spark context stopped"
16 | } catch {
17 | case _: MatchError => println("Run as: inputLayerName1 inputLayerName2 [inputLayerName3 ...] layerStackNameOut /path/to/catalog")
18 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parameter: -Dspark.master=local[*]"
19 | }
20 | }
21 |
22 | def apply(layerNameToUpdate: String, layerNameNew: String)(implicit catalogPath: String, sc: SparkContext): Unit = {
23 |
24 | logger info s"Updating '$layerNameToUpdate' from '$layerNameNew' inside '$catalogPath'"
25 |
26 | /*
27 | //val updater = HadoopLayerUpdater(hdfsPath)
28 | val writer = HadoopLayerWriter(hdfsPath)
29 | val reader = HadoopLayerReader(hdfsPath)
30 |
31 | // Pyramiding up the zoom levels, write our tiles out to the local file system.
32 | Pyramid.upLevels(reprojected, layoutScheme, zoom, CubicConvolution) { (rdd, zoomlevel) =>
33 | val layerId = LayerId("layer_sat", zoomlevel)
34 | val existing = reader.read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](layerId)
35 |
36 | // If the layer exists already, update it
37 | if(attributeStore.layerExists(layerId)) {
38 | //updater.update(layerId, existing.merge(rdd)) //deprecated
39 | writer.overwrite(layerId, existing.merge(rdd)) //better use new writer.overwrite
40 | } else {
41 | writer.write(layerId, rdd, ZCurveKeyIndexMethod)
42 | }
43 | */
44 |
45 | // ToDo: Delete after testing - Only for testing create layerToUpdate from "layer_sat1" for testing
46 | /*
47 | val tempLayerId = getMaxZoomLevel("layer_sat1").get
48 | val rddTemp : SpatialMultibandRDD = readRddFromLayer(getMaxZoomLevel("layer_sat1").get)
49 | writeRddToLayer(rddTemp,LayerId(layerNameToUpdate,tempLayerId.zoom))
50 | */
51 |
52 | // Update layerNameToUpdate with layerNameNew
53 | mergeRddIntoLayer(readRddFromLayer(getMaxZoomLevel(layerNameNew).get) : SpatialMultibandRDD,getMaxZoomLevel(layerNameToUpdate).get)
54 | }
55 |
56 | }
57 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/ManyLayersToMultibandLayer.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import geotrellis.raster.{ArrayMultibandTile, DoubleConstantNoDataCellType, MultibandTile, Tile}
5 | import geotrellis.spark.io.hadoop.{HadoopAttributeStore, HadoopLayerDeleter, HadoopLayerReader, HadoopLayerWriter}
6 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod
7 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod.spatialKeyIndexMethod
8 | import geotrellis.spark.io.{LayerHeader, SpatialKeyFormat, spatialKeyAvroFormat, tileLayerMetadataFormat, tileUnionCodec}
9 | import geotrellis.spark.{LayerId, Metadata, SpatialKey, TileLayerMetadata}
10 | import org.apache.hadoop.fs.Path
11 | import org.apache.spark.{SparkContext, SparkException}
12 | import org.apache.spark.rdd.RDD
13 | import biggis.landuse.api
14 | import biggis.landuse.api.SpatialMultibandRDD
15 | import org.apache.spark.sql.catalog.Catalog
16 |
17 | /**
18 | * Created by vlx on 1/19/17.
19 | */
20 | object ManyLayersToMultibandLayer extends LazyLogging { //extends App with LazyLogging
21 | def main(args: Array[String]): Unit = {
22 | try {
23 | val (layerNameArray,Array(layerStackNameOut, catalogPath)) = (args.take(args.length - 2),args.drop(args.length - 2))
24 | if(args.length == 4){
25 | val Array(layerName1, layerName2, layerStackNameOut, catalogPath) = args
26 | implicit val sc : SparkContext = Utils.initSparkAutoContext // only for debugging - needs rework
27 | ManyLayersToMultibandLayer(layerName1, layerName2, layerStackNameOut)(catalogPath, sc)
28 | sc.stop()
29 | } else if(args.length > 4){
30 | //val layerNameArray = args.take(1 + args.size - 2)
31 | //val Array(layerNameOut, catalogPath) = args.drop(1 + args.size - 2)
32 | implicit val sc : SparkContext = Utils.initSparkAutoContext // only for debugging - needs rework
33 | ManyLayersToMultibandLayer( layerNameArray, layerStackNameOut)(catalogPath, sc)
34 | sc.stop()
35 | }
36 | logger debug "Spark context stopped"
37 | } catch {
38 | case _: MatchError => println("Run as: inputLayerName1 inputLayerName2 [inputLayerName3 ...] layerStackNameOut /path/to/catalog")
39 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parameter: -Dspark.master=local[*]"
40 | }
41 | }
42 |
43 | def apply(layerName1: String, layerName2: String, layerNameOut: String)(implicit catalogPath: String, sc: SparkContext) {
44 | //val layerName1 = "morning2"
45 | //val layerName2 = "morning2_conv"
46 | //val layerNameOut = "mblayer"
47 | //val catalogPath = "target/geotrellis-catalog"
48 |
49 | logger info s"Combining '$layerName1' and '$layerName2' into $layerNameOut inside '$catalogPath'"
50 |
51 | //implicit val sc = Utils.initLocalSparkContext
52 |
53 | // Create the attributes store that will tell us information about our catalog.
54 | val catalogPathHdfs = new Path(catalogPath)
55 | implicit val attributeStore : HadoopAttributeStore = HadoopAttributeStore(catalogPathHdfs)
56 | val layerReader = HadoopLayerReader(attributeStore)
57 |
58 | //val commonZoom = Math.max(findFinestZoom(layerName1), findFinestZoom(layerName2))
59 | val commonZoom = findFinestZoom(List(layerName1,layerName2))
60 | logger info s"using zoom level $commonZoom"
61 |
62 | val layerId1 = findLayerIdByNameAndZoom(layerName1, commonZoom)
63 | val layerId2 = findLayerIdByNameAndZoom(layerName2, commonZoom)
64 |
65 | println(s"$layerId1, $layerId2")
66 |
67 | /*
68 | val tiles1: RDD[(SpatialKey, MultibandTile)] with Metadata[TileLayerMetadata[SpatialKey]] =
69 | //layerReader.read[SpatialKey, MultibandTile, TileLayerMetadata[SpatialKey]](layerId1)
70 | layerReaderMB(layerId1)(layerReader)
71 |
72 | val tiles2: RDD[(SpatialKey, MultibandTile)] with Metadata[TileLayerMetadata[SpatialKey]] =
73 | //layerReader.read[SpatialKey, MultibandTile, TileLayerMetadata[SpatialKey]](layerId2)
74 | layerReaderMB(layerId2)(layerReader)
75 | */
76 | val tiles1 : SpatialMultibandRDD = biggis.landuse.api.readRddFromLayer(layerId1)
77 | val tiles2 : SpatialMultibandRDD = biggis.landuse.api.readRddFromLayer(layerId2)
78 |
79 | val outTiles: RDD[(SpatialKey, MultibandTile)] with Metadata[TileLayerMetadata[SpatialKey]] =
80 | stack2MBlayers(tiles1,tiles2)
81 |
82 | /*
83 | // Create the writer that we will use to store the tiles in the local catalog.
84 | val writer = HadoopLayerWriter(catalogPathHdfs, attributeStore)
85 | val layerIdOut = LayerId(layerNameOut, commonZoom)
86 |
87 | // If the layer exists already, delete it out before writing
88 | if (attributeStore.layerExists(layerIdOut)) {
89 | logger debug s"Layer $layerIdOut already exists, deleting ..."
90 | HadoopLayerDeleter(attributeStore).delete(layerIdOut)
91 | }
92 |
93 | logger debug "Writing reprojected tiles using space filling curve"
94 | writer.write(layerIdOut, outTiles, ZCurveKeyIndexMethod)
95 | */
96 |
97 | biggis.landuse.api.deleteLayerFromCatalog(layerNameOut, commonZoom)
98 | biggis.landuse.api.writeRddToLayer( outTiles, (layerNameOut, commonZoom))
99 |
100 | //sc.stop()
101 | logger info "done."
102 |
103 | }
104 |
105 | def apply(layerNames: Array[String], layerNameOut: String)(implicit catalogPath: String, sc: SparkContext) {
106 | val layerNamesAll : String = {var layerNamesConcat = "";layerNames.foreach( layerName => layerNamesConcat += "["+layerName+"]");layerNamesConcat}
107 | logger info s"Combining '$layerNamesAll' into '$layerNameOut' inside '$catalogPath'"
108 |
109 | /*
110 | // Create the attributes store that will tell us information about our catalog.
111 | val catalogPathHdfs = new Path(catalogPath)
112 | implicit val attributeStore = HadoopAttributeStore(catalogPathHdfs)
113 | */
114 | implicit val attributeStore : HadoopAttributeStore = biggis.landuse.api.catalogToStore(catalogPath)
115 | implicit val layerReader : HadoopLayerReader = HadoopLayerReader(attributeStore)
116 |
117 | implicit val commonZoom : Int = findFinestZoom(layerNames) //Math.max(findFinestZoom(layerName1), findFinestZoom(layerName2)
118 | logger info s"using zoom level $commonZoom"
119 |
120 | val outTiles: RDD[(SpatialKey, MultibandTile)] with Metadata[TileLayerMetadata[SpatialKey]] = createLayerStack(layerNames)
121 |
122 | /*
123 | // Create the writer that we will use to store the tiles in the local catalog.
124 | val writer = HadoopLayerWriter(catalogPathHdfs, attributeStore)
125 | val layerIdOut = LayerId(layerNameOut, commonZoom)
126 |
127 | // If the layer exists already, delete it out before writing
128 | if (attributeStore.layerExists(layerIdOut)) {
129 | logger debug s"Layer $layerIdOut already exists, deleting ..."
130 | HadoopLayerDeleter(attributeStore).delete(layerIdOut)
131 | }
132 |
133 | logger debug "Writing reprojected tiles using space filling curve"
134 | writer.write(layerIdOut, outTiles, ZCurveKeyIndexMethod)
135 | */
136 |
137 | biggis.landuse.api.deleteLayerFromCatalog(layerNameOut, commonZoom)
138 | biggis.landuse.api.writeRddToLayer( outTiles, (layerNameOut, commonZoom))
139 |
140 | //sc.stop()
141 | logger info "done."
142 |
143 | }
144 |
145 | def findFinestZoom(layerName: String)(implicit attributeStore: HadoopAttributeStore): Int = {
146 | val zoomsOfLayer = attributeStore.layerIds filter (_.name == layerName)
147 | if (zoomsOfLayer.isEmpty){
148 | logger info s"Layer not found: $layerName"
149 | throw new RuntimeException(s"Layer not found : $layerName")
150 | }
151 | zoomsOfLayer.maxBy(_.zoom).zoom //.sortBy(_.zoom).last.zoom
152 | }
153 |
154 | def findLayerIdByNameAndZoom(layerName: String, zoom: Int)(implicit attributeStore: HadoopAttributeStore): LayerId = {
155 | val zoomsOfLayer = attributeStore.layerIds filter (_.name == layerName)
156 | zoomsOfLayer.filter(_.zoom == zoom).head
157 | }
158 |
159 | /*
160 | def layerReaderMB(layerId: LayerId)(implicit layerReader: HadoopLayerReader) : RDD[(SpatialKey, MultibandTile)] with Metadata[TileLayerMetadata[SpatialKey]] = {
161 | try {
162 | //val schema = layerReader.attributeStore.readSchema(layerId)
163 | //val meta = layerReader.attributeStore.readMetadata(layerId)
164 | //val attributes = layerReader.attributeStore.availableAttributes(layerId)
165 | val header = layerReader.attributeStore.readHeader[LayerHeader](layerId)
166 | assert(header.keyClass == "geotrellis.spark.SpatialKey")
167 | //assert(header.valueClass == "geotrellis.raster.Tile")
168 | if (header.valueClass == "geotrellis.raster.Tile"){
169 | layerReader.read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](layerId)
170 | .withContext { rdd =>
171 | rdd.map { case (spatialKey, tile) => (spatialKey, ArrayMultibandTile(tile)) }
172 | }
173 | }
174 | else {
175 | assert(header.valueClass == "geotrellis.raster.MultibandTile")
176 | layerReader.read[SpatialKey, MultibandTile, TileLayerMetadata[SpatialKey]](layerId)
177 | }
178 | }
179 | catch { case _: Throwable => null }
180 | }
181 | */
182 |
183 | def findFinestZoom(layerNames: Iterable[String])(implicit attributeStore: HadoopAttributeStore) : Int = {
184 | var commonZoom: Int = 0
185 | layerNames.foreach( layerName => { commonZoom = Math.max(commonZoom, findFinestZoom(layerName))})
186 | commonZoom
187 | }
188 |
189 | def getLayerId(layerName: String)(implicit attributeStore: HadoopAttributeStore, commonZoom: Int): LayerId ={
190 | findLayerIdByNameAndZoom(layerName,commonZoom)
191 | }
192 |
193 | def stack2MBlayers(tiles1:RDD[(SpatialKey, MultibandTile)] with Metadata[TileLayerMetadata[SpatialKey]], tiles2: RDD[(SpatialKey, MultibandTile)] with Metadata[TileLayerMetadata[SpatialKey]])(implicit tilesize: (Int,Int) = (256,256)): RDD[(SpatialKey, MultibandTile)] with Metadata[TileLayerMetadata[SpatialKey]] ={
194 | val tilesmerged: RDD[(SpatialKey, MultibandTile)] with Metadata[TileLayerMetadata[SpatialKey]] =
195 | tiles1.withContext { rdd =>
196 | rdd.join(tiles2).map { case (spatialKey, (mbtile1, mbtile2)) =>
197 |
198 | val tilebands1 = mbtile1.bands.toArray.map ( band =>
199 | band.crop(tilesize._1, tilesize._2).convert(DoubleConstantNoDataCellType))
200 | val tilebands2 = mbtile2.bands.toArray.map ( band =>
201 | band.crop(tilesize._1, tilesize._2).convert(DoubleConstantNoDataCellType))
202 |
203 | val mbtile = ArrayMultibandTile( tilebands1 ++ tilebands2)
204 |
205 | (spatialKey, mbtile)
206 | }
207 | }
208 | tilesmerged
209 | }
210 |
211 | /* *
212 | * Read leayer from RDD
213 | * @param layerId layerName and zoom level
214 | * @param bandNumber Optional: select specific band number from layer (only applies to reading MultibandTile as Tile, ignored otherwise), defaults to 0 (first band) if None
215 | * @param catalogPath Geotrellis catalog
216 | * @param sc SparkContext
217 | * @return RDD[(K, V)] with Metadata[M] representing a layer of tiles
218 | * /
219 | def readRddFromLayerT[T]
220 | (layerId: LayerId, bandNumber : Option[Int] = None : Option[Int])
221 | (implicit catalogPath: String, sc: SparkContext, ttag : TypeTag[T]): T = {
222 | if(ttag.tpe =:= typeOf[SpatialRDD]) readRddFromLayer[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](layerId).asInstanceOf[T]
223 | else if(ttag.tpe =:= typeOf[SpatialMultibandRDD]) readRddFromLayer[SpatialKey, MultibandTile, TileLayerMetadata[SpatialKey]](layerId).asInstanceOf[T]
224 | else if(ttag.tpe =:= typeOf[SpaceTimeMultibandRDD]) readRddFromLayer[SpaceTimeKey, MultibandTile, TileLayerMetadata[SpaceTimeKey]](layerId).asInstanceOf[T]
225 | else if(ttag.tpe =:= typeOf[SpaceTimeRDD]) readRddFromLayer[SpaceTimeKey, Tile, TileLayerMetadata[SpaceTimeKey]](layerId).asInstanceOf[T]
226 | else {
227 | throw new RuntimeException("we did not expect any other type than SpatialRDD, SpaceTimeRDD, SpatialMultibandRDD, SpaceTimeMultibandRDD")
228 | sc.emptyRDD[(T, T)].asInstanceOf[T]
229 | }
230 | }
231 | */
232 |
233 | def createLayerStack(layerNames: Array[String])(implicit commonZoom: Int, catalogPath: String/*attributeStore: HadoopAttributeStore, layerReader: HadoopLayerReader*/, sc: SparkContext): RDD[(SpatialKey, MultibandTile)] with Metadata[TileLayerMetadata[SpatialKey]] = {
234 | var tilesmerged : RDD[(SpatialKey, MultibandTile)] with Metadata[TileLayerMetadata[SpatialKey]] = null
235 | layerNames.foreach( layerName => {
236 | logger info s"Reading Layer $layerName"
237 | if (biggis.landuse.api.layerExists(LayerId(layerName, commonZoom))/*attributeStore.layerExists(layerName,commonZoom)*/) {
238 | //var tiles: RDD[(SpatialKey, MultibandTile)] with Metadata[TileLayerMetadata[SpatialKey]] = layerReaderMB(getLayerId(layerName))(layerReader)
239 | //val tiles: SpatialMultibandRDD = biggis.landuse.api.readRddFromLayerT[SpatialMultibandRDD]((layerName, commonZoom))
240 | val tiles: SpatialMultibandRDD = biggis.landuse.api.readRddFromLayer[SpatialKey, MultibandTile, TileLayerMetadata[SpatialKey]]((layerName, commonZoom))
241 | if (tilesmerged == null) {
242 | tilesmerged = tiles
243 | } else {
244 | if(tilesmerged.metadata.crs != tiles.metadata.crs){
245 | logger info s"Mismatch crs: ${tilesmerged.metadata.crs.proj4jCrs.getDatum.getName} != ${tiles.metadata.crs.proj4jCrs.getDatum.getName} "
246 | val (zoomlevel, tilesreproj) : (Int, SpatialMultibandRDD) = tiles.reproject(destCrs = tilesmerged.metadata.crs, layoutDefinition = tilesmerged.metadata.layout)
247 | logger info s"Reproject crs: ${tiles.metadata.crs.proj4jCrs.getDatum.getName} to ${tilesmerged.metadata.crs.proj4jCrs.getDatum.getName} "
248 | tilesmerged = stack2MBlayers(tilesmerged, tilesreproj)
249 | }
250 | else
251 | tilesmerged = stack2MBlayers(tilesmerged, tiles)
252 | }
253 | }
254 | else {
255 | logger info s"Layer not found: $layerName"
256 | throw new RuntimeException(s"Layer not found : $layerName")
257 | }
258 | })
259 | tilesmerged
260 | }
261 | }
262 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/ManySingleBandLayersToMultibandLayer.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import biggis.landuse.spark.examples.MultibandGeotiffTilingExample.logger
4 | import com.typesafe.scalalogging.LazyLogging
5 | import geotrellis.raster.{ArrayMultibandTile, DoubleConstantNoDataCellType, IntConstantNoDataCellType, MultibandTile, Tile}
6 | import geotrellis.spark.{LayerId, Metadata, SpatialKey, TileLayerMetadata}
7 | import geotrellis.spark.io.hadoop.{HadoopAttributeStore, HadoopLayerDeleter, HadoopLayerReader, HadoopLayerWriter}
8 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod
9 | import org.apache.hadoop.fs.Path
10 | import org.apache.spark.rdd.RDD
11 | import geotrellis.spark.io.{SpatialKeyFormat, spatialKeyAvroFormat, tileLayerMetadataFormat, tileUnionCodec}
12 | import geotrellis.spark.{LayerId, Metadata, SpatialKey, TileLayerMetadata}
13 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod
14 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod.spatialKeyIndexMethod
15 |
16 | /**
17 | * Created by vlx on 1/19/17.
18 | */
19 | object ManySingleBandLayersToMultibandLayer extends App with LazyLogging {
20 |
21 | val layerName1 = "morning2"
22 | val layerName2 = "morning2_conv"
23 | val layerNameOut = "mblayer"
24 | val catalogPath = "target/geotrellis-catalog"
25 |
26 | logger info s"Combining '$layerName1' and '$layerName2' into $layerNameOut inside '$catalogPath'"
27 |
28 | implicit val sc = Utils.initLocalSparkContext
29 |
30 | // Create the attributes store that will tell us information about our catalog.
31 | val catalogPathHdfs = new Path(catalogPath)
32 | val attributeStore = HadoopAttributeStore( catalogPathHdfs )
33 | val layerReader = HadoopLayerReader(attributeStore)
34 |
35 | val commonZoom = Math.max(findFinestZoom(layerName1), findFinestZoom(layerName2))
36 |
37 | val layerId1 = findLayerIdByNameAndZoom(layerName1, commonZoom)
38 | val layerId2 = findLayerIdByNameAndZoom(layerName2, commonZoom)
39 |
40 | println(s"$layerId1, $layerId2")
41 |
42 | val tiles1:RDD[(SpatialKey, Tile)] with Metadata[TileLayerMetadata[SpatialKey]] =
43 | layerReader.read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](layerId1)
44 |
45 | val tiles2:RDD[(SpatialKey, Tile)] with Metadata[TileLayerMetadata[SpatialKey]] =
46 | layerReader.read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](layerId2)
47 |
48 | val outTiles:RDD[(SpatialKey, MultibandTile)] with Metadata[TileLayerMetadata[SpatialKey]] =
49 | tiles1.withContext { rdd =>
50 | rdd.join(tiles2).map { case (spatialKey, (tile1, tile2)) =>
51 | val mbtile = ArrayMultibandTile(
52 | tile2.crop(256, 256).convert(DoubleConstantNoDataCellType),
53 | tile1.crop(256, 256).convert(DoubleConstantNoDataCellType)
54 | ).convert(DoubleConstantNoDataCellType)
55 |
56 | (spatialKey, mbtile)
57 | }
58 | }
59 |
60 | // Create the writer that we will use to store the tiles in the local catalog.
61 | val writer = HadoopLayerWriter(catalogPathHdfs, attributeStore)
62 | val layerIdOut = LayerId(layerNameOut, commonZoom)
63 |
64 | // If the layer exists already, delete it out before writing
65 | if (attributeStore.layerExists(layerIdOut)) {
66 | logger debug s"Layer $layerIdOut already exists, deleting ..."
67 | HadoopLayerDeleter(attributeStore).delete(layerIdOut)
68 | }
69 |
70 | logger debug "Writing reprojected tiles using space filling curve"
71 | writer.write(layerIdOut, outTiles, ZCurveKeyIndexMethod)
72 |
73 | sc.stop()
74 | logger info "done."
75 |
76 | def findFinestZoom(layerName:String):Int = {
77 | val zoomsOfLayer = attributeStore.layerIds filter (_.name == layerName)
78 | zoomsOfLayer.sortBy(_.zoom).last.zoom
79 | }
80 |
81 | def findLayerIdByNameAndZoom(layerName:String, zoom:Int):LayerId = {
82 | val zoomsOfLayer = attributeStore.layerIds filter (_.name == layerName)
83 | zoomsOfLayer.filter(_.zoom == zoom).head
84 | }
85 | }
86 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/MultibandExample.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import com.typesafe.scalalogging.StrictLogging
4 | import geotrellis.proj4.WebMercator
5 | import geotrellis.raster.io.HistogramDoubleFormat
6 | import geotrellis.raster.io.geotiff.{GeoTiff, MultibandGeoTiff, reader}
7 | import geotrellis.raster.io.geotiff.reader.GeoTiffReader
8 | import geotrellis.raster.resample.Bilinear
9 | import geotrellis.raster.{MultibandTile, MultibandRaster, Raster, Tile, withTileMethods}
10 | import geotrellis.spark.io.file.{FileAttributeStore, FileLayerManager, FileLayerWriter}
11 | import geotrellis.spark.io.hadoop.{HadoopAttributeStore, HadoopLayerDeleter, HadoopLayerReader, HadoopLayerWriter, HadoopSparkContextMethodsWrapper}
12 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod
13 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod.spatialKeyIndexMethod
14 | import geotrellis.spark.io.{SpatialKeyFormat, spatialKeyAvroFormat, tileLayerMetadataFormat, tileUnionCodec}
15 | import geotrellis.spark.pyramid.Pyramid
16 | import geotrellis.spark.tiling.{FloatingLayoutScheme, ZoomedLayoutScheme}
17 | import geotrellis.spark.{LayerId, Metadata, MultibandTileLayerRDD, SpatialKey, TileLayerMetadata, TileLayerRDD, withProjectedExtentTilerKeyMethods, withStatsTileRDDMethods, withTileRDDReprojectMethods, withTilerMethods}
18 | import geotrellis.vector.ProjectedExtent
19 | import org.apache.hadoop.fs.Path
20 | import org.apache.spark.rdd.RDD
21 | import org.apache.spark.{SparkConf, SparkContext}
22 | import geotrellis.spark.etl.hadoop
23 |
24 | /**
25 | * Created by ak on 20.10.2016.
26 | */
27 | @deprecated("do not use, only for debugging", "always")
28 | object MultibandExample extends StrictLogging{
29 |
30 | def main(args: Array[String]): Unit = {
31 | try {
32 | val Array(inputPath, outputPath, layerName, catalogPath) = args
33 | MultibandExample(inputPath, outputPath, layerName)(catalogPath)
34 | } catch {
35 | case _: MatchError => println("Run as: inputPath outputPath layerName /path/to/catalog")
36 | }
37 | }
38 |
39 | def apply(inputPath: String, outputPath: String, layerName: String)(implicit catalogPath: String) {
40 |
41 | logger debug s"Building the pyramid '$layerName' from geotiff '$inputPath' ... "
42 |
43 | implicit val sc = Utils.initSparkContext
44 |
45 | // Multiband Read
46 | logger debug "Opening geotiff as RDD"
47 | val inputRdd = sc.hadoopMultibandGeoTiffRDD(inputPath)
48 | val (_, myRasterMetaData) = TileLayerMetadata.fromRDD(inputRdd, FloatingLayoutScheme(Utils.TILE_SIZE))
49 |
50 | val tiled = inputRdd
51 | .tileToLayout(myRasterMetaData.cellType, myRasterMetaData.layout, Utils.RESAMPLING_METHOD)
52 | .repartition(Utils.RDD_PARTITIONS)
53 |
54 | val layoutScheme = ZoomedLayoutScheme(WebMercator, tileSize = Utils.TILE_SIZE)
55 |
56 | logger debug "Reprojecting to WebMercator"
57 | val (zoom, reprojected) =
58 | MultibandTileLayerRDD(tiled, myRasterMetaData).reproject(WebMercator, layoutScheme, Utils.RESAMPLING_METHOD)
59 |
60 | // Create the attributes store that will tell us information about our catalog.
61 | val catalogPathHdfs = new Path(catalogPath)
62 | val attributeStore = HadoopAttributeStore(catalogPathHdfs)
63 |
64 | // Create the writer that we will use to store the tiles in the local catalog.
65 | val writer = HadoopLayerWriter(catalogPathHdfs, attributeStore)
66 | val layerId = LayerId(layerName, zoom)
67 |
68 | // If the layer exists already, delete it out before writing
69 | if (attributeStore.layerExists(layerId)) {
70 | logger debug s"Layer $layerId already exists, deleting ..."
71 | HadoopLayerDeleter(attributeStore).delete(layerId)
72 | }
73 |
74 | logger debug "Writing reprojected tiles using space filling curve"
75 | writer.write(layerId, reprojected, ZCurveKeyIndexMethod)
76 |
77 | //Utils.writeHistogram(attributeStore, layerName, reprojected.histogram)
78 |
79 | // Multiband Write
80 | val layerReader = HadoopLayerReader(attributeStore)
81 |
82 | val zoomsOfLayer = attributeStore.layerIds filter (_.name == layerName)
83 | if(zoomsOfLayer.isEmpty) {
84 | logger info s"Layer '$layerName' not found in the catalog '$catalogPath'"
85 | return
86 | }
87 |
88 | val srcLayerId = zoomsOfLayer.sortBy(_.zoom).last
89 | logger debug s"The following layerId will be used: $srcLayerId"
90 |
91 | val outputRdd:RDD[(SpatialKey, MultibandTile)] with Metadata[TileLayerMetadata[SpatialKey]] = layerReader
92 | .read[SpatialKey, MultibandTile, TileLayerMetadata[SpatialKey]](srcLayerId)
93 |
94 | val metadata = outputRdd.metadata
95 |
96 | val tiled_out: RDD[(SpatialKey, MultibandTile)] = outputRdd.distinct()
97 |
98 | val tile: MultibandTile = tiled_out.stitch()
99 |
100 | val crs = metadata.crs
101 |
102 | MultibandGeoTiff(tile, metadata.extent, crs).write(outputPath)
103 |
104 | sc.stop()
105 |
106 | }
107 |
108 | }
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/MultibandGeotiffTilingExample.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import geotrellis.proj4.WebMercator
5 | import geotrellis.spark.io.hadoop.HadoopAttributeStore
6 | import geotrellis.spark.io.hadoop.HadoopLayerDeleter
7 | import geotrellis.spark.io.hadoop.HadoopLayerWriter
8 | import geotrellis.spark.io.hadoop.HadoopSparkContextMethodsWrapper
9 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod
10 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod.spatialKeyIndexMethod
11 | import geotrellis.spark.io.SpatialKeyFormat
12 | import geotrellis.spark.io.spatialKeyAvroFormat
13 | import geotrellis.spark.io.tileLayerMetadataFormat
14 | import geotrellis.spark.tiling.FloatingLayoutScheme
15 | import geotrellis.spark.tiling.ZoomedLayoutScheme
16 | import geotrellis.spark.LayerId
17 | import geotrellis.spark.MultibandTileLayerRDD
18 | import geotrellis.spark.TileLayerMetadata
19 | import geotrellis.spark.withProjectedExtentTilerKeyMethods
20 | import geotrellis.spark.withTileRDDReprojectMethods
21 | import geotrellis.spark.withTilerMethods
22 | import org.apache.hadoop.fs.Path
23 | import org.apache.spark.SparkContext
24 | import org.apache.spark.SparkException
25 |
26 |
27 | /**
28 | * Within this example:
29 | * - Geotiff raster file is opened as a Spark RDD
30 | * - the raster is reprojected to WebMercator
31 | * - the raster is tiled into a grid
32 | * - all tiles are stored as a layer in geotrellis catalog
33 | * - histogram data are stored as an attribute in the catalog (into zoom level 0)
34 | */
35 | object MultibandGeotiffTilingExample extends LazyLogging {
36 |
37 | /**
38 | * Run as: /path/to/raster.tif some_layer /path/to/some/dir
39 | */
40 | def main(args: Array[String]): Unit = {
41 | try {
42 | val Array(inputPath, layerName, catalogPath) = args
43 | implicit val sc : SparkContext = Utils.initSparkAutoContext
44 | MultibandGeotiffTilingExample(inputPath, layerName)(catalogPath, sc)
45 | sc.stop()
46 | } catch {
47 | case _: MatchError => println("Run as: inputPath layerName /path/to/catalog")
48 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parmaeter: -Dspark.master=local[*]"
49 | }
50 | }
51 |
52 | def apply(inputPath: String, layerName: String)(implicit catalogPath: String, sc: SparkContext) {
53 |
54 | logger info s"Loading geotiff '$inputPath' into '$layerName' in catalog '$catalogPath' ... "
55 |
56 | //implicit val sc = Utils.initSparkContext
57 |
58 | logger debug "Opening geotiff as RDD"
59 | val inputRdd = sc.hadoopMultibandGeoTiffRDD(inputPath)
60 | val (_, myRasterMetaData) = TileLayerMetadata.fromRDD(inputRdd, FloatingLayoutScheme(Utils.TILE_SIZE))
61 |
62 | val tiled = inputRdd
63 | .tileToLayout(myRasterMetaData.cellType, myRasterMetaData.layout, Utils.RESAMPLING_METHOD)
64 | .repartition(Utils.RDD_PARTITIONS)
65 |
66 | val layoutScheme = ZoomedLayoutScheme(WebMercator, tileSize = Utils.TILE_SIZE)
67 |
68 | logger debug "Reprojecting to WebMercator"
69 | val (zoom, reprojected) =
70 | MultibandTileLayerRDD(tiled, myRasterMetaData).reproject(WebMercator, layoutScheme, Utils.RESAMPLING_METHOD)
71 |
72 | biggis.landuse.api.writeRddToLayer(reprojected, LayerId(layerName, zoom))
73 | /*
74 | // Create the attributes store that will tell us information about our catalog.
75 | val catalogPathHdfs = new Path(catalogPath)
76 | val attributeStore = HadoopAttributeStore(catalogPathHdfs)
77 |
78 | // Create the writer that we will use to store the tiles in the local catalog.
79 | val writer = HadoopLayerWriter(catalogPathHdfs, attributeStore)
80 | val layerId = LayerId(layerName, zoom)
81 |
82 | // If the layer exists already, delete it out before writing
83 | if (attributeStore.layerExists(layerId)) {
84 | logger debug s"Layer $layerId already exists, deleting ..."
85 | HadoopLayerDeleter(attributeStore).delete(layerId)
86 | }
87 |
88 | logger debug "Writing reprojected tiles using space filling curve"
89 | writer.write(layerId, reprojected, ZCurveKeyIndexMethod)
90 |
91 | //Utils.writeHistogram(attributeStore, layerName, reprojected.histogram)
92 | */
93 |
94 | //sc.stop()
95 | logger info "done."
96 | }
97 | }
98 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/MultibandGeotiffToLayerNoReproj.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import geotrellis.proj4.WebMercator
5 | import geotrellis.spark.io.hadoop.{HadoopAttributeStore, HadoopLayerDeleter, HadoopLayerWriter, HadoopSparkContextMethodsWrapper}
6 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod
7 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod.spatialKeyIndexMethod
8 | import geotrellis.spark.io.{SpatialKeyFormat, spatialKeyAvroFormat, tileLayerMetadataFormat, tileUnionCodec}
9 | import geotrellis.spark.tiling.{FloatingLayoutScheme, ZoomedLayoutScheme}
10 | import geotrellis.spark.{LayerId, MultibandTileLayerRDD, TileLayerMetadata, TileLayerRDD, withProjectedExtentTilerKeyMethods, withTileRDDReprojectMethods, withTilerMethods}
11 | import geotrellis.util.annotations.experimental
12 | import org.apache.hadoop.fs.Path
13 | import org.apache.spark.{SparkContext, SparkException}
14 |
15 |
16 | /**
17 | * Within this example:
18 | * - Geotiff raster file is opened as a Spark RDD
19 | * - the raster is reprojected to WebMercator (optionally, otherwise use_original_crs)
20 | * - the raster is tiled into a grid
21 | * - all tiles are stored as a layer in geotrellis catalog
22 | * - histogram data are stored as an attribute in the catalog (into zoom level 0)
23 | */
24 | @experimental //@deprecated("for debugging only (keeps original projection - no WebMercator)", "always")
25 | object MultibandGeotiffToLayerNoReproj extends LazyLogging {
26 |
27 | /**
28 | * Run as: /path/to/raster.tif some_layer /path/to/some/dir
29 | */
30 | def main(args: Array[String]): Unit = {
31 | try {
32 | val Array(inputPath, layerName, catalogPath) = args
33 | implicit val sc : SparkContext = Utils.initSparkAutoContext // do not use - only for dirty debugging
34 | MultibandGeotiffToLayerNoReproj(inputPath, layerName)(catalogPath, sc)
35 | sc.stop()
36 | logger debug "Spark context stopped"
37 | } catch {
38 | case _: MatchError => println("Run as: inputPath layerName /path/to/catalog")
39 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parmaeter: -Dspark.master=local[*]"
40 | }
41 | }
42 |
43 | def apply(inputPath: String, layerName: String)(implicit catalogPath: String, sc: SparkContext) {
44 |
45 | logger info s"Loading geotiff '$inputPath' into '$layerName' in catalog '$catalogPath' ... "
46 |
47 | //implicit val sc = Utils.initSparkContext
48 |
49 | logger debug "Opening geotiff as RDD"
50 | val inputRdd = sc.hadoopMultibandGeoTiffRDD(inputPath)
51 | val (_, myRasterMetaData) = TileLayerMetadata.fromRDD(inputRdd, FloatingLayoutScheme(Utils.TILE_SIZE))
52 |
53 | val myRESAMPLING_METHOD = geotrellis.raster.resample.NearestNeighbor //Utils.RESAMPLING_METHOD
54 |
55 | val tiled = inputRdd
56 | .tileToLayout(myRasterMetaData.cellType, myRasterMetaData.layout, myRESAMPLING_METHOD)
57 | .repartition(Utils.RDD_PARTITIONS)
58 |
59 | val use_original_crs = true //false //
60 |
61 | val crs = {
62 | if(use_original_crs) myRasterMetaData.crs
63 | else WebMercator
64 | }
65 |
66 | val layoutScheme = {
67 | if(use_original_crs) FloatingLayoutScheme(tileSize = Utils.TILE_SIZE)
68 | else ZoomedLayoutScheme(crs, tileSize = Utils.TILE_SIZE)
69 | }
70 |
71 | //logger debug "Reprojecting to myRasterMetaData.crs"
72 | val (zoom, reprojected) =
73 | MultibandTileLayerRDD(tiled, myRasterMetaData).reproject(crs, layoutScheme, Utils.RESAMPLING_METHOD)
74 |
75 | biggis.landuse.api.writeRddToLayer(reprojected, LayerId(layerName, zoom))
76 | /*
77 | // Create the attributes store that will tell us information about our catalog.
78 | val catalogPathHdfs = new Path(catalogPath)
79 | val attributeStore = HadoopAttributeStore(catalogPathHdfs)
80 |
81 | // Create the writer that we will use to store the tiles in the local catalog.
82 | val writer = HadoopLayerWriter(catalogPathHdfs, attributeStore)
83 | val layerId = LayerId(layerName, zoom)
84 |
85 | // If the layer exists already, delete it out before writing
86 | if (attributeStore.layerExists(layerId)) {
87 | logger debug s"Layer $layerId already exists, deleting ..."
88 | HadoopLayerDeleter(attributeStore).delete(layerId)
89 | }
90 |
91 | logger debug "Writing reprojected tiles using space filling curve"
92 | writer.write(layerId, reprojected, ZCurveKeyIndexMethod)
93 |
94 | //Utils.writeHistogram(attributeStore, layerName, reprojected.histogram)
95 | */
96 |
97 | //sc.stop()
98 | logger info "done."
99 | }
100 | }
101 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/MultibandLayerToGeotiff.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 | import com.typesafe.scalalogging.LazyLogging
3 | import geotrellis.raster.{Tile, withTileMethods}
4 | import geotrellis.spark.{LayerId, Metadata, SpatialKey, TileLayerMetadata}
5 | import geotrellis.spark.io.file.{FileAttributeStore, FileLayerManager, FileLayerReader, FileLayerWriter}
6 | import geotrellis.spark.io.{LayerHeader, SpatialKeyFormat, spatialKeyAvroFormat, tileLayerMetadataFormat, tileUnionCodec}
7 | import geotrellis.util._
8 | import org.apache.spark.rdd.RDD
9 | import org.apache.spark.{SparkConf, SparkContext}
10 | import geotrellis.raster.{io => _, _}
11 | import geotrellis.raster.io.geotiff.{MultibandGeoTiff, _}
12 | import geotrellis.spark.io.hadoop._
13 | import geotrellis.spark.io.hadoop.{HadoopAttributeStore, HadoopLayerReader}
14 | import geotrellis.spark.stitch._
15 | import geotrellis.spark.tiling.{FloatingLayoutScheme, LayoutDefinition, ZoomedLayoutScheme}
16 | import geotrellis.spark.{io => _, _}
17 | import geotrellis.vector.Extent
18 | import org.apache.hadoop.fs.{FileSystem, Path}
19 | import org.apache.hadoop.conf.Configuration
20 | import biggis.landuse.api.SpatialMultibandRDD
21 | import geotrellis.util.annotations.experimental
22 | import org.apache.spark.sql.SparkSession
23 |
24 | // https://github.com/geotrellis/geotrellis/blob/master/docs/spark/spark-examples.md
25 |
26 | @experimental //@deprecated("for debugging only (attention: writes many tiles, not single file)", "always")
27 | object MultibandLayerToGeotiff extends LazyLogging{
28 | def main(args: Array[String]): Unit = {
29 | try {
30 | //val Array(layerName, outputPath, catalogPath) = args
31 | val (layerNameArray,Array(outputPath, catalogPath)) = (args.take(args.length - 2),args.drop(args.length - 2))
32 | val (layerName: String, zoomLevel: Int) =
33 | if(layerNameArray.length == 2) layerNameArray
34 | else if (layerNameArray.length == 1) (layerNameArray(0),-1)
35 | //implicit val sc : SparkContext = Utils.initSparkAutoContext // do not use - only for dirty debugging
36 | val sparkSession: SparkSession = biggis.landuse.api.initSparkSession
37 | implicit val sc : SparkContext = sparkSession.sparkContext
38 | MultibandLayerToGeotiff(layerName, outputPath)(catalogPath, sc, zoomLevel)
39 | sc.stop()
40 | logger debug "Spark context stopped"
41 | } catch {
42 | case _: MatchError => println("Run as: layerName outputPath /path/to/catalog")
43 | }
44 | }
45 |
46 | def apply(layerName: String, outputPath: String, useStitching: Boolean = false)(implicit catalogPath: String, sc: SparkContext, zoomLevel: Int = -1): Unit = {
47 | logger info s"Writing layer '$layerName' in catalog '$catalogPath' to '$outputPath'"
48 |
49 | //implicit val sc = Utils.initSparkContext
50 |
51 | val catalogPathHdfs = new Path(catalogPath)
52 | val attributeStore = HadoopAttributeStore(catalogPathHdfs)
53 | val layerReader = HadoopLayerReader(attributeStore)
54 |
55 | val zoomsOfLayer = attributeStore.layerIds filter (_.name == layerName)
56 | if(zoomsOfLayer.isEmpty) {
57 | logger info s"Layer '$layerName' not found in the catalog '$catalogPath'"
58 | return
59 | }
60 |
61 | val srcLayerId =
62 | if(zoomLevel < 0) zoomsOfLayer.maxBy(_.zoom) //.sortBy(_.zoom).last
63 | else {
64 | val zoomLevels = zoomsOfLayer.filter(_.zoom == zoomLevel)
65 | if (zoomLevels.lengthCompare(1) == 0) zoomLevels.last // if(zoomLevels.length == 1)
66 | else {
67 | logger info s"Layer '$layerName' with zoom '$zoomLevel' not found in the catalog '$catalogPath'"
68 | return
69 | }
70 | }
71 |
72 | //val srcLayerId = zoomsOfLayer.sortBy(_.zoom).last
73 | logger debug s"The following layerId will be used: $srcLayerId"
74 |
75 | val inputRdd : SpatialMultibandRDD = biggis.landuse.api.readRddFromLayer(srcLayerId)
76 | /*
77 | // ToDo: check if RDD is Tile or MultibandTile
78 | val (srcLayerMetadata, srcLayerSchema) =
79 | try {
80 | (
81 | layerReader.attributeStore
82 | .readMetadata(id = srcLayerId) ,
83 | layerReader.attributeStore
84 | //.readSchema(id = srcLayerId) //.getFields()
85 | .readAll(layerName)
86 | )
87 | } catch {
88 | case _: Throwable =>
89 | }
90 | */
91 | /*
92 | val header = layerReader.attributeStore.readHeader[LayerHeader](srcLayerId)
93 | //assert(header.keyClass == "geotrellis.spark.SpatialKey")
94 | //assert(header.valueClass == "geotrellis.raster.Tile")
95 |
96 | val inputRdd:RDD[(SpatialKey, MultibandTile)] with Metadata[TileLayerMetadata[SpatialKey]] =
97 | if(header.valueClass == "geotrellis.raster.Tile") {
98 | layerReader.read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](srcLayerId)
99 | .withContext { rdd =>
100 | rdd.map { case (spatialKey, tile) => (spatialKey, ArrayMultibandTile(tile)) }
101 | }
102 | }
103 | else {
104 | assert(header.valueClass == "geotrellis.raster.MultibandTile")
105 | layerReader.read[SpatialKey, MultibandTile, TileLayerMetadata[SpatialKey]](srcLayerId)
106 | }
107 | */
108 |
109 |
110 | val metadata = inputRdd.metadata
111 | val crs = metadata.crs
112 |
113 | // test re-tile
114 | /*
115 | val myTILE_SIZE = 1024 //256 //Utils.TILE_SIZE
116 | val myRDD_PARTITIONS = 32 //32 //Utils.RDD_PARTITIONS
117 | val myRESAMPLING_METHOD = geotrellis.raster.resample.NearestNeighbor //Bilinear //Utils.RESAMPLING_METHOD
118 |
119 | val layout = {
120 | //val layoutScheme = FloatingLayoutScheme(tileSize = myTILE_SIZE) // Utils.TILE_SIZE
121 | //val layoutScheme = ZoomedLayoutScheme(crs, tileSize = myTILE_SIZE) // Utils.TILE_SIZE
122 | val layoutTile = TileLayout(metadata.layout.layoutCols, metadata.layout.layoutRows, myTILE_SIZE, myTILE_SIZE )
123 | LayoutDefinition(extent = metadata.extent, layoutTile)
124 | }
125 |
126 | val myMetadata = TileLayerMetadata(
127 | metadata.cellType,
128 | layout, //metadata.layout,
129 | metadata.extent,
130 | metadata.crs,
131 | metadata.bounds)
132 | // */
133 |
134 | // Hadoop Config is accessible from SparkContext
135 | implicit val conf: Configuration = sc.hadoopConfiguration
136 | val serConf = new SerializableConfiguration(conf)
137 | //implicit val fs: FileSystem = FileSystem.get(conf);
138 |
139 | if(useStitching){
140 | // one single GeoTiff, but attention
141 | val tiled: RDD[(SpatialKey, MultibandTile)] = inputRdd
142 | val tile: MultibandTile = tiled.distinct().stitch()
143 | if( crs.epsgCode.get==3857){ //"WebMercator"
144 | val raster: Raster[MultibandTile] = tile.reproject(metadata.extent, metadata.crs, metadata.crs)
145 | MultibandGeoTiff(raster.tile, raster.extent, crs).write(outputPath)
146 | } else {
147 | val layoutextent: Extent = metadata.layoutExtent
148 | MultibandGeoTiff(tile, layoutextent, crs).write(outputPath) //for UTM32
149 | }
150 | } else {
151 | // many GeoTiff tiles
152 | // ToDo: replace "256x256 tiles" by "intelligent" tile size (as many as necessary, as few as possible)
153 | val outputRdd: RDD[(SpatialKey, MultibandTile)] = inputRdd
154 | //.tileToLayout(metadata.cellType, metadata.layout, Utils.RESAMPLING_METHOD)
155 | //.repartition(Utils.RDD_PARTITIONS)
156 | //.repartition(myRDD_PARTITIONS)
157 | //.tileToLayout(myMetadata.cellType, myMetadata.layout, myRESAMPLING_METHOD)
158 |
159 | val useSerializedHadoopConfig = true
160 | if (useSerializedHadoopConfig) {
161 | // ToDo: test Spark Cluster version
162 | outputRdd.foreachPartition { partition =>
163 | partition.foreach { tuple =>
164 | val (key, tile) = tuple
165 | val (col, row) = (key.col, key.row)
166 | val tileextent: Extent = metadata.layout.mapTransform(key)
167 | val filename = new Path(outputPath + "_" + col + "_" + row + ".tif")
168 | logger info s" writing: '${filename.toString}'"
169 | MultibandGeoTiff(tile, tileextent, crs)
170 | .write(filename, serConf.value)
171 | }
172 | }
173 | } else {
174 | // only for local debugging - do not use in cloud // ToDo: delete after testing
175 | outputRdd.foreach(mbtile => {
176 | val (key, tile) = mbtile
177 | val (col, row) = (key.col, key.row)
178 | val tileextent: Extent = metadata.layout.mapTransform(key)
179 | //val filename = new Path(outputPath + "_" + col + "_" + row + ".tif")
180 | //logger info s" writing: '${filename.toString}'"
181 | MultibandGeoTiff(tile, tileextent, crs)
182 | //.write(filename.toString) //.write(filename, serConf.value)
183 | .write(outputPath + "_" + col + "_" + row + ".tif")
184 | }
185 | )
186 | }
187 | }
188 |
189 | ////val raster: Raster[MultibandTile] = tile.reproject(metadata.extent, metadata.crs, metadata.crs)
190 | //MultibandGeoTiff(tile, metadata.extent, crs).write(outputPath)
191 |
192 | //sc.stop()
193 | //logger debug "Spark context stopped"
194 |
195 | logger info "done."
196 | }
197 | }
198 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/NDVILayerExample.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import geotrellis.raster.{DoubleConstantNoDataCellType, NODATA, Tile, isData}
5 | import geotrellis.raster.render.ColorMap
6 | import geotrellis.spark.io.hadoop.{HadoopAttributeStore, HadoopLayerDeleter, HadoopLayerReader, HadoopLayerWriter}
7 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod
8 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod.spatialKeyIndexMethod
9 | import geotrellis.spark.io.{SpatialKeyFormat, spatialKeyAvroFormat, tileLayerMetadataFormat, tileUnionCodec}
10 | import geotrellis.spark.{LayerId, Metadata, SpatialKey, TileLayerMetadata}
11 | import org.apache.hadoop.fs.Path
12 | import org.apache.spark.{SparkContext, SparkException}
13 | import org.apache.spark.rdd.RDD
14 |
15 | object NDVILayerExample extends LazyLogging {
16 |
17 | // from geotrellis/geotrellis-landsat-tutorial -> src/main/resources/application.conf
18 | // tutorial.ndviColormap = "0:ffffe5ff;0.1:f7fcb9ff;0.2:d9f0a3ff;0.3:addd8eff;0.4:78c679ff;0.5:41ab5dff;0.6:238443ff;0.7:006837ff;1:004529ff"
19 | var colorMap : ColorMap = ColorMap.fromStringDouble(s"0:ffffe5ff;0.1:f7fcb9ff;0.2:d9f0a3ff;0.3:addd8eff;0.4:78c679ff;0.5:41ab5dff;0.6:238443ff;0.7:006837ff;1:004529ff").get
20 |
21 | def main(args: Array[String]): Unit = {
22 | try {
23 | val Array(layerNIR, layerRed, layerNDVI, catalogPath) = args
24 | implicit val sc = Utils.initSparkAutoContext
25 | NDVILayerExample(layerNIR, layerRed, layerNDVI)(catalogPath, sc)
26 | sc.stop()
27 | } catch {
28 | case _: MatchError => println("Run as: layerNIR layerRed layerNDVI /path/to/catalog")
29 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parmaeter: -Dspark.master=local[*]"
30 | }
31 | }
32 |
33 | def apply(layerNIR: String, layerRed: String, layerNDVI: String)(implicit catalogPath: String, sc: SparkContext): Unit = {
34 | logger info s"Running ndvi calc of layers '$layerNIR' - '$layerRed' in catalog '$catalogPath'"
35 |
36 | // Create the attributes store that will tell us information about our catalog.
37 | val catalogPathHdfs = new Path(catalogPath)
38 | implicit val attributeStore = HadoopAttributeStore( catalogPathHdfs )
39 | val layerReader = HadoopLayerReader(attributeStore)
40 |
41 | // see: geotrellis-landsat-tutorial/src/main/scala/tutorial/IngestImage.scala
42 | // https://github.com/geotrellis/geotrellis-landsat-tutorial/blob/master/src/main/scala/tutorial/IngestImage.scala
43 | // replaced by GeotiffTilingExample -> Hadoop Layer
44 | val commonZoom = Math.max(findFinestZoom(layerNIR), findFinestZoom(layerRed))
45 | val layerIdNIR = findLayerIdByNameAndZoom(layerNIR, commonZoom)
46 | val layerIdRed = findLayerIdByNameAndZoom(layerRed, commonZoom)
47 | println(s"$layerIdNIR, $layerIdRed")
48 | val tilesNIR: RDD[(SpatialKey, Tile)] with Metadata[TileLayerMetadata[SpatialKey]] =
49 | layerReader.read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](layerIdNIR)
50 | val tilesRed: RDD[(SpatialKey, Tile)] with Metadata[TileLayerMetadata[SpatialKey]] =
51 | layerReader.read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](layerIdRed)
52 |
53 | // see: geotrellis-landsat-tutorial/src/main/scala/tutorial/Calculations.scala
54 | // https://github.com/geotrellis/geotrellis-landsat-tutorial/blob/master/src/main/scala/tutorial/Calculations.scala
55 | def ndvi (r: Double, ir: Double) : Double = {
56 | if (isData(r) && isData(ir)) {
57 | (ir - r) / (ir + r)
58 | } else {
59 | Double.NaN
60 | }
61 | }
62 |
63 | // here, the calculation takes place
64 | val ndviRdd: RDD[(SpatialKey, Tile)] with Metadata[TileLayerMetadata[SpatialKey]] =
65 | tilesNIR.withContext { rdd => rdd
66 | .join(tilesRed)
67 | .map { case (spatialKey, (tileNIR, tileRed)) =>
68 | val tileNIRFloat = tileNIR.convert(DoubleConstantNoDataCellType)
69 | val tileRedFloat = tileRed.convert(DoubleConstantNoDataCellType)
70 | val tile = tileRedFloat.combineDouble(tileNIRFloat) {
71 | (r: Double, ir: Double) => ndvi(r,ir)
72 | }
73 | (spatialKey, tile)
74 | }
75 | }
76 |
77 | // this will be the new ndvi layer
78 | val layerIdNDVI = LayerId(layerNDVI, commonZoom)
79 |
80 | // automatically deleting existing layer
81 | if (attributeStore.layerExists(layerIdNDVI)) {
82 | logger debug s"Layer $layerIdNDVI already exists, deleting ..."
83 | HadoopLayerDeleter(attributeStore).delete(layerIdNDVI)
84 | }
85 |
86 | logger info s"Writing convoluted layer '${layerIdNDVI}'"
87 | val writer = HadoopLayerWriter(catalogPathHdfs, attributeStore)
88 | writer.write(layerIdNDVI, ndviRdd, ZCurveKeyIndexMethod)
89 |
90 | Utils.writeHistogram(attributeStore, layerNDVI, ndviRdd.histogram)
91 |
92 | logger info "done."
93 | }
94 |
95 | def findFinestZoom(layerName: String)(implicit attributeStore: HadoopAttributeStore): Int = {
96 | val zoomsOfLayer = attributeStore.layerIds filter (_.name == layerName)
97 | zoomsOfLayer.sortBy(_.zoom).last.zoom
98 | }
99 |
100 | def findLayerIdByNameAndZoom(layerName: String, zoom: Int)(implicit attributeStore: HadoopAttributeStore): LayerId = {
101 | val zoomsOfLayer = attributeStore.layerIds filter (_.name == layerName)
102 | zoomsOfLayer.filter(_.zoom == zoom).head
103 | }
104 | }
105 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/NDVILayerWithCloudMaskExample.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import geotrellis.raster.{DoubleConstantNoDataCellType, NODATA, Tile, isData}
5 | import geotrellis.spark.io.hadoop.{HadoopAttributeStore, HadoopLayerDeleter, HadoopLayerReader, HadoopLayerWriter}
6 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod
7 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod.spatialKeyIndexMethod
8 | import geotrellis.spark.io.{SpatialKeyFormat, spatialKeyAvroFormat, tileLayerMetadataFormat, tileUnionCodec}
9 | import geotrellis.spark.{LayerId, Metadata, SpatialKey, TileLayerMetadata}
10 | import org.apache.hadoop.fs.Path
11 | import org.apache.spark.{SparkContext, SparkException}
12 | import org.apache.spark.rdd.RDD
13 |
14 | object NDVILayerWithCloudMaskExample extends LazyLogging {
15 |
16 | def main(args: Array[String]): Unit = {
17 | try {
18 | val Array(layerNIR, layerRed, layerClouds, layerNDVI, catalogPath) = args
19 | implicit val sc = Utils.initSparkAutoContext
20 | NDVILayerWithCloudMaskExample(layerNIR, layerRed, layerClouds, layerNDVI)(catalogPath, sc)
21 | sc.stop()
22 | } catch {
23 | case _: MatchError => println("Run as: layerNIR layerRed layerCloud layerNDVI /path/to/catalog")
24 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parmaeter: -Dspark.master=local[*]"
25 | }
26 | }
27 |
28 | def apply(layerNIR: String, layerRed: String, layerClouds: String, layerNDVI: String)(implicit catalogPath: String, sc: SparkContext): Unit = {
29 | logger info s"Running ndvi calc of layers '$layerNIR' - '$layerRed' in catalog '$catalogPath'"
30 |
31 |
32 | // Create the attributes store that will tell us information about our catalog.
33 | val catalogPathHdfs = new Path(catalogPath)
34 | implicit val attributeStore = HadoopAttributeStore( catalogPathHdfs )
35 | val layerReader = HadoopLayerReader(attributeStore)
36 |
37 | // see: geotrellis-landsat-tutorial/src/main/scala/tutorial/IngestImage.scala
38 | // https://github.com/geotrellis/geotrellis-landsat-tutorial/blob/master/src/main/scala/tutorial/IngestImage.scala
39 | // replaced by GeotiffTilingExample -> Hadoop Layer
40 | val commonZoom = Math.max(Math.max(findFinestZoom(layerNIR), findFinestZoom(layerRed)), findFinestZoom(layerClouds))
41 | val layerIdNIR = findLayerIdByNameAndZoom(layerNIR, commonZoom)
42 | val layerIdRed = findLayerIdByNameAndZoom(layerRed, commonZoom)
43 | val layerIdClouds = findLayerIdByNameAndZoom(layerClouds, commonZoom)
44 | println(s"$layerIdNIR, $layerIdRed")
45 | val tilesNIR: RDD[(SpatialKey, Tile)] with Metadata[TileLayerMetadata[SpatialKey]] =
46 | layerReader.read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](layerIdNIR)
47 | val tilesRed: RDD[(SpatialKey, Tile)] with Metadata[TileLayerMetadata[SpatialKey]] =
48 | layerReader.read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](layerIdRed)
49 | val tilesClouds: RDD[(SpatialKey, Tile)] with Metadata[TileLayerMetadata[SpatialKey]] =
50 | layerReader.read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](layerIdClouds)
51 |
52 | // see: geotrellis-landsat-tutorial/src/main/scala/tutorial/MaskBandsRandGandNIR.scala
53 | // https://github.com/geotrellis/geotrellis-landsat-tutorial/blob/master/src/main/scala/tutorial/MaskBandsRandGandNIR.scala
54 | def maskClouds(tile: Tile)(implicit qaTile: Tile): Tile =
55 | tile.combine(qaTile) { (v: Int, qa: Int) =>
56 | val isCloud = qa & 0x8000
57 | val isCirrus = qa & 0x2000
58 | if(isCloud > 0 || isCirrus > 0) { NODATA }
59 | else { v }
60 | }
61 |
62 | // see: geotrellis-landsat-tutorial/src/main/scala/tutorial/Calculations.scala
63 | // https://github.com/geotrellis/geotrellis-landsat-tutorial/blob/master/src/main/scala/tutorial/Calculations.scala
64 | def ndvi (r: Double, ir: Double) : Double = {
65 | if (isData(r) && isData(ir)) {
66 | (ir - r) / (ir + r)
67 | } else {
68 | Double.NaN
69 | }
70 | }
71 |
72 | // here, the calculation takes place
73 | val ndviRdd: RDD[(SpatialKey, Tile)] with Metadata[TileLayerMetadata[SpatialKey]] =
74 | tilesNIR.withContext { rdd => rdd
75 | .join(tilesRed)
76 | .join(tilesClouds)
77 | .map { case (spatialKey, ((tileNIR, tileRed), tileClouds)) =>
78 | implicit val qaTile = tileClouds
79 | val tileNIRMasked = maskClouds(tileNIR).convert(DoubleConstantNoDataCellType)
80 | val tileRedMasked = maskClouds(tileRed).convert(DoubleConstantNoDataCellType)
81 | val tile = tileRedMasked.combineDouble(tileNIRMasked) {
82 | (r: Double, ir: Double) => ndvi(r,ir)
83 | }
84 | (spatialKey, tile)
85 | }
86 | }
87 |
88 | // this will be the new ndvi layer
89 | val layerIdNDVI = LayerId(layerNDVI, commonZoom)
90 |
91 | // automatically deleting existing layer
92 | if (attributeStore.layerExists(layerIdNDVI)) {
93 | logger debug s"Layer $layerIdNDVI already exists, deleting ..."
94 | HadoopLayerDeleter(attributeStore).delete(layerIdNDVI)
95 | }
96 |
97 | logger info s"Writing convoluted layer '${layerIdNDVI}'"
98 | val writer = HadoopLayerWriter(catalogPathHdfs, attributeStore)
99 | writer.write(layerIdNDVI, ndviRdd, ZCurveKeyIndexMethod)
100 |
101 | Utils.writeHistogram(attributeStore, layerNDVI, ndviRdd.histogram)
102 |
103 | logger info "done."
104 | }
105 |
106 | def findFinestZoom(layerName: String)(implicit attributeStore: HadoopAttributeStore): Int = {
107 | val zoomsOfLayer = attributeStore.layerIds filter (_.name == layerName)
108 | zoomsOfLayer.sortBy(_.zoom).last.zoom
109 | }
110 |
111 | def findLayerIdByNameAndZoom(layerName: String, zoom: Int)(implicit attributeStore: HadoopAttributeStore): LayerId = {
112 | val zoomsOfLayer = attributeStore.layerIds filter (_.name == layerName)
113 | zoomsOfLayer.filter(_.zoom == zoom).head
114 | }
115 | }
116 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/ServeLayerAsMap.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import akka.actor._
4 | import akka.io.IO
5 | import com.typesafe.scalalogging.LazyLogging
6 | import geotrellis.raster._
7 | import geotrellis.raster.histogram.Histogram
8 | import geotrellis.raster.io.HistogramDoubleFormat
9 | import geotrellis.raster.render._
10 | import geotrellis.spark._
11 | import geotrellis.spark.io._
12 | import geotrellis.spark.io.hadoop.HadoopValueReader
13 | import org.apache.hadoop.fs.Path
14 | import org.apache.spark.SparkContext
15 | import org.apache.spark.SparkException
16 | import spray.can.Http
17 | import spray.http.MediaTypes
18 | import spray.httpx.marshalling.ToResponseMarshallable.isMarshallable
19 | import spray.routing.Directive.pimpApply
20 | import spray.routing.HttpService
21 |
22 | import scala.concurrent._
23 |
24 | object ServeLayerAsMap extends LazyLogging {
25 |
26 | // filled from command line
27 | var fileValueReader: HadoopValueReader = null
28 | var layerNameServed: String = null
29 | var colorMap: ColorMap = null
30 | // end of filled from command line
31 |
32 | // the reader is used from the akka actor class
33 | def reader(layerId: LayerId) = fileValueReader.reader[SpatialKey, Tile](layerId)
34 |
35 | // DEBUG: val args = Array("target/geotrellis-catalog", "morning2")
36 | def main(args: Array[String]): Unit = {
37 | try {
38 | val Array(catalogPath, layerName) = args
39 |
40 | logger info "setting variables from commandline"
41 |
42 | //layerNameServed = layerName // TODO // moved to apply(...) using init()
43 |
44 | implicit val sc = Utils.initSparkAutoContext
45 |
46 | // catalog reader // moved to apply(...) using init()
47 | //fileValueReader = HadoopValueReader(new Path(catalogPath))
48 |
49 | // read quantile breaks from attribute store // moved to apply(...) using initHeatMap(...)
50 | //val layerId = LayerId(layerName, 0)
51 | //val hist = fileValueReader.attributeStore.read[Histogram[Double]](layerId, "histogramData")
52 | //colorMap = ColorRamps.HeatmapBlueToYellowToRedSpectrum.toColorMap(hist.quantileBreaks(10))
53 |
54 | ServeLayerAsMap(catalogPath, layerName)
55 | } catch {
56 | case _: MatchError => println("Run as: [/path/to/catalog] [layerName]")
57 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parmaeter: -Dspark.master=local[*]"
58 | }
59 | }
60 |
61 | def apply(catalogPath: String, layerNameServed: String, colorMap: ColorMap = null)(implicit sc: SparkContext): Unit = {
62 | // init catalog reader
63 | init(catalogPath, layerNameServed)
64 | // init ColorMap
65 | ServeLayerAsMap.colorMap = if (colorMap == null) initHeatmap(layerNameServed) else colorMap
66 |
67 | logger info s"Serving layer='$layerNameServed' from catalog='$catalogPath'"
68 |
69 | implicit val system = akka.actor.ActorSystem("biggis-actor-system")
70 |
71 | // create and start our service actor
72 | val service = system.actorOf(Props(classOf[ServeLayerAsMapActor]), "tile-server")
73 |
74 | // start a new HTTP server on port 8080 with our service actor as the handler
75 | IO(Http) ! Http.Bind(service, "localhost", 18080)
76 | println("Now open the file 'static/index.html' in your browser.")
77 | println("The HTML code uses leaflet javascript library which communicates with our tile-serving backend.")
78 | }
79 |
80 | // init catalog reader
81 | def init(catalogPath: String, layerNameServed: String)(implicit sc: SparkContext): Unit = {
82 | ServeLayerAsMap.layerNameServed = layerNameServed
83 | // catalog reader
84 | ServeLayerAsMap.fileValueReader = HadoopValueReader(new Path(catalogPath))
85 | }
86 |
87 | // init ColorMap
88 | def initHeatmap(layerNameServed: String): ColorMap = {
89 | // read quantile breaks from attribute store
90 | val layerId = LayerId(layerNameServed, 0)
91 | val hist = fileValueReader.attributeStore.read[Histogram[Double]](layerId, "histogramData")
92 | colorMap = ColorRamps.HeatmapBlueToYellowToRedSpectrum.toColorMap(hist.quantileBreaks(10))
93 | colorMap
94 | }
95 | }
96 |
97 | class ServeLayerAsMapActor extends Actor with HttpService {
98 |
99 | import scala.concurrent.ExecutionContext.Implicits.global
100 |
101 | def actorRefFactory = context
102 |
103 | def receive = runRoute(root)
104 |
105 | def root =
106 | pathPrefix(IntNumber / IntNumber / IntNumber) { (zoom, x, y) =>
107 | respondWithMediaType(MediaTypes.`image/png`) {
108 | complete {
109 | Future {
110 | try {
111 | val tile: Tile = ServeLayerAsMap.reader(LayerId(ServeLayerAsMap.layerNameServed, zoom)).read(x, y)
112 | val png = tile.renderPng(ServeLayerAsMap.colorMap)
113 | Some(png.bytes)
114 | } catch {
115 | //// https://github.com/locationtech/geotrellis/commit/69ee528d99e4d126bd7dbf464ce7805fe4fe33d9
116 | case _: ValueNotFoundError => None // TileNotFoundError in Geotrellis 0.10.3
117 | //case _: TileNotFoundError => None
118 | case _: UnsupportedOperationException => None
119 | }
120 | }
121 | }
122 | }
123 | }
124 | }
125 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/ShapefileExample.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import com.vividsolutions.jts.{geom => jts}
5 | import geotrellis.shapefile.ShapeFileReader
6 | import org.apache.spark.SparkException
7 |
8 | /*
9 | * Needs additional dependencies from external repositories
10 | * see: http://stackoverflow.com/questions/16225573/why-cant-i-resolve-the-dependencies-for-geotools-maven-quickstart
11 | */
12 | @deprecated("old version will be deleted after pending pull request from geotrellis", "always")
13 | object ShapefileExample extends LazyLogging {
14 | def main(args: Array[String]): Unit = {
15 | try {
16 | val Array(shapeName, catalogPath) = args
17 | ShapefileExample(shapeName)(catalogPath)
18 | } catch {
19 | case _: MatchError => println("Run as: shapeName /path/to/catalog")
20 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parmaeter: -Dspark.master=local[*]"
21 | }
22 | }
23 |
24 | def apply(shapeName: String)(implicit catalogPath: String): Unit = {
25 | logger info s"Running Shapefile import '$shapeName' in catalog '$catalogPath'"
26 |
27 | implicit val sc = Utils.initSparkContext
28 |
29 | val shp = ShapeFileReader.readSimpleFeatures(shapeName)
30 | for(ft <- shp) yield{
31 | val ID : String = ft.getID
32 | val geom = ft.getDefaultGeometry
33 | val attribs = ft.getAttributes
34 | attribs.remove(0)
35 | println(ID)
36 | println(geom)
37 | println(attribs)
38 | }
39 | //ToDo: Get Shapefile Context (JTS Geometry Format) into Spark Context
40 |
41 | //https://gitter.im/geotrellis/geotrellis/archives/2015/05/18
42 | //ShapeFile.readPointFeatures[Int](path, dataField)
43 | //geotrellis.raster.VectorToRaster(pointsFromShapeFile, kernel, rasterExtent)
44 | /*
45 |
46 | // https://gist.github.com/echeipesh/26b50b235fd812f39098
47 | val mps: Seq[MultiPolygon[Int]] =
48 | for (ft <- shp) yield {
49 | val geom = ft.getAttribute(0).asInstanceOf[jts.MultiPolygon]
50 | val props: Map[String, Object] =
51 | ft.getProperties.asScala.drop(1).map { p =>
52 | (p.getName.toString, ft.getAttribute(p.getName))
53 | }.toMap
54 | val data = props("WorkingAge").asInstanceOf[Long].toInt
55 | new MultiPolygon(geom, data)
56 | }
57 | */
58 |
59 | /*
60 | val extent = {
61 | val env = mps
62 | .map(_.geom.getEnvelope())
63 | .reduce(_ union _)
64 | .getEnvelopeInternal()
65 | .asInstanceOf[jts.Envelope]
66 |
67 | Extent(env.getMinX, env.getMinY, env.getMaxX, env.getMaxY)
68 | }
69 | val re = RasterExtent(extent, 255, 255)
70 | val rd = RasterData.emptyByType(TypeInt, 255, 255).mutable
71 | println(mps)
72 | for { mp <- mps; poly <- mp.flatten } {
73 | Rasterizer.foreachCellByFeature[Polygon, Int](poly, re) {
74 | new Callback[Polygon, Int] {
75 | def apply(col: Int, row: Int, g: Polygon[Int]) =
76 | rd.set(col, row, g.data)
77 | }
78 | }
79 | }
80 |
81 | val rater = Raster(rd, re)
82 | */
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/ShapefilePolygonRasterizer.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import geotrellis.proj4.WebMercator
4 | import geotrellis.raster.Tile
5 | import geotrellis.raster._
6 | import geotrellis.raster.rasterize.Rasterizer.Options
7 | import geotrellis.spark.Bounds
8 | import geotrellis.spark.LayerId
9 | import geotrellis.spark.Metadata
10 | import geotrellis.spark.SpatialKey
11 | import geotrellis.spark.TileLayerMetadata
12 | import geotrellis.spark.TileLayerRDD
13 | import geotrellis.spark.io.hadoop.HadoopAttributeStore
14 | import geotrellis.spark.io.hadoop.HadoopLayerWriter
15 | import geotrellis.spark.rasterize.RasterizeFeaturesRDD
16 | import geotrellis.spark.tiling.LayoutDefinition
17 | import geotrellis.util.LazyLogging
18 | import org.apache.hadoop.fs.Path
19 | import org.apache.spark.SparkContext
20 | import org.apache.spark.SparkException
21 | import org.apache.spark.rdd.RDD
22 |
23 | /**
24 | * Created by ak on 21.06.2017.
25 | */
26 | object ShapefilePolygonRasterizer extends LazyLogging {
27 | def main(args: Array[String]): Unit = {
28 | try {
29 | val Array(shapefilePath, attribName, layerName, catalogPath) = args
30 | implicit val sc: SparkContext = Utils.initSparkAutoContext
31 | ShapefilePolygonRasterizer(shapefilePath, attribName, layerName)(catalogPath, sc)
32 | sc.stop()
33 | } catch {
34 | case _: MatchError => println("Run as: shapefilePath attribName layerName /path/to/catalog")
35 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parmaeter: -Dspark.master=local[*]"
36 | }
37 | }
38 |
39 | def apply(shapefileName: String, attribName: String, layerName: String)(implicit catalogPath: String, sc: SparkContext): Unit = {
40 | logger info s"Running rasterizer for layer '$layerName' in catalog '$catalogPath'"
41 |
42 | val multipolygons = UtilsShape.readShapefileMultiPolygonDoubleAttribute(shapefileName, attribName)
43 | val multipolygons_extent = UtilsShape.getExtent(multipolygons)
44 |
45 | // Create the attributes store that will tell us information about our catalog.
46 | val catalogPathHdfs = new Path(catalogPath)
47 | val attributeStore = HadoopAttributeStore(catalogPathHdfs)
48 |
49 | // Create the writer that we will use to store the tiles in the local catalog.
50 | val layerWriter = HadoopLayerWriter(catalogPathHdfs, attributeStore)
51 |
52 | val cellSize = CellSize(100, 100) // TODO cell size should be a parameter
53 | val myLayout = LayoutDefinition(GridExtent(multipolygons_extent, cellSize), Utils.TILE_SIZE, Utils.TILE_SIZE)
54 | val cellType = IntConstantNoDataCellType
55 | val crs = WebMercator
56 | val bounds = Bounds[SpatialKey](
57 | SpatialKey(0, 0),
58 | SpatialKey(myLayout.layoutCols, myLayout.layoutRows)
59 | )
60 |
61 | val myTileLayerMetadata = TileLayerMetadata(cellType, myLayout,
62 | multipolygons_extent,
63 | crs,
64 | bounds)
65 |
66 | val zoom = 15 // TODO how to automatically/manually select the right zoom level
67 | // val srcLayerId = zoomsOfLayer.sortBy(_.zoom).last
68 |
69 | val multipolygonsRdd = sc.parallelize(multipolygons)
70 | val rasterizedRDD: RDD[(SpatialKey, Tile)] with Metadata[LayoutDefinition] = RasterizeFeaturesRDD
71 | .fromFeature(
72 | multipolygonsRdd,
73 | IntConstantNoDataCellType,
74 | myLayout,
75 | Options.DEFAULT)
76 |
77 | val rasterizedRDDwithContext: RDD[(SpatialKey, Tile)] with Metadata[TileLayerMetadata[SpatialKey]] =
78 | TileLayerRDD(rasterizedRDD, myTileLayerMetadata)
79 |
80 | val dstLayerId = LayerId(layerName, zoom)
81 | logger debug s"The following layerId will be used for writing rastertized shapefile: $dstLayerId"
82 |
83 | biggis.landuse.api.deleteLayerFromCatalog(dstLayerId)
84 | biggis.landuse.api.writeRddToLayer(rasterizedRDDwithContext, dstLayerId)
85 |
86 | logger info "shapefile rasterization finished"
87 | }
88 | }
89 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/SpatialGetisOrd.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import biggis.landuse.api.SpatialRDD
4 | import biggis.landuse.api.catalogToStore
5 | import com.typesafe.scalalogging.LazyLogging
6 | import geotrellis.raster.Tile
7 | import geotrellis.raster.mapalgebra.focal.Kernel
8 | import geotrellis.raster.summary.Statistics
9 | import geotrellis.raster.withTileMethods
10 | import geotrellis.spark.LayerId
11 | import geotrellis.spark.Metadata
12 | import geotrellis.spark.SpatialKey
13 | import geotrellis.spark.TileLayerMetadata
14 | import geotrellis.spark.io.SpatialKeyFormat
15 | import geotrellis.spark.io.hadoop.HadoopLayerReader
16 | import geotrellis.spark.io.spatialKeyAvroFormat
17 | import geotrellis.spark.io.tileLayerMetadataFormat
18 | import geotrellis.spark.io.tileUnionCodec
19 | import org.apache.spark.rdd.RDD
20 |
21 | /**
22 | * Created by Viliam Simko (viliam.simko@gmail.com)
23 | */
24 | object SpatialGetisOrd extends App with LazyLogging {
25 |
26 | val layerName = "morning2"
27 | val circleKernelRadius = 7
28 |
29 | implicit val catalogPath = "target/geotrellis-catalog"
30 | implicit val sc = Utils.initSparkAutoContext
31 |
32 | val maybeLayerId = biggis.landuse.api.getMaxZoomLevel(layerName)
33 |
34 | maybeLayerId match {
35 | case None => logger error s"Layer '$layerName' not found in the catalog '$catalogPath'"
36 | case Some(layerId) => {
37 | logger debug s"The following layerId will be used: $layerId"
38 |
39 | val layerReader = HadoopLayerReader(catalogPath)
40 |
41 | val queryResult: SpatialRDD =
42 | layerReader.read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](layerId)
43 |
44 | val weightMatrix = Kernel.circle(circleKernelRadius, queryResult.metadata.cellwidth, circleKernelRadius)
45 | logger info s"extent of weightMatrix is ${weightMatrix.extent}"
46 |
47 | val stats = queryResult.histogram.statistics
48 | require(stats.nonEmpty)
49 |
50 | val Statistics(_, globMean, _, _, globStdev, _, _) = stats.get
51 | val numPixels = queryResult.histogram.totalCount
52 | logger info s"GLOBAL MEAN: ${globMean}"
53 | logger info s"GLOBAL STDEV: ${globStdev}"
54 | logger info s"GLOBAL NUMPX: ${numPixels}"
55 |
56 | val outRdd = getisord(queryResult, weightMatrix, globMean, globStdev, numPixels )
57 |
58 | // this will be the new convoluted layer
59 | val convolvedLayerId = LayerId(layerId.name + "_gstar", layerId.zoom)
60 |
61 | biggis.landuse.api.deleteLayerFromCatalog(convolvedLayerId)
62 | biggis.landuse.api.writeRddToLayer(outRdd, convolvedLayerId)
63 | }
64 | }
65 |
66 | def getisord(rdd: SpatialRDD, weightMatrix: Kernel,
67 | globalMean:Double, globalStdev:Double, numPixels:Long): SpatialRDD = {
68 |
69 | val wcells = weightMatrix.tile.toArrayDouble
70 | val sumW = wcells.sum
71 | val sumW2 = wcells.map(x => x*x).sum
72 | val A = globalMean * sumW
73 | val B = globalStdev * Math.sqrt( (numPixels * sumW2 - sumW*sumW) / (numPixels - 1) )
74 |
75 | rdd.withContext {
76 | _.bufferTiles(weightMatrix.extent)
77 | .mapValues { tileWithCtx =>
78 | tileWithCtx.tile
79 | .focalSum(weightMatrix, Some(tileWithCtx.targetArea))
80 | .mapDouble { x => (x - A) / B }
81 | }
82 | }
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/TestClassifierSVM.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 |
4 | import com.typesafe.scalalogging.StrictLogging
5 | import geotrellis.util.annotations.experimental
6 | import org.apache.spark.{SparkContext, SparkException}
7 | import org.apache.spark.mllib.classification.{SVMModel, SVMMultiClassOVAModel, SVMWithSGD}
8 | import org.apache.spark.mllib.evaluation.{BinaryClassificationMetrics, MulticlassMetrics}
9 | import org.apache.spark.mllib.util.MLUtils
10 |
11 | //https://github.com/Bekbolatov/spark
12 | import org.apache.spark.mllib.classification.{SVMMultiClassOVAModel, SVMMultiClassOVAWithSGD}
13 |
14 | @experimental // ToDo: replace SVMMultiClassOVAModel (experimental implementation based on Spark 1.6.2) by OneVsRest with LinearSVC (needs Spark 2.2, not implemented in Spark 2.1)
15 | object TestClassifierSVM extends StrictLogging {
16 | /**
17 | * Run as: /path/to/sample_libsvm_data.txt /path/to/myModel
18 | * You can download the dataset from:
19 | * - https://raw.githubusercontent.com/apache/spark/master/data/mllib/sample_libsvm_data.txt
20 | */
21 | def main(args: Array[String]): Unit = {
22 | try {
23 | val Array(trainingName, modelPath) = args
24 | implicit val sc : SparkContext = Utils.initSparkAutoContext
25 | TestClassifierSVM(trainingName)(modelPath, sc)
26 | sc.stop()
27 | } catch {
28 | case _: MatchError => println("Run as: /path/to/sample_libsvm_data.txt /path/to/myModel")
29 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parmaeter: -Dspark.master=local[*]"
30 | }
31 | }
32 |
33 | def apply(trainingName: String)(implicit modelPath: String, sc: SparkContext): Unit = {
34 | logger info s"(SVM) Classifying layer $trainingName in $modelPath ..."
35 | //ClassifierSVM
36 |
37 | // Load training data in LIBSVM format.
38 | val data = MLUtils.loadLibSVMFile(sc, trainingName)
39 |
40 | // Split data into training (60%) and test (40%).
41 | val splits = data.randomSplit(Array(0.6, 0.4), seed = 11L)
42 | val training = splits(0).cache()
43 | val test = splits(1)
44 |
45 | // Run training algorithm to build the model
46 | val numIterations = 100
47 | //val model = SVMWithSGD.train(training, numIterations)
48 | val model = SVMMultiClassOVAWithSGD.train(training, numIterations)
49 |
50 | // Clear the default threshold.
51 | //model.clearThreshold()
52 |
53 | // Compute raw scores on the test set.
54 | val scoreAndLabels = test.map { point =>
55 | val score = model.predict(point.features)
56 | (score, point.label)
57 | }
58 |
59 | // Get evaluation metrics.
60 | //val metrics = new BinaryClassificationMetrics(scoreAndLabels)
61 | //val auROC = metrics.areaUnderROC()
62 |
63 | //logger info "Area under ROC = " + auROC
64 |
65 | val metrics = new MulticlassMetrics(scoreAndLabels)
66 | val precision = metrics.accuracy //.precision
67 |
68 | logger info "Precision = " + precision
69 |
70 | // If the model exists already, delete it before writing
71 | // http://stackoverflow.com/questions/27033823/how-to-overwrite-the-output-directory-in-spark
72 | val hdfs = org.apache.hadoop.fs.FileSystem.get(sc.hadoopConfiguration)
73 | if(hdfs.exists(new org.apache.hadoop.fs.Path(modelPath))){
74 | try { hdfs.delete(new org.apache.hadoop.fs.Path(modelPath), true)} catch { case _ : Throwable => }
75 | }
76 | // Save and load model
77 | model.save(sc, modelPath)
78 | //val sameModel = SVMModel.load(sc, modelPath)
79 | val sameModel = SVMMultiClassOVAModel.load(sc, modelPath)
80 |
81 | //ClassifierSVM
82 | logger info "done"
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/TilePixelingExample.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import geotrellis.raster.DoubleArrayTile
5 | import geotrellis.raster.MultibandTile
6 | import geotrellis.raster.Tile
7 | import geotrellis.spark.io.hadoop.HadoopAttributeStore
8 | import geotrellis.spark.io.hadoop.HadoopLayerDeleter
9 | import geotrellis.spark.io.hadoop.HadoopLayerReader
10 | import geotrellis.spark.io.hadoop.HadoopLayerWriter
11 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod
12 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod.spatialKeyIndexMethod
13 | import geotrellis.spark.io.SpatialKeyFormat
14 | import geotrellis.spark.io.spatialKeyAvroFormat
15 | import geotrellis.spark.io.tileLayerMetadataFormat
16 | import geotrellis.spark.io.tileUnionCodec
17 | import geotrellis.spark.LayerId
18 | import geotrellis.spark.Metadata
19 | import geotrellis.spark.SpatialKey
20 | import geotrellis.spark.TileLayerMetadata
21 | import org.apache.hadoop.fs.Path
22 | import org.apache.spark.mllib.regression.LabeledPoint
23 | import org.apache.spark.rdd.RDD
24 | import org.apache.spark.rdd.RDD._
25 | import org.apache.spark.SparkContext
26 | import org.apache.spark.SparkException
27 |
28 | object TilePixelingExample extends LazyLogging {
29 |
30 | def main(args: Array[String]): Unit = {
31 | try {
32 | val Array(layerNameIn, layerNameOut, catalogPath) = args
33 | implicit val sc = Utils.initSparkAutoContext
34 | TilePixelingExample(layerNameIn, layerNameOut)(catalogPath, sc)
35 | sc.stop()
36 | } catch {
37 | case _: MatchError => println("Run as: layerNameIn layerNameOut /path/to/catalog")
38 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parmaeter: -Dspark.master=local[*]"
39 | }
40 | }
41 |
42 | def apply(layerNameIn: String, layerNameOut: String)(implicit catalogPath: String, sc: SparkContext): Unit = {
43 | logger info s"Running pixeling of layer '$layerNameIn' in catalog '$catalogPath'"
44 |
45 | // Create the attributes store that will tell us information about our catalog.
46 | val catalogPathHdfs = new Path(catalogPath)
47 | val attributeStore = HadoopAttributeStore(catalogPathHdfs)
48 | val layerReader = HadoopLayerReader(attributeStore)
49 |
50 | val zoomsOfLayer = attributeStore.layerIds filter (_.name == layerNameIn)
51 | if (zoomsOfLayer.isEmpty) {
52 | logger info s"Layer '$layerNameIn' not found in the catalog '$catalogPath'"
53 | return
54 | }
55 |
56 | val srcLayerId = zoomsOfLayer.sortBy(_.zoom).last
57 | logger debug s"The following layerId will be used: $srcLayerId"
58 |
59 | //val queryResult: RDD[(SpatialKey, Tile)] with Metadata[TileLayerMetadata[SpatialKey]] = layerReader
60 | // .read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](srcLayerId)
61 |
62 | //For image layers we need multiband
63 | val queryResult: RDD[(SpatialKey, MultibandTile)] with Metadata[TileLayerMetadata[SpatialKey]] = layerReader
64 | .read[SpatialKey, MultibandTile, TileLayerMetadata[SpatialKey]](srcLayerId)
65 |
66 | // MultibandTile with Label => Pixel Samples with Label
67 | val samples: RDD[(SpatialKey, (Int, Int, LabeledPoint))] with Metadata[TileLayerMetadata[SpatialKey]] =
68 | queryResult.withContext { rdd =>
69 | rdd.flatMapValues(mbtile =>
70 | UtilsML.MultibandTile2LabeledPixelSamples(mbtile, classBandNo = 0)
71 | )
72 | }
73 |
74 | // ToDo: Spark Streaming write to Kafka queue
75 | // see: https://spark.apache.org/docs/1.6.2/streaming-kafka-integration.html
76 | /* // e.g.
77 | import org.apache.spark.streaming.kafka._
78 | val kafkaStream = KafkaUtils.createStream(streamingContext,
79 | [ZK quorum], [consumer group id], [per-topic number of Kafka partitions to consume])
80 | */
81 |
82 | /* // Use UtilsKafka & UtilsSVM to create Kafka Stream from samples
83 | val (brokers, topic) = ("localhost:9092","neu")
84 | UtilsKafka.initKafka(topic)(brokers)
85 | val messages : Array[String] = UtilsSVM.toKafkaString(samples, UtilsSVM.Delimiter(";")).toLocalIterator.toArray
86 | val nmsg : Int = messages.length
87 | logger debug s"messages $nmsg"
88 | for ( imsg <- 0 until nmsg ){
89 | val str = messages(imsg)
90 | UtilsKafka.send(str)
91 | }
92 | */
93 |
94 | // ToDo: Spark Streaming read from Kafka queue
95 |
96 | // Label (ClassId) of Pixel Samples => Tile
97 | val outTiles: RDD[(SpatialKey, Tile)] with Metadata[TileLayerMetadata[SpatialKey]] =
98 | samples.withContext { rdd =>
99 | rdd.groupByKey().map { case (spatialKey, listOfPixels) =>
100 | val arr = Array.ofDim[Double](256 * 256)
101 | listOfPixels.foreach { case (x, y, lp) =>
102 | arr(x + y * 256) = lp.label
103 | }
104 |
105 | (spatialKey, DoubleArrayTile(arr, 256, 256))
106 | }
107 | }
108 |
109 | // Create the writer that we will use to store the tiles in the local catalog.
110 | val writer = HadoopLayerWriter(catalogPathHdfs, attributeStore)
111 | val layerIdOut = LayerId(layerNameOut, srcLayerId.zoom )// "TODO_outlayer" TODO:srcLayerId.zoom
112 |
113 | // If the layer exists already, delete it out before writing
114 | if (attributeStore.layerExists(layerIdOut)) {
115 | logger debug s"Layer $layerIdOut already exists, deleting ..."
116 | HadoopLayerDeleter(attributeStore).delete(layerIdOut)
117 | }
118 |
119 | logger debug "Writing reprojected tiles using space filling curve"
120 | writer.write(layerIdOut, outTiles, ZCurveKeyIndexMethod)
121 |
122 | //sc.stop() //moved to main
123 | logger info "done."
124 | }
125 | }
126 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/TilePixelingToCSVExample.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import com.typesafe.scalalogging._
4 | //import com.typesafe.scalalogging.slf4j._
5 | import geotrellis.raster.io.HistogramDoubleFormat
6 | import geotrellis.raster.mapalgebra.focal.Kernel
7 | import geotrellis.raster.{DoubleArrayTile, MultibandTile, Tile, withTileMethods}
8 | import geotrellis.spark.io.hadoop.{HadoopAttributeStore, HadoopLayerDeleter, HadoopLayerReader, HadoopLayerWriter}
9 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod
10 | import geotrellis.spark.io.index.ZCurveKeyIndexMethod.spatialKeyIndexMethod
11 | import geotrellis.spark.io.{SpatialKeyFormat, spatialKeyAvroFormat, tileLayerMetadataFormat, tileUnionCodec}
12 | import geotrellis.spark.{LayerId, Metadata, SpatialKey, TileLayerMetadata}
13 | import org.apache.hadoop.fs.Path
14 | import org.apache.spark.{SparkContext, SparkException}
15 | import org.apache.spark.mllib.feature.Normalizer
16 | import org.apache.spark.mllib.linalg.Vectors
17 | import org.apache.spark.mllib.regression.LabeledPoint
18 | import org.apache.spark.rdd.RDD
19 | import org.apache.spark.rdd.RDD._
20 | import org.apache.spark.rdd._
21 | import org.apache.spark.streaming.{Seconds, StreamingContext}
22 |
23 | object TilePixelingToCSVExample extends LazyLogging {
24 |
25 | def main(args: Array[String]): Unit = {
26 | try {
27 | val Array(layerName, catalogPath, fileNameCSV) = args
28 | implicit val sc = Utils.initSparkAutoContext
29 | TilePixelingToCSVExample(layerName, fileNameCSV)(catalogPath, sc)
30 | sc.stop()
31 | } catch {
32 | case _: MatchError => println("Run as: layerName /path/to/catalog")
33 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parmaeter: -Dspark.master=local[*]"
34 | }
35 | }
36 |
37 | def apply(layerName: String, fileNameCSV: String)(implicit catalogPath: String, sc: SparkContext): Unit = {
38 | logger info s"Running convolution of layer '$layerName' in catalog '$catalogPath'"
39 |
40 | //implicit val sc = Utils.initSparkContext //moved to main
41 |
42 | // Create the attributes store that will tell us information about our catalog.
43 | val catalogPathHdfs = new Path(catalogPath)
44 | val attributeStore = HadoopAttributeStore(catalogPathHdfs)
45 | val layerReader = HadoopLayerReader(attributeStore)
46 |
47 | val zoomsOfLayer = attributeStore.layerIds filter (_.name == layerName)
48 | if (zoomsOfLayer.isEmpty) {
49 | logger info s"Layer '$layerName' not found in the catalog '$catalogPath'"
50 | return
51 | }
52 |
53 | val srcLayerId = zoomsOfLayer.sortBy(_.zoom).last
54 | logger debug s"The following layerId will be used: $srcLayerId"
55 |
56 | //val queryResult: RDD[(SpatialKey, Tile)] with Metadata[TileLayerMetadata[SpatialKey]] = layerReader
57 | // .read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](srcLayerId)
58 |
59 | //For image layers we need multiband
60 | val queryResult: RDD[(SpatialKey, MultibandTile)] with Metadata[TileLayerMetadata[SpatialKey]] = layerReader
61 | .read[SpatialKey, MultibandTile, TileLayerMetadata[SpatialKey]](srcLayerId)
62 |
63 | // MultibandTile with Label => Pixel Samples with Label
64 | val samples: RDD[(SpatialKey, (Int, Int, LabeledPoint))] with Metadata[TileLayerMetadata[SpatialKey]] =
65 | queryResult.withContext { rdd =>
66 | rdd.flatMapValues(mbtile =>
67 | UtilsML.MultibandTile2LabeledPixelSamples(mbtile, classBandNo = 0)
68 | )
69 | }
70 |
71 | // ToDo: Spark Streaming write to Kafka queue
72 | UtilsSVM.SaveAsCSVFileWithKey(samples, fileNameCSV)
73 |
74 | /*
75 | // ToDo: Spark Streaming read from Kafka queue
76 |
77 | // Label (ClassId) of Pixel Samples => Tile
78 | val outTiles: RDD[(SpatialKey, Tile)] with Metadata[TileLayerMetadata[SpatialKey]] =
79 | samples.withContext { rdd =>
80 | rdd.groupByKey().map { case (spatialKey, listOfPixels) =>
81 | val arr = Array.ofDim[Double](256 * 256)
82 | listOfPixels.foreach { case (x, y, lp) =>
83 | arr(x + y * 256) = lp.label
84 | }
85 |
86 | (spatialKey, DoubleArrayTile(arr, 256, 256))
87 | }
88 | }
89 |
90 | // Create the writer that we will use to store the tiles in the local catalog.
91 | val writer = HadoopLayerWriter(catalogPathHdfs, attributeStore)
92 | val layerIdOut = LayerId("TODO_outlayer", srcLayerId.zoom )// TODO:srcLayerId.zoom
93 |
94 | // If the layer exists already, delete it out before writing
95 | if (attributeStore.layerExists(layerIdOut)) {
96 | logger debug s"Layer $layerIdOut already exists, deleting ..."
97 | HadoopLayerDeleter(attributeStore).delete(layerIdOut)
98 | }
99 |
100 | logger debug "Writing reprojected tiles using space filling curve"
101 | writer.write(layerIdOut, outTiles, ZCurveKeyIndexMethod)
102 | */
103 | //sc.stop() //moved to main
104 | logger info "done."
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/UploadToHdfs.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import java.net.UnknownHostException
4 |
5 | import com.typesafe.scalalogging.LazyLogging
6 | import org.apache.hadoop.conf.Configuration
7 | import org.apache.hadoop.fs.FileSystem
8 | import org.apache.hadoop.fs.Path
9 |
10 | object UploadToHdfs extends App with LazyLogging {
11 |
12 | try {
13 | val Array(localPath, hdfsPath, hdfsUrl) = args
14 | UploadToHdfs(localPath, hdfsPath, hdfsUrl)
15 | } catch {
16 | case _: MatchError => println("Run as: localPath hdfsPath hdfsUrl")
17 | case e: IllegalArgumentException => {
18 | e.getCause match {
19 | case _ : UnknownHostException => logger error s"Unknown HDFS host, try hdfs://localhost:8020"
20 | case _ => logger error e.getMessage
21 | }
22 | }
23 | }
24 |
25 | def apply(localPath: String, hdfsPath: String, hdfsUrl: String): Unit = {
26 | logger info s"Uploading local file $localPath to hdfs $hdfsPath ..."
27 |
28 | val conf = new Configuration() {
29 | set("fs.default.name", hdfsUrl)
30 | }
31 | val fileSystem = FileSystem.get(conf)
32 | fileSystem.copyFromLocalFile(new Path(localPath), new Path(hdfsPath))
33 |
34 | logger info "done"
35 | }
36 |
37 | }
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/Utils.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import java.lang.management.ManagementFactory
4 |
5 | import com.typesafe.scalalogging.LazyLogging
6 | import geotrellis.raster.histogram.Histogram
7 | import geotrellis.raster.io.HistogramDoubleFormat
8 | import geotrellis.raster.resample.{Bilinear, CubicConvolution, NearestNeighbor, ResampleMethod}
9 | import geotrellis.spark.LayerId
10 | import geotrellis.spark.io.AttributeStore
11 | import org.apache.spark.SparkConf
12 | import org.apache.spark.SparkContext
13 |
14 | import scala.collection.JavaConverters._
15 |
16 | /**
17 | * Created by Viliam Simko on 2016-11-04
18 | */
19 | object Utils extends LazyLogging {
20 |
21 | val TILE_SIZE = 256
22 | val RDD_PARTITIONS = 256 //32
23 | val RESAMPLING_METHOD: ResampleMethod = NearestNeighbor //Bilinear //CubicConvolution
24 |
25 | @deprecated("replace by implicit def biggis.landuse.api.sessionToContext(spark: SparkSession): SparkContext = { spark.sparkContext }", "Oct 2018")
26 | def initSparkAutoContext: SparkContext = {
27 | logger info s"initSparkAutoContext "
28 | val args: List[String] = ManagementFactory.getRuntimeMXBean.getInputArguments.asScala.toList
29 |
30 | args.find(_ == "-Dspark.master=local[*]") match {
31 |
32 | case Some(_) =>
33 | logger info s"calling initSparkContext"
34 | initSparkContext
35 |
36 | case None => initSparkClusterContext
37 | }
38 | }
39 |
40 | @deprecated("do not use, only for dirty debugging", "Sep 2017")
41 | def initLocalSparkContext: SparkContext = {
42 | val sparkConf = new SparkConf()
43 | sparkConf.setAppName("Geotrellis Example")
44 | sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
45 | sparkConf.set("spark.kryo.registrator", "geotrellis.spark.io.kryo.KryoRegistrator")
46 | sparkConf.setMaster("local[*]")
47 |
48 | new SparkContext(sparkConf)
49 | }
50 |
51 | @deprecated("replace by implicit def biggis.landuse.api.sessionToContext(spark: SparkSession): SparkContext = { spark.sparkContext }", "Oct 2018")
52 | def initSparkContext: SparkContext = {
53 | val sparkConf = new SparkConf()
54 | sparkConf.setAppName("Geotrellis Example")
55 | sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
56 | sparkConf.set("spark.kryo.registrator", "geotrellis.spark.io.kryo.KryoRegistrator")
57 |
58 | // We also need to set the spark master.
59 | // instead of hardcoding it using sparkConf.setMaster("local[*]")
60 | // we can use the JVM parameter: -Dspark.master=local[*]
61 | // sparkConf.setMaster("local[*]")
62 |
63 | new SparkContext(sparkConf)
64 | }
65 |
66 | @deprecated("replace by implicit def biggis.landuse.api.sessionToContext(spark: SparkSession): SparkContext = { spark.sparkContext }", "Oct 2018")
67 | def initSparkClusterContext: SparkContext = {
68 |
69 | val sparkConf = new SparkConf()
70 | sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
71 |
72 | //TODO: get rid of the hardcoded JAR - replace by spark: SparkSession -> spark.sparkContext
73 | sparkConf.setJars(Seq("hdfs:///jobs/landuse-example/biggis-landuse-0.0.8-SNAPSHOT.jar"))
74 |
75 | // implicit def biggis.landuse.api.sessionToContext(spark: SparkSession): SparkContext = { spark.sparkContext }
76 |
77 | // We also need to set the spark master.
78 | // instead of hardcoding it using sparkConf.setMaster("local[*]")
79 | // we can use the JVM parameter: -Dspark.master=local[*]
80 | // sparkConf.setMaster("local[*]")
81 |
82 | new SparkContext(sparkConf)
83 | }
84 |
85 | def writeHistogram(attributeStore: AttributeStore, layerName: String, histogram: Histogram[Double]): Unit = {
86 | logger debug s"Writing histogram of layer '$layerName' to attribute store as 'histogramData' for zoom level 0"
87 | attributeStore.write(
88 | LayerId(layerName, 0), "histogramData", histogram)
89 | }
90 |
91 | }
92 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/UtilsKafka.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import java.util.HashMap
4 |
5 | import com.typesafe.scalalogging.LazyLogging
6 | import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
7 |
8 | /**
9 | * Created by ak on 12.05.2017.
10 | */
11 | object UtilsKafka extends LazyLogging {
12 |
13 | case class Topic(topic: String)
14 |
15 | var producer: KafkaProducer[String, String] = null
16 | var topic: Topic = null
17 |
18 | def initKafka(topic: String)(implicit brokers: String = "localhost:9092"): Unit = {
19 | this.topic = Topic(topic)
20 | initKafkaProducer(brokers)
21 | }
22 |
23 | def initKafkaProducer(brokers: String = "localhost:9092"): Unit = {
24 | val props = new HashMap[String, Object]()
25 | props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
26 | props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
27 | props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
28 | producer = new KafkaProducer[String, String](props)
29 | }
30 |
31 | def send(str: String)(implicit topic: Topic = this.topic): Unit = {
32 | val message = new ProducerRecord[String, String](topic.topic, null, str)
33 | producer.send(message)
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/UtilsML.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import geotrellis.raster.MultibandTile
4 | import org.apache.hadoop.fs.{FileUtil, Path}
5 | import org.apache.spark.SparkContext
6 | import org.apache.spark.mllib.linalg.Vectors
7 | import org.apache.spark.mllib.regression.LabeledPoint
8 | import org.apache.spark.mllib.util.MLUtils
9 | import org.apache.spark.rdd.RDD
10 |
11 | /**
12 | * Created by ak on 01.12.2016.
13 | */
14 | object UtilsML extends UtilsML
15 |
16 | trait UtilsML {
17 | def MultibandTile2PixelSamples(tile: MultibandTile): Iterable[(Int, Int, List[Double])] = {
18 |
19 | val xy = for (x <- 0 until tile.cols;
20 | y <- 0 until tile.rows) yield (x, y)
21 |
22 | xy.map { case (x, y) =>
23 | val features = for (b <- 0 until tile.bandCount) yield tile.band(b).getDouble(x, y)
24 | (x, y, features.toList)
25 | }
26 | }
27 |
28 | def MultibandTile2LabeledPixelSamples(tile: MultibandTile,
29 | classBandNo: Int): Iterable[(Int, Int, LabeledPoint)] = {
30 |
31 | MultibandTile2PixelSamples(tile).map { case (x, y, features) =>
32 | val label = if (classBandNo >= 0) features(classBandNo) else Double.NaN
33 | val featuresWithoutLabel = features.take(classBandNo) ::: features.drop(classBandNo + 1)
34 | val featuresMllib = Vectors.dense(featuresWithoutLabel.toArray).compressed
35 | (x, y, LabeledPoint(label, featuresMllib))
36 | }
37 | //
38 | // val xy = for (x <- 0 until tile.cols;
39 | // y <- 0 until tile.rows) yield (x, y)
40 | //
41 | // val results = xy.map { case (x, y) =>
42 | // val label = tile.band(classBandNo).getDouble(x, y)
43 | // val features = for (b <- 0 until tile.bandCount if b != classBandNo) yield tile.band(b).getDouble(x, y)
44 | // (x, y, label, features)
45 | // }
46 | //
47 | // // adjusting the output for MLLIB
48 | // results.map { case (x, y, label, features) =>
49 | // val featuresMllib = Vectors.dense(features.toArray).compressed
50 | // (x, y, LabeledPoint(label, featuresMllib))
51 | // }
52 | }
53 |
54 | case class pathContents(dir: String, filename: String, suffix: String, dirbase: String, filebase: String, filetype: String, dir_hierarchy: Iterable[String])
55 |
56 | def ParsePath(path: String): pathContents = {
57 | val regexp = "((.*)[\\\\/])?(([^\\\\/]*?)(\\.([^.]*))?)$".r
58 | val regexp(dir, dirbase, filename, filebase, suffix, filetype) = path
59 | val hierarchy = dirbase.split("\\/").toIterable
60 | pathContents(dir, filename, suffix, dirbase, filebase, filetype, hierarchy)
61 | }
62 |
63 | def DeleteFile(fileName: String)(implicit sc: SparkContext): Unit = {
64 | try {
65 | val hdfs = org.apache.hadoop.fs.FileSystem.get(sc.hadoopConfiguration)
66 | if (hdfs.exists(new org.apache.hadoop.fs.Path(fileName))) {
67 | try {
68 | hdfs.delete(new org.apache.hadoop.fs.Path(fileName), true)
69 | } catch {
70 | case _: Throwable =>
71 | }
72 | }
73 | }
74 | catch {
75 | case _: Throwable =>
76 | }
77 | }
78 |
79 | def SaveAsLibSVMFile(data: RDD[LabeledPoint], trainingName: String)(implicit removeZeroLabel: Boolean = false): Unit = {
80 | try {
81 |
82 | implicit val sc: SparkContext = data.sparkContext
83 |
84 | val hdfs = org.apache.hadoop.fs.FileSystem.get(sc.hadoopConfiguration)
85 | val trainingPath = ParsePath(trainingName)
86 | val first_dir = trainingPath.dir_hierarchy.toArray.apply(1)
87 | val use_single_file_export = trainingPath.filetype == "txt"
88 | if (use_single_file_export) {
89 | val trainingNameTemp = trainingName + "_temp"
90 | DeleteFile(trainingNameTemp)
91 | if (removeZeroLabel) {
92 | val data_w_o_nodata = data.filter(_.label > 0)
93 | MLUtils.saveAsLibSVMFile(data_w_o_nodata, trainingNameTemp)
94 | } else
95 | MLUtils.saveAsLibSVMFile(data, trainingNameTemp)
96 | DeleteFile(trainingName)
97 | FileUtil.copyMerge(hdfs, new Path(trainingNameTemp), hdfs, new Path(trainingName), true, sc.hadoopConfiguration, null)
98 | DeleteFile(trainingNameTemp)
99 | }
100 | else {
101 | DeleteFile(trainingName)
102 | if (removeZeroLabel) {
103 | val data_w_o_nodata = data.filter(_.label > 0)
104 | MLUtils.saveAsLibSVMFile(data_w_o_nodata, trainingName)
105 | } else
106 | MLUtils.saveAsLibSVMFile(data, trainingName)
107 | }
108 | }
109 | catch {
110 | case _: Throwable =>
111 | }
112 | }
113 |
114 | }
115 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/UtilsShape.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import geotrellis.util.LazyLogging
4 | import geotrellis.shapefile.ShapeFileReader
5 | import geotrellis.shapefile.ShapeFileReader.SimpleFeatureWrapper
6 | import geotrellis.vector.{Extent, Feature, MultiPolygon}
7 | import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
8 |
9 | import scala.collection.JavaConverters._
10 | import com.vividsolutions.jts.{geom => jts}
11 | import geotrellis.proj4.CRS
12 | import geotrellis.vector.io.json.JsonFeatureCollection
13 | import geotrellis.vector.io.json._
14 | import geotrellis.vector.io._
15 | import org.apache.hadoop.conf.Configuration
16 | import org.apache.hadoop.fs.{FSDataInputStream, Path}
17 | import org.apache.spark.SparkContext
18 | import spray.json._
19 | import spray.json.DefaultJsonProtocol
20 |
21 | /**
22 | * Created by ak on 22.06.2017.
23 | */
24 | object UtilsShape extends LazyLogging{
25 |
26 | def readShapefileMultiPolygonLongAttribute(shapefileName: String, attribName: String)(implicit targetcrs : Option[CRS] = None, sc: SparkContext): List[Feature[MultiPolygon,Long]] = {
27 | if (shapefileName.contains(".shp")) {
28 | ShapeFileReader.readSimpleFeatures(shapefileName)
29 | .filter { feat =>
30 | "MultiPolygon" != feat.getFeatureType.getGeometryDescriptor.getType.getName.toString
31 | }
32 | .map { feat =>
33 | Feature(MultiPolygon.jts2MultiPolygon(feat.geom[jts.MultiPolygon].get), feat.attribute(attribName))
34 | }
35 | .toList
36 | }
37 | else if(shapefileName.contains(".geojson")){
38 | readGeoJSONMultiPolygonLongAttribute(shapefileName, attribName)
39 | } else {
40 | List[Feature[MultiPolygon,Long]]()
41 | }
42 | }
43 | def readGeoJSONMultiPolygonLongAttribute(geojsonName: String, attribName: String)(implicit targetcrs : Option[CRS] = None, sc: SparkContext): List[Feature[MultiPolygon,Long]] = {
44 | if(geojsonName.contains(".geojson")){
45 | val collection = fromFileHdfs[WithCrs[JsonFeatureCollection]](geojsonName)//GeoJson.fromFile[WithCrs[JsonFeatureCollection]](geojsonName) //Source.fromFile(geojsonName, "UTF-8").mkString.parseGeoJson[WithCrs[JsonFeatureCollection]]
46 |
47 | case class Landcover(landcover: Long)
48 | object UtilsShapeJsonProtocol extends DefaultJsonProtocol {
49 | implicit val landcoverValue = jsonFormat(Landcover, attribName) //"bedeckung")
50 | }
51 | import UtilsShapeJsonProtocol._
52 | //val poly = collection.obj.getAllPolygons()
53 | val jsoncrs = collection.crs
54 | val crsoption = jsoncrs.toCRS
55 | val pattern = ("^" + "(.*)" + "\\(" + "(.*)" + "(EPSG\\:)" + "(?:\\:)" + "([0-9]*)" + "\\)" + "$").r
56 | val pattern(crstype, urn_ogc, epsgtype, epsgcode) = jsoncrs.toString
57 | val epsg = epsgtype + epsgcode //"EPSG:32632"
58 | val crsepsg = CRS.fromName(epsg)
59 | val crs : CRS = crsoption.getOrElse(CRS.fromName(epsg))
60 | logger info s"CRS: ${crs.toString()}"
61 |
62 | val collectionFeatures = collection.obj.getAllPolygonFeatures[Landcover]
63 | .map { feat =>
64 | val geom = MultiPolygon(Seq(feat.geom))
65 | val data : Long = feat.data.landcover
66 | Feature( if(targetcrs.nonEmpty) geom.reproject(crs,targetcrs.get) else geom, data) //Feature(geom, data)
67 | }
68 | collectionFeatures.toList
69 | }
70 | else {
71 | List[Feature[MultiPolygon,Long]]()
72 | }
73 | }
74 | def fromFileHdfs[T: JsonReader](path: String)(implicit sc: SparkContext) = {
75 | val src = openHdfs(new Path(path)) //val src = scala.io.Source.fromFile(path)
76 | val txt =
77 | try {
78 | scala.io.Source.fromInputStream(src).mkString
79 | } finally {
80 | src.close
81 | }
82 | GeoJson.parse[T](txt)
83 | }
84 | def openHdfs(path: Path)(implicit sc: SparkContext): FSDataInputStream = {
85 | val conf: Configuration = sc.hadoopConfiguration
86 | val fs = path.getFileSystem(conf)
87 | val valid = fs.exists(path)
88 | val isFile = fs.isFile(path)
89 | val isDir = fs.isDirectory(path)
90 | val src = if(isDir){
91 | // Fix Ingest into HDFS issue (directory is created for each file with same name as file)
92 | val status = fs.listStatus(path) //val status = fs.getStatus(pathHdfs)
93 | val filelist = status.map( file => file.getPath ) //fs.listFiles(pathHdfs,false)
94 | val file = if(filelist.length == 1) Some(fs.open(filelist(0))) else None
95 | if(file.nonEmpty)
96 | file.get // Open file in hdfs (contained in dir with same name)
97 | else
98 | fs.open(path) // Unhandled - will cause exception
99 | } else{
100 | fs.open(path) // Open file in hdfs
101 | }
102 | src
103 | }
104 |
105 | def readShapefileMultiPolygonDoubleAttribute(shapefileName: String, attribName: String)(implicit targetcrs : Option[CRS] = None, sc: SparkContext): List[Feature[MultiPolygon,Double]] = {
106 | readShapefileMultiPolygonLongAttribute(shapefileName, attribName).map{ feature => Feature(feature.geom,feature.data.toDouble) }
107 | }
108 | def getExtent(mps : List[Feature[MultiPolygon,Any]]) : Extent = {
109 | mps
110 | .map{ feature => feature.geom.envelope }
111 | .reduce( (a,b) =>
112 | Extent(
113 | Math.min(a.xmin, b.xmin),
114 | Math.min(a.ymin, b.ymin),
115 | Math.max(a.xmax, b.xmax),
116 | Math.max(a.ymax, b.ymax)
117 | )
118 | ) }
119 | }
120 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/WordCount.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import org.apache.spark.{SparkContext, SparkException}
5 |
6 | object WordCount extends LazyLogging {
7 | def main(args: Array[String]): Unit = {
8 | try {
9 | val Array(input, output) = args
10 | implicit val sc : SparkContext = Utils.initSparkAutoContext
11 | WordCount()(input, output, sc)
12 | sc.stop()
13 | }
14 | catch {
15 | case _: MatchError => println("Run as: /path/to/input, /path/to/output")
16 | case e: SparkException => logger error e.getMessage
17 | }
18 | }
19 |
20 | def apply()(implicit input: String, output: String, sc: SparkContext): Unit = {
21 |
22 | val textFile = sc.textFile(input)
23 | val counts = textFile.flatMap(line => line.split(" "))
24 | .map(word => (word, 1))
25 | .reduceByKey(_ + _)
26 | counts.saveAsTextFile(output)
27 | }
28 | }
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/WorkflowExample.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import com.typesafe.scalalogging.StrictLogging
4 | import org.apache.spark.SparkContext
5 | import org.apache.spark.SparkException
6 |
7 | /**
8 | * Created by ak on 15.02.2017.
9 | */
10 | object WorkflowExample extends StrictLogging {
11 | def main(args: Array[String]): Unit = {
12 | try {
13 | val Array(projectPath, catalogPath) = args
14 | //val projectPath = "hdfs:///landuse-demo/landuse/"
15 | //implicit val catalogPath = "target/geotrellis-catalog/"
16 | implicit val sc = Utils.initSparkAutoContext
17 | WorkflowExample(projectPath)(catalogPath, sc)
18 | sc.stop()
19 | }
20 | catch {
21 | case _: MatchError => println("Run as: /path/to/project /path/to/catalog")
22 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parameter: -Dspark.master=local[*] -Dspark.app.name=Geotrellis"
23 | }
24 | }
25 |
26 | def apply(projectPath: String)(implicit catalogPath: String, sc: SparkContext): Unit = {
27 | // ToDo: generally replace SpatialKey by SpaceTimeKey, handle timestamp metadata
28 |
29 | // Settings (for Debugging)
30 | val useDebugLayerExport = false //for debugging only
31 | val useLayerstackExport = false
32 | val useResultExport = false
33 | val useCleanup = true
34 | val useWebMercator = true //disabled - use original resolution for csv export
35 | val useLeaflet = false
36 |
37 | // ToDo: configure local paths
38 | /*
39 | val projectdir = "data/workflowexample/"
40 |
41 | val inputdir = projectdir + "in/"
42 | val outputdir = projectdir + "out/"
43 |
44 | val input_label = inputdir + "labels.tif"
45 | //val input_dop = inputdir + "dop.tif"
46 | val input_sat = inputdir + "S2_2016-05-08.tif"
47 | //val input_sat = inputdir + "S2_2016-07-18.tif"
48 | //val input_sat = inputdir + "S2_2016-09-15.tif"
49 |
50 | val output_result = outputdir + "result/result.tif"
51 |
52 | val output_labeled_layerstack = outputdir + "layerstack/labeled_layerstack.tif"
53 |
54 | //test csv export
55 | val fileNameCSV = outputdir + "labeled_sat" + "_withkey" + ".csv"
56 | // */
57 |
58 | // ToDo: configure local paths (example bw)
59 | //*
60 | //val projectdir = "hdfs:///landuse-demo/landuse/"
61 | val projectdir = projectPath
62 | /*
63 | val tile_id = "3431_5378"
64 | val inputdir = projectdir + tile_id + "/"
65 | val outputdir = projectdir + "out/"
66 |
67 | val input_label = inputdir + "bedeckung_" + tile_id + "_epsg32632_2m.tif"
68 | val input_dop = inputdir + tile_id + "_epsg32632_2m.tif"
69 | //val input_sat = input_dop
70 | val input_sat = inputdir + "32_UMU_2016_5_5_0_S2_10m_2B_3G_4R_8NIR_" + tile_id + "_2m.tif"
71 | //val input_sat = inputdir + "32_UMU_2016_6_24_1_S2_10m_2B_3G_4R_8NIR_" + tile_id + "_2m.tif"
72 | //val input_sat = inputdir + "32_UMU_2016_8_13_0_S2_10m_2B_3G_4R_8NIR_" + tile_id + "_2m.tif"
73 | //val input_sat = inputdir + "32_UMU_2016_8_23_0_S2_10m_2B_3G_4R_8NIR_" + tile_id + "_2m.tif"
74 |
75 | val output_result = outputdir + "result.tif"
76 | val output_labeled_layerstack = outputdir + "labeled_layerstack.tif"
77 |
78 | //val fileNameCSV = catalogPath + "/" + labeled_layerstack + "_withkey" + ".csv"
79 | //val fileNameCSV = outputdir + tile_id + "_2m" + ".csv"
80 | //val fileNameCSV = outputdir + "dop_" + tile_id + "_2m" + ".csv"
81 | val fileNameCSV = outputdir + "32_UMU_2016_5_5_0_S2_10m_2B_3G_4R_8NIR_" + tile_id + "_2m" + ".csv"
82 | //val fileNameCSV = outputdir + "32_UMU_2016_6_24_1_S2_10m_2B_3G_4R_8NIR_" + tile_id + "_2m" + ".csv"
83 | //val fileNameCSV = outputdir + "32_UMU_2016_8_13_0_S2_10m_2B_3G_4R_8NIR_" + tile_id + "_2m" + ".csv"
84 | //val fileNameCSV = outputdir + "32_UMU_2016_8_23_0_S2_10m_2B_3G_4R_8NIR_" + tile_id + "_2m" + ".csv"
85 | // */
86 | //*
87 | //val tile_id = "3431_5378"
88 | val inputdir = projectdir + "/"
89 | val outputdir = projectdir + "out/"
90 |
91 | val input_label = inputdir + "label"
92 | val input_dop = inputdir + "dop"
93 | val input_sat = input_dop
94 | //val input_sat = inputdir + "sat_1"
95 | //val input_sat = inputdir + "sat_2"
96 | //val input_sat = inputdir + "sat_3"
97 | //val input_sat = inputdir + "sat_4"
98 |
99 | val output_result = outputdir + "result.tif"
100 | val output_labeled_layerstack = outputdir + "labeled_layerstack.tif"
101 |
102 | //val fileNameCSV = catalogPath + "/" + labeled_layerstack + "_withkey" + ".csv"
103 | //val fileNameCSV = outputdir + tile_id + "_2m" + ".csv"
104 | val fileNameCSV =
105 | if (useWebMercator) {
106 | outputdir + "dop__prio1_epsg3857_zoom17" + ".csv"
107 | //outputdir + "32_UMU_2016_5_5_0_S2_10m_2B_3G_4R_8NIR_epsg3857_zoom17" + ".csv"
108 | //outputdir + "32_UMU_2016_6_24_1_S2_10m_2B_3G_4R_8NIR_epsg3857_zoom17" + ".csv"
109 | //outputdir + "32_UMU_2016_8_13_0_S2_10m_2B_3G_4R_8NIR_epsg3857_zoom17" + ".csv"
110 | //outputdir + "32_UMU_2016_8_23_0_S2_10m_2B_3G_4R_8NIR_epsg3857_zoom17" + ".csv"
111 | }
112 | else {
113 | outputdir + "dop__prio1__2m" + ".csv"
114 | //outputdir + "32_UMU_2016_5_5_0_S2_10m_2B_3G_4R_8NIR_prio1_2m" + ".csv"
115 | //outputdir + "32_UMU_2016_6_24_1_S2_10m_2B_3G_4R_8NIR_prio1_2m" + ".csv"
116 | //voutputdir + "32_UMU_2016_8_13_0_S2_10m_2B_3G_4R_8NIR_prio1_2m" + ".csv"
117 | //outputdir + "32_UMU_2016_8_23_0_S2_10m_2B_3G_4R_8NIR_prio1_2m" + ".csv"
118 | }
119 | // */
120 |
121 | val (layer_label, layer_sat) =
122 | ("layer_label", "layer_sat")
123 | if (useWebMercator) {
124 | MultibandGeotiffTilingExample(input_label, layer_label)
125 | MultibandGeotiffTilingExample(input_sat, layer_sat)
126 | } else { //Debugging (w/o WebMercator, uses original crs)
127 | MultibandGeotiffToLayerNoReproj(input_label, layer_label)
128 | MultibandGeotiffToLayerNoReproj(input_sat, layer_sat)
129 | }
130 |
131 | val labeled_layerstack = {
132 | val layer_label_sat = "layer_label_sat"
133 | ManyLayersToMultibandLayer(layer_label, layer_sat, layer_label_sat)
134 | layer_label_sat
135 | }
136 |
137 | if (useDebugLayerExport) {
138 | val output_labeled_layerstack = outputdir + "debugging/" + "labeled_layerstack.tif"
139 | MultibandLayerToGeotiff(layer_label, output_labeled_layerstack + ".label.tif")
140 | MultibandLayerToGeotiff(layer_sat, output_labeled_layerstack + ".layer.tif")
141 | }
142 |
143 | if (useCleanup) {
144 | biggis.landuse.api.deleteLayerFromCatalog(layer_label)
145 | biggis.landuse.api.deleteLayerFromCatalog(layer_sat)
146 | }
147 |
148 | if (useLayerstackExport) {
149 | MultibandLayerToGeotiff(labeled_layerstack, output_labeled_layerstack)
150 | }
151 |
152 | val layer_result = "layer_result"
153 | //TilePixelingExample(labeled_layerstack, layer_result)
154 | // ToDo: Send Pixel Stream to Kafka
155 | //TilePixelingToKafkaExample(labeled_layerstack)
156 | //val fileNameCSV = catalogPath + "/" + labeled_layerstack + "_libsvm_csv"
157 | //val fileNameCSV = catalogPath + "/" + labeled_layerstack + "_withkey" + ".csv"
158 | TilePixelingToCSVExample(labeled_layerstack, fileNameCSV)
159 | // ToDo: Receive Result from Kafka
160 | //ReadTileFromKafkaExample(layer_result)
161 | //ReadTileFromCSVExample(layer_result, fileNameCSV)
162 | // ToDo: store Result RDD als Hadoop Layer layer_result
163 |
164 | // Export Result to GeoTiff
165 | if (useResultExport) {
166 | LayerToGeotiff(layer_result, output_result)
167 | }
168 |
169 | // Visualize Result
170 | if (useLeaflet && useWebMercator) {
171 | LayerToPyramid(catalogPath, layer_result)
172 | ServeLayerAsMap(catalogPath, layer_result)
173 | }
174 |
175 | }
176 |
177 | }
178 |
--------------------------------------------------------------------------------
/src/main/scala/biggis/landuse/spark/examples/ZoomResampleLayer.scala:
--------------------------------------------------------------------------------
1 | package biggis.landuse.spark.examples
2 |
3 | import biggis.landuse.api.SpatialMultibandRDD
4 | import com.typesafe.scalalogging.LazyLogging
5 | import geotrellis.raster.resample.NearestNeighbor
6 | import geotrellis.spark.pyramid.Pyramid
7 | import geotrellis.spark.tiling.ZoomedLayoutScheme
8 | import geotrellis.spark.{LayerId, MultibandTileLayerRDD, TileLayerRDD, resample, _}
9 | import org.apache.spark.SparkContext
10 | import org.apache.spark.SparkException
11 |
12 | object ZoomResampleLayer extends LazyLogging {
13 | /**
14 | * Run as: layerNameIn zoomIn layerNameOut zoomOut /path/to/catalog
15 | */
16 | def main(args: Array[String]): Unit = {
17 | try {
18 | val Array(layerNameIn, zoomIn, layerNameOut, zoomOut, catalogPath) = args
19 | implicit val sc : SparkContext = Utils.initSparkAutoContext
20 | ZoomResampleLayer(layerNameIn, zoomIn, layerNameOut, zoomOut)(catalogPath, sc)
21 | sc.stop()
22 | } catch {
23 | case _: MatchError => println("Run as: layerNameIn zoomIn layerNameOut zoomOut /path/to/catalog")
24 | case e: SparkException => logger error e.getMessage + ". Try to set JVM parameter: -Dspark.master=local[*]"
25 | }
26 | }
27 |
28 | def apply(layerNameIn: String, zoomIn: String, layerNameOut: String, zoomOut: String)(implicit catalogPath: String, sc: SparkContext) {
29 |
30 | logger info s"Resampling layer '$layerNameIn' with '$zoomOut' into '$layerNameOut' with '$zoomOut' in catalog '$catalogPath' ... "
31 |
32 | val inputRdd : SpatialMultibandRDD = biggis.landuse.api.readRddFromLayer((layerNameIn, zoomIn.toInt))
33 |
34 | val outputRdd : SpatialMultibandRDD = //inputRdd//.asInstanceOf[MultibandTileLayerRDD[SpatialKey]].resampleToZoom(zoomIn.toInt, zoomOut.toInt)
35 | if(zoomOut > zoomIn) resampleLayerToZoom(inputRdd, zoomIn.toInt, zoomOut.toInt) // UpSample (DownLevel) using ZoomResample
36 | else if(zoomOut < zoomIn) Pyramid.up( inputRdd, ZoomedLayoutScheme(inputRdd.metadata.crs), zoomOut.toInt)._2 // DownSample (UpLevel) using Pyramid.up
37 | else inputRdd
38 |
39 | biggis.landuse.api.writeRddToLayer(outputRdd, LayerId(layerNameOut, zoomOut.toInt))
40 |
41 | logger info "done."
42 | }
43 |
44 | implicit def resampleLayerToZoom(rdd: MultibandTileLayerRDD[SpatialKey], zoomLevelIn: Int, zoomLevelOut: Int)(implicit sc: SparkContext): MultibandTileLayerRDD[SpatialKey] = {
45 | //geotrellis.spark.resample.Implicits.withZoomResampleMultibandMethods(rdd).resampleToZoom(zoomLevelIn, zoomLevelOut)
46 | //geotrellis.spark.resample.Implicits.withLayerRDDZoomResampleMethods(rdd).resampleToZoom(zoomLevelIn, zoomLevelOut)
47 | //geotrellis.spark.resample.Implicits.withLayerRDDZoomResampleMethods(rdd).resampleToZoom(zoomLevelIn, zoomLevelOut, targetGridBounds = None, method = NearestNeighbor)
48 | geotrellis.spark.resample.Implicits.withLayerRDDZoomResampleMethods(rdd).resampleToZoom(zoomLevelIn, zoomLevelOut, targetGridBounds = None, method = Utils.RESAMPLING_METHOD)
49 | }
50 |
51 | }
--------------------------------------------------------------------------------
/src/main/scala/geotrellis/spark/rasterize/RasterizeFeaturesRDD.scala:
--------------------------------------------------------------------------------
1 | package geotrellis.spark.rasterize
2 |
3 | import geotrellis.raster._
4 | import geotrellis.raster.rasterize._
5 | import geotrellis.spark._
6 | import geotrellis.spark.tiling._
7 | import geotrellis.vector._
8 | import org.apache.spark.rdd._
9 | import org.apache.spark.{HashPartitioner, Partitioner}
10 |
11 | object RasterizeFeaturesRDD {
12 |
13 | /**
14 | * Rasterize an RDD of Geometry objects into a tiled raster RDD.
15 | * Cells not intersecting any geometry will left as NODATA.
16 | * Value will be converted to type matching specified [[CellType]].
17 | *
18 | * @param features Cell values for cells intersecting a feature consisting of Feature(geometry,value)
19 | * @param layout Raster layer layout for the result of rasterization
20 | * @param cellType [[CellType]] for creating raster tiles
21 | * @param options Rasterizer options for cell intersection rules
22 | * @param partitioner Partitioner for result RDD
23 | */
24 | def fromFeature[G <: Geometry](
25 | features: RDD[Feature[G, Double]],
26 | cellType: CellType,
27 | layout: LayoutDefinition,
28 | options: Rasterizer.Options = Rasterizer.Options.DEFAULT,
29 | partitioner: Option[Partitioner] = None
30 | ): RDD[(SpatialKey, Tile)] with Metadata[LayoutDefinition] = {
31 | val layoutRasterExtent = RasterExtent(layout.extent, layout.layoutCols, layout.layoutRows)
32 | val layoutRasterizerOptions = Rasterizer.Options(includePartial=true, sampleType=PixelIsArea)
33 |
34 | /** Key geometry by spatial keys of intersecting tiles */
35 | def keyGeom(feature: Feature[Geometry, Double]): Iterator[(SpatialKey, (Feature[Geometry, Double], SpatialKey))] = {
36 | var keySet = Set.empty[SpatialKey]
37 | feature.geom.foreach(layoutRasterExtent, layoutRasterizerOptions){ (col, row) =>
38 | keySet = keySet + SpatialKey(col, row)
39 | }
40 | keySet.toIterator.map { key => (key, (feature, key)) }
41 | }
42 |
43 | // key the geometry to intersecting tiles so it can be rasterized in the map-side combine
44 | val keyed: RDD[(SpatialKey, (Feature[Geometry, Double], SpatialKey))] =
45 | features.flatMap { feature => keyGeom(feature) }
46 |
47 | val createTile = (tup: (Feature[Geometry, Double], SpatialKey)) => {
48 | val (feature, key) = tup
49 | val tile = ArrayTile.empty(cellType, layout.tileCols, layout.tileRows)
50 | val re = RasterExtent(layout.mapTransform(key), layout.tileCols, layout.tileRows)
51 | feature.geom.foreach(re, options){ tile.setDouble(_, _, feature.data) }
52 | tile: MutableArrayTile
53 | }
54 |
55 | val updateTile = (tile: MutableArrayTile, tup: (Feature[Geometry, Double], SpatialKey)) => {
56 | val (feature, key) = tup
57 | val re = RasterExtent(layout.mapTransform(key), layout.tileCols, layout.tileRows)
58 | feature.geom.foreach(re, options){ tile.setDouble(_, _, feature.data) }
59 | tile: MutableArrayTile
60 | }
61 |
62 | val mergeTiles = (t1: MutableArrayTile, t2: MutableArrayTile) => {
63 | t1.merge(t2).mutable
64 | }
65 |
66 | val tiles = keyed.combineByKeyWithClassTag[MutableArrayTile](
67 | createCombiner = createTile,
68 | mergeValue = updateTile,
69 | mergeCombiners = mergeTiles,
70 | partitioner.getOrElse(new HashPartitioner(features.getNumPartitions))
71 | )
72 |
73 | ContextRDD(tiles.asInstanceOf[RDD[(SpatialKey, Tile)]], layout)
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/src/main/scala/geotrellis/spark/resample/Implicits.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 Azavea
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package geotrellis.spark.resample
18 |
19 | import geotrellis.raster._
20 | import geotrellis.raster.resample._
21 | import geotrellis.spark._
22 | import geotrellis.spark.tiling._
23 | import geotrellis.util._
24 | import geotrellis.vector._
25 |
26 | import org.apache.spark.rdd._
27 |
28 | object Implicits extends Implicits
29 |
30 | trait Implicits {
31 | @deprecated ("replaced by withLayerRDDZoomResampleMethods")
32 | implicit class withZoomResampleMultibandMethods[K: SpatialComponent](self: MultibandTileLayerRDD[K]) extends ZoomResampleMultibandMethods[K](self)
33 | implicit class withLayerRDDZoomResampleMethods[
34 | K: SpatialComponent,
35 | V <: CellGrid: (? => TileResampleMethods[V])
36 | ](self: RDD[(K, V)] with Metadata[TileLayerMetadata[K]]) extends LayerRDDZoomResampleMethods[K, V](self)
37 | }
38 |
--------------------------------------------------------------------------------
/src/main/scala/geotrellis/spark/resample/LayerRDDZoomResampleMethods.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 Azavea
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package geotrellis.spark.resample
18 |
19 | import geotrellis.raster._
20 | import geotrellis.raster.resample._
21 | import geotrellis.spark._
22 | import geotrellis.spark.tiling.{ZoomedLayoutScheme, LayoutDefinition}
23 | import geotrellis.util._
24 | import geotrellis.vector.Extent
25 |
26 | import org.apache.spark.rdd._
27 |
28 | abstract class LayerRDDZoomResampleMethods[
29 | K: SpatialComponent,
30 | V <: CellGrid: (? => TileResampleMethods[V])
31 | ](val self: RDD[(K, V)] with Metadata[TileLayerMetadata[K]]) extends MethodExtensions[RDD[(K, V)] with Metadata[TileLayerMetadata[K]]] {
32 | def resampleToZoom(
33 | sourceZoom: Int,
34 | targetZoom: Int
35 | ): RDD[(K, V)] with Metadata[TileLayerMetadata[K]] =
36 | resampleToZoom(sourceZoom, targetZoom, None, NearestNeighbor)
37 |
38 | def resampleToZoom(
39 | sourceZoom: Int,
40 | targetZoom: Int,
41 | method: ResampleMethod
42 | ): RDD[(K, V)] with Metadata[TileLayerMetadata[K]] =
43 | resampleToZoom(sourceZoom, targetZoom, None, method)
44 |
45 | def resampleToZoom(
46 | sourceZoom: Int,
47 | targetZoom: Int,
48 | targetGridBounds: GridBounds
49 | ): RDD[(K, V)] with Metadata[TileLayerMetadata[K]] =
50 | resampleToZoom(sourceZoom, targetZoom, Some(targetGridBounds), NearestNeighbor)
51 |
52 | def resampleToZoom(
53 | sourceZoom: Int,
54 | targetZoom: Int,
55 | targetGridBounds: GridBounds,
56 | method: ResampleMethod
57 | ): RDD[(K, V)] with Metadata[TileLayerMetadata[K]] =
58 | resampleToZoom(sourceZoom, targetZoom, Some(targetGridBounds), method)
59 |
60 | def resampleToZoom(
61 | sourceZoom: Int,
62 | targetZoom: Int,
63 | targetExtent: Extent
64 | ): RDD[(K, V)] with Metadata[TileLayerMetadata[K]] =
65 | resampleToZoom(sourceZoom, targetZoom, targetExtent, NearestNeighbor)
66 |
67 | def resampleToZoom(
68 | sourceZoom: Int,
69 | targetZoom: Int,
70 | targetExtent: Extent,
71 | method: ResampleMethod
72 | ): RDD[(K, V)] with Metadata[TileLayerMetadata[K]] = {
73 | val layout = ZoomedLayoutScheme.layoutForZoom(targetZoom, self.metadata.layout.extent, self.metadata.layout.tileLayout.tileCols)
74 | val targetGridBounds = layout.mapTransform(targetExtent)
75 | resampleToZoom(sourceZoom, targetZoom, Some(targetGridBounds), method)
76 | }
77 |
78 | def resampleToZoom(
79 | sourceZoom: Int,
80 | targetZoom: Int ,
81 | targetGridBounds: Option[GridBounds] = None,
82 | method: ResampleMethod = NearestNeighbor
83 | ): RDD[(K, V)] with Metadata[TileLayerMetadata[K]] =
84 | ZoomResampleTEST(self, sourceZoom, targetZoom, targetGridBounds, method)
85 | }
86 |
--------------------------------------------------------------------------------
/src/main/scala/geotrellis/spark/resample/ZoomResampleMultiband.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 Azavea
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package geotrellis.spark.resample
18 |
19 | import geotrellis.raster._
20 | import geotrellis.raster.resample._
21 | import geotrellis.spark._
22 | import geotrellis.spark.tiling._
23 | import geotrellis.util._
24 | import geotrellis.vector.Extent
25 |
26 | import org.apache.spark.rdd.RDD
27 |
28 | @deprecated ("replaced by ZoomResample")
29 | object ZoomResampleMultiband {
30 | private def gridBoundsAtZoom(sourceZoom: Int, spatialKey: SpatialKey, targetZoom: Int): GridBounds = {
31 | val SpatialKey(col, row) = spatialKey
32 | val zoomDiff = targetZoom - sourceZoom
33 | val factor = math.pow(2, zoomDiff).toInt
34 | val (minCol, minRow) = (col * factor, row * factor)
35 | val (maxCol, maxRow) = (((col + 1) * factor) - 1, ((row + 1) * factor) - 1)
36 | GridBounds(minCol, minRow, maxCol, maxRow)
37 | }
38 |
39 | private def boundsAtZoom[K: SpatialComponent](sourceZoom: Int, bounds: Bounds[K], targetZoom: Int): Bounds[K] =
40 | bounds match {
41 | case KeyBounds(minKey, maxKey) =>
42 | val min = {
43 | val gb = gridBoundsAtZoom(sourceZoom, minKey.getComponent[SpatialKey], targetZoom)
44 | minKey.setComponent(SpatialKey(gb.colMin, gb.rowMin))
45 | }
46 |
47 | val max = {
48 | val gb = gridBoundsAtZoom(sourceZoom, maxKey.getComponent[SpatialKey], targetZoom)
49 | maxKey.setComponent(SpatialKey(gb.colMax, gb.rowMax))
50 | }
51 | KeyBounds(min, max)
52 | case EmptyBounds =>
53 | EmptyBounds
54 | }
55 |
56 | /** Resamples a tile layer from a lower zoom level to a higher zoom level.
57 | * The levels are based on the ZoomedLayoutScheme.
58 | *
59 | * @param rdd The RDD to be resampled.
60 | * @param sourceZoom The zoom level of the rdd.
61 | * @param targetZoom The zoom level we want to resample to.
62 | * @param targetGridBounds Optionally, a grid bounds in the target zoom level we want to filter by.
63 | * @param method The resample method to use for resampling.
64 | */
65 | def apply[K: SpatialComponent](
66 | rdd: MultibandTileLayerRDD[K],
67 | sourceZoom: Int,
68 | targetZoom: Int,
69 | targetGridBounds: Option[GridBounds] = None,
70 | method: ResampleMethod = NearestNeighbor
71 | ): MultibandTileLayerRDD[K] = {
72 | require(sourceZoom < targetZoom, "This resample call requires that the target zoom level be greater than the source zoom level")
73 | val tileSize = rdd.metadata.layout.tileLayout.tileCols
74 | val targetLayoutDefinition =
75 | ZoomedLayoutScheme.layoutForZoom(targetZoom, rdd.metadata.layout.extent, tileSize)
76 | val targetMapTransform = targetLayoutDefinition.mapTransform
77 | val sourceMapTransform = rdd.metadata.mapTransform
78 | val (resampledRdd: RDD[(K, MultibandTile)], md) =
79 | targetGridBounds match {
80 | case Some(tgb) =>
81 | val resampleKeyBounds: KeyBounds[K] =
82 | boundsAtZoom(sourceZoom, rdd.metadata.bounds, targetZoom).get
83 |
84 | resampleKeyBounds.toGridBounds.intersection(tgb) match {
85 | case Some(resampleGridBounds) =>
86 | val resampled: RDD[(K, MultibandTile)] = rdd.flatMap { case (key, tile) =>
87 | val gbaz: Option[GridBounds] =
88 | gridBoundsAtZoom(sourceZoom, key.getComponent[SpatialKey], targetZoom)
89 | .intersection(resampleGridBounds)
90 |
91 | gbaz.map { gb =>
92 | gb.coordsIter
93 | .map { case (col, row) =>
94 | val sourceExtent = sourceMapTransform.keyToExtent(key.getComponent[SpatialKey])
95 | val targetExtent = targetMapTransform.keyToExtent(col, row)
96 | val resampled = tile.resample(
97 | sourceExtent,
98 | RasterExtent(targetExtent, tileSize, tileSize),
99 | method
100 | )
101 |
102 | (key.setComponent(SpatialKey(col, row)), resampled)
103 | }
104 | }.getOrElse(Iterator.empty)
105 | }
106 |
107 | val extent: Extent =
108 | targetMapTransform(resampleGridBounds).intersection(rdd.metadata.extent).get
109 |
110 | val md = rdd.metadata.copy(
111 | layout = targetLayoutDefinition,
112 | bounds = resampleKeyBounds.setSpatialBounds(resampleGridBounds),
113 | extent = extent
114 | )
115 |
116 | (resampled, md)
117 |
118 | case None =>
119 | val md = rdd.metadata.copy(
120 | layout = targetLayoutDefinition,
121 | bounds = EmptyBounds: Bounds[K]
122 | )
123 |
124 | (rdd.sparkContext.emptyRDD[(K, MultibandTile)], md)
125 |
126 | }
127 | case None =>
128 | val resampled: RDD[(K, MultibandTile)] =
129 | rdd
130 | .flatMap { case (key, tile) =>
131 | gridBoundsAtZoom(sourceZoom, key.getComponent[SpatialKey], targetZoom)
132 | .coordsIter
133 | .map { case (col, row) =>
134 | val sourceExtent = sourceMapTransform.keyToExtent(key.getComponent[SpatialKey])
135 | val targetExtent = targetMapTransform.keyToExtent(col, row)
136 | val resampled =
137 | tile.resample(sourceExtent, RasterExtent(targetExtent, tileSize, tileSize), method)
138 | (key.setComponent(SpatialKey(col, row)), resampled)
139 | }
140 | }
141 |
142 | val md = rdd.metadata.copy(
143 | layout = targetLayoutDefinition,
144 | bounds = boundsAtZoom(sourceZoom, rdd.metadata.bounds, targetZoom)
145 | )
146 |
147 | (resampled, md)
148 | }
149 |
150 |
151 | ContextRDD(resampledRdd, md)
152 | }
153 | }
154 |
--------------------------------------------------------------------------------
/src/main/scala/geotrellis/spark/resample/ZoomResampleMultibandMethods.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 Azavea
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package geotrellis.spark.resample
18 |
19 | import geotrellis.raster._
20 | import geotrellis.raster.resample._
21 | import geotrellis.spark._
22 | import geotrellis.spark.tiling.ZoomedLayoutScheme
23 | import geotrellis.util.MethodExtensions
24 | import geotrellis.vector.Extent
25 |
26 | @deprecated ("replaced by LayerRDDZoomResampleMethods")
27 | abstract class ZoomResampleMultibandMethods[K: SpatialComponent](val self: MultibandTileLayerRDD[K]) extends MethodExtensions[MultibandTileLayerRDD[K]] {
28 | def resampleToZoom(
29 | sourceZoom: Int,
30 | targetZoom: Int
31 | ): MultibandTileLayerRDD[K] =
32 | resampleToZoom(sourceZoom, targetZoom, None, NearestNeighbor)
33 |
34 | def resampleToZoom(
35 | sourceZoom: Int,
36 | targetZoom: Int,
37 | method: ResampleMethod
38 | ): MultibandTileLayerRDD[K] =
39 | resampleToZoom(sourceZoom, targetZoom, None, method)
40 |
41 | def resampleToZoom(
42 | sourceZoom: Int,
43 | targetZoom: Int,
44 | targetGridBounds: GridBounds
45 | ): MultibandTileLayerRDD[K] =
46 | resampleToZoom(sourceZoom, targetZoom, Some(targetGridBounds), NearestNeighbor)
47 |
48 | def resampleToZoom(
49 | sourceZoom: Int,
50 | targetZoom: Int,
51 | targetGridBounds: GridBounds,
52 | method: ResampleMethod
53 | ): MultibandTileLayerRDD[K] =
54 | resampleToZoom(sourceZoom, targetZoom, Some(targetGridBounds), method)
55 |
56 | def resampleToZoom(
57 | sourceZoom: Int,
58 | targetZoom: Int,
59 | targetExtent: Extent
60 | ): MultibandTileLayerRDD[K] =
61 | resampleToZoom(sourceZoom, targetZoom, targetExtent, NearestNeighbor)
62 |
63 | def resampleToZoom(
64 | sourceZoom: Int,
65 | targetZoom: Int,
66 | targetExtent: Extent,
67 | method: ResampleMethod
68 | ): MultibandTileLayerRDD[K] = {
69 | val layout = ZoomedLayoutScheme.layoutForZoom(targetZoom, self.metadata.layout.extent, self.metadata.layout.tileLayout.tileCols)
70 | val targetGridBounds = layout.mapTransform(targetExtent)
71 | resampleToZoom(sourceZoom, targetZoom, Some(targetGridBounds), method)
72 | }
73 |
74 | def resampleToZoom(
75 | sourceZoom: Int,
76 | targetZoom: Int ,
77 | targetGridBounds: Option[GridBounds] = None,
78 | method: ResampleMethod = NearestNeighbor
79 | ): MultibandTileLayerRDD[K] =
80 | ZoomResampleMultiband(self, sourceZoom, targetZoom, targetGridBounds, method)
81 | }
82 |
--------------------------------------------------------------------------------
/src/main/scala/geotrellis/spark/resample/ZoomResampleTEST.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 Azavea
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package geotrellis.spark.resample
18 |
19 | import geotrellis.raster._
20 | import geotrellis.raster.resample._
21 | import geotrellis.spark._
22 | import geotrellis.spark.tiling._
23 | import geotrellis.util._
24 | import geotrellis.vector.Extent
25 |
26 | import org.apache.spark.rdd.RDD
27 |
28 | object ZoomResampleTEST {
29 | private def gridBoundsAtZoom(sourceZoom: Int, spatialKey: SpatialKey, targetZoom: Int): GridBounds = {
30 | val SpatialKey(col, row) = spatialKey
31 | val zoomDiff = targetZoom - sourceZoom
32 | val factor = math.pow(2, zoomDiff).toInt
33 | val (minCol, minRow) = (col * factor, row * factor)
34 | val (maxCol, maxRow) = (((col + 1) * factor) - 1, ((row + 1) * factor) - 1)
35 | GridBounds(minCol, minRow, maxCol, maxRow)
36 | }
37 |
38 | private def boundsAtZoom[K: SpatialComponent](sourceZoom: Int, bounds: Bounds[K], targetZoom: Int): Bounds[K] =
39 | bounds match {
40 | case KeyBounds(minKey, maxKey) =>
41 | val min = {
42 | val gb = gridBoundsAtZoom(sourceZoom, minKey.getComponent[SpatialKey], targetZoom)
43 | minKey.setComponent(SpatialKey(gb.colMin, gb.rowMin))
44 | }
45 |
46 | val max = {
47 | val gb = gridBoundsAtZoom(sourceZoom, maxKey.getComponent[SpatialKey], targetZoom)
48 | maxKey.setComponent(SpatialKey(gb.colMax, gb.rowMax))
49 | }
50 | KeyBounds(min, max)
51 | case EmptyBounds =>
52 | EmptyBounds
53 | }
54 |
55 | /** Resamples a tile layer from a lower zoom level to a higher zoom level.
56 | * The levels are based on the ZoomedLayoutScheme.
57 | *
58 | * @param rdd The RDD to be resampled.
59 | * @param sourceZoom The zoom level of the rdd.
60 | * @param targetZoom The zoom level we want to resample to.
61 | * @param targetGridBounds Optionally, a grid bounds in the target zoom level we want to filter by.
62 | * @param method The resample method to use for resampling.
63 | */
64 | def apply[
65 | K: SpatialComponent,
66 | V <: CellGrid: (? => TileResampleMethods[V])
67 | ](
68 | rdd: RDD[(K, V)] with Metadata[TileLayerMetadata[K]],
69 | sourceZoom: Int,
70 | targetZoom: Int,
71 | targetGridBounds: Option[GridBounds] = None,
72 | method: ResampleMethod = NearestNeighbor
73 | ): RDD[(K, V)] with Metadata[TileLayerMetadata[K]] = {
74 | require(sourceZoom < targetZoom, "This resample call requires that the target zoom level be greater than the source zoom level")
75 | val tileSize = rdd.metadata.layout.tileLayout.tileCols
76 | val targetLayoutDefinition =
77 | ZoomedLayoutScheme.layoutForZoom(targetZoom, rdd.metadata.layout.extent, tileSize)
78 | val targetMapTransform = targetLayoutDefinition.mapTransform
79 | val sourceMapTransform = rdd.metadata.mapTransform
80 | val (resampledRdd: RDD[(K, V)], md) =
81 | targetGridBounds match {
82 | case Some(tgb) =>
83 | val resampleKeyBounds: KeyBounds[K] =
84 | boundsAtZoom(sourceZoom, rdd.metadata.bounds, targetZoom).get
85 |
86 | resampleKeyBounds.toGridBounds.intersection(tgb) match {
87 | case Some(resampleGridBounds) => {
88 | val resampled: RDD[(K, V)] = rdd.flatMap { case (key, tile) =>
89 | val gbaz: Option[GridBounds] =
90 | gridBoundsAtZoom(sourceZoom, key.getComponent[SpatialKey], targetZoom)
91 | .intersection(resampleGridBounds)
92 |
93 | gbaz.map { gb =>
94 | gb.coordsIter
95 | .map { case (col, row) =>
96 | val sourceExtent = sourceMapTransform.keyToExtent(key.getComponent[SpatialKey])
97 | val targetExtent = targetMapTransform.keyToExtent(col, row)
98 | val resampled = tile.resample(
99 | sourceExtent,
100 | RasterExtent(targetExtent, tileSize, tileSize),
101 | method
102 | )
103 |
104 | (key.setComponent(SpatialKey(col, row)), resampled)
105 | }
106 | }.getOrElse(Iterator.empty)
107 | }
108 |
109 | val extent: Extent =
110 | targetMapTransform(resampleGridBounds).intersection(rdd.metadata.extent).get
111 |
112 | val md = rdd.metadata.copy(
113 | layout = targetLayoutDefinition,
114 | bounds = resampleKeyBounds.setSpatialBounds(resampleGridBounds),
115 | extent = extent
116 | )
117 |
118 | (resampled, md)
119 | }
120 | case None => {
121 | val md = rdd.metadata.copy(
122 | layout = targetLayoutDefinition,
123 | bounds = (EmptyBounds: Bounds[K])
124 | )
125 |
126 | (rdd.sparkContext.emptyRDD[(K, V)], md)
127 | }
128 | }
129 | case None => {
130 | val resampled: RDD[(K, V)] =
131 | rdd
132 | .flatMap { case (key, tile) =>
133 | gridBoundsAtZoom(sourceZoom, key.getComponent[SpatialKey], targetZoom)
134 | .coordsIter
135 | .map { case (col, row) =>
136 | val sourceExtent = sourceMapTransform.keyToExtent(key.getComponent[SpatialKey])
137 | val targetExtent = targetMapTransform.keyToExtent(col, row)
138 | val resampled =
139 | tile.resample(sourceExtent, RasterExtent(targetExtent, tileSize, tileSize), method)
140 | (key.setComponent(SpatialKey(col, row)), resampled)
141 | }
142 | }
143 |
144 | val md = rdd.metadata.copy(
145 | layout = targetLayoutDefinition,
146 | bounds = boundsAtZoom(sourceZoom, rdd.metadata.bounds, targetZoom)
147 | )
148 |
149 | (resampled, md)
150 | }
151 | }
152 |
153 | ContextRDD(resampledRdd, md)
154 | }
155 | }
156 |
--------------------------------------------------------------------------------
/src/main/scala/org/apache/spark/mllib/classification/SVMMultiClass.scala:
--------------------------------------------------------------------------------
1 | package org.apache.spark.mllib.classification
2 |
3 | import org.apache.spark.mllib.linalg.Vector
4 | import org.apache.spark.mllib.regression.LabeledPoint
5 | import org.apache.spark.rdd.RDD
6 | import org.apache.spark.SparkContext
7 | import java.io.{ObjectInputStream, ObjectOutputStream}
8 | import org.apache.spark.mllib.util.{DataValidators, Loader, Saveable}
9 |
10 | import org.apache.spark.mllib.classification.impl.GLMClassificationMultiClassOVAModel
11 |
12 | class SVMMultiClassOVAModel(classModels: Array[SVMModel], classIds : Array[Int] = Array.empty) extends ClassificationModel with Serializable with Saveable {
13 |
14 | private def zipIndices(classModels: Array[SVMModel])(classIds : Array[Int]): Array[(SVMModel, Int)] = {
15 | if(classIds.nonEmpty) classModels.zip(classIds) else classModels.zipWithIndex
16 | }
17 | val classModelsWithId : Array[(SVMModel, Int)] = zipIndices(classModels)(classIds)
18 |
19 | /**
20 | * Predict values for the given data set using the model trained.
21 | *
22 | * @param testData RDD representing data points to be predicted
23 | * @return an RDD[Double] where each entry contains the corresponding prediction
24 | */
25 | override def predict(testData: RDD[Vector]): RDD[Double] = {
26 | val localClassModelsWithId = classModelsWithId
27 | val bcClassModels = testData.context.broadcast(localClassModelsWithId)
28 | testData.mapPartitions { iter =>
29 | val w = bcClassModels.value
30 | iter.map(v => predictPoint(v, w))
31 | }
32 | }
33 |
34 | /**
35 | * Predict values for a single data point using the model trained.
36 | *
37 | * @param testData array representing a single data point
38 | * @return predicted category from the trained model
39 | */
40 | override def predict(testData: Vector): Double = predictPoint(testData, classModelsWithId)
41 |
42 | def predictPoint(testData: Vector, models: Array[(SVMModel, Int)]): Double =
43 | models
44 | .map { case (classModel, classNumber) => (classModel.predict(testData), classNumber)}
45 | .maxBy { case (score, classNumber) => score}
46 | ._2
47 |
48 | override protected def formatVersion: String = "1.0"
49 |
50 | override def save(sc: SparkContext, path: String): Unit = {
51 | GLMClassificationMultiClassOVAModel.SaveLoadV1_0.save(sc, path, this.getClass.getName, classModelsWithId)
52 | }
53 | }
54 |
55 | object SVMMultiClassOVAModel /*extends Loader[SVMMultiClassOVAModel]*/{
56 |
57 | def load(sc: SparkContext, path: String): SVMMultiClassOVAModel = {
58 | val (loadedClassName, version, metadata) = Loader.loadMetadata(sc, path)
59 | val classNameV1_0 = "org.apache.spark.mllib.classification.SVMMultiClassOVAModel"
60 | (loadedClassName, version) match {
61 | case (className, "1.0") if className == classNameV1_0 =>
62 | val data = GLMClassificationMultiClassOVAModel.SaveLoadV1_0.loadData(sc, path, this.getClass.getName)
63 | val dataModels = data.classModelsWithId.map( item => item._1 )
64 | val dataIndices = data.classModelsWithId.map( item => item._2 )
65 | val model = new SVMMultiClassOVAModel(dataModels, dataIndices)
66 | model
67 | case _ => throw new Exception(
68 | s"SVMMultiClassOVAModel.load did not recognize model with (className, format version):" +
69 | s"($loadedClassName, $version). Supported:\n" +
70 | s" ($classNameV1_0, 1.0)")
71 | }
72 | }
73 | }
74 |
75 |
76 | object SVMMultiClassOVAWithSGD {
77 |
78 | /**
79 | * Train a Multiclass SVM model given an RDD of (label, features) pairs,
80 | * using One-vs-Rest method - create one SVMModel per class with SVMWithSGD.
81 | *
82 | * @param input RDD of (label, array of features) pairs.
83 | * @param numIterations Number of iterations of gradient descent to run.
84 | * @param stepSize Step size to be used for each iteration of gradient descent.
85 | * @param regParam Regularization parameter.
86 | * @param miniBatchFraction Fraction of data to be used per iteration.
87 | */
88 | def train(
89 | input: RDD[LabeledPoint],
90 | numIterations: Int,
91 | stepSize: Double,
92 | regParam: Double,
93 | miniBatchFraction: Double): SVMMultiClassOVAModel = {
94 |
95 | val numClasses = input.map(_.label).max().toInt
96 |
97 | val classModels = (0 until numClasses).map { classId =>
98 |
99 | val inputProjection = input.map { case LabeledPoint(label, features) =>
100 | LabeledPoint(if (label == classId) 1.0 else 0.0, features)}.cache()
101 | val model = SVMWithSGD.train(inputProjection, numIterations, stepSize, regParam, miniBatchFraction)
102 | inputProjection.unpersist(false)
103 |
104 | model.clearThreshold()
105 | model
106 |
107 | }.toArray
108 |
109 | new SVMMultiClassOVAModel(classModels)
110 |
111 | }
112 |
113 | /**
114 | * Train a Multiclass SVM model given an RDD of (label, features) pairs,
115 | * using One-vs-Rest method - create one SVMModel per class with SVMWithSGD.
116 | *
117 | * @param input RDD of (label, array of features) pairs.
118 | * @param stepSize Step size to be used for each iteration of Gradient Descent.
119 | * @param regParam Regularization parameter.
120 | * @param numIterations Number of iterations of gradient descent to run.
121 | * @return a SVMModel which has the weights and offset from training.
122 | */
123 | def train(input: RDD[LabeledPoint], numIterations: Int, stepSize: Double, regParam: Double): SVMMultiClassOVAModel =
124 | train(input, numIterations, stepSize, regParam, 1.0)
125 |
126 | /**
127 | * Train a Multiclass SVM model given an RDD of (label, features) pairs,
128 | * using One-vs-Rest method - create one SVMModel per class with SVMWithSGD.
129 | *
130 | * @param input RDD of (label, array of features) pairs.
131 | * @param numIterations Number of iterations of gradient descent to run.
132 | * @return a SVMModel which has the weights and offset from training.
133 | */
134 | def train(input: RDD[LabeledPoint], numIterations: Int): SVMMultiClassOVAModel = train(input, numIterations, 1.0, 0.01, 1.0)
135 |
136 | }
137 |
--------------------------------------------------------------------------------
/src/main/scala/org/apache/spark/mllib/classification/impl/GLMClassificationMultiClassOVAModel.scala:
--------------------------------------------------------------------------------
1 | package org.apache.spark.mllib.classification.impl
2 |
3 | import org.apache.spark.SparkContext
4 | import org.apache.spark.mllib.classification.SVMModel
5 | import org.apache.spark.mllib.util.{DataValidators, Loader, Saveable}
6 | import org.apache.spark.sql.{Row, SQLContext}
7 | import org.json4s.JsonDSL._
8 | import org.json4s.jackson.JsonMethods._
9 |
10 | import scala.collection.mutable
11 |
12 | private[classification] object GLMClassificationMultiClassOVAModel {
13 | object SaveLoadV1_0 {
14 | def thisFormatVersion: String = "1.0"
15 | case class Data(classModelsWithId: Array[(SVMModel, Int)])
16 | case class MetaDataId(classId: Array[Int])
17 | def save( sc: SparkContext, path: String, modelClass: String, dataModels: Array[(SVMModel, Int)]): Unit = {
18 | val numFeatures: Int = dataModels.reduceLeft ((x, y) => if (x._1.weights.size > y._1.weights.size) x else y)._1.weights.size
19 | val numClasses: Int = dataModels.length
20 | val sqlContext = SQLContext.getOrCreate (sc)
21 | import sqlContext.implicits._
22 | val metadata = compact (render (
23 | ("class" -> modelClass) ~ ("version" -> thisFormatVersion) ~
24 | ("numFeatures" -> numFeatures) ~ ("numClasses" -> numClasses) ) )
25 | sc.parallelize (Seq(metadata), 1).saveAsTextFile (Loader.metadataPath(path) )
26 | val metaDataId = MetaDataId( dataModels.map( model => model._2 ) )
27 | sc.parallelize (Seq (metaDataId), 1).toDF ().write.parquet (Loader.dataPath (path))
28 | for( modelno <- dataModels.indices) {
29 | val model = dataModels(modelno)._1
30 | val modelid = dataModels(modelno)._2
31 | model.save(sc, path + "/class/" + modelid )
32 | }
33 | }
34 | def loadData(sc: SparkContext, path: String, modelClass: String): Data = {
35 | val dataPath = Loader.dataPath(path)
36 | val sqlContext = SQLContext.getOrCreate(sc)
37 | val dataRDD = sqlContext.read.parquet(dataPath)
38 | val dataArray = dataRDD.select("classId").take(1)
39 | assert(dataArray.length == 1, s"Unable to load $modelClass data from: $dataPath")
40 | val data = dataArray(0)
41 | assert(data.size == 1, s"Unable to load $modelClass data from: $dataPath")
42 | val classId = data match {case Row (classId: mutable.WrappedArray[Int]) => classId }
43 | val numClasses = classId.length
44 | val dataModels : Array[(SVMModel, Int)] = Array.ofDim[(SVMModel, Int)](numClasses)
45 | for( modelno <- classId.indices) {
46 | val modelid = classId(modelno)
47 | val model = SVMModel.load(sc, path + "/class/" + modelid)
48 | dataModels(modelid) = (model,modelid)
49 | }
50 | Data(dataModels)
51 | }
52 | }
53 | }
--------------------------------------------------------------------------------
/src/test/scala/samples/specs.scala:
--------------------------------------------------------------------------------
1 | package samples
2 |
3 | import org.junit.runner.RunWith
4 | import org.specs2.mutable._
5 | import org.specs2.runner._
6 |
7 |
8 | /**
9 | * Sample specification.
10 | *
11 | * This specification can be executed with: scala -cp ${package}.SpecsTest
12 | * Or using maven: mvn test
13 | *
14 | * For more information on how to write or run specifications, please visit:
15 | * http://etorreborre.github.com/specs2/guide/org.specs2.guide.Runners.html
16 | *
17 | */
18 | @RunWith(classOf[JUnitRunner])
19 | class MySpecTest extends Specification {
20 | "The 'Hello world' string" should {
21 | "contain 11 characters" in {
22 | "Hello world" must have size(11)
23 | }
24 | "start with 'Hello'" in {
25 | "Hello world" must startWith("Hello")
26 | }
27 | "end with 'world'" in {
28 | "Hello world" must endWith("world")
29 | }
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/static/GettingStarted.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
11 |
12 |
13 |
14 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/static/css/l.geosearch.css:
--------------------------------------------------------------------------------
1 | .displayNone {
2 | display: none;
3 | }
4 |
5 | .leaflet-control-geosearch {
6 | position: relative;
7 | }
8 |
9 | .leaflet-control-geosearch a {
10 | -webkit-border-radius: 4px;
11 | border-radius: 4px;
12 | border-bottom: none;
13 | }
14 |
15 | .leaflet-control-geosearch a.glass {
16 | background-image: url(../images/geosearch.png);
17 | background-size: 100% 100%;
18 | }
19 |
20 | .leaflet-control-geosearch a.spinner {
21 | background-image: url(../images/spinner.gif);
22 | background-position: 50% 50%;
23 | }
24 |
25 | .leaflet-control-geosearch a.alert {
26 | background-image: url(../images/alert.png);
27 | background-size: 64% 64%;
28 | }
29 |
30 | .leaflet-control-geosearch a:hover {
31 | border-bottom: none;
32 | }
33 |
34 | .leaflet-control-geosearch form {
35 | position: absolute;
36 | top: 0;
37 | left: 22px;
38 | box-shadow: 0 1px 7px rgba(0, 0, 0, 0.65);
39 | -webkit-border-radius: 4px;
40 | border-radius: 0px 4px 4px 0px;
41 | z-index: -1;
42 | background: #FFF;
43 | height: 26px;
44 | padding: 0 6px 0 6px;
45 | }
46 |
47 | .leaflet-control-geosearch form input {
48 | width: 200px;
49 | border: none;
50 | outline: none;
51 | margin: 0;
52 | padding: 0;
53 | font-size: 12px;
54 | margin-top: 5px;
55 | }
56 |
57 | .leaflet-control-geosearch .message {
58 | position: absolute;
59 | top: 26px;
60 | left: 0px;
61 | width: 226px;
62 | color: #FFF;
63 | background: rgb(40, 40, 40);
64 | padding: 4px 0 4px 8px;
65 | }
66 |
--------------------------------------------------------------------------------
/static/images/alert.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/biggis-project/biggis-landuse/d31ffc26eaab875618fee32bf220e7b870fd36be/static/images/alert.png
--------------------------------------------------------------------------------
/static/images/geosearch.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/biggis-project/biggis-landuse/d31ffc26eaab875618fee32bf220e7b870fd36be/static/images/geosearch.png
--------------------------------------------------------------------------------
/static/images/layers-2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/biggis-project/biggis-landuse/d31ffc26eaab875618fee32bf220e7b870fd36be/static/images/layers-2x.png
--------------------------------------------------------------------------------
/static/images/layers.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/biggis-project/biggis-landuse/d31ffc26eaab875618fee32bf220e7b870fd36be/static/images/layers.png
--------------------------------------------------------------------------------
/static/images/marker-icon-2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/biggis-project/biggis-landuse/d31ffc26eaab875618fee32bf220e7b870fd36be/static/images/marker-icon-2x.png
--------------------------------------------------------------------------------
/static/images/marker-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/biggis-project/biggis-landuse/d31ffc26eaab875618fee32bf220e7b870fd36be/static/images/marker-icon.png
--------------------------------------------------------------------------------
/static/images/marker-shadow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/biggis-project/biggis-landuse/d31ffc26eaab875618fee32bf220e7b870fd36be/static/images/marker-shadow.png
--------------------------------------------------------------------------------
/static/images/spinner.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/biggis-project/biggis-landuse/d31ffc26eaab875618fee32bf220e7b870fd36be/static/images/spinner.gif
--------------------------------------------------------------------------------
/static/images/transparent.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/biggis-project/biggis-landuse/d31ffc26eaab875618fee32bf220e7b870fd36be/static/images/transparent.png
--------------------------------------------------------------------------------
/static/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
11 |
12 |
13 |
14 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/static/js/l.control.geosearch.js:
--------------------------------------------------------------------------------
1 | /*
2 | * L.Control.GeoSearch - search for an address and zoom to its location
3 | * https://github.com/smeijer/leaflet.control.geosearch
4 | */
5 |
6 | L.GeoSearch = {};
7 | L.GeoSearch.Provider = {};
8 |
9 | L.GeoSearch.Result = function (x, y, label) {
10 | this.X = x;
11 | this.Y = y;
12 | this.Label = label;
13 | };
14 |
15 | L.Control.GeoSearch = L.Control.extend({
16 | options: {
17 | position: 'topleft'
18 | },
19 |
20 | initialize: function (options) {
21 | this._config = {};
22 | L.Util.extend(this.options, options);
23 | this.setConfig(options);
24 | },
25 |
26 | setConfig: function (options) {
27 | this._config = {
28 | 'provider': options.provider,
29 | 'searchLabel': options.searchLabel || 'Enter address',
30 | 'notFoundMessage' : options.notFoundMessage || 'Sorry, that address could not be found.',
31 | 'zoomLevel': options.zoomLevel || 17,
32 | 'showMarker': typeof options.showMarker !== 'undefined' ? options.showMarker : true
33 | };
34 | },
35 |
36 | resetLink: function(extraClass) {
37 | var link = this._container.querySelector('a');
38 | link.className = 'leaflet-bar-part leaflet-bar-part-single' + ' ' + extraClass;
39 | },
40 |
41 | onAdd: function (map) {
42 |
43 | // create the container
44 | this._container = L.DomUtil.create('div', 'leaflet-bar leaflet-control leaflet-control-geosearch');
45 |
46 | // create the link - this will contain one of the icons
47 | var link = L.DomUtil.create('a', '', this._container);
48 | link.href = '#';
49 | link.title = this._config.searchLabel;
50 |
51 | // set the link's icon to magnifying glass
52 | this.resetLink('glass');
53 |
54 | var displayNoneClass = 'displayNone';
55 |
56 | // create the form that will contain the input
57 | var form = L.DomUtil.create('form', displayNoneClass, this._container);
58 |
59 | // create the input, and set its placeholder ("Enter address") text
60 | var input = L.DomUtil.create('input', null, form);
61 | input.placeholder = 'Enter address';
62 |
63 | // create the error message div
64 | var message = L.DomUtil.create('div', 'leaflet-bar message displayNone', this._container);
65 |
66 | L.DomEvent
67 | .on(link, 'click', L.DomEvent.stopPropagation)
68 | .on(link, 'click', L.DomEvent.preventDefault)
69 | .on(link, 'click', function() {
70 |
71 | if (L.DomUtil.hasClass(form, displayNoneClass)) {
72 | L.DomUtil.removeClass(form, 'displayNone'); // unhide form
73 | input.focus();
74 | } else {
75 | L.DomUtil.addClass(form, 'displayNone'); // hide form
76 | }
77 |
78 | })
79 | .on(link, 'dblclick', L.DomEvent.stopPropagation);
80 |
81 | L.DomEvent
82 | .on(input, 'keypress', this.onKeyPress, this)
83 | .on(input, 'keyup', this.onKeyUp, this)
84 | .on(input, 'input', this.onInput, this);
85 |
86 | return this._container;
87 | },
88 |
89 | geosearch: function (qry) {
90 | try {
91 | var provider = this._config.provider;
92 |
93 | if(typeof provider.GetLocations == 'function') {
94 | var results = provider.GetLocations(qry, this._map, function(err, results) {
95 | if (err) {
96 | return this._printError(err);
97 | }
98 |
99 | this._processResults(results);
100 | }.bind(this));
101 | }
102 | else {
103 | var url = provider.GetServiceUrl(qry);
104 |
105 | $.getJSON(url, function (data) {
106 | try {
107 | var results = provider.ParseJSON(data);
108 | this._processResults(results);
109 | }
110 | catch (error) {
111 | this._printError(error);
112 | }
113 | }.bind(this));
114 | }
115 | }
116 | catch (error) {
117 | this._printError(error);
118 | }
119 | },
120 |
121 | _processResults: function(results) {
122 | if (results.length === 0)
123 | throw this._config.notFoundMessage;
124 |
125 | this.cancelSearch();
126 | this._showLocation(results[0]);
127 | },
128 |
129 | _showLocation: function (location) {
130 | if (this._config.showMarker) {
131 | if (typeof this._positionMarker === 'undefined')
132 | this._positionMarker = L.marker([location.Y, location.X]).addTo(this._map);
133 | else
134 | this._positionMarker.setLatLng([location.Y, location.X]);
135 | }
136 |
137 | // this._map.setView([location.Y, location.X], this._config.zoomLevel, false);
138 | },
139 |
140 | _isShowingError: false,
141 |
142 | _printError: function(error) {
143 | var message = this._container.querySelector('.message');
144 | message.innerHTML = error;
145 | L.DomUtil.removeClass(message, 'displayNone');
146 |
147 | // show alert icon
148 | this.resetLink('alert');
149 |
150 | this._isShowingError = true;
151 | },
152 |
153 | cancelSearch: function() {
154 | var form = this._container.querySelector('form');
155 | L.DomUtil.addClass(form, 'displayNone'); // hide form
156 |
157 | var input = form.querySelector('input');
158 | input.value = ''; // clear form
159 |
160 | // show glass icon
161 | this.resetLink('glass');
162 |
163 | var message = this._container.querySelector('.message');
164 | L.DomUtil.addClass(message, 'displayNone'); // hide message
165 | },
166 |
167 | startSearch: function() {
168 | // show spinner icon
169 | this.resetLink('spinner');
170 |
171 | var input = this._container.querySelector('input');
172 | this.geosearch(input.value);
173 | },
174 |
175 | onInput: function() {
176 | if (this._isShowingError) {
177 | // show glass icon
178 | this.resetLink('glass');
179 |
180 | var message = this._container.querySelector('.message');
181 | L.DomUtil.addClass(message, 'displayNone'); // hide message
182 |
183 | this._isShowingError = false;
184 | }
185 | },
186 |
187 | onKeyPress: function (e) {
188 | var enterKey = 13;
189 |
190 | if (e.keyCode === enterKey) {
191 | L.DomEvent.preventDefault(e); // prevent default form submission
192 |
193 | this.startSearch();
194 | }
195 | },
196 |
197 | onKeyUp: function (e) {
198 | var escapeKey = 27;
199 |
200 | if (e.keyCode === escapeKey) {
201 | this.cancelSearch();
202 | }
203 | }
204 | });
205 |
--------------------------------------------------------------------------------
/static/js/l.geosearch.provider.nominatim.js:
--------------------------------------------------------------------------------
1 | /**
2 | * L.Control.GeoSearch - search for an address and zoom to it's location
3 | * L.GeoSearch.Provider.OpenStreetMap uses openstreetmap geocoding service
4 | * https://github.com/smeijer/leaflet.control.geosearch
5 | */
6 |
7 | L.GeoSearch.Provider.Nominatim = L.Class.extend({
8 | options: {
9 |
10 | },
11 |
12 | initialize: function(options) {
13 | options = L.Util.setOptions(this, options);
14 | },
15 |
16 | GetLocations: function(query, map, callback) {
17 | callback = callback || function() {};
18 |
19 | var url = this.GetServiceUrl(query);
20 |
21 | $.getJSON(url, function (data) {
22 | var results;
23 |
24 | try {
25 | results = this.ParseJSON(data);
26 | } catch (err) {
27 | return callback(err);
28 | }
29 |
30 | if (data.length > 0) {
31 | var bbox = data[0].boundingbox,
32 | viewport = [
33 | [bbox[0], bbox[2]],
34 | [bbox[1], bbox[3]]
35 | ];
36 |
37 | map.fitBounds(viewport, {
38 | maxZoom: 15
39 | });
40 | }
41 |
42 | return callback(null, results);
43 | }.bind(this));
44 | },
45 |
46 | GetServiceUrl: function (qry) {
47 | var parameters = L.Util.extend({
48 | q: qry,
49 | format: 'json'
50 | }, this.options);
51 |
52 | return 'http://nominatim.openstreetmap.org/search'
53 | + L.Util.getParamString(parameters);
54 | },
55 |
56 | ParseJSON: function (data) {
57 | if (data.length == 0)
58 | return [];
59 |
60 | var results = [];
61 | for (var i = 0; i < data.length; i++)
62 | results.push(new L.GeoSearch.Result(
63 | data[i].lon,
64 | data[i].lat,
65 | data[i].display_name
66 | ));
67 |
68 | return results;
69 | }
70 | });
71 |
--------------------------------------------------------------------------------
/static/leaflet.css:
--------------------------------------------------------------------------------
1 | /* required styles */
2 |
3 | .leaflet-map-pane,
4 | .leaflet-tile,
5 | .leaflet-marker-icon,
6 | .leaflet-marker-shadow,
7 | .leaflet-tile-pane,
8 | .leaflet-tile-container,
9 | .leaflet-overlay-pane,
10 | .leaflet-shadow-pane,
11 | .leaflet-marker-pane,
12 | .leaflet-popup-pane,
13 | .leaflet-overlay-pane svg,
14 | .leaflet-zoom-box,
15 | .leaflet-image-layer,
16 | .leaflet-layer {
17 | position: absolute;
18 | left: 0;
19 | top: 0;
20 | }
21 | .leaflet-container {
22 | overflow: hidden;
23 | -ms-touch-action: none;
24 | touch-action: none;
25 | }
26 | .leaflet-tile,
27 | .leaflet-marker-icon,
28 | .leaflet-marker-shadow {
29 | -webkit-user-select: none;
30 | -moz-user-select: none;
31 | user-select: none;
32 | -webkit-user-drag: none;
33 | }
34 | .leaflet-marker-icon,
35 | .leaflet-marker-shadow {
36 | display: block;
37 | }
38 | /* map is broken in FF if you have max-width: 100% on tiles */
39 | .leaflet-container img {
40 | max-width: none !important;
41 | }
42 | /* stupid Android 2 doesn't understand "max-width: none" properly */
43 | .leaflet-container img.leaflet-image-layer {
44 | max-width: 15000px !important;
45 | }
46 | .leaflet-tile {
47 | filter: inherit;
48 | visibility: hidden;
49 | }
50 | .leaflet-tile-loaded {
51 | visibility: inherit;
52 | }
53 | .leaflet-zoom-box {
54 | width: 0;
55 | height: 0;
56 | }
57 | /* workaround for https://bugzilla.mozilla.org/show_bug.cgi?id=888319 */
58 | .leaflet-overlay-pane svg {
59 | -moz-user-select: none;
60 | }
61 |
62 | .leaflet-tile-pane { z-index: 2; }
63 | .leaflet-objects-pane { z-index: 3; }
64 | .leaflet-overlay-pane { z-index: 4; }
65 | .leaflet-shadow-pane { z-index: 5; }
66 | .leaflet-marker-pane { z-index: 6; }
67 | .leaflet-popup-pane { z-index: 7; }
68 |
69 | .leaflet-vml-shape {
70 | width: 1px;
71 | height: 1px;
72 | }
73 | .lvml {
74 | behavior: url(#default#VML);
75 | display: inline-block;
76 | position: absolute;
77 | }
78 |
79 |
80 | /* control positioning */
81 |
82 | .leaflet-control {
83 | position: relative;
84 | z-index: 7;
85 | pointer-events: auto;
86 | }
87 | .leaflet-top,
88 | .leaflet-bottom {
89 | position: absolute;
90 | z-index: 1000;
91 | pointer-events: none;
92 | }
93 | .leaflet-top {
94 | top: 0;
95 | }
96 | .leaflet-right {
97 | right: 0;
98 | }
99 | .leaflet-bottom {
100 | bottom: 0;
101 | }
102 | .leaflet-left {
103 | left: 0;
104 | }
105 | .leaflet-control {
106 | float: left;
107 | clear: both;
108 | }
109 | .leaflet-right .leaflet-control {
110 | float: right;
111 | }
112 | .leaflet-top .leaflet-control {
113 | margin-top: 10px;
114 | }
115 | .leaflet-bottom .leaflet-control {
116 | margin-bottom: 10px;
117 | }
118 | .leaflet-left .leaflet-control {
119 | margin-left: 10px;
120 | }
121 | .leaflet-right .leaflet-control {
122 | margin-right: 10px;
123 | }
124 |
125 |
126 | /* zoom and fade animations */
127 |
128 | .leaflet-fade-anim .leaflet-tile,
129 | .leaflet-fade-anim .leaflet-popup {
130 | opacity: 0;
131 | -webkit-transition: opacity 0.2s linear;
132 | -moz-transition: opacity 0.2s linear;
133 | -o-transition: opacity 0.2s linear;
134 | transition: opacity 0.2s linear;
135 | }
136 | .leaflet-fade-anim .leaflet-tile-loaded,
137 | .leaflet-fade-anim .leaflet-map-pane .leaflet-popup {
138 | opacity: 1;
139 | }
140 |
141 | .leaflet-zoom-anim .leaflet-zoom-animated {
142 | -webkit-transition: -webkit-transform 0.25s cubic-bezier(0,0,0.25,1);
143 | -moz-transition: -moz-transform 0.25s cubic-bezier(0,0,0.25,1);
144 | -o-transition: -o-transform 0.25s cubic-bezier(0,0,0.25,1);
145 | transition: transform 0.25s cubic-bezier(0,0,0.25,1);
146 | }
147 | .leaflet-zoom-anim .leaflet-tile,
148 | .leaflet-pan-anim .leaflet-tile,
149 | .leaflet-touching .leaflet-zoom-animated {
150 | -webkit-transition: none;
151 | -moz-transition: none;
152 | -o-transition: none;
153 | transition: none;
154 | }
155 |
156 | .leaflet-zoom-anim .leaflet-zoom-hide {
157 | visibility: hidden;
158 | }
159 |
160 |
161 | /* cursors */
162 |
163 | .leaflet-clickable {
164 | cursor: pointer;
165 | }
166 | .leaflet-container {
167 | cursor: -webkit-grab;
168 | cursor: -moz-grab;
169 | }
170 | .leaflet-popup-pane,
171 | .leaflet-control {
172 | cursor: auto;
173 | }
174 | .leaflet-dragging .leaflet-container,
175 | .leaflet-dragging .leaflet-clickable {
176 | cursor: move;
177 | cursor: -webkit-grabbing;
178 | cursor: -moz-grabbing;
179 | }
180 |
181 |
182 | /* visual tweaks */
183 |
184 | .leaflet-container {
185 | background: #ddd;
186 | outline: 0;
187 | }
188 | .leaflet-container a {
189 | color: #0078A8;
190 | }
191 | .leaflet-container a.leaflet-active {
192 | outline: 2px solid orange;
193 | }
194 | .leaflet-zoom-box {
195 | border: 2px dotted #38f;
196 | background: rgba(255,255,255,0.5);
197 | }
198 |
199 |
200 | /* general typography */
201 | .leaflet-container {
202 | font: 12px/1.5 "Helvetica Neue", Arial, Helvetica, sans-serif;
203 | }
204 |
205 |
206 | /* general toolbar styles */
207 |
208 | .leaflet-bar {
209 | box-shadow: 0 1px 5px rgba(0,0,0,0.65);
210 | border-radius: 4px;
211 | }
212 | .leaflet-bar a,
213 | .leaflet-bar a:hover {
214 | background-color: #fff;
215 | border-bottom: 1px solid #ccc;
216 | width: 26px;
217 | height: 26px;
218 | line-height: 26px;
219 | display: block;
220 | text-align: center;
221 | text-decoration: none;
222 | color: black;
223 | }
224 | .leaflet-bar a,
225 | .leaflet-control-layers-toggle {
226 | background-position: 50% 50%;
227 | background-repeat: no-repeat;
228 | display: block;
229 | }
230 | .leaflet-bar a:hover {
231 | background-color: #f4f4f4;
232 | }
233 | .leaflet-bar a:first-child {
234 | border-top-left-radius: 4px;
235 | border-top-right-radius: 4px;
236 | }
237 | .leaflet-bar a:last-child {
238 | border-bottom-left-radius: 4px;
239 | border-bottom-right-radius: 4px;
240 | border-bottom: none;
241 | }
242 | .leaflet-bar a.leaflet-disabled {
243 | cursor: default;
244 | background-color: #f4f4f4;
245 | color: #bbb;
246 | }
247 |
248 | .leaflet-touch .leaflet-bar a {
249 | width: 30px;
250 | height: 30px;
251 | line-height: 30px;
252 | }
253 |
254 |
255 | /* zoom control */
256 |
257 | .leaflet-control-zoom-in,
258 | .leaflet-control-zoom-out {
259 | font: bold 18px 'Lucida Console', Monaco, monospace;
260 | text-indent: 1px;
261 | }
262 | .leaflet-control-zoom-out {
263 | font-size: 20px;
264 | }
265 |
266 | .leaflet-touch .leaflet-control-zoom-in {
267 | font-size: 22px;
268 | }
269 | .leaflet-touch .leaflet-control-zoom-out {
270 | font-size: 24px;
271 | }
272 |
273 |
274 | /* layers control */
275 |
276 | .leaflet-control-layers {
277 | box-shadow: 0 1px 5px rgba(0,0,0,0.4);
278 | background: #fff;
279 | border-radius: 5px;
280 | }
281 | .leaflet-control-layers-toggle {
282 | background-image: url(images/layers.png);
283 | width: 36px;
284 | height: 36px;
285 | }
286 | .leaflet-retina .leaflet-control-layers-toggle {
287 | background-image: url(images/layers-2x.png);
288 | background-size: 26px 26px;
289 | }
290 | .leaflet-touch .leaflet-control-layers-toggle {
291 | width: 44px;
292 | height: 44px;
293 | }
294 | .leaflet-control-layers .leaflet-control-layers-list,
295 | .leaflet-control-layers-expanded .leaflet-control-layers-toggle {
296 | display: none;
297 | }
298 | .leaflet-control-layers-expanded .leaflet-control-layers-list {
299 | display: block;
300 | position: relative;
301 | }
302 | .leaflet-control-layers-expanded {
303 | padding: 6px 10px 6px 6px;
304 | color: #333;
305 | background: #fff;
306 | }
307 | .leaflet-control-layers-selector {
308 | margin-top: 2px;
309 | position: relative;
310 | top: 1px;
311 | }
312 | .leaflet-control-layers label {
313 | display: block;
314 | }
315 | .leaflet-control-layers-separator {
316 | height: 0;
317 | border-top: 1px solid #ddd;
318 | margin: 5px -10px 5px -6px;
319 | }
320 |
321 |
322 | /* attribution and scale controls */
323 |
324 | .leaflet-container .leaflet-control-attribution {
325 | background: #fff;
326 | background: rgba(255, 255, 255, 0.7);
327 | margin: 0;
328 | }
329 | .leaflet-control-attribution,
330 | .leaflet-control-scale-line {
331 | padding: 0 5px;
332 | color: #333;
333 | }
334 | .leaflet-control-attribution a {
335 | text-decoration: none;
336 | }
337 | .leaflet-control-attribution a:hover {
338 | text-decoration: underline;
339 | }
340 | .leaflet-container .leaflet-control-attribution,
341 | .leaflet-container .leaflet-control-scale {
342 | font-size: 11px;
343 | }
344 | .leaflet-left .leaflet-control-scale {
345 | margin-left: 5px;
346 | }
347 | .leaflet-bottom .leaflet-control-scale {
348 | margin-bottom: 5px;
349 | }
350 | .leaflet-control-scale-line {
351 | border: 2px solid #777;
352 | border-top: none;
353 | line-height: 1.1;
354 | padding: 2px 5px 1px;
355 | font-size: 11px;
356 | white-space: nowrap;
357 | overflow: hidden;
358 | -moz-box-sizing: content-box;
359 | box-sizing: content-box;
360 |
361 | background: #fff;
362 | background: rgba(255, 255, 255, 0.5);
363 | }
364 | .leaflet-control-scale-line:not(:first-child) {
365 | border-top: 2px solid #777;
366 | border-bottom: none;
367 | margin-top: -2px;
368 | }
369 | .leaflet-control-scale-line:not(:first-child):not(:last-child) {
370 | border-bottom: 2px solid #777;
371 | }
372 |
373 | .leaflet-touch .leaflet-control-attribution,
374 | .leaflet-touch .leaflet-control-layers,
375 | .leaflet-touch .leaflet-bar {
376 | box-shadow: none;
377 | }
378 | .leaflet-touch .leaflet-control-layers,
379 | .leaflet-touch .leaflet-bar {
380 | border: 2px solid rgba(0,0,0,0.2);
381 | background-clip: padding-box;
382 | }
383 |
384 |
385 | /* popup */
386 |
387 | .leaflet-popup {
388 | position: absolute;
389 | text-align: center;
390 | }
391 | .leaflet-popup-content-wrapper {
392 | padding: 1px;
393 | text-align: left;
394 | border-radius: 12px;
395 | }
396 | .leaflet-popup-content {
397 | margin: 13px 19px;
398 | line-height: 1.4;
399 | }
400 | .leaflet-popup-content p {
401 | margin: 18px 0;
402 | }
403 | .leaflet-popup-tip-container {
404 | margin: 0 auto;
405 | width: 40px;
406 | height: 20px;
407 | position: relative;
408 | overflow: hidden;
409 | }
410 | .leaflet-popup-tip {
411 | width: 17px;
412 | height: 17px;
413 | padding: 1px;
414 |
415 | margin: -10px auto 0;
416 |
417 | -webkit-transform: rotate(45deg);
418 | -moz-transform: rotate(45deg);
419 | -ms-transform: rotate(45deg);
420 | -o-transform: rotate(45deg);
421 | transform: rotate(45deg);
422 | }
423 | .leaflet-popup-content-wrapper,
424 | .leaflet-popup-tip {
425 | background: white;
426 |
427 | box-shadow: 0 3px 14px rgba(0,0,0,0.4);
428 | }
429 | .leaflet-container a.leaflet-popup-close-button {
430 | position: absolute;
431 | top: 0;
432 | right: 0;
433 | padding: 4px 4px 0 0;
434 | text-align: center;
435 | width: 18px;
436 | height: 14px;
437 | font: 16px/14px Tahoma, Verdana, sans-serif;
438 | color: #c3c3c3;
439 | text-decoration: none;
440 | font-weight: bold;
441 | background: transparent;
442 | }
443 | .leaflet-container a.leaflet-popup-close-button:hover {
444 | color: #999;
445 | }
446 | .leaflet-popup-scrolled {
447 | overflow: auto;
448 | border-bottom: 1px solid #ddd;
449 | border-top: 1px solid #ddd;
450 | }
451 |
452 | .leaflet-oldie .leaflet-popup-content-wrapper {
453 | zoom: 1;
454 | }
455 | .leaflet-oldie .leaflet-popup-tip {
456 | width: 24px;
457 | margin: 0 auto;
458 |
459 | -ms-filter: "progid:DXImageTransform.Microsoft.Matrix(M11=0.70710678, M12=0.70710678, M21=-0.70710678, M22=0.70710678)";
460 | filter: progid:DXImageTransform.Microsoft.Matrix(M11=0.70710678, M12=0.70710678, M21=-0.70710678, M22=0.70710678);
461 | }
462 | .leaflet-oldie .leaflet-popup-tip-container {
463 | margin-top: -1px;
464 | }
465 |
466 | .leaflet-oldie .leaflet-control-zoom,
467 | .leaflet-oldie .leaflet-control-layers,
468 | .leaflet-oldie .leaflet-popup-content-wrapper,
469 | .leaflet-oldie .leaflet-popup-tip {
470 | border: 1px solid #999;
471 | }
472 |
473 |
474 | /* div icon */
475 |
476 | .leaflet-div-icon {
477 | background: #fff;
478 | border: 1px solid #666;
479 | }
480 |
--------------------------------------------------------------------------------