├── .dockerignore ├── .editorconfig ├── .github └── workflows │ └── test-build-publish.yaml ├── .gitignore ├── .vscode └── settings.json ├── Dockerfile ├── LICENSE.txt ├── README.md ├── examples ├── example-bboxes.csv ├── san-francisco-downtown.osm.pbf └── test.geojson ├── planetutils.sh ├── planetutils ├── __init__.py ├── bbox.py ├── download.py ├── elevation_tile_download.py ├── elevation_tile_downloader.py ├── elevation_tile_merge.py ├── log.py ├── osm_extract_download.py ├── osm_extract_downloader.py ├── osm_planet_extract.py ├── osm_planet_get_timestamp.py ├── osm_planet_update.py ├── planet.py ├── tilepack_download.py ├── tilepack_downloader.py └── tilepack_list.py ├── run.sh ├── setup.py └── tests ├── test_bbox.py ├── test_commands.py ├── test_elevation_tile_downloader.py └── test_planet.py /.dockerignore: -------------------------------------------------------------------------------- 1 | *.osm 2 | .git 3 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # http://editorconfig.org 2 | 3 | # based on https://github.com/django/django/blob/master/.editorconfig 4 | 5 | root = true 6 | 7 | [*] 8 | indent_style = space 9 | insert_final_newline = true 10 | trim_trailing_whitespace = true 11 | end_of_line = lf 12 | charset = utf-8 13 | 14 | # Docstrings and comments use max_line_length = 79 15 | [*.py] 16 | max_line_length = 119 17 | indent_size = 4 18 | 19 | [*.rb] 20 | indent_size = 2 21 | -------------------------------------------------------------------------------- /.github/workflows/test-build-publish.yaml: -------------------------------------------------------------------------------- 1 | name: "Test, Build, and Publish" 2 | on: 3 | release: 4 | types: 5 | - created 6 | env: 7 | REGISTRY: ghcr.io 8 | IMAGE_NAME: ${{ github.repository }} 9 | jobs: 10 | test-build: 11 | runs-on: ubuntu-latest 12 | permissions: 13 | contents: read 14 | packages: write 15 | steps: 16 | - uses: actions/checkout@v3 17 | 18 | - name: Log in to the Container registry 19 | uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 20 | with: 21 | registry: ${{ env.REGISTRY }} 22 | username: ${{ github.actor }} 23 | password: ${{ secrets.GITHUB_TOKEN }} 24 | 25 | - name: Extract metadata (tags, labels) for Docker 26 | id: meta 27 | uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38 28 | with: 29 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} 30 | 31 | - name: Build and push Docker image 32 | uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc 33 | with: 34 | context: . 35 | push: true 36 | tags: ${{ steps.meta.outputs.tags }} 37 | labels: ${{ steps.meta.outputs.labels }} 38 | 39 | release-notes: 40 | needs: test-build 41 | if: ${{ github.event_name == 'release' }} 42 | runs-on: ubuntu-latest 43 | steps: 44 | - uses: actions/checkout@v2 45 | - uses: actions/setup-node@v3 46 | with: 47 | node-version: '16.x' 48 | - run: npm install github-release-notes -g 49 | - run: gren release --override 50 | env: 51 | GREN_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 52 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # https://github.com/github/gitignore/blob/master/Python.gitignore 2 | # Byte-compiled / optimized / DLL files 3 | __pycache__/ 4 | *.py[cod] 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | 47 | # Translations 48 | *.mo 49 | *.pot 50 | 51 | # Django stuff: 52 | *.log 53 | 54 | # Sphinx documentation 55 | docs/_build/ 56 | 57 | # PyBuilder 58 | target/ 59 | 60 | # Virtual Environment 61 | venv/ 62 | data 63 | 64 | # PyTest 65 | .pytest_cache/ 66 | virtualenv 67 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.pythonPath": "${workspaceFolder}/virtualenv/bin/python" 3 | } -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:20.04 2 | LABEL maintainer="Ian Rees ,Drew Dara-Abrams " 3 | 4 | ENV DEBIAN_FRONTEND=noninteractive 5 | RUN apt-get update -y && apt-get install \ 6 | python3 \ 7 | python3-pip \ 8 | pypy-setuptools \ 9 | curl \ 10 | osmosis \ 11 | osmctools \ 12 | osmium-tool \ 13 | pyosmium \ 14 | libgdal-dev \ 15 | gdal-bin \ 16 | awscli \ 17 | software-properties-common \ 18 | -y 19 | 20 | # Ubuntu Java SSL issue - https://stackoverflow.com/questions/6784463/error-trustanchors-parameter-must-be-non-empty/25188331#25188331 21 | RUN /usr/bin/printf '\xfe\xed\xfe\xed\x00\x00\x00\x02\x00\x00\x00\x00\xe2\x68\x6e\x45\xfb\x43\xdf\xa4\xd9\x92\xdd\x41\xce\xb6\xb2\x1c\x63\x30\xd7\x92' > /etc/ssl/certs/java/cacerts 22 | RUN /var/lib/dpkg/info/ca-certificates-java.postinst configure 23 | 24 | WORKDIR /app 25 | COPY . /app 26 | RUN python3 setup.py test 27 | RUN pip3 install . && pip3 install boto3 28 | 29 | COPY planetutils.sh /scripts/planetutils.sh 30 | 31 | WORKDIR /data 32 | 33 | CMD [ "/scripts/planetutils.sh" ] 34 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Unless otherwise indicated: 2 | 3 | Copyright 2018 - , Interline Techologies LLC. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 6 | 7 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![current release version](https://img.shields.io/github/release/interline-io/planetutils.svg)](https://github.com/interline-io/planetutils/releases) 2 | 3 | # Interline PlanetUtils 4 | 5 | 6 | 7 | 8 | 9 | - [Features](#features) 10 | - [Installation](#installation) 11 | * [Using Docker container](#using-docker-container) 12 | * [Using Homebrew on Mac OS](#using-homebrew-on-mac-os) 13 | * [Using Python package](#using-python-package) 14 | - [Command-line Usage](#command-line-usage) 15 | * [osm_planet_update](#osm_planet_update) 16 | * [osm_planet_extract](#osm_planet_extract) 17 | * [osm_extract_download](#osm_extract_download) 18 | * [osm_planet_get_timestamp](#osm_planet_get_timestamp) 19 | * [elevation_tile_download](#elevation_tile_download) 20 | * [elevation_tile_merge](#elevation_tile_merge) 21 | * [valhalla_tilepack_list](#valhalla_tilepack_list) 22 | * [valhalla_tilepack_download](#valhalla_tilepack_download) 23 | - [Specifying extract extents](#specifying-extract-extents) 24 | * [Bounding box file: CSV format](#bounding-box-file-csv-format) 25 | * [Bounding box/polygon file: GeoJSON format](#bounding-boxpolygon-file-geojson-format) 26 | - [Switching toolchains](#switching-toolchains) 27 | - [Support](#support) 28 | 29 | 30 | 31 | ## Features 32 | 33 | Python-based scripts and a Docker container to work with planet-scale geographic data. Using PlanetUtils, you can: 34 | 35 | - maintain your own copy of the [OpenStreetMap](http://www.openstreetmap.org) planet (by applying incremental updates) 36 | - cut your copy of the OSM planet into named bounding boxes 37 | - download [OSM Extracts from Interline](https://www.interline.io/osm/extracts/) for popular cities and regions 38 | - download [Mapzen Terrain Tiles from AWS](https://aws.amazon.com/public-datasets/terrain/) for the planet or your bounding boxes 39 | - merge and resample Terrain Tiles 40 | - download [Valhalla Tilepacks from Interline](https://www.interline.io/valhalla/tilepacks) for the planet (subscription required) 41 | 42 | PlanetUtils is packaged for use as a: 43 | 44 | - Docker container, for use on any operating system 45 | - Python package, for use on any operating system 46 | - Homebrew formula, for use on Mac OS 47 | 48 | PlanetUtils is a "high level" library that makes use of [Osmosis](https://wiki.openstreetmap.org/wiki/Osmosis), [OSM C tools](https://gitlab.com/osm-c-tools/osmctools/), and [Osmium](https://osmcode.org/osmium-tool/) among other great open-source components. 49 | 50 | ## Installation 51 | 52 | ### Using Docker container 53 | 54 | Make sure you have [Docker](https://www.docker.com/community-edition) installed. Then: 55 | 56 | ```sh 57 | docker pull ghcr.io/interline-io/planetutils:v0.4.14 58 | ``` 59 | 60 | Any of the example commands below can be executed with `docker run`. It may be helpful to mount a local directory inside the container for persistence and to access output files. 61 | 62 | - Example of using `docker run` with the `data` directory mounted as `/data`: 63 | 64 | ```sh 65 | docker run --rm -v ${PWD}/data:/data -t ghcr.io/interline-io/planetutils:v0.4.14 66 | ``` 67 | 68 | ### Using Homebrew on Mac OS 69 | 70 | Make sure you have [Homebrew](https://brew.sh/) installed. Then: 71 | 72 | ```sh 73 | brew install interline-io/planetutils/planetutils 74 | ``` 75 | 76 | ### Using Python package 77 | 78 | If you want to install and use the Python package directly, you'll need to provide: 79 | 80 | - Python 2.x or 3.x 81 | - Java and [Osmosis](https://wiki.openstreetmap.org/wiki/Osmosis) 82 | - [OSM C tools](https://gitlab.com/osm-c-tools/osmctools/) 83 | - [Osmium Tool](https://osmcode.org/osmium-tool/) 84 | - [PyOsmium](https://osmcode.org/pyosmium/) 85 | - [GDAL](https://www.gdal.org/) (both binaries and Python scripts) 86 | 87 | Then clone this repo, run the tests, and install the Python package: 88 | 89 | ```sh 90 | git clone https://github.com/interline-io/planetutils.git 91 | python ./setup.py test 92 | pip install . 93 | ``` 94 | 95 | ## Command-line Usage 96 | 97 | PlanetUtils supplies the following command-line utilities: 98 | 99 | ### osm_planet_update 100 | 101 | Update a local OSM planet. For example: 102 | 103 | ```sh 104 | osm_planet_update planet-recent.osm.pbf planet-with-updates.osm.pbf 105 | ``` 106 | 107 | If `planet-recent.osm.pbf` does not exist locally, the most recent planet file will be downloaded, before applying hourly updates to it. (Note: This download is nearly 40Gb.) By default, files are downloaded from planet.openstreetmap.org. Amazon Web Services also provides [OSM planets through its Public Datasets program](https://aws.amazon.com/public-datasets/osm/). To instead download the planet file from AWS: 108 | 109 | 1. Make sure you have your [AWS credentials configured locally](http://boto3.readthedocs.io/en/latest/guide/configuration.html). 110 | 2. Append the `--s3` flag. 111 | 112 | Note that an entire OSM planet may be upwards of 40Gb in size! In other words, you should have ~80Gb free disk space before running this command. 113 | 114 | For complete help on command-line arguments: 115 | 116 | ```sh 117 | osm_planet_update -h 118 | ``` 119 | 120 | ### osm_planet_extract 121 | 122 | Cut up an OSM planet file into one or more extracts, defined by bounding boxes or polygons. Each extract is assigned a name. (This is like a mini version of Mapzen Metro Extracts!) 123 | 124 | To create a single extract: 125 | 126 | ```sh 127 | osm_planet_extract --outpath=data/osm_extracts --bbox=-122.737,37.449,-122.011,37.955 --name=san-francisco planet-latest.osm.pbf 128 | ``` 129 | 130 | To specify more than one bounding box of tiles to download, list the extents in a [CSV file or GeoJSON file](#bounding-box). For example: 131 | 132 | ```sh 133 | osm_planet_extract --outpath=data/osm_extracts --csv=data/bboxes.csv planet-latest.osm.pbf 134 | ``` 135 | 136 | For complete help on command-line arguments: 137 | 138 | ```sh 139 | osm_planet_extract -h 140 | ``` 141 | 142 | ### osm_extract_download 143 | 144 | Download regularly updated OSM extracts for popular cities and regions from [OSM Extracts by Interline](https://www.interline.io/osm/extracts). Browse available extracts using [the web interface]((https://www.interline.io/osm/extracts)) or [the GeoJSON file](https://github.com/interline-io/osm-extracts/blob/master/cities.geojson). Anyone can browse the available extracts or propose changes to the extract bounding boxes on [GitHub](https://github.com/interline-io/osm-extracts). A subscription is required to download extracts, to cover hosting costs and keep the service sustainable. (See the OSM Extracts website for more information on how profits are donated to OpenStreetMap and other "open" efforts.) 145 | 146 | To download the latest copy of an extract (if `abcd` is your Interline API token and `abidjan_ivory-coast` is the ID for your chosen extract region): 147 | 148 | ```sh 149 | osm_extract_download --api-token=abcd abidjan_ivory-coast 150 | ``` 151 | 152 | You can also download extracts in GeoJSON format by using `--data-format=geojson`. Warning: these can be very large files, but may be useful for filtering and displaying on a web map. 153 | 154 | For complete help on command-line arguments: 155 | 156 | ```sh 157 | osm_extract_download -h 158 | ``` 159 | 160 | (Note: OSM Extracts is a hosted and managed version of the PlanetUtils library. Every day, the pipeline runs the `osm_planet_update` and `osm_planet_extract` commands.) 161 | 162 | ### osm_planet_get_timestamp 163 | 164 | A simple utility to print the timestamp of an OpenStreetMap PBF file. 165 | 166 | ```sh 167 | osm_planet_get_timestamp planet-latest.osm.pbf 168 | ``` 169 | 170 | ### elevation_tile_download 171 | 172 | Download elevation tiles from the [Terrain Tiles in the AWS Public Datasets program](https://aws.amazon.com/public-datasets/terrain/). Download for the entire planet, only tiles within a single bounding box, or within multiple bounding boxes. 173 | 174 | Elevation tiles are available in [a variety of formats](https://mapzen.com/documentation/terrain-tiles/formats/). This command supports the download of: 175 | - GeoTIFF (default): extension `.tif` in Web Mercator projection, 512x512 tiles 176 | - Skadi: extension `.hgt` in unprojected latlng, 1°x1° tiles 177 | 178 | To download the entire planet in Skadi tiles (__which will require about 1.6Tb of space!__): 179 | 180 | ```sh 181 | elevation_tile_download --format=skadi --outpath=data/elevation 182 | ``` 183 | 184 | To download GeoTIFF tiles to cover a single bounding box at a specified zoom level: 185 | 186 | ```sh 187 | elevation_tile_download --outpath=data/elevation --bbox=-122.737,37.449,-122.011,37.955 --zoom=10 188 | ``` 189 | 190 | To specify more than one bounding box of tiles to download, list the bounding boxes in a [CSV file or GeoJSON file](#bounding-box). For example: 191 | 192 | ```sh 193 | elevation_tile_download --outpath=data/elevation --csv=data/bboxes.csv 194 | ``` 195 | 196 | By default tiles are downloaded from the AWS `us-east-1` region. To instead download from the `eu-central-1` region: 197 | 198 | ```sh 199 | elevation_tile_download --outpath=data/elevation --region=eu-central-1 200 | ``` 201 | 202 | For complete help on command-line arguments: 203 | 204 | ```sh 205 | elevation_tile_download -h 206 | ``` 207 | 208 | ### elevation_tile_merge 209 | 210 | After downloading elevation tiles using the `elevation_tile_download` command, use this command to merge together multiple tiles. You can optionally resample elevation values as part of the merge process. 211 | 212 | This command only operates on GeoTIFF format elevation tiles. 213 | 214 | Warnings: merging lots of tiles can be resource intensive! 215 | 216 | To merge a directory of GeoTIFF files: 217 | 218 | ```sh 219 | elevation_tile_merge single_tile.tif geo_tiff_tiles/ 220 | ``` 221 | 222 | For complete help on command-line arguments: 223 | 224 | ```sh 225 | elevation_tile_merge -h 226 | ``` 227 | 228 | ### valhalla_tilepack_list 229 | 230 | Use [Valhalla Tilepacks from Interline](https://www.interline.io/valhalla/tilepacks/) to power your own instances of the [Valhalla routing engine](https://www.interline.io/valhalla/). Anyone can list available planet tilepacks. A subscription and an API key are required to [download tilepacks](#valhalla_tilepack_download). 231 | 232 | To list all available planet tilepacks: 233 | 234 | ```sh 235 | valhalla_tilepack_list 236 | ``` 237 | 238 | For complete help on command-line arguments: 239 | 240 | ```sh 241 | valhalla_tilepack_list -h 242 | ``` 243 | 244 | ### valhalla_tilepack_download 245 | 246 | Download [Valhalla Tilepacks from Interline](https://www.interline.io/valhalla/tilepacks/) to power your own instances of the [Valhalla routing engine](https://www.interline.io/valhalla/). A subscription and an API key are required to download tilepacks. 247 | 248 | Initial set-up: 249 | 250 | 1. Sign up for [Valhalla Tilepacks from Interline](https://www.interline.io/valhalla/tilepacks/). 251 | 2. Set your API token as an environment variable (`INTERLINE_API_TOKEN`) or use it as an argument to the command 252 | 253 | To download the latest planet tilepack (if `abcd` is your Interline API token): 254 | 255 | ```sh 256 | valhalla_tilepack_download --api-token=abcd 257 | ``` 258 | 259 | or set your API token as an environment variable, and download the latest planet tilepack: 260 | 261 | ```sh 262 | export INTERLINE_API_TOKEN=abcd 263 | valhalla_tilepack_download 264 | ``` 265 | 266 | For complete help on command-line arguments: 267 | 268 | ```sh 269 | valhalla_tilepack_download -h 270 | ``` 271 | 272 | ## Specifying extract extents 273 | 274 | 275 | When extracting multiple bounding boxes or polygons from an OSM planet, or when downloading multiple bounding boxes of elevation tiles, you can specify your extents in a single file, either CSV or GeoJSON format. 276 | 277 | ### Bounding box file: CSV format 278 | 279 | Do not include a header row. The format is as follows: 280 | 281 | ```csv 282 | [name for extract],[left longitude],[bottom latitude],[right longitude],[top latitude] 283 | ``` 284 | 285 | For example: 286 | ```csv 287 | san-francisco,-122.737,37.449,-122.011,37.955 288 | dar-es-salaam,38.894,-7.120,39.661,-6.502 289 | ``` 290 | 291 | To determine a bounding box, try the tool at http://bboxfinder.com/ 292 | 293 | ### Bounding box/polygon file: GeoJSON format 294 | 295 | Alternatively, you can specify the bounding boxes or polygons as features in a GeoJSON file, using the `--geojson` argument. 296 | 297 | ```sh 298 | osm_planet_extract --geojson=examples/test.geojson examples/san-francisco-downtown.osm.pbf 299 | ``` 300 | 301 | To draw extents in GeoJSON, try the tool at http://geojson.io/ 302 | 303 | ## Switching toolchains 304 | 305 | PlanetUtils wraps up a number of libraries, including Osmosis, Osmium, and OSM C Tools. Some PlanetUtils commands allow you to switch which library is used to perform the operation: 306 | 307 | | PlanetUtils command | argument flag | default | options | 308 | | ------------------- | ------------- | ------- | ------- | 309 | | `osm_planet_update` | `--toolchain` | `osmosis` | `osmosis`, `osmium` | 310 | | `osm_planet_extract` | `--toolchain` | `osmosis` | `osmosis`, `osmium`, `osmctools` | 311 | 312 | If you are using `osm_planet_extract` with `--toolchain=osmium`, you can also use the `--strategy=` option to select `simple, complete_ways (default) or smart`. 313 | 314 | If you are using `osm_planet_update` with `--toolchain=osmium`, you can also use the `--size=` option to limit the amount of updates downloaded from the OSM replication server. Osmium requires this data to be held in memory. The default is `1024` megabytes. 315 | 316 | ## Support 317 | 318 | To report a bug, please [open an issue](https://github.com/interline-io/planetutils). 319 | 320 | Interline Technologies also provides professional support and consulting services around this and other related tools. Contact us at info@interline.io for more information. 321 | -------------------------------------------------------------------------------- /examples/example-bboxes.csv: -------------------------------------------------------------------------------- 1 | california,-126.386719,32.157012,-113.532715,42.244785 2 | san-francisco,-122.588882,37.670234,-122.323151,37.863470 3 | san-francisco-downtown,-122.430439,37.766508,-122.379670,37.800052 4 | -------------------------------------------------------------------------------- /examples/san-francisco-downtown.osm.pbf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/interline-io/planetutils/0ccd381f816e10e3e186fe9ca1ea1b5da7070308/examples/san-francisco-downtown.osm.pbf -------------------------------------------------------------------------------- /examples/test.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [ 4 | { 5 | "type": "Feature", 6 | "properties": {"id":"union"}, 7 | "geometry": { 8 | "type": "Polygon", 9 | "coordinates": [ 10 | [ 11 | [ 12 | -122.42400169372557, 13 | 37.7860125252054 14 | ], 15 | [ 16 | -122.40559101104735, 17 | 37.7860125252054 18 | ], 19 | [ 20 | -122.40559101104735, 21 | 37.7985943621788 22 | ], 23 | [ 24 | -122.42400169372557, 25 | 37.7985943621788 26 | ], 27 | [ 28 | -122.42400169372557, 29 | 37.7860125252054 30 | ] 31 | ] 32 | ] 33 | } 34 | }, 35 | { 36 | "type": "Feature", 37 | "properties": {"id":"pentagon"}, 38 | "geometry": { 39 | "type": "Polygon", 40 | "coordinates": [ 41 | [ 42 | [ 43 | -122.39494800567627, 44 | 37.791879793952084 45 | ], 46 | [ 47 | -122.39975452423094, 48 | 37.78808138412046 49 | ], 50 | [ 51 | -122.39898204803465, 52 | 37.78390969122026 53 | ], 54 | [ 55 | -122.39439010620116, 56 | 37.78370618798191 57 | ], 58 | [ 59 | -122.38949775695801, 60 | 37.78740307610388 61 | ], 62 | [ 63 | -122.39494800567627, 64 | 37.791879793952084 65 | ] 66 | ] 67 | ] 68 | } 69 | } 70 | ] 71 | } -------------------------------------------------------------------------------- /planetutils.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | OSM_PLANET=${OSM_PLANET:-"planet-latest.osm.pbf"} 5 | OSM_PLANET_TMP=${OSM_PLANET_TMP:-"planet-new.osm.pbf"} 6 | OSM_TOOLCHAIN=${OSM_TOOLCHAIN:-"osmium"} 7 | OSM_UPDATE_MEMORY=${OSM_UPDATE_MEMORY:-"1024"} 8 | 9 | osm_planet_update --toolchain=${OSM_TOOLCHAIN} --size=${OSM_UPDATE_MEMORY} ${OSM_PLANET} ${OSM_PLANET_TMP} 10 | mv ${OSM_PLANET_TMP} ${OSM_PLANET} 11 | 12 | if [ -n "${BBOX}" ]; then 13 | osm_planet_extract --toolchain=${OSM_TOOLCHAIN} --csv=${BBOX} --outpath=${EXTRACTS} ${OSM_PLANET} 14 | fi 15 | 16 | if [ -n "${ELEVATION}" ]; then 17 | elevation_tile_download --csv=${BBOX} --outpath=${ELEVATION} 18 | fi 19 | 20 | -------------------------------------------------------------------------------- /planetutils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/interline-io/planetutils/0ccd381f816e10e3e186fe9ca1ea1b5da7070308/planetutils/__init__.py -------------------------------------------------------------------------------- /planetutils/bbox.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import absolute_import, unicode_literals 3 | import json 4 | import os 5 | import csv 6 | 7 | def flatcoords(coords, fc=None): 8 | if fc is None: 9 | fc = [] 10 | try: 11 | coords[0][0] # check if iterable of iterables 12 | for c in coords: 13 | flatcoords(c, fc) 14 | except: 15 | fc.append(coords) 16 | return fc 17 | 18 | class Feature(object): 19 | def __init__(self, properties=None, geometry=None, **kwargs): 20 | self.properties = properties or {} 21 | self.geometry = geometry or {} 22 | if not self.geometry: 23 | self.set_bbox([0.0, 0.0, 0.0, 0.0]) 24 | 25 | def bbox(self): 26 | gt = self.geometry.get('type') 27 | coords = self.geometry.get('coordinates', []) 28 | fc = flatcoords(coords) 29 | lons = [i[0] for i in fc] 30 | lats = [i[1] for i in fc] 31 | left, right = min(lons), max(lons) 32 | bottom, top = min(lats), max(lats) 33 | return validate_bbox([left, bottom, right, top]) 34 | 35 | def set_bbox(self, bbox): 36 | left, bottom, right, top = validate_bbox(bbox) 37 | self.geometry = { 38 | "type": "LineString", 39 | "coordinates": [ 40 | [left, bottom], 41 | [right, top], 42 | ] 43 | } 44 | 45 | def is_rectangle(self): 46 | fc = flatcoords(self.geometry.get('coordinates', [])) 47 | lons = set([i[0] for i in fc]) 48 | lats = set([i[1] for i in fc]) 49 | return len(lons) <= 2 and len(lats) <= 2 50 | 51 | # act like [left, bottom, right, top] 52 | def __getitem__(self, item): 53 | return self.bbox()[item] 54 | 55 | 56 | def validate_bbox(bbox): 57 | left, bottom, right, top = map(float, bbox) 58 | assert -180 <= left <= 180 59 | assert -180 <= right <= 180 60 | assert -90 <= bottom <= 90 61 | assert -90 <= top <= 90 62 | assert top >= bottom 63 | assert right >= left 64 | return [left, bottom, right, top] 65 | 66 | def load_feature_string(bbox): 67 | f = Feature() 68 | f.set_bbox(bbox.split(',')) 69 | return f 70 | 71 | def load_features_csv(csvpath): 72 | # bbox csv format: 73 | # name, left, bottom, right, top 74 | if not os.path.exists(csvpath): 75 | raise Exception('file does not exist: %s'%csvpath) 76 | bboxes = {} 77 | with open(csvpath) as f: 78 | reader = csv.reader(f) 79 | for row in reader: 80 | if len(row) != 5: 81 | raise Exception('5 columns required') 82 | f = Feature() 83 | f.set_bbox(row[1:]) 84 | bboxes[row[0]] = f 85 | return bboxes 86 | 87 | def load_features_geojson(path): 88 | if not os.path.exists(path): 89 | raise Exception('file does not exist: %s'%path) 90 | with open(path) as f: 91 | data = json.load(f) 92 | # check if this is a single feature 93 | if data.get('type') == 'FeatureCollection': 94 | features = data.get('features', []) 95 | else: 96 | features = [data] 97 | bboxes = {} 98 | for count,feature in enumerate(features): 99 | key = feature.get('properties',{}).get('id') or feature.get('id') or count 100 | bboxes[key] = Feature(**feature) 101 | return bboxes 102 | -------------------------------------------------------------------------------- /planetutils/download.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, unicode_literals 2 | import os 3 | import subprocess 4 | import requests 5 | from . import log 6 | 7 | def download(url, outpath): 8 | r = requests.get(url, stream=True) 9 | with open(outpath, 'wb') as fd: 10 | for chunk in r.iter_content(chunk_size=128): 11 | fd.write(chunk) 12 | 13 | def download_gzip(url, outpath): 14 | with open(outpath, 'wb') as f: 15 | ps1 = subprocess.Popen(['curl', '-L', '--fail', '-s', url], stdout=subprocess.PIPE) 16 | ps2 = subprocess.Popen(['gzip', '-d'], stdin=ps1.stdout, stdout=f) 17 | ps2.wait() 18 | 19 | def download_curl(url, outpath, compressed=False): 20 | if os.path.exists(outpath): 21 | log.warning("Warning: output path %s already exists."%outpath) 22 | args = ['curl', '-L', '--fail', '-o', outpath, url] 23 | if not compressed: 24 | args.append('--compressed') 25 | 26 | log.info("Downloading to %s"%outpath) 27 | log.debug(url) 28 | log.debug(' '.join(args)) 29 | p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 30 | out, err = p.communicate() 31 | e = p.wait() 32 | if e != 0: 33 | raise Exception("Error downloading") 34 | else: 35 | log.info("Done") 36 | -------------------------------------------------------------------------------- /planetutils/elevation_tile_download.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import absolute_import, unicode_literals, print_function 3 | import argparse 4 | import sys 5 | 6 | from . import log 7 | from .bbox import load_features_csv, load_feature_string 8 | from .elevation_tile_downloader import ElevationGeotiffDownloader, ElevationSkadiDownloader 9 | 10 | def main(): 11 | parser = argparse.ArgumentParser() 12 | parser.add_argument('--outpath', help='Output path for elevation tiles.', default='.') 13 | parser.add_argument('--csv', help='Path to CSV file with bounding box definitions.') 14 | parser.add_argument('--bbox', help='Bounding box for extract file. Format for coordinates: left,bottom,right,top') 15 | parser.add_argument('--verbose', help="Verbose output", action='store_true') 16 | parser.add_argument('--format', help='Download format', default='geotiff') 17 | parser.add_argument('--zoom', help='Zoom level', default=0, type=int) 18 | parser.add_argument('--region', help='AWS region for downloads (us-east-1, eu-central-1)', default='us-east-1') 19 | 20 | args = parser.parse_args() 21 | 22 | if args.verbose: 23 | log.set_verbose() 24 | 25 | if args.format == 'geotiff': 26 | p = ElevationGeotiffDownloader(args.outpath, zoom=args.zoom, region=args.region) 27 | elif args.format == 'skadi': 28 | p = ElevationSkadiDownloader(args.outpath, region=args.region) 29 | else: 30 | print("Unknown format: %s"%args.format) 31 | sys.exit(1) 32 | 33 | if args.csv: 34 | p.download_bboxes(load_features_csv(args.csv)) 35 | elif args.bbox: 36 | p.download_bbox(load_feature_string(args.bbox)) 37 | else: 38 | p.download_planet() 39 | 40 | 41 | if __name__ == '__main__': 42 | main() 43 | -------------------------------------------------------------------------------- /planetutils/elevation_tile_downloader.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import absolute_import, unicode_literals 3 | import os 4 | import subprocess 5 | import math 6 | 7 | from . import download 8 | from . import log 9 | from .bbox import validate_bbox 10 | 11 | def makedirs(path): 12 | try: 13 | os.makedirs(path) 14 | except OSError as e: 15 | pass 16 | 17 | class ElevationDownloader(object): 18 | """Downloads elevation tiles from AWS Open Data Registry's Terrain Tiles dataset. 19 | 20 | This class handles downloading of elevation data from the Terrain Tiles dataset 21 | hosted on AWS S3. The dataset is available in both US (us-east-1) and EU (eu-central-1) 22 | regions through the buckets elevation-tiles-prod and elevation-tiles-prod-eu respectively. 23 | 24 | Data source: https://registry.opendata.aws/terrain-tiles/ 25 | """ 26 | def __init__(self, outpath='.', region='us-east-1'): 27 | self.outpath = outpath 28 | self.region = region 29 | # Map regions to buckets 30 | self.region_buckets = { 31 | 'us-east-1': 'elevation-tiles-prod', 32 | 'eu-central-1': 'elevation-tiles-prod-eu' 33 | } 34 | 35 | def get_bucket_for_region(self, default_bucket): 36 | """Get the appropriate bucket based on region, falling back to default""" 37 | return self.region_buckets.get(self.region, default_bucket) 38 | 39 | def download_planet(self): 40 | self.download_bbox([-180, -90, 180, 90]) 41 | 42 | def download_bboxes(self, bboxes): 43 | for name, bbox in bboxes.items(): 44 | self.download_bbox(bbox) 45 | 46 | def download_bbox(self, bbox, bucket='elevation-tiles-prod', prefix='geotiff'): 47 | tiles = self.get_bbox_tiles(bbox) 48 | found = set() 49 | download = set() 50 | for z,x,y in tiles: 51 | od = self.tile_path(z, x, y) 52 | op = os.path.join(self.outpath, *od) 53 | if self.tile_exists(op): 54 | found.add((x,y)) 55 | else: 56 | download.add((x,y)) 57 | log.info("found %s tiles; %s to download"%(len(found), len(download))) 58 | for x,y in sorted(download): 59 | self.download_tile(bucket, prefix, z, x, y) 60 | 61 | def tile_exists(self, op): 62 | if os.path.exists(op): 63 | return True 64 | 65 | def download_tile(self, bucket, prefix, z, x, y, suffix=''): 66 | od = self.tile_path(z, x, y) 67 | op = os.path.join(self.outpath, *od) 68 | makedirs(os.path.join(self.outpath, *od[:-1])) 69 | if prefix: 70 | od = [prefix]+od 71 | 72 | # Use the region-specific bucket if available 73 | actual_bucket = self.get_bucket_for_region(bucket) 74 | 75 | # Use virtual-hosted style URL 76 | if self.region == 'us-east-1': 77 | url = f'https://{actual_bucket}.s3.amazonaws.com/{"/".join(od)}{suffix}' 78 | else: 79 | url = f'https://{actual_bucket}.s3.{self.region}.amazonaws.com/{"/".join(od)}{suffix}' 80 | 81 | log.info("downloading %s to %s"%(url, op)) 82 | self._download(url, op) 83 | 84 | def tile_path(self, z, x, y): 85 | raise NotImplementedError 86 | 87 | def get_bbox_tiles(self, bbox): 88 | raise NotImplementedError 89 | 90 | def _download(self, url, op): 91 | download.download(url, op) 92 | 93 | class ElevationGeotiffDownloader(ElevationDownloader): 94 | def __init__(self, *args, **kwargs): 95 | self.zoom = kwargs.pop('zoom', 0) 96 | super(ElevationGeotiffDownloader, self).__init__(*args, **kwargs) 97 | 98 | def get_bbox_tiles(self, bbox): 99 | left, bottom, right, top = validate_bbox(bbox) 100 | ybound = 85.0511 101 | if bottom <= -ybound: 102 | bottom = -ybound 103 | if top > ybound: 104 | top = ybound 105 | if right >= 180: 106 | right = 179.999 107 | size = 2**self.zoom 108 | xt = lambda x:int((x + 180.0) / 360.0 * size) 109 | yt = lambda y:int((1.0 - math.log(math.tan(math.radians(y)) + (1 / math.cos(math.radians(y)))) / math.pi) / 2.0 * size) 110 | tiles = [] 111 | for x in range(xt(left), xt(right)+1): 112 | for y in range(yt(top), yt(bottom)+1): 113 | tiles.append([self.zoom, x, y]) 114 | return tiles 115 | 116 | def tile_path(self, z, x, y): 117 | return list(map(str, [z, x, str(y)+'.tif'])) 118 | 119 | class ElevationSkadiDownloader(ElevationDownloader): 120 | HGT_SIZE = (3601 * 3601 * 2) 121 | 122 | def get_bbox_tiles(self, bbox): 123 | left, bottom, right, top = validate_bbox(bbox) 124 | min_x = int(math.floor(left)) 125 | max_x = int(math.ceil(right)) 126 | min_y = int(math.floor(bottom)) 127 | max_y = int(math.ceil(top)) 128 | expect = (max_x - min_x + 1) * (max_y - min_y + 1) 129 | tiles = set() 130 | for x in range(min_x, max_x): 131 | for y in range(min_y, max_y): 132 | tiles.add((0, x, y)) 133 | return tiles 134 | 135 | def tile_exists(self, op): 136 | if os.path.exists(op) and os.stat(op).st_size == self.HGT_SIZE: 137 | return True 138 | 139 | def download_tile(self, bucket, prefix, z, x, y, suffix=''): 140 | super(ElevationSkadiDownloader, self).download_tile(bucket, 'skadi', z, x, y, suffix='.gz') 141 | 142 | def tile_path(self, z, x, y): 143 | ns = lambda i:'S%02d'%abs(i) if i < 0 else 'N%02d'%abs(i) 144 | ew = lambda i:'W%03d'%abs(i) if i < 0 else 'E%03d'%abs(i) 145 | return [ns(y), '%s%s.hgt'%(ns(y), ew(x))] 146 | 147 | def _download(self, url, op): 148 | download.download_gzip(url, op) 149 | -------------------------------------------------------------------------------- /planetutils/elevation_tile_merge.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import absolute_import, unicode_literals, print_function 3 | import argparse 4 | import sys 5 | import fnmatch 6 | import os 7 | import subprocess 8 | import tempfile 9 | 10 | from . import log 11 | 12 | def main(): 13 | parser = argparse.ArgumentParser() 14 | parser.add_argument('--scale', help="Resample to 8 bit with (min,max) range") 15 | parser.add_argument('outpath', help='Output filename') 16 | parser.add_argument('inpath', help='Input directory') 17 | args = parser.parse_args() 18 | 19 | outpath = args.outpath 20 | tmppath = args.outpath 21 | 22 | if args.scale and len(args.scale.split(',')) != 2: 23 | print("Must provide min, max values") 24 | sys.exit(1) 25 | elif args.scale: 26 | # Output to tmp file 27 | _, tmppath = tempfile.mkstemp(suffix='.tif') 28 | 29 | matches = [] 30 | for root, dirnames, filenames in os.walk(args.inpath): 31 | for filename in fnmatch.filter(filenames, '*.tif'): 32 | matches.append(os.path.join(root, filename)) 33 | 34 | if len(matches) == 0: 35 | print("No input files") 36 | sys.exit(0) 37 | 38 | print("Found %s files:"%len(matches)) 39 | for i in matches: 40 | print("\t%s"%(i)) 41 | 42 | # gdal_merge.py -init 0 -o out.tif 43 | print("Merging... %s"%(tmppath)) 44 | cmd = ['gdal_merge.py', '-init', '0', '-o', tmppath] 45 | cmd += matches 46 | p = subprocess.check_call(cmd) 47 | 48 | # gdal_translate -of GTiff -ot Byte -scale 0 255 0 255 out.tif out8.tif 49 | if args.scale: 50 | print("Scaling: %s -> %s"%(tmppath, outpath)) 51 | a = args.scale.split(",") 52 | cmd = ['gdal_translate', '-of', 'GTiff', '-ot', 'Byte', '-scale', a[0], a[1], '0', '255', tmppath, outpath] 53 | subprocess.check_call(cmd) 54 | # cleanup 55 | try: os.unlink('%s.aux.xml'%outpath) 56 | except: pass 57 | try: os.unlink(tmppath) 58 | except: pass 59 | 60 | if __name__ == '__main__': 61 | main() 62 | -------------------------------------------------------------------------------- /planetutils/log.py: -------------------------------------------------------------------------------- 1 | import logging 2 | logging.basicConfig(format='[%(levelname)s] %(message)s') 3 | logger = logging.getLogger(__name__) 4 | 5 | def set_quiet(): 6 | logger.setLevel(logging.ERROR) 7 | 8 | def set_verbose(): 9 | logger.setLevel(logging.DEBUG) 10 | 11 | def set_default(): 12 | logger.setLevel(logging.INFO) 13 | 14 | set_default() 15 | 16 | info = logger.info 17 | debug = logger.debug 18 | warning = logger.warning 19 | error = logger.error -------------------------------------------------------------------------------- /planetutils/osm_extract_download.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import absolute_import, unicode_literals 3 | import os 4 | import argparse 5 | 6 | from . import log 7 | from .osm_extract_downloader import OsmExtractDownloader 8 | 9 | def main(): 10 | parser = argparse.ArgumentParser(usage="OSM Extract Download tool.") 11 | parser.add_argument('id', help='Extract ID') 12 | # parser.add_argument('--osm-extract-version', help='OSM Extract version', default='latest') 13 | parser.add_argument('--outpath', help='Output path for Extract; default is .osm.pbf') 14 | parser.add_argument('--data-format', help='Download format: pbf, geojson, geojsonl', default='pbf') 15 | parser.add_argument('--api-token', help='Interline Auth Token; default is read from $INTERLINE_API_TOKEN') 16 | parser.add_argument('--verbose', help="Verbose output", action='store_true') 17 | args = parser.parse_args() 18 | 19 | if args.verbose: 20 | log.set_verbose() 21 | 22 | defaultpath = "%s.osm.pbf"%(args.id) 23 | if args.data_format != "pbf": 24 | defaultpath = "%s.%s"%(args.id, args.data_format) 25 | outpath = args.outpath or defaultpath 26 | if os.path.exists(outpath): 27 | log.warning("Warning: output path %s already exists."%outpath) 28 | 29 | downloader = OsmExtractDownloader() 30 | downloader.download( 31 | outpath, 32 | osm_extract_id=args.id, 33 | data_format=args.data_format, 34 | api_token=args.api_token or os.getenv('INTERLINE_API_TOKEN') 35 | ) 36 | 37 | if __name__ == '__main__': 38 | main() 39 | -------------------------------------------------------------------------------- /planetutils/osm_extract_downloader.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, unicode_literals 2 | from future.standard_library import install_aliases 3 | install_aliases() 4 | from urllib.parse import urlparse, urlencode, urlsplit, parse_qs, urlunsplit 5 | from urllib.request import urlopen 6 | 7 | import subprocess 8 | import json 9 | 10 | from . import log 11 | from . import download 12 | 13 | class OsmExtractDownloader(object): 14 | HOST = 'https://app.interline.io' 15 | def download(self, outpath, osm_extract_id, osm_extract_version='latest', data_format='pbf', api_token=None): 16 | # Endpoint 17 | url = '%s/osm_extracts/%s/download'%(self.HOST, osm_extract_version) 18 | if osm_extract_version == 'latest': 19 | url = '%s/osm_extracts/download_latest'%(self.HOST) 20 | 21 | # Make url 22 | u = list(urlsplit(url)) 23 | q = parse_qs(u[3]) 24 | q['data_format'] = data_format 25 | if osm_extract_version == "latest": 26 | q['string_id'] = osm_extract_id 27 | if api_token: 28 | q['api_token'] = api_token 29 | u[3] = urlencode(q) 30 | url = urlunsplit(u) 31 | 32 | # Download 33 | download.download_curl(url, outpath) -------------------------------------------------------------------------------- /planetutils/osm_planet_extract.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import absolute_import, unicode_literals, print_function 3 | import argparse 4 | from .planet import * 5 | from . import bbox 6 | from .bbox import load_feature_string, load_features_csv 7 | 8 | def main(): 9 | parser = argparse.ArgumentParser() 10 | parser.add_argument('osmpath', help='Name or path to OSM planet file. Use planet_update if you do not have a copy locally.') 11 | parser.add_argument('--outpath', help='Extract output directory', default='.') 12 | parser.add_argument('--csv', help='Path to CSV file with bounding box definitions.') 13 | parser.add_argument('--geojson', help='Path to GeoJSON file: bbox for each feature is extracted.') 14 | parser.add_argument('--name', help='Name to give to extract file.') 15 | parser.add_argument('--bbox', help='Bounding box for extract file. Format for coordinates: left,bottom,right,top') 16 | parser.add_argument('--verbose', help="Verbose output", action='store_true') 17 | parser.add_argument('--toolchain', help='OSM toolchain', default='osmosis') 18 | parser.add_argument('--strategy', help='Osmium extract strategy: simple, complete_ways, or smart', default='complete_ways') 19 | parser.add_argument('--commands', help='Output a command list instead of performing action, e.g. for parallel usage', action='store_true') 20 | args = parser.parse_args() 21 | 22 | if args.verbose: 23 | log.set_verbose() 24 | 25 | if args.toolchain == 'osmosis': 26 | p = PlanetExtractorOsmosis(args.osmpath) 27 | elif args.toolchain == 'osmctools': 28 | p = PlanetExtractorOsmconvert(args.osmpath) 29 | elif args.toolchain == 'osmium': 30 | p = PlanetExtractorOsmium(args.osmpath) 31 | else: 32 | parser.error('unknown toolchain: %s'%args.toolchain) 33 | 34 | bboxes = {} 35 | if args.csv: 36 | bboxes = bbox.load_features_csv(args.csv) 37 | elif args.geojson: 38 | bboxes = bbox.load_features_geojson(args.geojson) 39 | elif (args.bbox and args.name): 40 | bboxes[args.name] = bbox.load_feature_string(args.bbox) 41 | else: 42 | parser.error('must specify --csv, --geojson, or --bbox and --name') 43 | 44 | if args.commands: 45 | commands = p.extract_commands(bboxes, outpath=args.outpath, strategy=args.strategy) 46 | for i in commands: 47 | print(" ".join(i)) 48 | else: 49 | p.extract_bboxes(bboxes, outpath=args.outpath, strategy=args.strategy) 50 | 51 | if __name__ == '__main__': 52 | main() 53 | -------------------------------------------------------------------------------- /planetutils/osm_planet_get_timestamp.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import absolute_import, unicode_literals, print_function 3 | import argparse 4 | from .planet import * 5 | from . import log 6 | 7 | def main(): 8 | parser = argparse.ArgumentParser() 9 | parser.add_argument('osmpath', help='OSM file') 10 | args = parser.parse_args() 11 | p = Planet(args.osmpath) 12 | log.set_quiet() 13 | print(p.get_timestamp()) 14 | 15 | if __name__ == '__main__': 16 | main() 17 | -------------------------------------------------------------------------------- /planetutils/osm_planet_update.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import absolute_import, unicode_literals 3 | import argparse 4 | 5 | from . import log 6 | from .planet import * 7 | 8 | def main(): 9 | parser = argparse.ArgumentParser() 10 | parser.add_argument('osmpath', help='Name or path to existing OSM planet file. Will be created and downloaded, if it does not exist.') 11 | parser.add_argument('outpath', help='Name or path to where updated output file should be placed.') 12 | parser.add_argument('--toolchain', help='OSM toolchain', default='osmosis') 13 | parser.add_argument('--s3', action='store_true', help='Download using S3 client from AWS Public Datasets program. AWS credentials required.') 14 | parser.add_argument('--workdir', help="Osmosis replication workingDirectory.", default='.') 15 | parser.add_argument('--verbose', help="Verbose output", action='store_true') 16 | parser.add_argument('--size', help='Osmium update memory limit', default='1024') 17 | parser.add_argument('--mirror', help='Base URL for OSM mirror', default='https://planet.osm.org') 18 | args = parser.parse_args() 19 | 20 | planet_source = "%s/pbf/planet-latest.osm.pbf"%args.mirror 21 | diff_source = "%s/replication/hour"%args.mirror 22 | 23 | if args.verbose: 24 | log.set_verbose() 25 | 26 | if not os.path.exists(args.osmpath): 27 | log.info("planet does not exist; downloading") 28 | if args.s3: 29 | d = PlanetDownloaderS3(args.osmpath) 30 | d.download_planet() 31 | else: 32 | d = PlanetDownloaderHttp(args.osmpath) 33 | d.download_planet(url=planet_source) 34 | 35 | if args.toolchain == 'osmosis': 36 | p = PlanetUpdaterOsmosis(args.osmpath) 37 | elif args.toolchain == 'osmium': 38 | p = PlanetUpdaterOsmium(args.osmpath) 39 | else: 40 | parser.error('unknown toolchain: %s'%args.toolchain) 41 | 42 | p.update_planet(args.outpath, size=args.size, changeset_url=diff_source) 43 | 44 | if __name__ == '__main__': 45 | main() 46 | -------------------------------------------------------------------------------- /planetutils/planet.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import absolute_import, unicode_literals 3 | from future.standard_library import install_aliases 4 | install_aliases() 5 | from urllib.parse import urlparse, urlencode 6 | from urllib.request import urlopen 7 | 8 | import re 9 | import os 10 | import subprocess 11 | import tempfile 12 | import json 13 | 14 | from . import log 15 | from .bbox import validate_bbox 16 | 17 | try: 18 | import boto3 19 | except ImportError: 20 | boto3 = None 21 | 22 | class PlanetBase(object): 23 | def __init__(self, osmpath=None, grain='hour', changeset_url=None, osmosis_workdir=None): 24 | self.osmpath = osmpath 25 | d, p = os.path.split(osmpath) 26 | self.osmosis_workdir = osmosis_workdir or os.path.join(d, '%s.workdir'%p) 27 | 28 | def command(self, args): 29 | log.debug(args) 30 | return subprocess.check_output( 31 | args, 32 | shell=False 33 | ).decode('utf-8') 34 | 35 | def osmosis(self, *args): 36 | return self.command(['osmosis'] + list(args)) 37 | 38 | def osmconvert(self, *args): 39 | return self.command(['osmconvert'] + list(args)) 40 | 41 | def get_timestamp(self): 42 | timestamp = self.osmconvert( 43 | self.osmpath, 44 | '--out-timestamp' 45 | ) 46 | if 'invalid' in timestamp: 47 | log.debug('no timestamp; falling back to osmconvert --out-statistics') 48 | statistics = self.osmconvert( 49 | self.osmpath, 50 | '--out-statistics' 51 | ) 52 | timestamp = [ 53 | i.partition(':')[2].strip() for i in statistics.split('\n') 54 | if i.startswith('timestamp max') 55 | ][0] 56 | return timestamp.strip() 57 | 58 | class Planet(PlanetBase): 59 | pass 60 | 61 | class PlanetExtractor(PlanetBase): 62 | def extract_bboxes(self, bboxes, workers=1, outpath='.'): 63 | raise NotImplementedError 64 | 65 | def extract_bbox(self, name, bbox, workers=1, outpath='.'): 66 | return self.extract_bboxes({name: bbox}, outpath=outpath, workers=workers) 67 | 68 | def extract_commands(self, bboxes, outpath='.', **kw): 69 | args = [] 70 | self.command = lambda x:args.append(x) 71 | self.extract_bboxes(bboxes, outpath=outpath, **kw) 72 | return args 73 | 74 | class PlanetExtractorOsmosis(PlanetExtractor): 75 | def extract_bboxes(self, bboxes, workers=1, outpath='.', **kw): 76 | args = [] 77 | args += ['--read-pbf-fast', self.osmpath, 'workers=%s'%int(workers)] 78 | args += ['--tee', str(len(bboxes))] 79 | for name, bbox in bboxes.items(): 80 | validate_bbox(bbox) 81 | left, bottom, right, top = bbox 82 | arg = [ 83 | '--bounding-box', 84 | 'left=%0.5f'%left, 85 | 'bottom=%0.5f'%bottom, 86 | 'right=%0.5f'%right, 87 | 'top=%0.5f'%top, 88 | '--write-pbf', 89 | os.path.join(outpath, '%s.osm.pbf'%name) 90 | ] 91 | args += arg 92 | self.osmosis(*args) 93 | 94 | class PlanetExtractorOsmconvert(PlanetExtractor): 95 | def extract_bboxes(self, bboxes, workers=1, outpath='.', **kw): 96 | for name, bbox in bboxes.items(): 97 | self.extract_bbox(name, bbox, outpath=outpath) 98 | 99 | def extract_bbox(self, name, bbox, workers=1, outpath='.', **kw): 100 | validate_bbox(bbox) 101 | left, bottom, right, top = bbox 102 | args = [ 103 | self.osmpath, 104 | '-b=%s,%s,%s,%s'%(left, bottom, right, top), 105 | '-o=%s'%os.path.join(outpath, '%s.osm.pbf'%name) 106 | ] 107 | self.osmconvert(*args) 108 | 109 | class PlanetExtractorOsmium(PlanetExtractor): 110 | def extract_bboxes(self, bboxes, workers=1, outpath='.', strategy='complete_ways', **kw): 111 | extracts = [] 112 | for name, bbox in bboxes.items(): 113 | ext = { 114 | 'output': '%s.osm.pbf'%name, 115 | 'output_format': 'pbf', 116 | } 117 | if bbox.is_rectangle(): 118 | left, bottom, right, top = bbox.bbox() 119 | ext['bbox'] = {'left': left, 'right': right, 'top': top, 'bottom':bottom} 120 | else: 121 | ftype = bbox.geometry.get('type', '').lower() 122 | ext[ftype] = bbox.geometry.get('coordinates', []) 123 | extracts.append(ext) 124 | config = {'directory': outpath, 'extracts': extracts} 125 | path = None 126 | with tempfile.NamedTemporaryFile(mode='w', delete=False) as f: 127 | json.dump(config, f) 128 | path = f.name 129 | self.command(['osmium', 'extract', '-s', strategy, '-c', path, self.osmpath]) 130 | os.unlink(path) 131 | 132 | class PlanetDownloader(PlanetBase): 133 | def download_planet(self): 134 | raise NotImplementedError 135 | 136 | class PlanetDownloaderHttp(PlanetBase): 137 | def _download(self, url, outpath): 138 | subprocess.check_output([ 139 | 'curl', 140 | '-L', 141 | '-o', outpath, 142 | url 143 | ]) 144 | 145 | def download_planet(self, url=None): 146 | if os.path.exists(self.osmpath): 147 | raise Exception('planet file exists: %s'%self.osmpath) 148 | url = url or 'https://planet.openstreetmap.org/pbf/planet-latest.osm.pbf' 149 | self._download(url, self.osmpath) 150 | 151 | class PlanetDownloaderS3(PlanetBase): 152 | def download_planet(self): 153 | self.download_planet_latest() 154 | 155 | def download_planet_latest(self, bucket=None, prefix=None, match=None): 156 | if os.path.exists(self.osmpath): 157 | raise Exception('planet file exists: %s'%self.osmpath) 158 | match = match or '.*(planet[-_:T0-9]+.osm.pbf)$' 159 | bucket = bucket or 'osm-pds' 160 | objs = self._get_planets(bucket, prefix, match) 161 | objs = sorted(objs, key=lambda x:x.key) 162 | for i in objs: 163 | log.info('found planet: s3://%s/%s'%(i.bucket_name, i.key)) 164 | planet = objs[-1] 165 | log.info('downloading: s3://%s/%s to %s'%(planet.bucket_name, planet.key, self.osmpath)) 166 | self._download(planet.bucket_name, planet.key) 167 | 168 | def _download(self, bucket_name, key): 169 | if not boto3: 170 | raise Exception('please install boto3') 171 | s3 = boto3.client('s3') 172 | s3.download_file(bucket_name, key, self.osmpath) 173 | 174 | def _get_planets(self, bucket, prefix, match): 175 | if not boto3: 176 | raise Exception('please install boto3') 177 | r = re.compile(match) 178 | s3 = boto3.resource('s3') 179 | s3bucket = s3.Bucket(bucket) 180 | objs = [] 181 | for obj in s3bucket.objects.filter(Prefix=(prefix or '')): 182 | if r.match(obj.key): 183 | objs.append(obj) 184 | return objs 185 | 186 | 187 | class PlanetUpdater(PlanetBase): 188 | def update_planet(self, outpath, grain='hour', changeset_url=None, **kw): 189 | raise NotImplementedError 190 | 191 | class PlanetUpdaterOsmupdate(PlanetBase): 192 | pass 193 | 194 | class PlanetUpdaterOsmium(PlanetBase): 195 | def update_planet(self, outpath, grain='minute', changeset_url=None, size='1024', **kw): 196 | changeset_url = changeset_url or 'https://planet.openstreetmap.org/replication/%s'%grain 197 | if not os.path.exists(self.osmpath): 198 | raise Exception('planet file does not exist: %s'%self.osmpath) 199 | self.command(['pyosmium-up-to-date', '-s', size, '--server', changeset_url, '-v', self.osmpath, '-o', outpath]) 200 | 201 | class PlanetUpdaterOsmosis(PlanetBase): 202 | def update_planet(self, outpath, grain='minute', changeset_url=None, **kw): 203 | if not os.path.exists(self.osmpath): 204 | raise Exception('planet file does not exist: %s'%self.osmpath) 205 | self.changeset_url = changeset_url or 'https://planet.openstreetmap.org/replication/%s'%grain 206 | self._initialize() 207 | self._initialize_state() 208 | self._get_changeset() 209 | self._apply_changeset(outpath) 210 | 211 | def _initialize(self): 212 | configpath = os.path.join(self.osmosis_workdir, 'configuration.txt') 213 | if os.path.exists(configpath): 214 | return 215 | if os.path.exists(self.osmosis_workdir) and not os.path.isdir(self.osmosis_workdir): 216 | raise Exception('workdir exists and is not a directory: %s'%self.osmosis_workdir) 217 | try: 218 | os.makedirs(self.osmosis_workdir) 219 | except OSError as e: 220 | pass 221 | self.osmosis( 222 | '--read-replication-interval-init', 223 | 'workingDirectory=%s'%self.osmosis_workdir 224 | ) 225 | with open(configpath, 'w') as f: 226 | f.write(''' 227 | baseUrl=%s 228 | maxInterval=0 229 | '''%self.changeset_url) 230 | 231 | def _initialize_state(self): 232 | statepath = os.path.join(self.osmosis_workdir, 'state.txt') 233 | if os.path.exists(statepath): 234 | return 235 | timestamp = self.get_timestamp() 236 | url = 'https://replicate-sequences.osm.mazdermind.de/?%s'%timestamp 237 | state = urlopen(url).read() 238 | with open(statepath, 'w') as f: 239 | f.write(state) 240 | 241 | def _get_changeset(self): 242 | self.osmosis( 243 | '--read-replication-interval', 244 | 'workingDirectory=%s'%self.osmosis_workdir, 245 | '--simplify-change', 246 | '--write-xml-change', 247 | os.path.join(self.osmosis_workdir, 'changeset.osm.gz') 248 | ) 249 | 250 | def _apply_changeset(self, outpath): 251 | self.osmosis( 252 | '--read-xml-change', 253 | os.path.join(self.osmosis_workdir, 'changeset.osm.gz'), 254 | '--read-pbf', 255 | self.osmpath, 256 | '--apply-change', 257 | '--write-pbf', 258 | outpath 259 | ) 260 | -------------------------------------------------------------------------------- /planetutils/tilepack_download.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import absolute_import, unicode_literals 3 | import os 4 | import argparse 5 | 6 | from . import log 7 | from .tilepack_downloader import TilepackDownloader 8 | 9 | def main(): 10 | parser = argparse.ArgumentParser(usage="Valhalla Tilepack Download tool. If no Tilepack ID is provided, the latest Tilepack is used.") 11 | parser.add_argument('--id', help='Tilepack ID', default='latest') 12 | parser.add_argument('--outpath', help='Output path for Valhalla Tilepack; default is tiles.tar', default='tiles.tar') 13 | parser.add_argument('--api-token', help='Interline Auth Token; default is read from $INTERLINE_API_TOKEN') 14 | parser.add_argument('--compressed', help='Do not decompress Tilepack', action='store_true') 15 | parser.add_argument('--verbose', help="Verbose output", action='store_true') 16 | args = parser.parse_args() 17 | 18 | if args.verbose: 19 | log.set_verbose() 20 | 21 | outpath = args.outpath 22 | if args.compressed: 23 | if not (outpath.endswith('.tar') or outpath.endswith('.tgz')): 24 | log.warning("Warning: compressed output path %s does not in end in .tar.gz or .tgz"%outpath) 25 | else: 26 | if not outpath.endswith('.tar'): 27 | log.warning("Warning: decompressed output path %s does not end in .tar"%outpath) 28 | 29 | downloader = TilepackDownloader() 30 | downloader.download( 31 | outpath, 32 | version=args.id, 33 | compressed=args.compressed, 34 | api_token=args.api_token or os.getenv('INTERLINE_API_TOKEN') 35 | ) 36 | 37 | if __name__ == '__main__': 38 | main() 39 | -------------------------------------------------------------------------------- /planetutils/tilepack_downloader.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, unicode_literals, print_function 2 | from future.standard_library import install_aliases 3 | install_aliases() 4 | from urllib.parse import urlparse, urlencode, urlsplit, urlunsplit, parse_qs 5 | from urllib.request import urlopen 6 | 7 | import os 8 | import subprocess 9 | import json 10 | 11 | from . import log 12 | from . import download 13 | 14 | class TilepackDownloader(object): 15 | HOST = 'https://app.interline.io' 16 | def download(self, outpath, version='latest', api_token=None, compressed=False): 17 | # Endpoint 18 | url = '%s/valhalla_planet_tilepacks/%s/download'%(self.HOST, version) 19 | if version == 'latest': 20 | url = '%s/valhalla_planet_tilepacks/download_latest'%(self.HOST) 21 | # Make url 22 | u = list(urlsplit(url)) 23 | q = parse_qs(u[3]) 24 | if api_token: 25 | q['api_token'] = api_token 26 | u[3] = urlencode(q) 27 | url = urlunsplit(u) 28 | # Download 29 | download.download_curl(url, outpath, compressed=compressed) 30 | 31 | def list(self): 32 | url = "%s/valhalla_planet_tilepacks.json"%(self.HOST) 33 | contents = urlopen(url).read() 34 | tilepacks = json.loads(contents).get('data', []) 35 | tilepacks = sorted(tilepacks, key=lambda x:int(x.get('id'))) 36 | for tilepack in tilepacks: 37 | a = tilepack.get('attributes', {}) 38 | if a.get('bucket_provider') == 'gcp': 39 | bucket = 'gs://%s/%s'%(a['bucket_name'], a['bucket_key']) 40 | elif a.get('bucket_provider') == 's3': 41 | bucket = 's3://%s/%s'%(a['bucket_name'], a['bucket_key']) 42 | print(""" 43 | Tilepack ID: %s 44 | Timestamp: %s 45 | Filename: %s 46 | Storage provider: %s 47 | Data sources: %s 48 | URL: %s 49 | Versions: 50 | valhalla: %s 51 | planetutils: %s 52 | tilepack_cutter: %s 53 | """%( 54 | tilepack['id'], 55 | a['osm_planet_datetime'], 56 | os.path.basename(a['bucket_key']), 57 | a['bucket_provider'], 58 | ", ".join(a.get('data_contents', [])), 59 | tilepack.get('links',{}).get('self'), 60 | a['valhalla_version'], 61 | a['interline_planetutils_version'], 62 | a['interline_valhalla_tile_cutter_version'] 63 | )) 64 | -------------------------------------------------------------------------------- /planetutils/tilepack_list.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import absolute_import, unicode_literals 3 | import os 4 | import argparse 5 | 6 | from .tilepack_downloader import TilepackDownloader 7 | 8 | def main(): 9 | parser = argparse.ArgumentParser(usage="List Valhalla Tilepacks.") 10 | args = parser.parse_args() 11 | downloader = TilepackDownloader() 12 | downloader.list() 13 | 14 | if __name__ == '__main__': 15 | main() 16 | -------------------------------------------------------------------------------- /run.sh: -------------------------------------------------------------------------------- 1 | docker run --rm -v $HOME/data:/data -w /data/planets -e AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} -e AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} -it planetutils "$@" 2 | 3 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | from codecs import open 3 | from os import path 4 | 5 | here = path.abspath(path.dirname(__file__)) 6 | 7 | # Get the long description from the README file 8 | with open(path.join(here, 'README.md'), encoding='utf-8') as f: 9 | long_description = f.read() 10 | 11 | setup(name='interline-planetutils', 12 | version='0.4.13', 13 | description='Interline PlanetUtils', 14 | long_description=long_description, 15 | url='https://github.com/interline-io/planetutils', 16 | author='Ian Rees', 17 | author_email='ian@interline.io', 18 | license='MIT', 19 | packages=find_packages(exclude=['contrib', 'docs', 'tests']), 20 | install_requires=['future', 'requests'], #, 'osmium', 'boto3' 21 | tests_require=['nose'], 22 | test_suite = 'nose.collector', 23 | entry_points={ 24 | 'console_scripts': [ 25 | 'osm_planet_update=planetutils.osm_planet_update:main', 26 | 'osm_planet_extract=planetutils.osm_planet_extract:main', 27 | 'osm_planet_get_timestamp=planetutils.osm_planet_get_timestamp:main', 28 | 'osm_extract_download=planetutils.osm_extract_download:main', 29 | 'elevation_tile_download=planetutils.elevation_tile_download:main', 30 | 'elevation_tile_merge=planetutils.elevation_tile_merge:main', 31 | 'valhalla_tilepack_download=planetutils.tilepack_download:main', 32 | 'valhalla_tilepack_list=planetutils.tilepack_list:main' 33 | ], 34 | }, 35 | classifiers=[ 36 | 'Development Status :: 5 - Production/Stable', 37 | 'Intended Audience :: Developers', 38 | 'License :: OSI Approved :: MIT License', 39 | 'Programming Language :: Python :: 2.7', 40 | 'Programming Language :: Python :: 3.7' 41 | ] 42 | ) 43 | -------------------------------------------------------------------------------- /tests/test_bbox.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, unicode_literals 2 | from builtins import zip 3 | import tempfile 4 | import os 5 | import unittest 6 | import planetutils.bbox as bbox 7 | 8 | CA = [-126.38,32.15,-113.53,42.24] 9 | TESTGEOJSON = os.path.join('.','examples','test.geojson') 10 | 11 | 12 | class TestFeature(unittest.TestCase): 13 | def test_is_rectangle(self): 14 | feat = bbox.Feature(geometry={ 15 | "type": "LineString", 16 | "coordinates": [ 17 | [30, 10], [20, 40] 18 | ] 19 | }) 20 | self.assertEqual(feat.is_rectangle(), True) 21 | 22 | def test_is_rectangle_set(self): 23 | feat = bbox.Feature() 24 | feat.set_bbox([-122.42400169372557, 37.7860125252054, - 25 | 122.40559101104735, 37.7985943621788]) 26 | self.assertEqual(feat.is_rectangle(), True) 27 | 28 | def test_is_rectangle_polygon(self): 29 | feat = bbox.Feature(geometry={ 30 | "coordinates": [ 31 | [ 32 | [ 33 | -123.64, 34 | 36.791 35 | ], 36 | [ 37 | -123.64, 38 | 38.719 39 | ], 40 | [ 41 | -121.025, 42 | 38.719 43 | ], 44 | [ 45 | -121.025, 46 | 36.791 47 | ], 48 | [ 49 | -123.64, 50 | 36.791 51 | ] 52 | ] 53 | ], 54 | "type": "Polygon" 55 | }) 56 | self.assertEqual(feat.is_rectangle(), True) 57 | 58 | def test_is_rectangle_polygon2(self): 59 | geometry={ 60 | "coordinates": [ 61 | [ 62 | [ 63 | -74.501, 64 | 40.345 65 | ], 66 | [ 67 | -74.501, 68 | 41.097 69 | ], 70 | [ 71 | -73.226, 72 | 41.097 73 | ], 74 | [ 75 | -73.226, 76 | 40.345 77 | ], 78 | [ 79 | -74.501, 80 | 40.345 81 | ] 82 | ] 83 | ], 84 | "type": "Polygon" 85 | } 86 | feat = bbox.Feature(geometry=geometry) 87 | self.assertEqual(feat.is_rectangle(), True) 88 | 89 | 90 | def test_is_not_rectangle(self): 91 | feat = bbox.Feature(geometry={ 92 | "type": "Polygon", 93 | "coordinates": [ 94 | [[30, 10], [40, 40], [20, 40], [10, 20], [30, 10]] 95 | ] 96 | }) 97 | self.assertEqual(feat.is_rectangle(), False) 98 | 99 | 100 | 101 | 102 | class TestValidateBbox(unittest.TestCase): 103 | def test_bounds(self): 104 | # bounds 105 | bbox.validate_bbox((-180,-90,180,90)) 106 | bbox.validate_bbox((180,-90,180,90)) 107 | self.assertRaises(AssertionError, bbox.validate_bbox, (-180.1,-90,180,90)) 108 | self.assertRaises(AssertionError, bbox.validate_bbox, (-180,-90.1,180,90)) 109 | self.assertRaises(AssertionError, bbox.validate_bbox, (-180,-90,180.1,90)) 110 | self.assertRaises(AssertionError, bbox.validate_bbox, (-180,-90,180,90.1)) 111 | # left < right, bottom < top 112 | self.assertRaises(AssertionError, bbox.validate_bbox, (180,0,0,0)) 113 | self.assertRaises(AssertionError, bbox.validate_bbox, (0,0,-180,0)) 114 | self.assertRaises(AssertionError, bbox.validate_bbox, (0,90,0,0)) 115 | self.assertRaises(AssertionError, bbox.validate_bbox, (0,0,0,-90)) 116 | 117 | def test_returns_array(self): 118 | self.assertEqual(bbox.validate_bbox([1,2,3,4]), [1.0, 2.0, 3.0, 4.0]) 119 | 120 | class TestLoadBboxesCsv(unittest.TestCase): 121 | def test_load(self): 122 | f = tempfile.NamedTemporaryFile(delete=False, mode='w') 123 | f.write("%s,%0.2f,%0.2f,%0.2f,%0.2f"%( 124 | 'CA', 125 | CA[0], CA[1], CA[2], CA[3] 126 | )) 127 | f.close() 128 | bboxes = bbox.load_features_csv(f.name) 129 | feat = bboxes['CA'] 130 | self.assertEqual(feat.bbox(), CA) 131 | 132 | class TestBboxString(unittest.TestCase): 133 | def test_returns_array(self): 134 | feat = bbox.load_feature_string('1.0,2.0,3.0,4.0') 135 | self.assertEqual(feat.bbox(), [1.0,2.0,3.0,4.0]) 136 | 137 | def test_validates(self): 138 | self.assertRaises(AssertionError, bbox.load_feature_string, ('10,-10,20,-20')) 139 | 140 | class TestLoadBboxGeojson(unittest.TestCase): 141 | def test_load(self): 142 | feats = bbox.load_features_geojson(TESTGEOJSON) 143 | union = (-122.42400169372557, 37.7860125252054, -122.40559101104735, 37.7985943621788) 144 | pentagon = (-122.39975452423094, 37.78370618798191, -122.38949775695801, 37.791879793952084) 145 | for a,b in zip(feats['union'].bbox(), union): 146 | self.assertAlmostEqual(a,b) 147 | for a,b in zip(feats['pentagon'].bbox(), pentagon): 148 | self.assertAlmostEqual(a,b) 149 | 150 | class TestFlatcoords(unittest.TestCase): 151 | def test_point(self): 152 | c = [30, 10] 153 | exp = [[30, 10]] 154 | self.assertEqual(bbox.flatcoords(c), exp) 155 | 156 | def test_linestring(self): 157 | c = [ 158 | [30, 10], [10, 30], [40, 40] 159 | ] 160 | exp = [[30, 10], [10, 30], [40, 40]] 161 | self.assertEqual(bbox.flatcoords(c), exp) 162 | 163 | def test_polygon(self): 164 | c = [ 165 | [[35, 10], [45, 45], [15, 40], [10, 20], [35, 10]], 166 | [[20, 30], [35, 35], [30, 20], [20, 30]] 167 | ] 168 | fc = bbox.flatcoords(c) 169 | exp = [[35, 10], [45, 45], [15, 40], [10, 20], [35, 10], [20, 30], [35, 35], [30, 20], [20, 30]] 170 | self.assertEqual(fc, exp) 171 | 172 | def test_multipoint(self): 173 | c = [ 174 | [10, 40], [40, 30], [20, 20], [30, 10] 175 | ] 176 | fc = bbox.flatcoords(c) 177 | exp = [[10, 40], [40, 30], [20, 20], [30, 10]] 178 | self.assertEqual(fc, exp) 179 | 180 | def test_multilinestring(self): 181 | c = [ 182 | [[10, 10], [20, 20], [10, 40]], 183 | [[40, 40], [30, 30], [40, 20], [30, 10]] 184 | ] 185 | fc = bbox.flatcoords(c) 186 | exp = [[10, 10], [20, 20], [10, 40], [40, 40], [30, 30], [40, 20], [30, 10]] 187 | self.assertEqual(fc, exp) 188 | 189 | def test_multipolygon(self): 190 | c = [ 191 | [ 192 | [[40, 40], [20, 45], [45, 30], [40, 40]] 193 | ], 194 | [ 195 | [[20, 35], [10, 30], [10, 10], [30, 5], [45, 20], [20, 35]], 196 | [[30, 20], [20, 15], [20, 25], [30, 20]] 197 | ] 198 | ] 199 | fc = bbox.flatcoords(c) 200 | exp = [[40, 40], [20, 45], [45, 30], [40, 40], [20, 35], [10, 30], [10, 10], [30, 5], [45, 20], [20, 35], [30, 20], [20, 15], [20, 25], [30, 20]] 201 | self.assertEqual(fc, exp) 202 | 203 | if __name__ == '__main__': 204 | unittest.main() 205 | -------------------------------------------------------------------------------- /tests/test_commands.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, unicode_literals 2 | import tempfile 3 | import types 4 | import os 5 | import unittest 6 | 7 | class TestCommandImports(unittest.TestCase): 8 | def test_command_imports(self): 9 | commands = [ 10 | 'osm_planet_update=planetutils.osm_planet_update:main', 11 | 'osm_planet_extract=planetutils.osm_planet_extract:main', 12 | 'osm_planet_get_timestamp=planetutils.osm_planet_get_timestamp:main', 13 | 'osm_extract_download=planetutils.osm_extract_download:main', 14 | 'elevation_tile_download=planetutils.elevation_tile_download:main', 15 | 'elevation_tile_merge=planetutils.elevation_tile_merge:main', 16 | 'valhalla_tilepack_download=planetutils.tilepack_download:main', 17 | 'valhalla_tilepack_list=planetutils.tilepack_list:main' 18 | ] 19 | for i in commands: 20 | a, _, b = i.partition('=')[-1].partition(':') 21 | exec('import %s'%(a)) 22 | -------------------------------------------------------------------------------- /tests/test_elevation_tile_downloader.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, unicode_literals 2 | import tempfile 3 | import os 4 | import types 5 | import unittest 6 | 7 | from planetutils.elevation_tile_downloader import ElevationSkadiDownloader, ElevationGeotiffDownloader 8 | 9 | CA = [-126.386719,32.157012,-113.532715,42.244785] 10 | 11 | class TestGeotiffDownloader(unittest.TestCase): 12 | def test_tile_path(self): 13 | e = ElevationGeotiffDownloader('.') 14 | expect = ('0', '37', '122.tif') 15 | tile_path = e.tile_path(0, 37, 122) 16 | self.assertEqual(tile_path[0], expect[0]) 17 | self.assertEqual(tile_path[1], expect[1]) 18 | self.assertEqual(tile_path[2], expect[2]) 19 | 20 | def test_get_bbox_tiles(self): 21 | e = ElevationGeotiffDownloader('.', zoom=8) 22 | tiles = e.get_bbox_tiles(CA) 23 | self.assertEqual(len(tiles), 100) 24 | tiles = e.get_bbox_tiles([-180,-90,180,90]) 25 | self.assertEqual(len(tiles), 2**16) 26 | 27 | class TestElevationSkadiDownloader(unittest.TestCase): 28 | def test_download_bboxes(self): 29 | pass 30 | 31 | def test_hgtpath(self): 32 | e = ElevationSkadiDownloader('.') 33 | expect = ('N122', 'N122E037.hgt') 34 | hgtpath = e.tile_path(0, 37, 122) 35 | self.assertEqual(hgtpath[0], expect[0]) 36 | self.assertEqual(hgtpath[1], expect[1]) 37 | 38 | def test_get_bbox_tiles(self): 39 | e = ElevationSkadiDownloader('.') 40 | tiles = e.get_bbox_tiles(CA) 41 | self.assertEqual(len(tiles), 154) 42 | tiles = e.get_bbox_tiles([-180,-90,180,90]) 43 | self.assertEqual(len(tiles), 64800) 44 | 45 | def download_bbox(self, e, method, args, expect): 46 | COUNT = [] 47 | # def c(self, url, op): 48 | def c(self, bucket, prefix, z, x, y, suffix=''): 49 | COUNT.append([x, y]) 50 | e.download_tile = types.MethodType(c, ElevationSkadiDownloader) 51 | method(*args) 52 | self.assertEqual(len(COUNT), expect) 53 | 54 | def test_download_planet(self): 55 | e = ElevationSkadiDownloader('.') 56 | self.download_bbox(e, e.download_planet, [], 64800) 57 | 58 | def test_download_bbox(self): 59 | e = ElevationSkadiDownloader('.') 60 | self.download_bbox(e, e.download_bbox, [CA], 154) 61 | 62 | def test_download_bbox_found(self): 63 | d = tempfile.mkdtemp() 64 | e = ElevationSkadiDownloader(d) 65 | # correct size 66 | path = e.tile_path(0, -119, 37) 67 | os.makedirs(os.path.join(d, path[0])) 68 | dp1 = os.path.join(d, *path) 69 | with open(dp1, 'w') as f: 70 | f.write('0'*e.HGT_SIZE) 71 | # incorrect size 72 | path = e.tile_path(0, -119, 36) 73 | os.makedirs(os.path.join(d, path[0])) 74 | dp2 = os.path.join(d, *path) 75 | with open(dp2, 'w') as f: 76 | f.write('0') 77 | # expect 154 - 1 78 | self.download_bbox(e, e.download_bbox, [CA], 154-1) 79 | # cleanup 80 | for i in [dp1, dp2]: 81 | os.unlink(i) 82 | for i in [dp1, dp2]: 83 | os.rmdir(os.path.dirname(i)) 84 | os.rmdir(d) 85 | -------------------------------------------------------------------------------- /tests/test_planet.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, unicode_literals 2 | import tempfile 3 | import types 4 | import os 5 | import unittest 6 | import planetutils.planet as planet 7 | 8 | TESTFILE = os.path.join('.','examples','san-francisco-downtown.osm.pbf') 9 | TESTFILE_TIMESTAMP = '2018-02-02T22:34:43Z' 10 | TEST_BBOX = [-122.430439,37.766508,-122.379670,37.800052] 11 | 12 | # import planetutils.log as log 13 | # log.set_verbose() 14 | 15 | class TestPlanetBase(unittest.TestCase): 16 | def test_osmosis(self): 17 | p = planet.PlanetBase(TESTFILE) 18 | output = p.osmosis( 19 | '--read-pbf', TESTFILE, 20 | '--tf', 'accept-ways', 'highway=pedestrian', 21 | '--write-xml','-' 22 | ) 23 | self.assertTrue(output.count('way id=') > 0) 24 | 25 | def test_osmconvert(self): 26 | p = planet.PlanetBase(TESTFILE) 27 | output = p.osmconvert(p.osmpath, '--out-statistics') 28 | self.assertIn('timestamp min:', output) 29 | 30 | def test_get_timestamp(self): 31 | p = planet.PlanetBase(TESTFILE) 32 | self.assertEqual(p.get_timestamp(), TESTFILE_TIMESTAMP) 33 | 34 | class TestPlanetExtractor(unittest.TestCase): 35 | kls = None 36 | def extract_bbox(self): 37 | name = 'test' 38 | bbox = TEST_BBOX 39 | d = tempfile.mkdtemp() 40 | p = self.kls(TESTFILE) 41 | outfile = os.path.join(d, '%s.osm.pbf'%name) 42 | p.extract_bbox(name, bbox, outpath=d) 43 | self.assertTrue(os.path.exists(outfile)) 44 | p2 = planet.PlanetBase(outfile) 45 | self.assertEqual(p2.get_timestamp(), TESTFILE_TIMESTAMP) 46 | os.unlink(outfile) 47 | os.rmdir(d) 48 | 49 | class TestPlanetExtractorOsmconvert(TestPlanetExtractor): 50 | kls = planet.PlanetExtractorOsmconvert 51 | def test_extract_bbox(self): 52 | self.extract_bbox() 53 | 54 | class TestPlanetExtractorOsmosis(TestPlanetExtractor): 55 | kls = planet.PlanetExtractorOsmosis 56 | def test_extract_bbox(self): 57 | self.extract_bbox() 58 | 59 | class TestPlanetDownloaderHttp(unittest.TestCase): 60 | def test_download_planet(self): 61 | p = planet.PlanetDownloaderHttp('test.osm.pbf') 62 | # mock curl 63 | COUNT = [] 64 | def c(self, url, outpath): 65 | COUNT.append([url,outpath]) 66 | p._download = types.MethodType(c, planet.PlanetDownloaderHttp) 67 | p.download_planet() 68 | self.assertEqual(COUNT[0], ['https://planet.openstreetmap.org/pbf/planet-latest.osm.pbf', 'test.osm.pbf']) 69 | 70 | if __name__ == '__main__': 71 | unittest.main() 72 | --------------------------------------------------------------------------------