├── .github └── workflows │ ├── ci.yml │ └── python-package.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yml ├── CITATION.cff ├── LICENSE ├── MANIFEST.in ├── Makefile ├── PYPI_README.md ├── README.md ├── binder ├── environment.yml ├── postBuild └── requirements.txt ├── docs ├── Makefile ├── README.md ├── _build │ └── .gitkeep ├── _static │ └── .gitkeep ├── _templates │ └── .gitkeep ├── about.rst ├── basic_usage.rst ├── conf.py ├── geograph.rst ├── geograph.utils.rst ├── geograph.visualisation.rst ├── images │ ├── CEZ_levels_timestack_analysis.png │ ├── CEZ_node_dynamics.png │ ├── CEZ_node_growth_rates.png │ ├── geograph_logo.png │ ├── geograph_logo_small.png │ └── viewer_demo.gif ├── index.rst ├── installation.rst ├── make.bat ├── notebooks │ ├── 1-demo-landscape-metrics-comparison-to-pylandstats.nblink │ ├── 2-demo-landscape-timeseries-metrics.nblink │ ├── 3-demo-geographviewer-polesia.nblink │ ├── 4-demo-geographviewer-chernobyl.nblink │ └── 5-demo-nodediff.nblink ├── tutorials.rst └── utils.py ├── geograph ├── README.md ├── __init__.py ├── _version.py ├── binary_graph_operations.py ├── constants.py ├── demo │ ├── binder_constants.py │ └── plot_settings.py ├── geograph.py ├── geotimeline.py ├── metrics.py ├── tests │ ├── __init__.py │ ├── create_data_test.py │ ├── testdata │ │ ├── adjacent │ │ │ ├── full.gpkg │ │ │ ├── lower_left.gpkg │ │ │ ├── lower_right.gpkg │ │ │ ├── upper_left.gpkg │ │ │ └── upper_right.gpkg │ │ ├── overlapping │ │ │ ├── full.gpkg │ │ │ ├── lower_left.gpkg │ │ │ ├── lower_right.gpkg │ │ │ ├── upper_left.gpkg │ │ │ └── upper_right.gpkg │ │ └── timestack │ │ │ ├── time_0.gpkg │ │ │ ├── time_1.gpkg │ │ │ ├── time_2.gpkg │ │ │ ├── time_3.gpkg │ │ │ └── time_4.gpkg │ └── utils.py ├── utils │ ├── __init__.py │ ├── geopandas_utils.py │ ├── polygon_utils.py │ └── rasterio_utils.py └── visualisation │ ├── __init__.py │ ├── control_widgets.py │ ├── folium_utils.py │ ├── geoviewer.py │ ├── graph_utils.py │ ├── style.py │ └── widget_utils.py ├── notebooks ├── 1-demo-landscape-metrics-comparison-to-pylandstats.ipynb ├── 2-demo-landscape-timeseries-metrics.ipynb ├── 3-demo-geographviewer-polesia.ipynb ├── 4-demo-geographviewer-chernobyl.ipynb ├── 5-demo-nodediff.ipynb ├── 6-tutorial.ipynb └── README.md ├── pylintrc ├── pytest.ini ├── requirements ├── README.md ├── dev-requirements.txt ├── doc-requirements.txt ├── environment.yml ├── requirements.txt └── test-requirements.txt └── setup.py /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: Continuous integration loop 5 | 6 | # Triggers the workflow on pull request 7 | # events but only for the main branch 8 | on: 9 | pull_request: 10 | branches: [main] 11 | 12 | jobs: 13 | build: 14 | runs-on: ubuntu-latest # OS to run the tests on 15 | 16 | steps: 17 | - uses: actions/checkout@v2 18 | - name: Set up Python 3.8 19 | uses: actions/setup-python@v2 20 | with: 21 | python-version: 3.8 22 | - name: Install dependencies 23 | run: | 24 | python -m pip install --upgrade pip 25 | pip install pylint pytest 26 | if [ -f requirements/test-requirements.txt ]; then pip install -r requirements/test-requirements.txt; fi 27 | 28 | - name: Lint with pylint 29 | run: | 30 | # lint with the rules defined in pylintrc 31 | pylint --ignore-patterns=lint_test geograph 32 | 33 | # For unit tests later 34 | # - name: Test with pytest 35 | # run: | 36 | # pytest 37 | # - name: Test with coverage 38 | # run: | 39 | # pip3 install coverage 40 | # make test 41 | # env: 42 | # VIRTUAL_ENV: TRUE 43 | -------------------------------------------------------------------------------- /.github/workflows/python-package.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: Python package 5 | 6 | on: 7 | push: 8 | branches: [ "main", "feature/unit-tests" ] 9 | pull_request: 10 | branches: [ "main" ] 11 | 12 | jobs: 13 | build: 14 | 15 | runs-on: ${{matrix.os}} 16 | strategy: 17 | fail-fast: false 18 | matrix: 19 | os: [ubuntu-latest, macos-latest] 20 | python-version: ["3.8", "3.9", "3.10"] 21 | 22 | steps: 23 | - uses: actions/checkout@v3 24 | - name: Set up Python ${{ matrix.python-version }} 25 | uses: actions/setup-python@v3 26 | with: 27 | python-version: ${{ matrix.python-version }} 28 | - name: Install dependencies 29 | run: | 30 | python -m pip install --upgrade pip 31 | python -m pip install pytest 32 | pip install -e . 33 | - name: Test with pytest 34 | run: | 35 | pytest geograph 36 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | dask-worker-space/ 12 | runs/ 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | outputs/ 18 | .vscode/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | *.egg-info/ 27 | .installed.cfg 28 | *.egg 29 | wandb/ 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | # Unit test / coverage reports 42 | htmlcov/ 43 | .tox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | 51 | # Translations 52 | *.mo 53 | *.pot 54 | 55 | # Django stuff: 56 | *.log 57 | 58 | # Sphinx documentation 59 | docs/_build/ 60 | docs/_static/ 61 | docs/_templates/ 62 | 63 | # PyBuilder 64 | target/ 65 | 66 | # DotEnv configuration 67 | .env 68 | 69 | # Database 70 | *.db 71 | *.rdb 72 | 73 | # Pycharm 74 | .idea 75 | 76 | # VS Code 77 | .vscode/ 78 | 79 | # Spyder 80 | .spyproject/ 81 | 82 | # Jupyter NB Checkpoints 83 | .ipynb_checkpoints/ 84 | 85 | # exclude data from source control by default 86 | io/ 87 | logs 88 | logs 89 | wandb 90 | wandb/ 91 | data 92 | 93 | # Mac OS-specific storage files 94 | .DS_Store 95 | 96 | # vim 97 | *.swp 98 | *.swo 99 | 100 | # Mypy cache 101 | .mypy_cache/ 102 | 103 | # History files 104 | .history 105 | 106 | # VSCode - custom 107 | workspace.code-workspace 108 | gtc-biodiversity.code-workspace 109 | 110 | # GDrive secrets 111 | secrets 112 | token.pickle 113 | 114 | # General pickle files 115 | *.pickle 116 | 117 | # Don't keep large files 118 | *.mp4 119 | *.nc 120 | *.model 121 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/psf/black 3 | rev: 20.8b1 4 | hooks: 5 | - id: black 6 | language_version: python3.8 7 | - repo: https://github.com/pycqa/pylint 8 | rev: pylint-2.6.0 9 | hooks: 10 | - id: pylint 11 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Remove report submodule from docs build 9 | submodules: 10 | exclude: all 11 | 12 | 13 | # Build documentation in the docs/ directory with Sphinx 14 | sphinx: 15 | configuration: docs/conf.py 16 | 17 | # Optionally build your docs in additional formats such as PDF 18 | formats: 19 | - pdf 20 | 21 | # Optionally set the version of Python and requirements required to build your docs 22 | python: 23 | version: 3.8 24 | install: 25 | - requirements: requirements/doc-requirements.txt -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | cff-version: 1.2.0 2 | message: "If you use this software, please cite it as below." 3 | authors: 4 | - family-names: "Bradley" 5 | given-names: "Herbie" 6 | orcid: "https://orcid.org/0000-0001-5390-1257" 7 | - family-names: "Mathis" 8 | given-names: "Simon Valentin" 9 | orcid: "https://orcid.org/0000-0002-5246-6481" 10 | - family-names: "Findeis" 11 | given-names: "Arduin" 12 | orcid: "https://orcid.org/0000-0003-2163-3854" 13 | - family-names: "Thomas" 14 | given-names: "Simon D.A." 15 | orcid: "https://orcid.org/0000-0001-7911-1659" 16 | - family-names: "Li" 17 | given-names: "Yilin" 18 | orcid: "https://orcid.org/0000-0002-7378-8615" 19 | - family-names: "Green" 20 | given-names: "Katherine" 21 | orcid: "https://orcid.org/0000-0003-4664-3354" 22 | - family-names: "Ashton-Butt" 23 | given-names: "Adham" 24 | orcid: "https://orcid.org/0000-0002-6926-6099" 25 | - family-names: "Rogers" 26 | given-names: "Martin" 27 | orcid: "https://orcid.org/0000-0003-0056-2030" 28 | - family-names: "Dowling" 29 | given-names: "Tom" 30 | orcid: "https://orcid.org/0000-0003-0569-4462" 31 | title: "Geograph" 32 | version: 0.0.2 33 | doi: 10.5281/zenodo.4774141 34 | date-released: 2021-05-19 35 | url: "https://github.com/ai4er-cdt/geograph" 36 | 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | Copyright (c) 2020, Biodiversity Team 3 | 4 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 5 | 6 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 7 | 8 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 9 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include MANIFEST.in 2 | include PYPI_README.md 3 | include LICENSE 4 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean lint format env jupyter_pro jupyter_dark vscode_pro 2 | 3 | ################################################################################# 4 | # GLOBALS # 5 | ################################################################################# 6 | 7 | PROJECT_DIR := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST)))) 8 | PROJECT_NAME = gtc-biodiversity 9 | PYTHON_INTERPRETER = python3 10 | PYTHON_ENV_VERSION = 3.8 11 | 12 | ifeq (,$(shell which conda)) 13 | HAS_CONDA=False 14 | else 15 | HAS_CONDA=True 16 | endif 17 | 18 | ################################################################################# 19 | # COMMANDS # 20 | ################################################################################# 21 | 22 | ## Delete all compiled Python files 23 | clean: 24 | find . -type f -name "*.py[co]" -delete 25 | find . -type d -name "__pycache__" -delete 26 | 27 | ## Lint geograph directory using flake8 28 | lint: 29 | pylint --ignore-patterns=lint_test geograph 30 | 31 | ## Format geograph directory using black 32 | format: 33 | black geograph 34 | 35 | ## Set up pre-commit hooks 36 | precommit: 37 | pip install pre-commit black pylint 38 | pre-commit install 39 | 40 | ## Set up python interpreter environment and install basic dependencies 41 | env: 42 | ifeq (True,$(HAS_CONDA)) 43 | @echo ">>> Detected conda, creating conda environment." 44 | 45 | # Create the conda environment 46 | conda env create --prefix=./env -f requirements/environment.yml 47 | 48 | @echo ">>> New conda env created. Activate from project directory with:\nconda activate ./env" 49 | else 50 | @echo ">>> No conda detected. Falling back to virtualenv instead. The python verison will be that of your python3 interpreter." 51 | $(PYTHON_INTERPRETER) -m pip install -q virtualenv virtualenvwrapper 52 | @echo ">>> Installing virtualenvwrapper if not already installed.\nMake sure the following lines are in shell startup file\n\ 53 | export WORKON_HOME=$$HOME/.virtualenvs\nexport PROJECT_HOME=$$HOME/Devel\nsource /usr/local/bin/virtualenvwrapper.sh\n" 54 | @bash -c "source `which virtualenvwrapper.sh`;mkvirtualenv $(PROJECT_NAME) --python=$(PYTHON_INTERPRETER)" 55 | @echo ">>> New virtualenv created. Activate with:\nworkon $(PROJECT_NAME)" 56 | endif 57 | 58 | ################################################################################# 59 | # Self Documenting Commands # 60 | ################################################################################# 61 | 62 | .DEFAULT_GOAL := help 63 | 64 | # Inspired by 65 | # sed script explained: 66 | # /^##/: 67 | # * save line in hold space 68 | # * purge line 69 | # * Loop: 70 | # * append newline + line to hold space 71 | # * go to next line 72 | # * if line starts with doc comment, strip comment character off and loop 73 | # * remove target prerequisites 74 | # * append hold space (+ newline) to line 75 | # * replace newline plus comments by `---` 76 | # * print line 77 | # Separate expressions are necessary because labels cannot be delimited by 78 | # semicolon; see 79 | .PHONY: help 80 | help: 81 | @echo "$$(tput bold)Available rules:$$(tput sgr0)" 82 | @echo 83 | @sed -n -e "/^## / { \ 84 | h; \ 85 | s/.*//; \ 86 | :doc" \ 87 | -e "H; \ 88 | n; \ 89 | s/^## //; \ 90 | t doc" \ 91 | -e "s/:.*//; \ 92 | G; \ 93 | s/\\n## /---/; \ 94 | s/\\n/ /g; \ 95 | p; \ 96 | }" ${MAKEFILE_LIST} \ 97 | | LC_ALL='C' sort --ignore-case \ 98 | | awk -F '---' \ 99 | -v ncol=$$(tput cols) \ 100 | -v indent=19 \ 101 | -v col_on="$$(tput setaf 6)" \ 102 | -v col_off="$$(tput sgr0)" \ 103 | '{ \ 104 | printf "%s%*s%s ", col_on, -indent, $$1, col_off; \ 105 | n = split($$2, words, " "); \ 106 | line_length = ncol - indent; \ 107 | for (i = 1; i <= n; i++) { \ 108 | line_length -= length(words[i]) + 1; \ 109 | if (line_length <= 0) { \ 110 | line_length = ncol - indent - length(words[i]) - 1; \ 111 | printf "\n%*s ", -indent, " "; \ 112 | } \ 113 | printf "%s ", words[i]; \ 114 | } \ 115 | printf "\n"; \ 116 | }' \ 117 | | more $(shell test $(shell uname) = Darwin && echo '--no-init --raw-control-chars') 118 | -------------------------------------------------------------------------------- /PYPI_README.md: -------------------------------------------------------------------------------- 1 |

2 | GeoGraph 3 |

4 | 5 | [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/ai4er-cdt/geograph/main?urlpath=lab%2Ftree%2Fnotebooks) 6 | [![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT) 7 | Code style: black 8 | [![Documentation Status](https://readthedocs.org/projects/geograph/badge/?version=latest)](https://geograph.readthedocs.io/en/latest/?badge=latest) 9 | 10 | 11 | __Table of contents:__ 12 | 1. Description 13 | 2. Installation 14 | 3. Requirements 15 | 4. Documentation 16 | 17 | ## 1. Description 18 | 19 | GeoGraph provides a tool for analysing habitat fragmentation and related problems in landscape ecology. GeoGraph builds a geospatially referenced graph from land cover or field survey data and enables graph-based landscape ecology analysis as well as interactive visualizations. Beyond the graph-based features, GeoGraph also enables the computation of common landscape metrics. 20 | 21 | ## 2. Installation 22 | 23 | GeoGraph is available via pip, so you can install it using 24 | 25 | ``` 26 | pip install geograph 27 | ``` 28 | 29 | Done, you're ready to go! 30 | 31 | You can also visit the [Github repository](https://github.com/ai4er-cdt/geograph). 32 | 33 | See the [documentation](https://geograph.readthedocs.io/) for a full getting started guide or check out the [binder](https://mybinder.org/v2/gh/ai4er-cdt/geograph/main?urlpath=lab%2Ftree%2Fnotebooks) for tutorials on how to get started . 34 | 35 | ## 3. Requirements 36 | 37 | GeoGraph is written in Python 3.8 and builds on [NetworkX](https://github.com/NetworkX/NetworkX), [ipyleaflet](https://github.com/jupyter-widgets/ipyleaflet), [geopandas](https://geopandas.org/), [rasterio](https://rasterio.readthedocs.io/en/latest/) and many more packages. See the [requirements directory](./requirements) for a full list of dependencies. 38 | 39 | ## 4. Documentation 40 | 41 | Our documentation is available at [geograph.readthedocs.io](https://geograph.readthedocs.io/). 42 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

2 | GeoGraph 3 |

4 | 5 | 6 | [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/ai4er-cdt/geograph/main?urlpath=lab%2Ftree%2Fnotebooks) 7 | [![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT) 8 | Code style: black 9 | [![Documentation Status](https://readthedocs.org/projects/geograph/badge/?version=latest)](https://geograph.readthedocs.io/en/latest/?badge=latest) 10 | [![PyPI version](https://badge.fury.io/py/geograph.svg)](https://badge.fury.io/py/geograph) 11 | [![DOI](https://zenodo.org/badge/320347092.svg)](https://zenodo.org/badge/latestdoi/320347092) 12 | 13 | 14 | ![GeoGraphViewer demo gif](docs/images/viewer_demo.gif) 15 | 16 | 17 | __Table of contents:__ 18 | 1. Description 19 | 1. Installation 20 | 1. Requirements 21 | 1. Documentation 22 | 23 | ## 1. Description 24 | 25 | GeoGraph provides a tool for analysing habitat fragmentation, related problems in landscape ecology. GeoGraph builds a geospatially referenced graph from land cover or field survey data and enables graph-based landscape ecology analysis as well as interactive visualizations. Beyond the graph-based features, GeoGraph also enables the computation of common landscape metrics. 26 | 27 | ## 2. Installation 28 | 29 | GeoGraph is available via pip, so you can install it using 30 | 31 | ``` 32 | pip install geograph 33 | ``` 34 | 35 | Done, you're ready to go! 36 | 37 | See the [documentation](https://geograph.readthedocs.io/) for a full getting started guide. 38 | 39 | ## 3. Requirements 40 | 41 | GeoGraph is written in Python 3.8 and builds on [NetworkX](https://github.com/NetworkX/NetworkX), [ipyleaflet](https://github.com/jupyter-widgets/ipyleaflet) and many more packages. See the [requirements directory](./requirements) for a full list of dependencies. 42 | 43 | ## 4. Documentation 44 | 45 | Our documentation is available at [geograph.readthedocs.io](https://geograph.readthedocs.io/). 46 | -------------------------------------------------------------------------------- /binder/environment.yml: -------------------------------------------------------------------------------- 1 | channels: 2 | - conda-forge 3 | - defaults 4 | dependencies: 5 | - python=3.8 6 | - pip 7 | - pip: 8 | - -r requirements.txt 9 | -------------------------------------------------------------------------------- /binder/postBuild: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Running postBuild from" $(pwd) 3 | 4 | echo "Downloading data for binder:" 5 | wget -q -O binder_data.zip "https://www.dropbox.com/s/ghcf4q447nxddy5/binder_data.zip?dl=1" 6 | 7 | echo "Unzipping data for demonstration into data directory" 8 | unzip -d data binder_data.zip 9 | 10 | echo "Removing data zip-file" 11 | rm binder_data.zip 12 | 13 | echo "unset PROJ_LIB environment variable for rasterio > 1.2.0" 14 | unset PROJ_LIB -------------------------------------------------------------------------------- /binder/requirements.txt: -------------------------------------------------------------------------------- 1 | # NOTE: Your final requirements for production/publishing the repo go here. 2 | 3 | # local package 4 | -e ./.. 5 | 6 | # linear algebra and general data analysis 7 | numpy==1.22.0 # arrays, linear algebra 8 | pandas==1.2.3 # tabular data analysis 9 | 10 | # interactive computing 11 | jupyter==1.0.0 # for opening jupyter notebooks in binder 12 | tqdm==4.59.0 # progress bars 13 | 14 | # plotting 15 | matplotlib==3.3.4 # general python plotting 16 | seaborn==0.11.1 # fancier plotting styles # TODO: Check for removal 17 | folium==0.12.1 # plotting maps 18 | ipyleaflet==0.13.6 # plotting ipywidget maps 19 | 20 | # geospatial analysis requirements 21 | # vector data 22 | geopandas==0.9.0 # manipulating geospatial vector data 23 | shapely==1.7.1 # working with vector shapes 24 | rtree==0.9.7 # efficiently querying polygon data 25 | descartes==1.1.0 # plotting geopandas vector data 26 | # raster data 27 | rasterio==1.1.8 # opening and loading raster data (Note: version >= 1.2 requires unsetting PROJ_LIB environment variable which is set by fiona 28 | xarray==0.17.0 # useful data structures 29 | rioxarray==0.3.1 # adaptation of xarray for raterio. # TODO: Check for removal 30 | 31 | # graph requirements 32 | networkx==2.5 # manipulating graph data 33 | 34 | # comparison to fragmentation metrics in binder 35 | pylandstats==2.2.1 36 | 37 | # saving and loading netcdf4 files 38 | netcdf4 -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # How to create Sphinx docs 2 | 3 | 4 | With the dev environment activated, and Sphinx installed you can create the html version by running the following command from this directory: 5 | 6 | ``` 7 | make html 8 | ``` 9 | 10 | And for the pdf version use 11 | 12 | ``` 13 | make latexpdf 14 | ``` 15 | 16 | Note: this last command requires a latex installation, which Jasmin servers don't seem to have. 17 | 18 | ## Other important commands 19 | 20 | To update the module references in the rst files 21 | 22 | ``` 23 | sphinx-apidoc -f -o . .. 24 | ``` 25 | -------------------------------------------------------------------------------- /docs/_build/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/docs/_build/.gitkeep -------------------------------------------------------------------------------- /docs/_static/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/docs/_static/.gitkeep -------------------------------------------------------------------------------- /docs/_templates/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/docs/_templates/.gitkeep -------------------------------------------------------------------------------- /docs/about.rst: -------------------------------------------------------------------------------- 1 | About 2 | ============================== 3 | 4 | This package was created by Herbie Bradley, Arduin Findeis, 5 | Katherine Green, Yilin Li, Simon Mathis, and Simon Thomas, graduate 6 | students in the `AI4ER CDT`_ at the University of Cambridge. It was 7 | initially created as part of the AI4ER Group Team Challenge 2021. 8 | 9 | .. _AI4ER CDT: https://ai4er-cdt.esc.cam.ac.uk/ -------------------------------------------------------------------------------- /docs/basic_usage.rst: -------------------------------------------------------------------------------- 1 | Basic Usage 2 | ----------- 3 | 4 | Given a variable ``data`` that is one of 5 | 6 | - a path to a pickle file or compressed pickle file to load the graph from, 7 | - a path to vector data in GPKG or Shapefile format, 8 | - a path to raster data in GeoTiff format, 9 | - a numpy array containing raster data, 10 | - a dataframe containing polygons, 11 | 12 | you can create a ``GeoGraph`` using 13 | 14 | .. code-block:: python 15 | 16 | from geograph import GeoGraph 17 | graph = GeoGraph(data) 18 | 19 | 20 | To visualise this graph use the following code in a jupyter notebook 21 | 22 | .. code-block:: python 23 | 24 | from geograph.visualisation.geoviewer import GeoGraphViewer 25 | viewer = GeoGraphViewer() 26 | viewer.add_graph(graph, name='my_graph') 27 | viewer.enable_graph_controls() 28 | viewer 29 | 30 | This should then look something like 31 | 32 | .. image:: images/viewer_demo.gif 33 | :width: 500 34 | :alt: viewer demo 35 | 36 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=missing-module-docstring 2 | # Configuration file for the Sphinx documentation builder. 3 | # 4 | # This file only contains a selection of the most common options. For a full 5 | # list see the documentation: 6 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 7 | 8 | # -- Path setup -------------------------------------------------------------- 9 | 10 | # If extensions (or modules to document with autodoc) are in another directory, 11 | # add these directories to sys.path here. If the directory is relative to the 12 | # documentation root, use os.path.abspath to make it absolute, like shown here. 13 | # 14 | import os 15 | import sys 16 | 17 | sys.path.insert(0, os.path.abspath("..")) 18 | 19 | 20 | # -- Project information ----------------------------------------------------- 21 | 22 | project = "GeoGraph - Package Documentation" 23 | copyright = ( # pylint: disable=redefined-builtin 24 | "2021, Herbie Bradley, Arduin Findeis, Katherine Green," 25 | " Yilin Li, Simon Mathis, Simon Thomas" 26 | ) 27 | author = "" 28 | 29 | 30 | # -- General configuration --------------------------------------------------- 31 | 32 | # Add any Sphinx extension module names here, as strings. They can be 33 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 34 | # ones. 35 | 36 | extensions = [ 37 | "sphinx.ext.autodoc", 38 | "sphinx.ext.napoleon", 39 | "sphinx_autodoc_typehints", 40 | "nbsphinx", 41 | "nbsphinx_link", 42 | ] 43 | 44 | # Add any paths that contain templates here, relative to this directory. 45 | templates_path = ["_templates"] 46 | 47 | # List of patterns, relative to source directory, that match files and 48 | # directories to ignore when looking for source files. 49 | # This pattern also affects html_static_path and html_extra_path. 50 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 51 | 52 | 53 | # -- Options for HTML output ------------------------------------------------- 54 | 55 | # The theme to use for HTML and HTML Help pages. See the documentation for 56 | # a list of builtin themes. 57 | # 58 | html_theme = "sphinx_rtd_theme" 59 | 60 | # Add any paths that contain custom static files (such as style sheets) here, 61 | # relative to this directory. They are copied after the builtin static files, 62 | # so a file named "default.css" will overwrite the builtin "default.css". 63 | html_static_path = ["_static"] 64 | 65 | html_logo = "images/geograph_logo.png" 66 | 67 | html_theme_options = { 68 | "style_nav_header_background": "#a0a0a0", 69 | "logo_only": True, 70 | "display_version": True, 71 | } 72 | 73 | html_favicon = "images/geograph_logo_small.png" 74 | 75 | # This adds the 'edit on github' banner on top right corner 76 | html_context = { 77 | "display_github": True, 78 | "github_user": "ai4er-cdt", 79 | "github_repo": "gtc-biodiversity", 80 | "github_version": "main/docs/", 81 | } 82 | 83 | # Latex options 84 | latex_logo = "./images/geograph_logo.png" 85 | latex_elements = { 86 | "extraclassoptions": "openany,oneside", 87 | "papersize": "a4paper", 88 | } 89 | -------------------------------------------------------------------------------- /docs/geograph.rst: -------------------------------------------------------------------------------- 1 | GeoGraph API Reference 2 | ====================== 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | :maxdepth: 4 9 | 10 | geograph.utils 11 | geograph.visualisation 12 | 13 | Submodules 14 | ---------- 15 | 16 | geograph.binary\_graph\_operations module 17 | ----------------------------------------- 18 | 19 | .. automodule:: geograph.binary_graph_operations 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | :special-members: __init__ 24 | 25 | geograph.constants module 26 | ------------------------- 27 | 28 | .. automodule:: geograph.constants 29 | :members: 30 | :undoc-members: 31 | :show-inheritance: 32 | :special-members: __init__ 33 | 34 | geograph.geograph module 35 | ------------------------ 36 | 37 | .. automodule:: geograph.geograph 38 | :members: 39 | :undoc-members: 40 | :show-inheritance: 41 | :special-members: __init__ 42 | 43 | geograph.geotimeline module 44 | --------------------------- 45 | 46 | .. automodule:: geograph.geotimeline 47 | :members: 48 | :undoc-members: 49 | :show-inheritance: 50 | :special-members: __init__ 51 | 52 | geograph.metrics module 53 | ----------------------- 54 | 55 | .. automodule:: geograph.metrics 56 | :members: 57 | :undoc-members: 58 | :show-inheritance: 59 | :special-members: __init__ 60 | 61 | Module contents 62 | --------------- 63 | 64 | .. automodule:: geograph 65 | :members: 66 | :undoc-members: 67 | :show-inheritance: 68 | :special-members: __init__ 69 | -------------------------------------------------------------------------------- /docs/geograph.utils.rst: -------------------------------------------------------------------------------- 1 | geograph.utils package 2 | ====================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | geograph.utils.geopandas\_utils module 8 | -------------------------------------- 9 | 10 | .. automodule:: geograph.utils.geopandas_utils 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | :special-members: __init__ 15 | 16 | geograph.utils.polygon\_utils module 17 | ------------------------------------ 18 | 19 | .. automodule:: geograph.utils.polygon_utils 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | :special-members: __init__ 24 | 25 | geograph.utils.rasterio\_utils module 26 | ------------------------------------- 27 | 28 | .. automodule:: geograph.utils.rasterio_utils 29 | :members: 30 | :undoc-members: 31 | :show-inheritance: 32 | :special-members: __init__ 33 | 34 | Module contents 35 | --------------- 36 | 37 | .. automodule:: geograph.utils 38 | :members: 39 | :undoc-members: 40 | :show-inheritance: 41 | :special-members: __init__ 42 | -------------------------------------------------------------------------------- /docs/geograph.visualisation.rst: -------------------------------------------------------------------------------- 1 | geograph.visualisation package 2 | ============================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | geograph.visualisation.control\_widgets module 8 | ---------------------------------------------- 9 | 10 | .. automodule:: geograph.visualisation.control_widgets 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | :special-members: __init__ 15 | 16 | geograph.visualisation.folium\_utils module 17 | ------------------------------------------- 18 | 19 | .. automodule:: geograph.visualisation.folium_utils 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | :special-members: __init__ 24 | 25 | geograph.visualisation.geoviewer module 26 | --------------------------------------- 27 | 28 | .. automodule:: geograph.visualisation.geoviewer 29 | :members: 30 | :undoc-members: 31 | :show-inheritance: 32 | :special-members: __init__ 33 | 34 | geograph.visualisation.graph\_utils module 35 | ------------------------------------------ 36 | 37 | .. automodule:: geograph.visualisation.graph_utils 38 | :members: 39 | :undoc-members: 40 | :show-inheritance: 41 | :special-members: __init__ 42 | 43 | geograph.visualisation.style module 44 | ----------------------------------- 45 | 46 | .. automodule:: geograph.visualisation.style 47 | :members: 48 | :undoc-members: 49 | :show-inheritance: 50 | :special-members: __init__ 51 | 52 | geograph.visualisation.widget\_utils module 53 | ------------------------------------------- 54 | 55 | .. automodule:: geograph.visualisation.widget_utils 56 | :members: 57 | :undoc-members: 58 | :show-inheritance: 59 | :special-members: __init__ 60 | 61 | Module contents 62 | --------------- 63 | 64 | .. automodule:: geograph.visualisation 65 | :members: 66 | :undoc-members: 67 | :show-inheritance: 68 | :special-members: __init__ 69 | -------------------------------------------------------------------------------- /docs/images/CEZ_levels_timestack_analysis.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/docs/images/CEZ_levels_timestack_analysis.png -------------------------------------------------------------------------------- /docs/images/CEZ_node_dynamics.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/docs/images/CEZ_node_dynamics.png -------------------------------------------------------------------------------- /docs/images/CEZ_node_growth_rates.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/docs/images/CEZ_node_growth_rates.png -------------------------------------------------------------------------------- /docs/images/geograph_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/docs/images/geograph_logo.png -------------------------------------------------------------------------------- /docs/images/geograph_logo_small.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/docs/images/geograph_logo_small.png -------------------------------------------------------------------------------- /docs/images/viewer_demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/docs/images/viewer_demo.gif -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. GeoGraph documentation master file, created by 2 | sphinx-quickstart on Thu Mar 18 17:37:46 2021. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Home - GeoGraph Documentation 7 | ============================== 8 | Welcome to the GeoGraph documentation! 9 | 10 | What is GeoGraph? 11 | ----------------- 12 | 13 | The Python package GeoGraph is built around the idea of geospatially 14 | referenced graph - a *GeoGraph*. Given either raster 15 | or polygon data as input, a GeoGraph is constructed by assigning each 16 | separate patch a graph node. In a second step, edges are added between nodes whenever the 17 | patches corresponding to two nodes are within a user-specificed distance. Based on this basic idea, 18 | the GeoGraph package provides a wide range of visualisation and analysis tools. 19 | 20 | What can it be used for? 21 | ------------------------ 22 | 23 | Landscape Ecology 24 | *Standard Analysis* 25 | Building on the graph-based data structure, the GeoGraph package is able to 26 | compute most of the standard metrics used in landscape ecology. Combined with 27 | an interactive user interface, it provides a powerful Python tool for 28 | fragmentation and connectivity analysis. 29 | 30 | *Policy Advice* 31 | Using the tools provided for landscape ecology, the GeoGraph package can be 32 | used to give two key insights for policy decisions: 33 | 34 | 1. Recommend conservation areas 35 | 2. Flag areas at potential risk of fragmentation 36 | 37 | *Temporal Analysis* 38 | The graph-based nature of the GeoGraph package allows us to track individual 39 | patches over time, and use this information for detailed temporal analysis of 40 | habitats. 41 | 42 | Polygon Data Visualisation 43 | Whilst our primary use-cases are in landscape ecology, this package can be used 44 | to investigate any kind of polygon data files, including ``.shp`` shape files. 45 | The :class:`GeoGraphViewer` allows for the data can be interactively viewed. 46 | 47 | 48 | 49 | 50 | 51 | 52 | .. toctree:: 53 | :maxdepth: 2 54 | :caption: Contents: 55 | 56 | self 57 | installation 58 | tutorials 59 | geograph 60 | about 61 | 62 | 63 | Indices and tables 64 | ================== 65 | 66 | * :ref:`genindex` 67 | * :ref:`modindex` 68 | * :ref:`search` 69 | 70 | 71 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | Installation 2 | ============ 3 | 4 | You can install GeoGraph via pip using 5 | 6 | .. code-block:: shell 7 | 8 | pip install geograph 9 | 10 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/notebooks/1-demo-landscape-metrics-comparison-to-pylandstats.nblink: -------------------------------------------------------------------------------- 1 | 2 | { 3 | "path": "../../notebooks/1-demo-landscape-metrics-comparison-to-pylandstats.ipynb" 4 | } 5 | -------------------------------------------------------------------------------- /docs/notebooks/2-demo-landscape-timeseries-metrics.nblink: -------------------------------------------------------------------------------- 1 | 2 | { 3 | "path": "../../notebooks/2-demo-landscape-timeseries-metrics.ipynb" 4 | } 5 | -------------------------------------------------------------------------------- /docs/notebooks/3-demo-geographviewer-polesia.nblink: -------------------------------------------------------------------------------- 1 | 2 | { 3 | "path": "../../notebooks/3-demo-geographviewer-polesia.ipynb" 4 | } 5 | -------------------------------------------------------------------------------- /docs/notebooks/4-demo-geographviewer-chernobyl.nblink: -------------------------------------------------------------------------------- 1 | 2 | { 3 | "path": "../../notebooks/4-demo-geographviewer-chernobyl.ipynb" 4 | } 5 | -------------------------------------------------------------------------------- /docs/notebooks/5-demo-nodediff.nblink: -------------------------------------------------------------------------------- 1 | 2 | { 3 | "path": "../../notebooks/5-demo-nodediff.ipynb" 4 | } 5 | -------------------------------------------------------------------------------- /docs/tutorials.rst: -------------------------------------------------------------------------------- 1 | 2 | Tutorials 3 | ====================== 4 | 5 | Here we have a number of tutorials introducing the different features of the GeoGraph package. 6 | You can also run the tutorials just in your browser without installing anything locally, by using binder with `this link`_. 7 | 8 | .. _this link: https://mybinder.org/v2/gh/ai4er-cdt/gtc-biodiversity/main?urlpath=lab%2Ftree%2Fnotebooks 9 | 10 | .. toctree:: 11 | :maxdepth: 1 12 | 13 | basic_usage 14 | notebooks/1-demo-landscape-metrics-comparison-to-pylandstats 15 | notebooks/2-demo-landscape-timeseries-metrics 16 | notebooks/3-demo-geographviewer-polesia 17 | notebooks/4-demo-geographviewer-chernobyl 18 | notebooks/5-demo-nodediff -------------------------------------------------------------------------------- /docs/utils.py: -------------------------------------------------------------------------------- 1 | """ Module with utility methods for docs""" 2 | 3 | 4 | import glob 5 | 6 | 7 | def create_notebook_links(): 8 | """Create links and entries for notebook in sphinx docs. 9 | 10 | Example usage (in home repo directory): 11 | > import docs.utils 12 | > docs.utils.create_notebook_links() 13 | """ 14 | notebooks = glob.glob("./notebooks/*.ipynb") 15 | notebooks.sort() 16 | 17 | file_name_tmp = "docs/notebooks/{}.nblink" 18 | file_content_tmp = """ 19 | {{ 20 | "path": "../../notebooks/{}.ipynb" 21 | }} 22 | """ 23 | 24 | rst_index = """ 25 | Advanced Tutorials 26 | ====================== 27 | 28 | .. toctree:: 29 | :maxdepth: 1 30 | """ 31 | 32 | for path in notebooks: 33 | nb_name = path.split("/")[-1].split(".")[0] 34 | file_name = file_name_tmp.format(nb_name) 35 | file_content = file_content_tmp.format(nb_name) 36 | with open(file_name, "w") as f: 37 | f.write(file_content) 38 | rst_index += "\n notebooks/{}".format(nb_name) 39 | 40 | with open("docs/tutorials.rst", "w") as f: 41 | f.write(rst_index) 42 | 43 | 44 | if __name__ == "__main__": 45 | create_notebook_links() 46 | -------------------------------------------------------------------------------- /geograph/README.md: -------------------------------------------------------------------------------- 1 | # Source code folder 2 | 3 | This folder contains the GeoGraph package. -------------------------------------------------------------------------------- /geograph/__init__.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=missing-module-docstring 2 | from geograph._version import __version__ 3 | from geograph.geograph import ComponentGeoGraph, GeoGraph, HabitatGeoGraph 4 | -------------------------------------------------------------------------------- /geograph/_version.py: -------------------------------------------------------------------------------- 1 | """Module to store the __version__ string.""" 2 | __version__ = "0.0.2" 3 | -------------------------------------------------------------------------------- /geograph/binary_graph_operations.py: -------------------------------------------------------------------------------- 1 | """Contains tools for binary operations between GeoGraph objects.""" 2 | from __future__ import annotations 3 | from typing import TYPE_CHECKING, Dict, List, Tuple 4 | import geopandas as gpd 5 | from shapely.geometry.base import BaseGeometry 6 | from shapely.geometry.polygon import Polygon 7 | import geograph.utils.geopandas_utils as gpd_utils 8 | from geograph.utils.polygon_utils import EMPTY_POLYGON, collapse_empty_polygon 9 | 10 | if TYPE_CHECKING: 11 | import geograph 12 | 13 | 14 | class NodeMap: 15 | """Class to store node mappings between two graphs (the src_graph and trg_graph).""" 16 | 17 | def __init__( 18 | self, 19 | src_graph: geograph.GeoGraph, 20 | trg_graph: geograph.GeoGraph, 21 | mapping: Dict[int, List[int]], 22 | ) -> None: 23 | """ 24 | Class to store node mappings between two graphs (`trg_graph` and `src_graph`). 25 | 26 | This class stores a dictionary of node one-to-many relationships of nodes from 27 | `src_graph` to `trg_graph`. It also provides support for convenient methods for 28 | inverting the mapping and bundles the mapping information with references to 29 | the `src_graph` and `trg_graph` 30 | 31 | Args: 32 | src_graph (GeoGraph): Domain of the node map (keys in `mapping` correspond 33 | to indices from the `src_graph`). 34 | trg_graph (GeoGraph): Image of the node map (values in `mapping` correspond 35 | to indices from the `trg_graph`) 36 | mapping (Dict[int, List[int]], optional): A lookup table for the map which 37 | maps nodes form `src_graph` to `trg_graph`. 38 | """ 39 | self._src_graph = src_graph 40 | self._trg_graph = trg_graph 41 | self._mapping = mapping 42 | 43 | @property 44 | def src_graph(self) -> geograph.GeoGraph: 45 | """Keys in the mapping dict correspond to node indices in the `src_graph`.""" 46 | return self._src_graph 47 | 48 | @property 49 | def trg_graph(self) -> geograph.GeoGraph: 50 | """Values in the mapping dict correspond to node indices in the `trg_graph`.""" 51 | return self._trg_graph 52 | 53 | @property 54 | def mapping(self) -> Dict[int, List[int]]: 55 | """Look-up table connecting node indices from `src_graph` to `trg_graph`.""" 56 | return self._mapping 57 | 58 | def __invert__(self) -> NodeMap: 59 | """Compute the inverse NodeMap.""" 60 | return self.invert() 61 | 62 | def __eq__(self, other: object) -> bool: 63 | """Check two NodeMaps for equality.""" 64 | if not isinstance(other, NodeMap): 65 | return False 66 | return ( 67 | self.src_graph == other.src_graph 68 | and self.trg_graph == other.trg_graph 69 | and self.mapping == other.mapping 70 | ) 71 | 72 | def invert(self) -> NodeMap: 73 | """Compute the inverse NodeMap from `trg_graph` to `src_graph`.""" 74 | inverted_mapping: Dict = {index: [] for index in self.trg_graph.df.index} 75 | 76 | for src_node in self.src_graph.df.index: 77 | for trg_node in self.mapping[src_node]: 78 | inverted_mapping[trg_node].append(src_node) 79 | 80 | return NodeMap( 81 | src_graph=self.trg_graph, trg_graph=self.src_graph, mapping=inverted_mapping 82 | ) 83 | 84 | 85 | def identify_node( 86 | node: dict, other_graph: geograph.GeoGraph, mode: str = "corner" 87 | ) -> List[int]: 88 | """ 89 | Return list of all node ids in `other_graph` which identify with the given `node`. 90 | 91 | Args: 92 | node (dict): The node for which to find nodes in `other_graphs` that can be 93 | identified with `node`. 94 | other_graph (GeoGraph): The GeoGraph object in which to search for 95 | identifications 96 | mode (str, optional): Must be one of `corner`, `edge` or `interior`. Defaults 97 | to "corner". 98 | The different modes correspond to different rules for identification: 99 | 100 | - corner: Polygons of the same `class_label` which overlap, touch in their 101 | edges or corners will be identified with each other. (fastest) 102 | - edge: Polygons of the same `class_label` which overlap or touch in their 103 | edges will be identified with each other. 104 | - interior: Polygons of the same `class_label` which overlap will be 105 | identified with each other. Touching corners or edges are not counted. 106 | 107 | Returns: 108 | List[int]: List of node ids in `other_graph` which identify with `node`. 109 | """ 110 | return gpd_utils.identify_node(node, other_graph.df, mode=mode) 111 | 112 | 113 | def identify_graphs( 114 | graph1: geograph.GeoGraph, graph2: geograph.GeoGraph, mode: str 115 | ) -> NodeMap: 116 | """ 117 | Idenitfy all nodes from `graph1` with nodes from `graph2` based on the given `mode`. 118 | 119 | Args: 120 | graph1 (GeoGraph): The GeoGraph whose node indicies will form the domain 121 | graph2 (GeoGraph): The GeoGraph whose node indices will form the image (target) 122 | mode (str): The mode to use for node identification. Must be one of `corner`, 123 | `edge` or `interior`. 124 | The different modes correspond to different rules for identification: 125 | 126 | - corner: Polygons of the same `class_label` which overlap, touch in their 127 | edges or corners will be identified with each other. (fastest) 128 | - edge: Polygons of the same `class_label` which overlap or touch in their 129 | edges will be identified with each other. 130 | - interior: Polygons of the same `class_label` which overlap will be 131 | identified with each other. Touching corners or edges are not counted. 132 | 133 | Returns: 134 | NodeMap: A NodeMap containing the map from `graph1` to `graph2`. 135 | """ 136 | mapping = gpd_utils.identify_dfs(graph1.df, graph2.df, mode=mode) 137 | 138 | return NodeMap(src_graph=graph1, trg_graph=graph2, mapping=mapping) 139 | 140 | 141 | def graph_polygon_diff(node_map: NodeMap) -> Tuple[gpd.GeoDataFrame, gpd.GeoDataFrame]: 142 | """ 143 | Return the polygons that were added/removed going from `src_graph` to `trg_graph`. 144 | 145 | Args: 146 | node_map (NodeMap): The node map from `src_graph` to `trg_graph` 147 | 148 | Returns: 149 | Tuple[GeoDataFrame, GeoDataFrame]: Added parts and removed parts as geopandas 150 | GeoDataFrame objects with the same index and crs as the src graph. 151 | """ 152 | assert ( 153 | node_map.src_graph.crs == node_map.trg_graph.crs 154 | ), "CRS systems of graphs do not agree." 155 | 156 | trg_minus_src = [] 157 | src_minus_trg = [] 158 | for index in node_map.src_graph.df.index: 159 | added_part, removed_part = node_polygon_diff(index, node_map) 160 | trg_minus_src.append(added_part) 161 | src_minus_trg.append(removed_part) 162 | 163 | trg_minus_src = gpd.GeoDataFrame( 164 | index=node_map.src_graph.df.index, 165 | geometry=trg_minus_src, 166 | crs=node_map.src_graph.crs, 167 | ) 168 | 169 | src_minus_trg = gpd.GeoDataFrame( 170 | index=node_map.src_graph.df.index, 171 | geometry=src_minus_trg, 172 | crs=node_map.src_graph.crs, 173 | ) 174 | 175 | return trg_minus_src, src_minus_trg 176 | 177 | 178 | def node_polygon_diff( 179 | src_node_id: int, node_map: NodeMap 180 | ) -> Tuple[BaseGeometry, BaseGeometry]: 181 | """ 182 | Return the (multi)polygon areas that were added/removed from the given node. 183 | 184 | Args: 185 | src_node_id (int): The id of the node in `src_graph` to check. 186 | node_map (NodeMap): The node map object between `src_graph` and `trg_graph` 187 | 188 | Returns: 189 | Tuple[BaseGeometry, BaseGeometry]: Added part and removed part as shapely 190 | BaseGeometry objects. 191 | """ 192 | src_polygon: Polygon = node_map.src_graph.df.geometry.loc[src_node_id] 193 | trg_node_ids: List[int] = node_map.mapping[src_node_id] 194 | 195 | if len(trg_node_ids) > 0: 196 | trg_polygon: Polygon = node_map.trg_graph.df.geometry.loc[ 197 | trg_node_ids 198 | ].unary_union 199 | removed_part = collapse_empty_polygon(src_polygon.difference(trg_polygon)) 200 | added_part = collapse_empty_polygon(trg_polygon.difference(src_polygon)) 201 | 202 | else: 203 | removed_part = src_polygon 204 | added_part = EMPTY_POLYGON 205 | 206 | return added_part, removed_part 207 | -------------------------------------------------------------------------------- /geograph/constants.py: -------------------------------------------------------------------------------- 1 | """All project wide constants are saved in this module.""" 2 | # Place all your constants here 3 | import os 4 | import pathlib 5 | 6 | # Note: constants should be UPPER_CASE 7 | constants_path = pathlib.Path(os.path.realpath(__file__)) 8 | SRC_PATH = pathlib.Path(os.path.dirname(constants_path)) 9 | PROJECT_PATH = pathlib.Path(os.path.dirname(SRC_PATH)) 10 | 11 | # Coordinate reference systems (crs) 12 | WGS84 = "EPSG:4326" # WGS84 standard crs (latitude, longitude) 13 | UTM35N = "EPSG:32635" # https://epsg.io/32635 - preferred crs for chernobyl region 14 | 15 | # Coordinates 16 | CHERNOBYL_COORDS_WGS84 = ( 17 | 51.389167, 18 | 30.099444, 19 | ) # coordinates of chernobyl power reactor 20 | CHERNOBYL_COORDS_UTM35N = (715639.1222290158, 5697662.734402668) 21 | -------------------------------------------------------------------------------- /geograph/demo/binder_constants.py: -------------------------------------------------------------------------------- 1 | """This file is for constants relevant to the binder demo.""" 2 | from geograph.constants import PROJECT_PATH 3 | 4 | 5 | # Data directory on GWS 6 | DATA_DIR = PROJECT_PATH / "data" 7 | # Polygon data of Chernobyl Exclusion Zone (CEZ) 8 | ROIS = DATA_DIR / "chernobyl" / "chernobyl_rois.geojson" 9 | 10 | # Link to ESA CCI Land cover Legend 11 | ESA_CCI_LEGEND_LINK = ( 12 | "https://www.dropbox.com/s/bget0phawnahd8v/ESACCI-LC-Legend.csv?dl=1" 13 | ) 14 | -------------------------------------------------------------------------------- /geograph/demo/plot_settings.py: -------------------------------------------------------------------------------- 1 | """ 2 | plot_settings.py 3 | ================ 4 | 5 | usage: 6 | 7 | from geograph.plot_settings import ( 8 | ps_defaults, 9 | label_subplots, 10 | get_dim, 11 | set_dim, 12 | PALETTE, 13 | STD_CLR_LIST, 14 | CAM_BLUE, 15 | BRICK_RED, 16 | OX_BLUE, 17 | ) 18 | 19 | ps_defaults(use_tex=True) 20 | 21 | # ---- example set of graphs --- 22 | 23 | import numpy as np 24 | import matplotlib.pyplot as plt 25 | 26 | 27 | fig, axs = plt.subplots(2, 2) 28 | 29 | x = np.linspace(0, np.pi, num=100) 30 | axs[0, 0].plot(x, np.sin(x), color=STD_CLR_LIST[0]) 31 | axs[0, 1].plot(x, np.cos(x), color=STD_CLR_LIST[1]) 32 | axs[1, 0].plot(x, np.sinc(x), color=STD_CLR_LIST[2]) 33 | axs[1, 1].plot(x, np.abs(x), color=STD_CLR_LIST[3]) 34 | 35 | # set size 36 | set_dim(fig, fraction_of_line_width=1, ratio=(5 ** 0.5 - 1) / 2) 37 | 38 | # label subplots 39 | label_subplots(axs, start_from=0, fontsize=10) 40 | 41 | """ 42 | import itertools 43 | from distutils.spawn import find_executable 44 | from typing import Sequence, Tuple 45 | import matplotlib 46 | import matplotlib.style 47 | import numpy as np 48 | import seaborn as sns 49 | 50 | 51 | def ps_defaults(use_tex: bool = True) -> None: 52 | """Apply plotting style to produce nice looking figures. 53 | Call this at the start of a script which uses `matplotlib`. 54 | Can enable `matplotlib` LaTeX backend if it is available. 55 | 56 | Args: 57 | use_tex (bool, optional): Whether or not to use latex matplotlib backend. 58 | Defaults to True. 59 | 60 | Example:: 61 | >>> from geograph.demo.plot_settings import ps_defaults 62 | >>> ps_defaults(use_tex=False) 63 | """ 64 | # matplotlib.use('agg') this used to be required for jasmin 65 | p_general = { 66 | "font.family": "STIXGeneral", # Nice alternative font. 67 | # "font.family": "serif", 68 | # "font.serif": [], 69 | # Use 10pt font in plots, to match 10pt font in document 70 | "axes.labelsize": 10, 71 | "font.size": 10, 72 | # Make the legend/label fonts a little smaller 73 | "legend.fontsize": 10, 74 | "xtick.labelsize": 9, 75 | "ytick.labelsize": 9, 76 | # Set the font for maths 77 | "mathtext.fontset": "cm", 78 | # "font.sans-serif": ["DejaVu Sans"], # gets rid of error messages 79 | # "font.monospace": [], 80 | "lines.linewidth": 1.0, 81 | "scatter.marker": "+", 82 | "image.cmap": "RdYlBu_r", 83 | } 84 | matplotlib.rcParams.update(p_general) 85 | matplotlib.style.use("seaborn-colorblind") 86 | 87 | if use_tex and find_executable("latex"): 88 | p_setting = { 89 | "pgf.texsystem": "pdflatex", 90 | "text.usetex": True, 91 | "pgf.preamble": ( 92 | r"\usepackage[utf8x]{inputenc} \usepackage[T1]{fontenc}" 93 | + r"\usepackage[separate -uncertainty=true]{siunitx}" 94 | ), 95 | } 96 | else: 97 | p_setting = { 98 | "text.usetex": False, 99 | } 100 | matplotlib.rcParams.update(p_setting) 101 | 102 | 103 | def label_subplots( 104 | axs: Sequence[matplotlib.pyplot.axes], 105 | labels: Sequence[str] = [chr(ord("`") + z) for z in range(1, 27)], 106 | start_from: int = 0, 107 | fontsize: int = 10, 108 | x_pos: float = 0.02, 109 | y_pos: float = 0.95, 110 | ) -> None: 111 | """Adds (a), (b), (c) at the top left of each subplot panel. 112 | Labelling order achieved through ravelling the input list / array. 113 | 114 | Args: 115 | axs (Sequence[matplotlib.axes]): list or array of subplot axes. 116 | labels (Sequence[str]): A sequence of labels for the subplots. 117 | start_from (int, optional): skips first ${start_from} labels. Defaults to 0. 118 | fontsize (int, optional): Font size for labels. Defaults to 10. 119 | x_pos (float, optional): Relative x position of labels. Defaults to 0.02. 120 | y_pos (float, optional): Relative y position of labels. Defaults to 0.95. 121 | 122 | Returns: 123 | void; alters the `matplotlib.pyplot.axes` objects 124 | 125 | """ 126 | if isinstance(axs, list): 127 | axs = np.asarray(axs) 128 | assert len(axs.ravel()) + start_from <= len(labels) 129 | subset_labels = [] 130 | for i in range(len(axs.ravel())): 131 | subset_labels.append(labels[i + start_from]) 132 | for i, label in enumerate(subset_labels): 133 | axs.ravel()[i].text( 134 | x_pos, 135 | y_pos, 136 | str("(" + label + ")"), 137 | color="black", 138 | transform=axs.ravel()[i].transAxes, 139 | fontsize=fontsize, 140 | fontweight="bold", 141 | va="top", 142 | ) 143 | 144 | 145 | def get_dim( 146 | width: float = 600, 147 | fraction_of_line_width: float = 1, 148 | ratio: float = (5**0.5 - 1) / 2, 149 | ) -> Tuple[float, float]: 150 | """Return figure height, width in inches to avoid scaling in latex. 151 | 152 | Default is golden ratio, with figur occupying full page width. 153 | 154 | Args: 155 | width (float): Textwidth of the report to make fontsizes match. 156 | fraction_of_line_width (float, optional): Fraction of the document width 157 | which you wish the figure to occupy. Defaults to 1. 158 | ratio (float, optional): Fraction of figure width that the figure height 159 | should be. Defaults to (5 ** 0.5 - 1)/2. 160 | 161 | Returns: 162 | fig_dim (tuple): 163 | Dimensions of figure in inches 164 | """ 165 | 166 | # Width of figure 167 | fig_width_pt = width * fraction_of_line_width 168 | 169 | # Convert from pt to inches 170 | inches_per_pt = 1 / 72.27 171 | 172 | # Figure width in inches 173 | fig_width_in = fig_width_pt * inches_per_pt 174 | # Figure height in inches 175 | fig_height_in = fig_width_in * ratio 176 | 177 | fig_dim = (fig_width_in, fig_height_in) 178 | 179 | return fig_dim 180 | 181 | 182 | def set_dim( 183 | fig: matplotlib.pyplot.figure, 184 | width: float = 600, 185 | fraction_of_line_width: float = 1, 186 | ratio: float = (5**0.5 - 1) / 2, 187 | ) -> None: 188 | """Set aesthetic figure dimensions to avoid scaling in latex. 189 | 190 | Args: 191 | fig (matplotlib.pyplot.figure): Figure object to resize. 192 | width (float): Textwidth of the report to make fontsizes match. 193 | fraction_of_line_width (float, optional): Fraction of the document width 194 | which you wish the figure to occupy. Defaults to 1. 195 | ratio (float, optional): Fraction of figure width that the figure height 196 | should be. Defaults to (5 ** 0.5 - 1)/2. 197 | 198 | Returns: 199 | void; alters current figure to have the desired dimensions 200 | """ 201 | fig.set_size_inches( 202 | get_dim(width=width, fraction_of_line_width=fraction_of_line_width, ratio=ratio) 203 | ) 204 | 205 | 206 | STD_CLR_LIST = [ 207 | "#4d2923ff", 208 | "#494f1fff", 209 | "#38734bff", 210 | "#498489ff", 211 | "#8481baff", 212 | "#c286b2ff", 213 | "#d7a4a3ff", 214 | ] 215 | _paper_colors = sns.color_palette(STD_CLR_LIST) 216 | # Note: To inspect colors, call `sns.palplot(_paper_colors)` 217 | PALETTE = itertools.cycle(_paper_colors) 218 | CAM_BLUE = "#a3c1ad" 219 | OX_BLUE = "#002147" 220 | BRICK_RED = "#CB4154" 221 | -------------------------------------------------------------------------------- /geograph/metrics.py: -------------------------------------------------------------------------------- 1 | """Functions for calculating metrics from a GeoGraph.""" 2 | from __future__ import annotations 3 | from dataclasses import dataclass 4 | from typing import TYPE_CHECKING, Any, Optional, Union 5 | import networkx as nx 6 | import numpy as np 7 | 8 | if TYPE_CHECKING: 9 | import geograph 10 | 11 | 12 | # define a metric dataclass with < <= => > == comparisons that work as you would 13 | # expect intuitively 14 | @dataclass() 15 | class Metric: 16 | """Class to represent a metric for a GeoGraph, with associated metadata.""" 17 | 18 | value: Any # No good Numpy type hints 19 | name: str 20 | description: str 21 | variant: Optional[str] 22 | unit: Optional[str] = None 23 | 24 | def __eq__(self, o: object) -> bool: 25 | if not isinstance(o, Metric): 26 | return False 27 | return self.value == o.value 28 | 29 | def __lt__(self, o: object) -> bool: 30 | if not isinstance(o, Metric): 31 | return False 32 | return self.value < o.value 33 | 34 | def __le__(self, o: object) -> bool: 35 | if not isinstance(o, Metric): 36 | return False 37 | return self.value <= o.value 38 | 39 | def __gt__(self, o: object) -> bool: 40 | if not isinstance(o, Metric): 41 | return False 42 | return self.value > o.value 43 | 44 | def __ge__(self, o: object) -> bool: 45 | if not isinstance(o, Metric): 46 | return False 47 | return self.value >= o.value 48 | 49 | 50 | ######################################################################################## 51 | # 1. Landscape level metrics 52 | ######################################################################################## 53 | def _num_patches(geo_graph: geograph.GeoGraph) -> Metric: 54 | return Metric( 55 | value=len(geo_graph.df), 56 | name="num_patches", 57 | description="The number of patches in the graph.", 58 | variant="conventional", 59 | unit="dimensionless", 60 | ) 61 | 62 | 63 | def _avg_patch_area(geo_graph: geograph.GeoGraph) -> Metric: 64 | 65 | total_area = np.sum(geo_graph.df.area.values) 66 | num_patches = geo_graph.get_metric("num_patches").value 67 | 68 | return Metric( 69 | value=total_area / num_patches, 70 | name="avg_patch_area", 71 | description="The average area of the patches in the graph.", 72 | variant="conventional", 73 | unit="CRS.unit**2", 74 | ) 75 | 76 | 77 | def _total_area(geo_graph: geograph.GeoGraph) -> Metric: 78 | return Metric( 79 | value=np.sum(geo_graph.df.area.values), 80 | name="total_area", 81 | description="The total area of all the patches in the graph.", 82 | variant="conventional", 83 | unit="CRS.unit**2", 84 | ) 85 | 86 | 87 | def _patch_density(geo_graph: geograph.GeoGraph) -> Metric: 88 | return Metric( 89 | value=1.0 / geo_graph.get_metric("avg_patch_area").value, 90 | name="patch_density", 91 | description="Number of patches divided by total area of the graph.", 92 | variant="conventional", 93 | unit="1 / CRS.unit**2", 94 | ) 95 | 96 | 97 | def _largest_patch_index(geo_graph: geograph.GeoGraph) -> Metric: 98 | 99 | total_area = np.sum(geo_graph.df.area.values) 100 | max_patch_area = max(geo_graph.df.area.values) 101 | 102 | return Metric( 103 | value=max_patch_area / total_area, 104 | name="largest_patch_index", 105 | description="The proportion of landscape comprised by the largest patch.", 106 | variant="conventional", 107 | unit="dimensionless", 108 | ) 109 | 110 | 111 | def _shannon_diversity_index(geo_graph: geograph.GeoGraph) -> Metric: 112 | """ 113 | Calculate Shannon diversity of a GeoGraph. 114 | 115 | Further reference: 116 | https://pylandstats.readthedocs.io/en/latest/landscape.html 117 | https://en.wikipedia.org/wiki/Diversity_index 118 | """ 119 | class_prop_of_landscape = np.array( 120 | [ 121 | geo_graph.get_metric( 122 | "proportion_of_landscape", class_value=class_value 123 | ).value 124 | for class_value in geo_graph.classes 125 | ] 126 | ) 127 | 128 | description = ( 129 | "SHDI approaches 0 when the entire landscape consists of a single " 130 | "patch, and increases as the number of classes increases and/or the " 131 | "proportional distribution of area among classes becomes more equitable." 132 | ) 133 | 134 | return Metric( 135 | value=-np.sum(class_prop_of_landscape * np.log(class_prop_of_landscape)), 136 | name="shannon_diversity_index", 137 | description=description, 138 | variant="conventional", 139 | unit="dimensionless", 140 | ) 141 | 142 | 143 | def _simpson_diversity_index(geo_graph: geograph.GeoGraph) -> Metric: 144 | """ 145 | Calculate Simpson diversity of a GeoGraph. 146 | 147 | Reference: 148 | umass.edu/landeco/teaching/landscape_ecology/schedule/chapter9_metrics.pdf 149 | """ 150 | class_prop_of_landscape = np.array( 151 | [ 152 | geo_graph.get_metric( 153 | "proportion_of_landscape", class_value=class_value 154 | ).value 155 | for class_value in geo_graph.classes 156 | ] 157 | ) 158 | 159 | description = ( 160 | "Probability that any two pixels drawn at random will be of different" 161 | "class types" 162 | ) 163 | 164 | return Metric( 165 | value=1 - np.sum(class_prop_of_landscape**2), 166 | name="simpson_diversity_index", 167 | description=description, 168 | variant="conventional", 169 | unit="dimensionless", 170 | ) 171 | 172 | 173 | LANDSCAPE_METRICS_DICT = { 174 | "avg_patch_area": _avg_patch_area, 175 | "total_area": _total_area, 176 | "num_patches": _num_patches, 177 | "patch_density": _patch_density, 178 | "largest_patch_index": _largest_patch_index, 179 | "shannon_diversity_index": _shannon_diversity_index, 180 | "simpson_diversity_index": _simpson_diversity_index, 181 | } 182 | 183 | ######################################################################################## 184 | # 2. Landcover class level metrics 185 | ######################################################################################## 186 | 187 | 188 | def _class_total_area( 189 | geo_graph: geograph.GeoGraph, class_value: Union[int, str] 190 | ) -> Metric: 191 | 192 | class_areas = geo_graph.df["geometry"][ 193 | geo_graph.df["class_label"] == class_value 194 | ].area.values 195 | 196 | return Metric( 197 | value=np.sum(class_areas), 198 | name=f"total_area_class={class_value}", 199 | description=f"Total area of all patches of class {class_value} in the graph.", 200 | variant="conventional", 201 | unit="CRS.unit**2", 202 | ) 203 | 204 | 205 | def _class_avg_patch_area( 206 | geo_graph: geograph.GeoGraph, class_value: Union[int, str] 207 | ) -> Metric: 208 | 209 | class_num_patches = geo_graph.get_metric( 210 | "num_patches", class_value=class_value 211 | ).value 212 | class_total_area = geo_graph.get_metric("total_area", class_value=class_value).value 213 | 214 | return Metric( 215 | value=class_total_area / class_num_patches, 216 | name=f"avg_patch_area_class={class_value}", 217 | description=f"The average area of patches of class {class_value} in the graph.", 218 | variant="conventional", 219 | unit="CRS.unit**2", 220 | ) 221 | 222 | 223 | def _class_num_patches( 224 | geo_graph: geograph.GeoGraph, class_value: Union[int, str] 225 | ) -> Metric: 226 | return Metric( 227 | value=np.sum(geo_graph.df["class_label"] == class_value), 228 | name=f"num_patches_class={class_value}", 229 | description=f"The number of patches of class {class_value} in the graph.", 230 | variant="conventional", 231 | unit="dimensionless", 232 | ) 233 | 234 | 235 | def _class_proportion_of_landscape( 236 | geo_graph: geograph.GeoGraph, class_value: Union[int, str] 237 | ) -> Metric: 238 | 239 | class_total_area = geo_graph.get_metric("total_area", class_value=class_value).value 240 | total_area = geo_graph.get_metric("total_area").value 241 | 242 | return Metric( 243 | value=class_total_area / total_area, 244 | name=f"proportion_of_landscape_class={class_value}", 245 | description=f"The proportional abundance of {class_value} in the graph.", 246 | variant="conventional", 247 | unit="dimensionless", 248 | ) 249 | 250 | 251 | def _class_patch_density( 252 | geo_graph: geograph.GeoGraph, class_value: Union[int, str] 253 | ) -> Metric: 254 | 255 | class_num_patches = geo_graph.get_metric( 256 | "num_patches", class_value=class_value 257 | ).value 258 | total_area = geo_graph.get_metric("total_area").value 259 | 260 | return Metric( 261 | value=class_num_patches / total_area, 262 | name=f"patch_density_class={class_value}", 263 | description=f"Density of patches of class {class_value} in the graph.", 264 | variant="conventional", 265 | unit="1 / CRS.unit**2", 266 | ) 267 | 268 | 269 | def _class_largest_patch_index( 270 | geo_graph: geograph.GeoGraph, class_value: Union[int, str] 271 | ) -> Metric: 272 | """ 273 | Return proportion of total landscape comprised by largest patch of given class. 274 | 275 | Definition taken from: 276 | https://pylandstats.readthedocs.io/en/latest/landscape.html 277 | """ 278 | total_area = geo_graph.get_metric("total_area").value 279 | class_areas = geo_graph.df["geometry"][ 280 | geo_graph.df["class_label"] == class_value 281 | ].area 282 | 283 | description = ( 284 | "Proportion of total landscape compriesed by largest patch of " 285 | f"class {class_value} in the graph." 286 | ) 287 | 288 | return Metric( 289 | value=max(class_areas) / total_area, 290 | name=f"patch_density_class={class_value}", 291 | description=description, 292 | variant="conventional", 293 | unit="dimensionless", 294 | ) 295 | 296 | 297 | def _class_total_edge( 298 | geo_graph: geograph.GeoGraph, class_value: Union[int, str] 299 | ) -> Metric: 300 | # TODO: Implement option for not counting edges. 301 | 302 | class_perimeters = geo_graph.df["geometry"][ 303 | geo_graph.df["class_label"] == class_value 304 | ].length 305 | 306 | return Metric( 307 | value=np.sum(class_perimeters), 308 | name=f"total_edge_class={class_value}", 309 | description=f"Total edgelength of patches of class {class_value} in the graph.", 310 | variant="conventional", 311 | unit="CRS.unit", 312 | ) 313 | 314 | 315 | def _class_edge_density( 316 | geo_graph: geograph.GeoGraph, class_value: Union[int, str] 317 | ) -> Metric: 318 | """ 319 | Return total length of class edges for the given class. 320 | 321 | Note: Currently the boundary also counted towards the edge length. 322 | 323 | Definition taken from: 324 | https://pylandstats.readthedocs.io/en/latest/landscape.html 325 | """ 326 | # TODO: Implement option for not counting edges. 327 | 328 | total_edge = geo_graph.get_metric("total_edge", class_value=class_value).value 329 | total_area = geo_graph.get_metric("total_area", class_value=class_value).value 330 | 331 | description = ( 332 | f"Edge length per unit area of patches of class {class_value} in the graph." 333 | ) 334 | 335 | return Metric( 336 | value=total_edge / total_area, 337 | name=f"total_edge_density_class={class_value}", 338 | description=description, 339 | variant="conventional", 340 | unit="1 / CRS.unit", 341 | ) 342 | 343 | 344 | def _class_shape_index( 345 | geo_graph: geograph.GeoGraph, class_value: Union[int, str] 346 | ) -> Metric: 347 | """ 348 | Return shape index of given class. 349 | 350 | Definition taken from: 351 | https://pylandstats.readthedocs.io/en/latest/landscape.html 352 | """ 353 | total_edge = geo_graph.get_metric("total_edge", class_value=class_value).value 354 | total_area = geo_graph.get_metric("total_area", class_value=class_value).value 355 | 356 | description = ( 357 | "SI >=1 ; LSI equals 1 when the entire landscape consists of a single patch of " 358 | f"class {class_value}, and increases without limit as the patches of class " 359 | f"{class_value} become more disaggregated." 360 | ) 361 | 362 | return Metric( 363 | value=0.25 * total_edge / np.sqrt(total_area), 364 | name=f"shape_index_class={class_value}", 365 | description=description, 366 | variant="conventional", 367 | unit="dimensionless", 368 | ) 369 | 370 | 371 | def _class_effective_mesh_size( 372 | geo_graph: geograph.GeoGraph, class_value: Union[int, str] 373 | ) -> Metric: 374 | """ 375 | Return effective mesh size of given class. 376 | 377 | Definition taken from: 378 | https://pylandstats.readthedocs.io/en/latest/landscape.html 379 | """ 380 | class_areas = geo_graph.df["geometry"][ 381 | geo_graph.df["class_label"] == class_value 382 | ].area 383 | total_area = geo_graph.get_metric("total_area").value 384 | 385 | description = ( 386 | "A <= MESH <= A ; MESH approaches its minimum when there is a single" 387 | " corresponding patch of one pixel, and approaches its maximum when the " 388 | "landscape consists of a single patch." 389 | ) 390 | 391 | return Metric( 392 | value=np.sum(class_areas**2) / total_area, 393 | name=f"effective_mesh_size_class={class_value}", 394 | description=description, 395 | variant="conventional", 396 | unit="CRS.unit**2", 397 | ) 398 | 399 | 400 | CLASS_METRICS_DICT = { 401 | "num_patches": _class_num_patches, 402 | "avg_patch_area": _class_avg_patch_area, 403 | "total_area": _class_total_area, 404 | "proportion_of_landscape": _class_proportion_of_landscape, 405 | "patch_density": _class_patch_density, 406 | "largest_patch_index": _class_largest_patch_index, 407 | "total_edge": _class_total_edge, 408 | "edge_density": _class_edge_density, 409 | "shape_index": _class_shape_index, 410 | "effective_mesh_size": _class_effective_mesh_size, 411 | } 412 | 413 | ######################################################################################## 414 | # 3. Habitat componment level metrics 415 | ######################################################################################## 416 | 417 | 418 | def _num_components(geo_graph: geograph.GeoGraph) -> Metric: 419 | return Metric( 420 | value=nx.number_connected_components(geo_graph.graph), 421 | name="num_components", 422 | description="The number of connected components in the graph.", 423 | variant="component", 424 | unit="dimensionless", 425 | ) 426 | 427 | 428 | def _avg_component_area(geo_graph: geograph.GeoGraph) -> Metric: 429 | if not geo_graph.components.has_df: 430 | print("Calculating component polygons...") 431 | geo_graph.components = geo_graph.get_graph_components(calc_polygons=True) 432 | comp_geograph = geo_graph.components 433 | return Metric( 434 | value=np.mean(comp_geograph.df.area.values), 435 | name="avg_component_area", 436 | description="The average area of the components in the graph", 437 | variant="component", 438 | unit="CRS.unit**2", 439 | ) 440 | 441 | 442 | def _avg_component_isolation(geo_graph: geograph.GeoGraph) -> Metric: 443 | """ 444 | Calculate the average distance to the next-nearest component. 445 | 446 | Warning: very computationally expensive for graphs with more than ~100 447 | components. 448 | """ 449 | if not geo_graph.components.has_df or not geo_graph.components.has_distance_edges: 450 | print( 451 | """Warning: very computationally expensive for graphs with more 452 | than ~100 components.""" 453 | ) 454 | geo_graph.components = geo_graph.get_graph_components( 455 | calc_polygons=True, add_distance_edges=True 456 | ) 457 | comp_geograph = geo_graph.components 458 | if len(comp_geograph.components_list) == 1: 459 | val: Any = 0 460 | else: 461 | dist_set = set() 462 | for comp in comp_geograph.graph.nodes: 463 | for nbr in comp_geograph.graph.adj[comp]: 464 | dist_set.update( 465 | [ 466 | dist 467 | for u, v, dist in comp_geograph.graph.edges( 468 | nbr, data="distance" 469 | ) 470 | if v != comp 471 | ] 472 | ) 473 | val = np.mean(np.fromiter(dist_set, np.float32, len(dist_set))) 474 | return Metric( 475 | value=val, 476 | name="avg_component_isolation", 477 | description="The average distance to the next-nearest component", 478 | variant="component", 479 | unit="CRS.unit", 480 | ) 481 | 482 | 483 | COMPONENT_METRICS_DICT = { 484 | "num_components": _num_components, 485 | "avg_component_area": _avg_component_area, 486 | "avg_component_isolation": _avg_component_isolation, 487 | } 488 | 489 | 490 | ######################################################################################## 491 | # 4. Define access interface for GeoGraph 492 | ######################################################################################## 493 | 494 | STANDARD_METRICS = ["num_components", "avg_patch_area", "total_area"] 495 | 496 | 497 | def _get_metric( 498 | name: str, 499 | geo_graph: geograph.GeoGraph, 500 | class_value: Optional[Union[int, str]] = None, 501 | **metric_kwargs, 502 | ) -> Metric: 503 | """ 504 | Calculate selected metric for the given GeoGraph. 505 | 506 | Args: 507 | name (str): Name of the metric to compute 508 | geo_graph (geograph.GeoGraph): GeoGraph object to compute the metric for 509 | class_value (Optional[int], optional): The value of the desired class if a 510 | class level metric is desired. None if a landscape/component level metric 511 | is desired. Defaults to None. 512 | 513 | Returns: 514 | Metric: The desired metric. 515 | """ 516 | # Landscape level metrics 517 | if class_value is None: 518 | try: 519 | try: 520 | return LANDSCAPE_METRICS_DICT[name](geo_graph, **metric_kwargs) 521 | except KeyError as key_error: 522 | return COMPONENT_METRICS_DICT[name](geo_graph, **metric_kwargs) 523 | except KeyError as key_error: 524 | raise ValueError( 525 | f"Argument `{name}` is not a valid landscape/component metric.\n" 526 | "Available landscape metrics are: " 527 | f"\n{list(LANDSCAPE_METRICS_DICT.keys())}.\n" 528 | "Available component metrics are: " 529 | f"\n{list(COMPONENT_METRICS_DICT.keys())}." 530 | ) from key_error 531 | 532 | # Class level metrics 533 | else: 534 | try: 535 | return CLASS_METRICS_DICT[name]( 536 | geo_graph, class_value=class_value, **metric_kwargs 537 | ) 538 | except KeyError as key_error: 539 | raise ValueError( 540 | "Argument `name` is not a valid class metric. " 541 | f"Available class metrics are: \n{list(CLASS_METRICS_DICT.keys())}" 542 | ) from key_error 543 | -------------------------------------------------------------------------------- /geograph/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/geograph/tests/__init__.py -------------------------------------------------------------------------------- /geograph/tests/create_data_test.py: -------------------------------------------------------------------------------- 1 | """Script to quickly and reproducibly create test data 2 | 3 | Note: We may want to delete this at some point. 4 | """ 5 | import numpy as np 6 | from typing import Dict, Iterable, Tuple 7 | import geopandas as gpd 8 | import pytest 9 | from geograph.constants import SRC_PATH 10 | from geograph.tests.utils import get_array_transform, polygonise_sub_array 11 | from geograph.utils.rasterio_utils import polygonise 12 | 13 | 14 | def _polygonise_splits( 15 | arr: np.ndarray, named_slices: Iterable[Dict[str, Tuple]] 16 | ) -> Dict[str, gpd.GeoDataFrame]: 17 | """ 18 | Create polygons from multiple sub-arrays of the given array. 19 | 20 | Note: 21 | Indices for `named_slices` must be given in x-y convention. 22 | x-y indexing is used for convenience with plotting later. The origin for 23 | x-y indexing is taken to be at the lower left corner of the array. The 24 | x-index increases horizontally to the right, y-index increases vertically to 25 | the top. 26 | 27 | Args: 28 | arr (np.ndarray): The array from which to select sub-arrays and polygonise them 29 | named_slices (Iterable[Dict[str, Tuple]]): An iterable of dictionaries 30 | containing the x-y limits of the sub-arrays to polygonise. x-y indices 31 | must be >= 0 or None. 32 | 33 | Returns: 34 | Dict[str, gpd.GeoDataFrame]: [description] 35 | """ 36 | 37 | result = {} 38 | for name, (x_lims, y_lims) in named_slices.items(): 39 | result[name] = polygonise_sub_array(arr, x_lims, y_lims) 40 | 41 | return result 42 | 43 | 44 | @pytest.mark.unit 45 | def test_create_data() -> None: 46 | """Create the test data.""" 47 | print("Creating test data ... ") 48 | TEST_DATA_FOLDER = SRC_PATH / "tests" / "testdata" 49 | TEST_DATA_FOLDER.mkdir(parents=True, exist_ok=True) 50 | 51 | # 1. Create non-overlapping polygons 52 | print("(1/3) Create non-overlapping polygon data") 53 | # Create array 54 | np.random.seed(28) 55 | arr1 = np.random.randint(low=1, high=5, size=(6, 4), dtype=np.uint8) 56 | # Define splits 57 | splits_of_interest = { 58 | "lower_left": ((0, 2), (0, 3)), 59 | "upper_right": ((2, 4), (3, 6)), 60 | "upper_left": ((0, 2), (3, 6)), 61 | "lower_right": ((2, 4), (0, 3)), 62 | } 63 | # Poligonise 64 | polygons1 = _polygonise_splits(arr1, splits_of_interest) 65 | polygons1["full"] = polygonise(arr1, transform=get_array_transform(arr1)) 66 | # Save 67 | for save_name, df in polygons1.items(): 68 | save_path = TEST_DATA_FOLDER / "adjacent" / f"{save_name}.gpkg" 69 | df.to_file(save_path, driver="GPKG") 70 | 71 | # 2. Create overlapping polygons 72 | print("(2/3) Create overlapping polygon data") 73 | # Create array 74 | np.random.seed(285) 75 | arr2 = np.random.randint(low=1, high=5, size=(4, 4), dtype=np.uint8) 76 | # Define splits 77 | splits_of_interest = { 78 | "lower_left": ((0, 3), (0, 3)), 79 | "upper_right": ((1, 4), (1, 4)), 80 | "upper_left": ((0, 3), (1, 4)), 81 | "lower_right": ((1, 4), (0, 3)), 82 | } 83 | # Polygonise 84 | polygons2 = _polygonise_splits(arr2, splits_of_interest) 85 | polygons2["full"] = polygonise(arr2, transform=get_array_transform(arr2)) 86 | # Save 87 | for save_name, df in polygons2.items(): 88 | save_path = TEST_DATA_FOLDER / "overlapping" / f"{save_name}.gpkg" 89 | df.to_file(save_path, driver="GPKG") 90 | 91 | # 3. Create time-stacked polygons 92 | print("(3/3) Create time slice data") 93 | # Settings 94 | np.random.seed(184) 95 | # Create polygons 96 | for i in range(5): 97 | arr_t = np.random.randint(low=1, high=4, size=(4, 4), dtype=np.uint8) 98 | polygons_t = polygonise(arr_t, transform=get_array_transform(arr_t)) 99 | save_path = TEST_DATA_FOLDER / "timestack" / f"time_{i}.gpkg" 100 | polygons_t.to_file(save_path, driver="GPKG") 101 | 102 | 103 | if __name__ == "__main__": 104 | test_create_data() 105 | -------------------------------------------------------------------------------- /geograph/tests/testdata/adjacent/full.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/geograph/tests/testdata/adjacent/full.gpkg -------------------------------------------------------------------------------- /geograph/tests/testdata/adjacent/lower_left.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/geograph/tests/testdata/adjacent/lower_left.gpkg -------------------------------------------------------------------------------- /geograph/tests/testdata/adjacent/lower_right.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/geograph/tests/testdata/adjacent/lower_right.gpkg -------------------------------------------------------------------------------- /geograph/tests/testdata/adjacent/upper_left.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/geograph/tests/testdata/adjacent/upper_left.gpkg -------------------------------------------------------------------------------- /geograph/tests/testdata/adjacent/upper_right.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/geograph/tests/testdata/adjacent/upper_right.gpkg -------------------------------------------------------------------------------- /geograph/tests/testdata/overlapping/full.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/geograph/tests/testdata/overlapping/full.gpkg -------------------------------------------------------------------------------- /geograph/tests/testdata/overlapping/lower_left.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/geograph/tests/testdata/overlapping/lower_left.gpkg -------------------------------------------------------------------------------- /geograph/tests/testdata/overlapping/lower_right.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/geograph/tests/testdata/overlapping/lower_right.gpkg -------------------------------------------------------------------------------- /geograph/tests/testdata/overlapping/upper_left.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/geograph/tests/testdata/overlapping/upper_left.gpkg -------------------------------------------------------------------------------- /geograph/tests/testdata/overlapping/upper_right.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/geograph/tests/testdata/overlapping/upper_right.gpkg -------------------------------------------------------------------------------- /geograph/tests/testdata/timestack/time_0.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/geograph/tests/testdata/timestack/time_0.gpkg -------------------------------------------------------------------------------- /geograph/tests/testdata/timestack/time_1.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/geograph/tests/testdata/timestack/time_1.gpkg -------------------------------------------------------------------------------- /geograph/tests/testdata/timestack/time_2.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/geograph/tests/testdata/timestack/time_2.gpkg -------------------------------------------------------------------------------- /geograph/tests/testdata/timestack/time_3.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/geograph/tests/testdata/timestack/time_3.gpkg -------------------------------------------------------------------------------- /geograph/tests/testdata/timestack/time_4.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/geograph/tests/testdata/timestack/time_4.gpkg -------------------------------------------------------------------------------- /geograph/tests/utils.py: -------------------------------------------------------------------------------- 1 | """Convenience functions for creating and analysing test data for GeoGraph""" 2 | from typing import Iterable, Tuple 3 | import affine 4 | import geograph 5 | import geopandas as gpd 6 | import matplotlib.pyplot as plt 7 | import numpy as np 8 | import seaborn as sns 9 | from geograph.utils.rasterio_utils import polygonise 10 | 11 | 12 | # Mirror the x axis 13 | AFFINE_MIRROR_X = affine.Affine(-1, 0, 0, 0, 1, 0) 14 | # Mirror the y axis 15 | AFFINE_MIRROR_Y = affine.Affine(1, 0, 0, 0, -1, 0) 16 | 17 | 18 | def get_array_transform(arr: np.ndarray, xoff: int = 0, yoff: int = 0) -> affine.Affine: 19 | """ 20 | Return affine transform for np.array such that lower-left corner conicides with 21 | (xoff, yoff). 22 | 23 | Note: 24 | This function is meant for use with `polygonise` to create simple test cases 25 | of polygon data and position them at the desired offset. 26 | 27 | Args: 28 | arr (np.ndarray): The numpy array for which the affine transform will be 29 | calculated 30 | xoff (int, optional): x-offset (horizontal) of the origin. Defaults to 0. 31 | yoff (int, optional): y-offset (vertical) of the origin. Defaults to 0. 32 | 33 | Returns: 34 | affine.Affine: The affine transformation that places the lower-left corner of 35 | the given numpy array at (xoff, yoff). 36 | """ 37 | 38 | return affine.Affine.translation(xoff, yoff + arr.shape[0]) * AFFINE_MIRROR_Y 39 | 40 | 41 | def _xy_to_rowcol( 42 | arr: np.ndarray, x: Tuple[int, int], y: Tuple[int, int] 43 | ) -> Tuple[Tuple[int, int], Tuple[int, int]]: 44 | """ 45 | Transform x-y indexing to row-column indexing for accessing the given numpy array. 46 | 47 | Convenience function to transfrom x-y indices (origin: lower left corner) 48 | to row-column indices (origin: upper left corner). 49 | 50 | Note: 51 | x,y indices must be non-negative or `None`. 52 | 53 | Args: 54 | arr (np.ndarray): The numpy array for which to transform the indices 55 | x (Tuple[int, int]): The x indices. Must be >= 0 or None. 56 | y (Tuple[int, int]): The y indices. Must be >= 0 or None. 57 | 58 | Returns: 59 | Tuple[Tuple[int, int], Tuple[int, int]]: The row and column indicies 60 | for accessing the numpy array `arr` 61 | """ 62 | 63 | # Throw error if negative inidices are given (positive & None indices are valid) 64 | is_valid = lambda _val: (_val is None) or _val >= 0 65 | assert all(map(is_valid, x)) and all(map(is_valid, y)), "invalid xy coordinates" 66 | 67 | # Convert 68 | row = (arr.shape[0] - (y[1] or arr.shape[0]), arr.shape[0] - (y[0] or 0)) 69 | col = x 70 | 71 | return row, col 72 | 73 | 74 | def polygonise_sub_array( 75 | arr: np.ndarray, x_lims: Tuple[int, int], y_lims: Tuple[int, int] 76 | ) -> gpd.GeoDataFrame: 77 | """ 78 | Convert sub-array of a given numpy array into polygons. 79 | 80 | Note: 81 | x-y indexing is used for convenience with plotting later. The origin for 82 | x-y indexing is taken to be at the lower left corner of the array. The 83 | x-index increases horizontally to the right, y-index increases vertically to 84 | the top. 85 | 86 | Args: 87 | arr (np.ndarray): The numpy array from which to select the sub-array 88 | x_lims (Tuple[int, int]): The x-limits of the sub-array. Must be >=0 or None. 89 | y_lims (Tuple[int, int]): The y-limits of the sub-array. Must be >=0 or None. 90 | 91 | Returns: 92 | gpd.GeoDataFrame: The polygons created from the numpy array. 93 | """ 94 | 95 | # Convert x-y indexing to row-col indexing 96 | row_lims, col_lims = _xy_to_rowcol(arr, x_lims, y_lims) 97 | # Select sub array 98 | sub_array = arr[row_lims[0] : row_lims[1], col_lims[0] : col_lims[1]] 99 | 100 | return polygonise( 101 | sub_array, 102 | transform=get_array_transform(sub_array, xoff=x_lims[0], yoff=y_lims[0]), 103 | ) 104 | 105 | 106 | def plot_identified_nodes( 107 | node: dict, other_graph: geograph.GeoGraph, identified_nodes: Iterable[int] 108 | ) -> None: 109 | """ 110 | Plot nodes that identify with `node` in `other_graph` 111 | 112 | Args: 113 | node (dict): The node for which identification checks were performed (will be 114 | colored with a blue frame) 115 | other_graph (GeoGraph): The geograph of nodes with which the given node was 116 | compared 117 | identified_nodes (Iterable[int]): The list of node ids in `other_graph` with 118 | which the current `node` was identified 119 | """ 120 | 121 | candidate_ids = list(other_graph.rtree.intersection(node["geometry"].bounds)) 122 | 123 | # Create color palette dependent on existing class labels 124 | class_labels = set(other_graph.df.loc[candidate_ids, "class_label"]) 125 | class_labels.add(node["class_label"]) 126 | colors = sns.color_palette("hls", len(class_labels)) 127 | map_to_color = dict(zip(class_labels, colors)) 128 | 129 | xs, ys = node["geometry"].exterior.xy 130 | plt.fill(xs, ys, alpha=0.4, fc=map_to_color[node["class_label"]], ec=None) 131 | plt.plot(xs, ys, color="blue", linewidth=6) 132 | 133 | for node_id in candidate_ids: 134 | other_node = other_graph.df.iloc[node_id] 135 | xs, ys = other_node["geometry"].exterior.xy 136 | plt.fill(xs, ys, alpha=0.4, fc=map_to_color[other_node["class_label"]], ec=None) 137 | 138 | for node_id in identified_nodes: 139 | other_node = other_graph.df.iloc[node_id] 140 | xs, ys = other_node["geometry"].exterior.xy 141 | plt.fill(xs, ys, alpha=0.4, fc=map_to_color[other_node["class_label"]], ec=None) 142 | plt.plot(xs, ys, color="green", linewidth=3) 143 | 144 | plt.show() 145 | -------------------------------------------------------------------------------- /geograph/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/geograph/utils/__init__.py -------------------------------------------------------------------------------- /geograph/utils/geopandas_utils.py: -------------------------------------------------------------------------------- 1 | """Helper functions for operating with geopandas objects.""" 2 | from typing import Dict, List 3 | import geopandas as gpd 4 | import networkx as nx 5 | import tqdm 6 | from geograph.utils.polygon_utils import ( 7 | connect_with_interior_bulk, 8 | connect_with_interior_or_edge_bulk, 9 | connect_with_interior_or_edge_or_corner_bulk, 10 | ) 11 | from shapely.geometry import MultiPolygon 12 | 13 | 14 | # For switching identifiction mode in `identify_node` 15 | _BULK_SPATIAL_IDENTIFICATION_FUNCTION = { 16 | "corner": connect_with_interior_or_edge_or_corner_bulk, 17 | "edge": connect_with_interior_or_edge_bulk, 18 | "interior": connect_with_interior_bulk, 19 | } 20 | 21 | 22 | def identify_node( 23 | node: dict, other_df: gpd.GeoDataFrame, mode: str = "corner" 24 | ) -> List[int]: 25 | """ 26 | Return list of all `loc` in `other_df` which identify with the given `node`. 27 | 28 | Args: 29 | node (dict): The node for which to find nodes in `other_df` that can be 30 | identified with `node`. 31 | other_df (GeoDataFrame): The GeoDataFrame object in which to search for 32 | identifications 33 | mode (str, optional): Must be one of `corner`, `edge` or `interior`. Defaults 34 | to "corner". 35 | The different modes correspond to different rules for identification: 36 | 37 | - corner: Polygons of the same `class_label` which overlap, touch in their 38 | edges or corners will be identified with each other. (fastest) 39 | - edge: Polygons of the same `class_label` which overlap or touch in their 40 | edges will be identified with each other. 41 | - interior: Polygons of the same `class_label` which overlap will be 42 | identified with each other. Touching corners or edges are not counted. 43 | 44 | Returns: 45 | np.ndarray: List of node `loc` in `other_df` which identify with `node`. 46 | """ 47 | # Mode switch 48 | assert mode in ["corner", "edge", "interior"] 49 | have_valid_overlap = _BULK_SPATIAL_IDENTIFICATION_FUNCTION[mode] 50 | 51 | # Get potential candidates for overlap 52 | candidate_ids = other_df.sindex.query(node["geometry"], sort=True) 53 | # Filter candidates according to the same class label 54 | candidate_ids = candidate_ids[ 55 | other_df["class_label"].values[candidate_ids] == node["class_label"] 56 | ] 57 | # Filter candidates accroding to correct spatial overlap 58 | candidate_ids = candidate_ids[ 59 | have_valid_overlap(node["geometry"], other_df.geometry.values[candidate_ids]) 60 | ] 61 | 62 | return other_df.index.values[candidate_ids].tolist() 63 | 64 | 65 | def identify_dfs( 66 | df1: gpd.GeoDataFrame, df2: gpd.GeoDataFrame, mode: str 67 | ) -> Dict[int, List[int]]: 68 | """ 69 | Idenitfy all nodes from `graph1` with nodes from `graph2` based on the given `mode` 70 | 71 | Args: 72 | df1 (GeoDataFrame): The dataframe whose node indicies will form the domain 73 | df2 (GeoDataFrame): The dataframe whose node indices will form the 74 | image (target) 75 | mode (str): The mode to use for node identification. Must be one of `corner`, 76 | `edge` or `interior`. 77 | The different modes correspond to different rules for identification: 78 | 79 | - corner: Polygons of the same `class_label` which overlap, touch in their 80 | edges or corners will be identified with each other. (fastest) 81 | - edge: Polygons of the same `class_label` which overlap or touch in their 82 | edges will be identified with each other. 83 | - interior: Polygons of the same `class_label` which overlap will be 84 | identified with each other. Touching corners or edges are not counted. 85 | 86 | Returns: 87 | mapping (Dict[int, np.ndarray]): A dictionary that represents the map from 88 | elements of `df1` to `df2`. 89 | """ 90 | 91 | assert df1.crs == df2.crs, "CRS systems do not agree." 92 | mapping = {index1: [] for index1 in df1.index} 93 | 94 | progress_bar = tqdm.tqdm(df1.index, desc="Identifying nodes") 95 | for index in progress_bar: # TODO: Speed up & enable trivial parallelisation 96 | mapping[index] = identify_node(df1.loc[index], df2, mode=mode) 97 | 98 | return mapping 99 | 100 | 101 | def merge_diagonally_connected_polygons(df: gpd.GeoDataFrame) -> gpd.GeoDataFrame: 102 | """ 103 | Return a new dataframe with all geometries of `df` which touch at corners merged. 104 | 105 | Merged geometries will be of type shapely.geometry.MultiPolygon 106 | 107 | Args: 108 | df (gpd.GeoDataFrame): The dataframe to analyse for geometries which touch 109 | at corners 110 | 111 | Returns: 112 | gpd.GeoDataFrame: The dataframe with patches that touch at corners merged 113 | """ 114 | 115 | # Identify the nodes that will be merged 116 | mapping = identify_dfs(df, df, mode="corner") 117 | mapping_graph = nx.from_dict_of_lists(mapping) 118 | nodes_to_merge = [ 119 | list(group) 120 | for group in nx.algorithms.connected_components(mapping_graph) 121 | if len(group) > 1 122 | ] 123 | 124 | # Remove nodes that will be merged 125 | nodes_to_merge_flattened = [item for sublist in nodes_to_merge for item in sublist] 126 | new_df = df.drop(nodes_to_merge_flattened) 127 | 128 | # Add the new, merged nodes and reset the index 129 | new_nodes = {"geometry": [], "class_label": []} 130 | for nodes in nodes_to_merge: 131 | new_nodes["class_label"].append(df["class_label"].loc[nodes[0]]) 132 | new_nodes["geometry"].append(MultiPolygon(df["geometry"].loc[nodes].values)) 133 | 134 | return new_df.append(gpd.GeoDataFrame(new_nodes), ignore_index=True) 135 | -------------------------------------------------------------------------------- /geograph/utils/polygon_utils.py: -------------------------------------------------------------------------------- 1 | """Helper functions for overlap computations with polygons in shapely.""" 2 | from typing import List 3 | from geopandas.array import GeometryArray 4 | from numpy import ndarray 5 | from shapely.geometry.polygon import Polygon 6 | 7 | 8 | # Note: All DE-9IM patterns below are streamlined to work well with polygons. 9 | # They are not guaranteed to work on lower dimensional objects (points/lines) 10 | CORNER_ONLY_PATTERN = "FF*F0****" 11 | EDGE_ONLY_PATTERN = "FF*F1****" 12 | # Note: since we deal with polygons only, we can use a simplified overlap pattern: 13 | # If polygons overlap with more than just their edge, they will automatically overlap 14 | # with their interiors 15 | OVERLAP_PATTERN = "T********" 16 | 17 | # Create empty polygon 18 | EMPTY_POLYGON = Polygon() 19 | 20 | 21 | def de9im_match(pattern: str, target_pattern: str) -> bool: 22 | """ 23 | Check a DE-9IM pattern `pattern` against a target DE-9IM pattern. 24 | 25 | Note: 26 | To enable maximal speed, patterns are not parsed for correctness. For 27 | correct patterns consult https://en.wikipedia.org/wiki/DE-9IM. 28 | 29 | Args: 30 | pattern (str): DE-9IM pattern to check as string 31 | target_pattern (str): DE-9IM pattern against which to check as string 32 | 33 | Returns: 34 | bool: True, iff pattern matches with target_pattern 35 | """ 36 | for char, target_char in zip(pattern, target_pattern): 37 | if target_char == "*": 38 | continue 39 | elif target_char == "T" and char in "012": 40 | continue 41 | elif char == target_char: 42 | continue 43 | else: 44 | return False 45 | return True 46 | 47 | 48 | def connect_with_interior_or_edge_or_corner( 49 | polygon1: Polygon, polygon2: Polygon 50 | ) -> bool: 51 | """ 52 | Return True iff `polygon1` and `polygon2` overlap in interior, edges or corners. 53 | 54 | Args: 55 | polygon1 (Polygon): A shapely Polygon 56 | polygon2 (Polygon): The other shapely Polygon 57 | 58 | Returns: 59 | bool: True, iff `polygon1` and `polygon2` intersect. 60 | """ 61 | return polygon1.intersects(polygon2) 62 | 63 | 64 | def connect_with_interior_or_edge(polygon1: Polygon, polygon2: Polygon) -> bool: 65 | """ 66 | Return True iff `polygon1` and `polygon2` overlap in interior/edge, but not corner. 67 | 68 | Args: 69 | polygon1 (Polygon): A shapely Polygon 70 | polygon2 (Polygon): The other shapely Polygon 71 | 72 | Returns: 73 | bool: True, iff `polygon1` and `polygon2` overlap in their interior/edge. 74 | """ 75 | pattern = polygon1.relate(polygon2) 76 | return de9im_match(pattern, EDGE_ONLY_PATTERN) or de9im_match( 77 | pattern, OVERLAP_PATTERN 78 | ) 79 | 80 | 81 | def connect_with_interior(polygon1: Polygon, polygon2: Polygon) -> bool: 82 | """ 83 | Return True iff `polygon1` and `polygon2` overlap in interior, but not edge/corner. 84 | 85 | Args: 86 | polygon1 (Polygon): A shapely Polygon 87 | polygon2 (Polygon): The other shapely Polygon 88 | 89 | Returns: 90 | bool: True, iff `polygon1` and `polygon2` overlap in their interior. 91 | """ 92 | return polygon1.relate_pattern(polygon2, OVERLAP_PATTERN) 93 | 94 | 95 | def connect_with_interior_or_edge_or_corner_bulk( 96 | polygon: Polygon, polygon_array: GeometryArray 97 | ) -> ndarray: 98 | """ 99 | Return boolean array with True iff polygons overlap in interior, edges or corners. 100 | 101 | Args: 102 | polygon (Polygon): A shapely Polygon 103 | polygon_array (GeometryArray): The other shapely Polygons in a geopandas 104 | geometry array 105 | 106 | Returns: 107 | np.array: Boolean array with value True, iff `polygon` and the polygon in 108 | `polygon_array` at the given location intersect. 109 | """ 110 | return polygon_array.intersects(polygon) 111 | 112 | 113 | def connect_with_interior_or_edge_bulk( 114 | polygon: Polygon, polygon_array: GeometryArray 115 | ) -> List[bool]: 116 | """ 117 | Return boolean array with True iff polys overlap in interior/edge, but not corner. 118 | 119 | Args: 120 | polygon (Polygon): A shapely Polygon 121 | polygon_array (GeometryArray): The other shapely Polygons in a geopandas 122 | geometry array 123 | 124 | Returns: 125 | List[bool]: Boolean array with value True, iff `polygon` and the polygon in 126 | `polygon_array` at the given location overlap in their interior/edge. 127 | """ 128 | patterns = polygon_array.relate(polygon) 129 | return [ 130 | de9im_match(pattern, EDGE_ONLY_PATTERN) or de9im_match(pattern, OVERLAP_PATTERN) 131 | for pattern in patterns 132 | ] 133 | 134 | 135 | def connect_with_interior_bulk( 136 | polygon: Polygon, polygon_array: GeometryArray 137 | ) -> List[bool]: 138 | """ 139 | Return boolean array with True iff polys overlap in interior, but not corner/edge. 140 | 141 | Args: 142 | polygon (Polygon): A shapely Polygon 143 | polygon_array (GeometryArray): The other shapely Polygons in a geopandas 144 | geometry array 145 | 146 | Returns: 147 | List[bool]: Boolean array with value True, iff `polygon` and the polygon in 148 | `polygon_array` at the given location overlap in their interior. 149 | """ 150 | patterns = polygon_array.relate(polygon) 151 | return [de9im_match(pattern, OVERLAP_PATTERN) for pattern in patterns] 152 | 153 | 154 | def collapse_empty_polygon(polygon: Polygon) -> Polygon: 155 | """ 156 | Collapse `polygon` to an `EMPTY_POLYGON` if it is empty. 157 | 158 | Args: 159 | polygon (Polygon): The polygon to collapse if empty 160 | 161 | Returns: 162 | Polygon: Either the original, unchanges polygon or an empty polygon 163 | """ 164 | if polygon.is_empty: 165 | return EMPTY_POLYGON 166 | else: 167 | return polygon 168 | -------------------------------------------------------------------------------- /geograph/utils/rasterio_utils.py: -------------------------------------------------------------------------------- 1 | """A collection of utility functions for data loading with rasterio.""" 2 | from typing import Iterable, Optional, Tuple, Union 3 | import affine 4 | import geograph.utils.geopandas_utils as gpd_utils 5 | import geopandas as gpd 6 | import numpy as np 7 | from rasterio.crs import CRS 8 | from rasterio.features import shapes 9 | from rasterio.io import DatasetReader 10 | 11 | 12 | class CoordinateSystemError(Exception): 13 | """Basic exception for coordinate system errors.""" 14 | 15 | 16 | class InvalidUseError(Exception): 17 | """Basic exception for invalid usage of functions.""" 18 | 19 | 20 | def get_thumbnail( 21 | data: DatasetReader, 22 | band_idx: Optional[int] = 1, 23 | height: Optional[int] = None, 24 | width: Optional[int] = None, 25 | ) -> np.ndarray: 26 | """ 27 | Calculate a thumbnail for a given band of a rasterio data. 28 | 29 | Args: 30 | data (DatasetReader): rasterio data handle 31 | band_idx (int, optional): The band index for which to calculate the 32 | thumbnail. Defaults to 1. 33 | height (int, optional): The desired height of the thumbnail. If only the 34 | height is set, the width will be automatically determined from the datas 35 | aspect ratio. Defaults to 100. 36 | width (int, optional): The desired width of the thumbnail. Defaults to 37 | None. 38 | 39 | Returns: 40 | np.ndarray: The 2D numpy array representing the thumbnail as calculated 41 | from nearest neighbour resampling. 42 | """ 43 | aspect_ratio = data.height / data.width 44 | if height is None and width is None: 45 | height = 100 46 | elif height is not None and width is None: 47 | width = int(height / aspect_ratio) 48 | elif width is not None and height is None: 49 | height = int(width * aspect_ratio) 50 | 51 | # Output height and/or width must be specified. 52 | assert height > 0 and width > 0 53 | 54 | return data.read(band_idx, out_shape=(int(height), int(width))) 55 | 56 | 57 | def read_from_lat_lon( 58 | data: DatasetReader, 59 | band_idxs: Union[int, Iterable[int]], 60 | lat: Tuple[float, float], 61 | lon: Tuple[float, float], 62 | **kwargs, 63 | ) -> np.ndarray: 64 | """ 65 | Read in a tile of raster data form specified latitude and longitude values. 66 | 67 | Note: This function only works if `data` is provided in the WGS geographical 68 | coordinate system (Note: WGS84 = EPSG4326). 69 | 70 | Args: 71 | data (DatasetReader): rasterio data handle 72 | band_idxs (Union[int, Iterable[int]]): The band index or indices for which to 73 | read the information from the underlying rasterio `data`. 74 | lat (Tuple[float]): A tuple containing (latitude_min, latitude_max). 75 | Latitudes must be in the range (-90, 90). 76 | lon (Tuple[float]): A tuple containing (longitude_min, longitude_max). 77 | Longitudes must be in the range (-180, 180). 78 | 79 | Returns: 80 | np.ndarray: A multidimensional numpy array containing the specified bands in 81 | the given latitude, longitude bounds. 82 | """ 83 | # Check that geographical cooridnate reference system of the data is 84 | # WGS84 (i.e. epsg 4326). If this is not the case, prompt user to retransform. 85 | if not data.crs == CRS.from_epsg(4326): 86 | raise CoordinateSystemError( 87 | "Latitude, Longitude based reading requires EPSG 4326 (WGS84) coordinate " 88 | f"reference system. Current CRS is {data.crs}. To use this method, first " 89 | "transform your data to EPSG 4326." 90 | ) 91 | 92 | lat_min, lat_max = lat 93 | lon_min, lon_max = lon 94 | 95 | # Check if latitude and longitude are within physical bounds 96 | if not -90 <= lat_min < lat_max <= 90: 97 | raise InvalidUseError( 98 | "Latitudinal coordinates must be between -90 and 90 degrees" 99 | ) 100 | if not -180 <= lon_min < lon_max <= 180: 101 | raise InvalidUseError( 102 | "Longitudinal coordinates must be between -180 and 180 degrees" 103 | ) 104 | 105 | # Note: rows increase from top to bottom, latitude increases from bottom to top 106 | row_min, col_min = data.index(lon_min, lat_max) # index swaps values internally 107 | row_max, col_max = data.index(lon_max, lat_min) 108 | 109 | # Read specified region 110 | return data.read( 111 | indexes=band_idxs, window=((row_min, row_max), (col_min, col_max)), **kwargs 112 | ) 113 | 114 | 115 | def polygonise( 116 | data_array: np.ndarray, 117 | mask: Optional[np.ndarray] = None, 118 | transform: affine.Affine = affine.identity, 119 | crs: Optional[str] = None, 120 | connectivity: int = 4, 121 | apply_buffer: bool = True, 122 | ): 123 | """ 124 | Convert 2D numpy array containing raster data into polygons. 125 | 126 | This implementation uses rasterio.features.shapes, which uses GDALpolygonize 127 | under the hood. 128 | 129 | References: 130 | (1) https://rasterio.readthedocs.io/en/latest/api/rasterio.features.html 131 | (2) https://gdal.org/programs/gdal_polygonize.html 132 | 133 | Args: 134 | data_array (np.ndarray): 2D numpy array with the raster data. 135 | mask (np.ndarray, optional): Boolean mask that can be applied over 136 | the polygonisation. Defaults to None. 137 | transform (affine.Affine, optional): Affine transformation to apply 138 | when polygonising. Defaults to the identity transform. 139 | crs (str, optional): Coordinate reference system to set on the 140 | resulting dataframe. Defaults to None. 141 | connectivity (int, optional): Use 4 or 8 pixel connectivity for 142 | grouping pixels into features. Defaults to 4. 143 | apply_buffer (bool, optional): Apply shapely buffer function to the 144 | polygons after polygonising. This can fix issues with the 145 | polygonisation creating invalid geometries. 146 | 147 | 148 | Returns: 149 | gpd.GeoDataFrame: GeoDataFrame containing polygon objects. 150 | """ 151 | assert connectivity in (4, 8) 152 | # Note: we handle connectivity=8 differently due to issues with self intersecting 153 | # polygons returned from shapely. Instead of using connectivity=8 we use 154 | # the stable connectivity=4 and post-process the polygons to achieve connectivity=8 155 | # with valid geometries. 156 | polygon_generator = shapes( 157 | data_array, mask=mask, connectivity=4, transform=transform 158 | ) 159 | results = list( 160 | {"properties": {"class_label": int(val)}, "geometry": shape} 161 | for shape, val in polygon_generator 162 | ) 163 | df = gpd.GeoDataFrame.from_features(results, crs=crs) 164 | 165 | if apply_buffer: 166 | # Redraw geometries to ensure polygons are valid. 167 | df.geometry = df.geometry.buffer(0) 168 | 169 | if connectivity == 8: 170 | df = gpd_utils.merge_diagonally_connected_polygons(df) 171 | 172 | return df 173 | -------------------------------------------------------------------------------- /geograph/visualisation/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ai4er-cdt/geograph/e9436a3a73d9b96bc2c7e139d65a518e0b48c28d/geograph/visualisation/__init__.py -------------------------------------------------------------------------------- /geograph/visualisation/folium_utils.py: -------------------------------------------------------------------------------- 1 | """Module with utility functions to plot graphs in folium.""" 2 | from __future__ import annotations 3 | from typing import Callable, List, Optional, Tuple 4 | import folium 5 | import geograph 6 | import geopandas as gpd 7 | from geograph.constants import CHERNOBYL_COORDS_WGS84, UTM35N 8 | from geograph.visualisation import graph_utils 9 | 10 | 11 | def add_graph_to_folium_map( 12 | folium_map: folium.Map = None, 13 | polygon_gdf: gpd.GeoDataFrame = None, 14 | color_column: str = "index", 15 | graph: Optional[geograph.GeoGraph] = None, 16 | name: str = "data", 17 | folium_tile_list: Optional[List[str]] = None, 18 | location: Tuple[float, float] = CHERNOBYL_COORDS_WGS84, 19 | crs: str = UTM35N, 20 | add_layer_control: bool = False, 21 | ) -> folium.Map: 22 | """Create a visualisation map of the given polygons and `graph` in folium. 23 | 24 | The polygons in `polygon_gdf` and `graph` are displayed on a folum map. 25 | It is intended that the graph was build from `polygon_gdf`, but it is not required. 26 | If given `map`, it will be put on this existing folium map. 27 | 28 | Args: 29 | folium_map (folium.Map, optional): map to add polygons and graph to. 30 | Defaults to None. 31 | polygon_gdf (gpd.GeoDataFrame, optional): data containing polygon. 32 | Defaults to None. 33 | color_column (str, optional): column in polygon_gdf that determines which color 34 | is given to each polygon. Can be categorical values. Defaults to "index". 35 | graph (Optional[geograph.GeoGraph], optional): graph to be plotted. 36 | Defaults to None. 37 | name (str, optional): prefix to all the folium layer names shown in layer 38 | control of map (if added). Defaults to "data". 39 | folium_tile_list (Optional[List[str]], optional): list of folium.Map tiles to be 40 | add to the map. See folium.Map docs for options. Defaults to None. 41 | location (Tuple[float, float], optional): starting location in WGS84 coordinates 42 | Defaults to CHERNOBYL_COORDS_WGS84. 43 | crs (str, optional): coordinates reference system to be used. 44 | Defaults to UTM35N. 45 | add_layer_control (bool, optional): whether to add layer controls to map. 46 | Warning: only use this when you don't intend to add any additional data 47 | after calling this function to the map. May cause bugs otherwise. 48 | Defaults to False. 49 | 50 | Returns: 51 | folium.Map: map with polygons and graph displayed as described 52 | """ 53 | 54 | if folium_tile_list is None: 55 | folium_tile_list = ["OpenStreetMap"] 56 | 57 | if folium_map is None: 58 | folium_map = folium.Map(location, zoom_start=8, tiles=folium_tile_list.pop(0)) 59 | 60 | # Adding standard folium raster tiles 61 | for tiles in folium_tile_list: 62 | # special esri satellite data case 63 | if tiles == "esri": 64 | folium.TileLayer( 65 | tiles=( 66 | "https://server.arcgisonline.com/ArcGIS/rest/" 67 | "services/World_Imagery/MapServer/tile/{z}/{y}/{x}" 68 | ), 69 | attr="esri", 70 | name="esri satellite", 71 | overlay=False, 72 | control=True, 73 | ).add_to(folium_map) 74 | else: 75 | folium.TileLayer(tiles=tiles).add_to(folium_map) 76 | 77 | # Adding polygon data 78 | if polygon_gdf is not None: 79 | # creating a color index that maps each category 80 | # in the color_column to an integer 81 | polygon_gdf["index"] = polygon_gdf.index 82 | polygon_gdf["color_index"] = ( 83 | polygon_gdf[color_column].astype("category").cat.codes.astype("int64") 84 | ) 85 | 86 | choropleth = folium.Choropleth( 87 | polygon_gdf, 88 | data=polygon_gdf, 89 | key_on="feature.properties.index", 90 | columns=["index", "color_index"], 91 | fill_color="YlOrBr", 92 | name=name + "_polygons", 93 | ) 94 | choropleth = remove_choropleth_color_legend(choropleth) 95 | choropleth.add_to(folium_map) 96 | 97 | # adding popup markers with class name 98 | folium.features.GeoJsonPopup(fields=[color_column], labels=True).add_to( 99 | choropleth.geojson 100 | ) 101 | 102 | # Adding graph data 103 | if graph is not None: 104 | node_gdf, edge_gdf = graph_utils.create_node_edge_geometries(graph, crs=crs) 105 | 106 | # add graph edges to map 107 | if not edge_gdf.empty: 108 | edges = folium.features.GeoJson( 109 | edge_gdf, 110 | name=name + "_graph_edges", 111 | style_function=get_style_function("#dd0000"), 112 | ) 113 | edges.add_to(folium_map) 114 | 115 | # add graph nodes/vertices to map 116 | node_marker = folium.vector_layers.Circle(radius=100, color="black") 117 | nodes = folium.features.GeoJson( 118 | node_gdf, marker=node_marker, name=name + "_graph_vetrices" 119 | ) 120 | nodes.add_to(folium_map) 121 | 122 | if add_layer_control: 123 | folium.LayerControl().add_to(folium_map) 124 | 125 | return folium_map 126 | 127 | 128 | def get_style_function(color: str = "#ff0000") -> Callable[[], dict]: 129 | """Return lambda function that returns a dict with the `color` given. 130 | 131 | The returned lambda function can be used as a style function for folium. 132 | 133 | Args: 134 | color (str, optional): color to be used in dict. Defaults to "#ff0000". 135 | 136 | Returns: 137 | Callable[[], dict]: style function 138 | """ 139 | 140 | return lambda x: {"fillColor": color, "color": color} 141 | 142 | 143 | def add_cez_to_map( 144 | folium_map: folium.Map, 145 | exclusion_json_path: Optional[str] = None, 146 | add_layer_control: bool = False, 147 | ) -> folium.Map: 148 | """Add polygons of the Chernobyl Exclusion Zone (CEZ) to a folium map. 149 | 150 | Args: 151 | folium_map (folium.Map): [description] 152 | exclusion_json_path (Optional[str], optional): path to the json file containing 153 | the CEZ polygons. Defaults to None. 154 | add_layer_control (bool, optional): whether to add layer controls to map. 155 | Warning: only use this when you don't intend to add any additional data 156 | after calling this function to the map. May cause bugs otherwise. 157 | Defaults to False. 158 | Returns: 159 | folium.Map: map with CEZ polygons added 160 | """ 161 | 162 | exc_data = gpd.read_file(exclusion_json_path) 163 | 164 | colors = ["#808080", "#ffff99", "#ff9933", "#990000", "#ff0000", "#000000"] 165 | 166 | for index, row in exc_data.iterrows(): 167 | folium.GeoJson( 168 | row["geometry"], 169 | name=row["name"], 170 | style_function=get_style_function(colors[index]), 171 | ).add_to(folium_map) 172 | 173 | if add_layer_control: 174 | folium.LayerControl().add_to(folium_map) 175 | 176 | return folium_map 177 | 178 | 179 | def remove_choropleth_color_legend( 180 | choropleth_map: folium.features.Choropleth, 181 | ) -> folium.features.Choropleth: 182 | """Remove color legend from Choropleth folium map. 183 | 184 | Solution proposed by `nhpackard` in the following GitHub issue in the folium repo: 185 | https://github.com/python-visualization/folium/issues/956 186 | 187 | Args: 188 | choropleth_map (folium.features.Choropleth): a Choropleth map 189 | 190 | Returns: 191 | folium.features.Choropleth: the same map without color legend 192 | """ 193 | for key in choropleth_map._children: # pylint: disable=protected-access 194 | if key.startswith("color_map"): 195 | del choropleth_map._children[key] # pylint: disable=protected-access 196 | 197 | return choropleth_map 198 | -------------------------------------------------------------------------------- /geograph/visualisation/geoviewer.py: -------------------------------------------------------------------------------- 1 | """This module contains the GeoGraphViewer to visualise GeoGraphs""" 2 | from __future__ import annotations 3 | import logging 4 | import threading 5 | import time 6 | from typing import TYPE_CHECKING, List, Optional, Union 7 | import folium 8 | import geograph 9 | import ipyleaflet 10 | import ipywidgets as widgets 11 | import pandas as pd 12 | import traitlets 13 | from geograph import metrics 14 | from geograph.constants import CHERNOBYL_COORDS_WGS84, WGS84 15 | from geograph.visualisation import ( 16 | control_widgets, 17 | folium_utils, 18 | graph_utils, 19 | style, 20 | widget_utils, 21 | ) 22 | 23 | if TYPE_CHECKING: 24 | import geopandas as gpd 25 | 26 | 27 | class GeoGraphViewer(ipyleaflet.Map): 28 | """Class for interactively viewing a GeoGraph.""" 29 | 30 | def __init__( 31 | self, 32 | center: List[int, int] = CHERNOBYL_COORDS_WGS84, 33 | zoom: int = 7, 34 | layout: Union[widgets.Layout, None] = None, 35 | metric_list: Optional[List[str]] = None, 36 | small_screen: bool = True, 37 | logging_level: str = "WARNING", 38 | max_log_len: int = 20, 39 | layer_update_delay: float = 0.0, 40 | **kwargs, 41 | ) -> None: 42 | """Class for interactively viewing a GeoGraph. 43 | 44 | Args: 45 | center (List[int, int], optional): center of the map. Defaults to 46 | CHERNOBYL_COORDS_WGS84. 47 | zoom (int, optional): initial zoom level. Defaults to 7. 48 | layout (Union[widgets.Layout, None], optional): layout passed to 49 | ipyleaflet.Map. Defaults to None. 50 | metric_list (List[str], optional): list of GeoGraph metrics to be shown. 51 | Defaults to None. 52 | small_screen (bool, optional): whether to reduce the control widget height 53 | for better usability on smaller screens. Defaults to True. 54 | logging_level (str, optional): python logging level. Defaults to 55 | "WARNING". 56 | max_log_len (int, optional): how many log messages should be displayed in 57 | in log tab. Note that a long log may slow down the viewer. 58 | Defaults to 20. 59 | layer_update_delay (float, optional): how long the viewer should wait 60 | before updating layer. Whilst waiting other layer update requests 61 | are caught. This reduces the amount of traffic between the client (your 62 | browser) and the python kernel. Experimental. Defaults to 0.0. 63 | 64 | """ 65 | super().__init__( 66 | center=center, 67 | zoom=zoom, 68 | scroll_wheel_zoom=True, 69 | crs=ipyleaflet.projections.EPSG3857, # EPSG code for WGS84 CRS 70 | zoom_snap=0.1, 71 | **kwargs, 72 | ) 73 | # There seems to be no easy way to add UTM35N to ipyleaflet.Map(), hence WGS84. 74 | self.gpd_crs_code = WGS84 75 | self.small_screen = small_screen 76 | self.layer_update_delay = layer_update_delay 77 | 78 | if metric_list is None: 79 | self.metrics = metrics.STANDARD_METRICS 80 | else: 81 | self.metrics = metric_list 82 | if layout is None: 83 | self.layout = widgets.Layout(height="700px") 84 | 85 | # Setting log with handler, allows access to log via handler.show_logs() 86 | self.logger = logging.getLogger(type(self).__name__) 87 | self.logger.setLevel(logging_level) 88 | self.log_handler = widget_utils.OutputWidgetHandler(max_len=max_log_len) 89 | self.logger.addHandler(self.log_handler) 90 | 91 | default_map_layer = ipyleaflet.TileLayer( 92 | url="https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png", 93 | base=True, 94 | max_zoom=19, 95 | min_zoom=4, 96 | ) 97 | 98 | # Note: entries in layer_dict follow the convention: 99 | # ipywidgets_layer = layer_dict[type][name][subtype]["layer"] 100 | # Layers of type "maps" only have subtype "map". 101 | # The layer_dict overrules the ipyleaflet.Map() attribute .layers 102 | self.layer_dict = dict( 103 | maps=dict( 104 | OpenStreetMap=dict(map=dict(layer=default_map_layer, active=True)) 105 | ), 106 | graphs=dict(), 107 | ) 108 | self.layer_style = style.DEFAULT_LAYER_STYLE 109 | 110 | self.graph_subtypes = [ 111 | "pgons", 112 | "graph", 113 | "components", 114 | "disconnected_nodes", 115 | "poorly_connected_nodes", 116 | "node_dynamics", 117 | "node_change", 118 | ] 119 | 120 | # Setting the current view of graph and map as traits. Together with layer_dict 121 | # these two determine the state of the widget. 122 | self.add_traits( 123 | current_graph=traitlets.Unicode().tag(sync=True), 124 | current_map=traitlets.Unicode().tag(sync=True), 125 | ) 126 | self.current_graph = "" 127 | self.current_map = "Map" # set to the default map added above 128 | self.layer_update_requested = False 129 | 130 | self.logger.info("Viewer successfully initialised.") 131 | 132 | def set_layer_visibility( 133 | self, layer_type: str, layer_name: str, layer_subtype: str, active: bool 134 | ) -> None: 135 | """Set visiblity for a specific layer 136 | 137 | Set the visibility for layer in 138 | `layer_dict[layer_type][layer_name][layer_subtype]`. 139 | 140 | Args: 141 | layer_type (str): type of layer (e.g. "maps","graphs") 142 | layer_name (str): name of layer 143 | layer_subtype (str): subtype of layer (e.g. "map","components") 144 | active (bool): whether layer is activate (=visible) 145 | """ 146 | self.logger.debug( 147 | "Set visibility of %s: %s to %s", layer_name, layer_subtype, active 148 | ) 149 | self.layer_dict[layer_type][layer_name][layer_subtype]["active"] = active 150 | 151 | def hide_all_layers(self) -> None: 152 | """Hide all layers in self.layer_dict.""" 153 | for layer_type, type_dict in self.layer_dict.items(): 154 | for layer_name in type_dict: 155 | if layer_type == "maps": 156 | self.set_layer_visibility(layer_type, layer_name, "map", False) 157 | elif layer_type == "graphs": 158 | for layer_subtype in self.graph_subtypes: 159 | self.set_layer_visibility( 160 | layer_type, layer_name, layer_subtype, False 161 | ) 162 | self.layer_update() 163 | 164 | def add_layer(self, layer: Union[dict, ipyleaflet.Layer], name=None) -> None: 165 | """Add a layer on the map. 166 | 167 | Args: 168 | layer (Layer instance): the new layer to add 169 | name (str): name for the layer. This shows up in viewer control widgets. 170 | """ 171 | if isinstance(layer, dict): 172 | if name is None: 173 | name = layer["name"] 174 | layer = ipyleaflet.basemap_to_tiles(layer) 175 | else: 176 | if name is None: 177 | name = layer.name 178 | if layer.model_id in self._layer_ids or name in self.layer_dict["maps"].keys(): 179 | raise ipyleaflet.LayerException( 180 | "layer with same name already on map, change name argument: %r" % layer 181 | ) 182 | 183 | self.layer_dict["maps"][name] = dict(map=dict(layer=layer, active=True)) 184 | self.layer_update() 185 | 186 | def add_graph( 187 | self, 188 | graph: geograph.GeoGraph, 189 | name: str = "Graph", 190 | with_components: bool = True, 191 | ) -> None: 192 | """Add GeoGraph to viewer. 193 | 194 | Args: 195 | graph (geograph.GeoGraph): graph to be added 196 | name (str, optional): name shown in control panel. Defaults to "Graph". 197 | with_components(bool, optional): Iff True the graph components are 198 | calculated. Warning, this can make the loading of the viewer slow. 199 | Defaults to True. 200 | """ 201 | self.logger.info("Started adding GeoGraph.") 202 | if name in graph.habitats.keys(): 203 | raise ValueError( 204 | "Name given cannot be same as habitat name in given GeoGraph." 205 | ) 206 | if name in self.layer_dict["graphs"]: 207 | raise ValueError( 208 | "Graph with the same name already added to GeoGraphViewer." 209 | ) 210 | 211 | graphs = {name: graph, **graph.habitats} 212 | 213 | for idx, (current_name, current_graph) in enumerate(graphs.items()): 214 | self.logger.info( 215 | "Started adding graph %s of %s: %s", idx + 1, len(graphs), current_name 216 | ) 217 | 218 | # Calculate patch metrics for current graph 219 | current_graph.get_patch_metrics() 220 | is_habitat = not current_name == name 221 | 222 | # Creating layer with geometries representing graph on map 223 | self.logger.debug("Creating graph geometries layer (graph_geo_data).") 224 | nodes, edges = graph_utils.create_node_edge_geometries( 225 | current_graph, crs=self.gpd_crs_code 226 | ) 227 | graph_geo_data = ipyleaflet.GeoData( 228 | geo_dataframe=edges.append(nodes) 229 | .to_frame(name="geometry") 230 | .reset_index(), 231 | name=current_name + "_graph", 232 | **self.layer_style["graph"], 233 | ) 234 | 235 | # Creating choropleth layer for patch polygons 236 | self.logger.debug("Creating patch polygons layer (pgon_choropleth).") 237 | pgon_choropleth = self._get_choropleth_from_df( 238 | current_graph.df, colname="class_label", **self.layer_style["pgons"] 239 | ) 240 | 241 | # Creating choropleth layer for node identification 242 | if "node_dynamic" in current_graph.df.columns: 243 | self.logger.debug("Adding node dynamics layer.") 244 | dynamics_choropleth = self._get_choropleth_from_df( 245 | graph_utils.map_dynamic_to_int(current_graph.df), 246 | colname="dynamic_class", 247 | **style._NODE_DYNAMICS_STYLE, # pylint: disable=protected-access 248 | ) 249 | abs_growth_choropleth = self._get_choropleth_from_df( 250 | current_graph.df, 251 | colname="absolute_growth", 252 | **style._ABS_GROWTH_STYLE, # pylint: disable=protected-access 253 | ) 254 | else: 255 | dynamics_choropleth = None 256 | abs_growth_choropleth = None 257 | 258 | # Creating layer for graph components 259 | if with_components: 260 | self.logger.debug("Creating components layer (component_choropleth).") 261 | component_df = current_graph.get_graph_components( 262 | calc_polygons=True 263 | ).df.copy() 264 | if is_habitat: 265 | component_df.geometry = component_df.geometry.buffer( 266 | current_graph.max_travel_distance 267 | ) 268 | component_choropleth = ipyleaflet.GeoData( 269 | geo_dataframe=component_df.to_crs(WGS84), 270 | name=current_name + "_components", 271 | **self.layer_style["components"], 272 | ) 273 | else: 274 | component_choropleth = None 275 | 276 | # Creating layer for disconnected (no-edge) nodes 277 | self.logger.debug( 278 | "Creating disconnected node layer (discon_nodes_geo_data)." 279 | ) 280 | disconnected = [ 281 | node 282 | for node in current_graph.graph.nodes() 283 | if current_graph.graph.degree[node] == 0 284 | ] 285 | discon_nodes_geo_data = ipyleaflet.GeoData( 286 | geo_dataframe=nodes.loc[disconnected].to_frame(name="geometry"), 287 | name=current_name + "_disconnected_nodes", 288 | **self.layer_style["disconnected_nodes"], 289 | ) 290 | 291 | # Creating layer for poorly connected (one-edge) nodes 292 | self.logger.debug( 293 | "Creating poorly connected node layer (poorly_con_nodes_geo_data)." 294 | ) 295 | poorly_connected = [ 296 | node 297 | for node in current_graph.graph.nodes() 298 | if current_graph.graph.degree[node] == 1 299 | ] 300 | poorly_con_nodes_geo_data = ipyleaflet.GeoData( 301 | geo_dataframe=nodes.loc[poorly_connected].to_frame(name="geometry"), 302 | name=current_name + "_poorly_connected_nodes", 303 | **self.layer_style["poorly_connected_nodes"], 304 | ) 305 | 306 | # Getting graph metrics 307 | self.logger.debug("Add graph metrics.") 308 | graph_metrics = [] 309 | for metric in self.metrics: 310 | graph_metrics.append( 311 | current_graph.get_metric(metric) 312 | ) # pylint: disable=protected-access 313 | 314 | # Combining all layers and adding them to layer_dict 315 | self.logger.debug("Assembling layer dict (layer).") 316 | layer = dict( 317 | is_habitat=is_habitat, 318 | graph=dict(layer=graph_geo_data, active=True), 319 | pgons=dict(layer=pgon_choropleth, active=True), 320 | components=dict(layer=component_choropleth, active=False), 321 | disconnected_nodes=dict(layer=discon_nodes_geo_data, active=False), 322 | poorly_connected_nodes=dict( 323 | layer=poorly_con_nodes_geo_data, active=False 324 | ), 325 | node_dynamics=dict(layer=dynamics_choropleth, active=False), 326 | node_change=dict(layer=abs_growth_choropleth, active=False), 327 | metrics=graph_metrics, 328 | original_graph=current_graph, 329 | ) 330 | if is_habitat: 331 | layer["parent"] = name 332 | 333 | self.layer_dict["graphs"][current_name] = layer 334 | self.logger.info("Finished adding graph: %s.", current_name) 335 | 336 | self.current_graph = name 337 | self.layer_update() 338 | self.logger.info("Added graph.") 339 | 340 | def _get_choropleth_from_df( 341 | self, df: gpd.GeoDataFrame, colname: str = "class_label", **choropleth_args 342 | ) -> ipyleaflet.Choropleth: 343 | """Create ipyleaflet.Choropleth from GeoDataFrame of polygons. 344 | 345 | Args: 346 | df (gpd.GeoDataFrame): dataframe to visualise 347 | colname (str): name of the column to display as choropleth data 348 | **choropleth_args: Keywordarguments passed to `ipyleaflet.Choropleth`. 349 | 350 | Returns: 351 | ipyleaflet.Choropleth: choropleth layer 352 | """ 353 | geo_data = df.to_crs(WGS84).__geo_interface__ # ipyleaflet works with WGS84 354 | if pd.api.types.is_numeric_dtype(df[colname]): 355 | # for numeric types, display the numeric data directly 356 | choro_data = {str(key): val for key, val in df[colname].items()} 357 | else: 358 | # for categorical types, convert to numbers and display those 359 | col_as_categories = df[colname].astype("category").cat.codes 360 | choro_data = {str(key): val for key, val in col_as_categories.items()} 361 | 362 | # create ipyleaflet layer 363 | choropleth_layer = ipyleaflet.Choropleth( 364 | geo_data=geo_data, choro_data=choro_data, **choropleth_args 365 | ) 366 | 367 | return choropleth_layer 368 | 369 | def layer_update(self) -> None: 370 | """Update `self.layer` tuple from `self.layer_dict`.""" 371 | layers = [ 372 | map_layer["map"]["layer"] 373 | for map_layer in self.layer_dict["maps"].values() 374 | if map_layer["map"]["active"] 375 | ] 376 | for graph in self.layer_dict["graphs"].values(): 377 | for graph_subtype in self.graph_subtypes: 378 | if graph[graph_subtype]["active"]: 379 | layers.append(graph[graph_subtype]["layer"]) 380 | 381 | self.layers = tuple(layers) 382 | self.logger.debug("layer_update() called.") 383 | 384 | def request_layer_update(self): 385 | """Request layer_update to be called. 386 | 387 | After receiving the first request the viewer waits for a set time, and then 388 | executes its layer_update method. If new requests come in whilst this time 389 | is passing no further action is taken. This helps avoid calling layer_update 390 | for each button in control widgets separately, slowing down the viewer. 391 | """ 392 | 393 | if self.layer_update_delay > 0: 394 | self.logger.debug("Layer update requested.") 395 | 396 | if not self.layer_update_requested: 397 | self.layer_update_requested = True 398 | 399 | def wait_and_update(viewer): 400 | time.sleep(self.layer_update_delay) 401 | viewer.layer_update() 402 | viewer.layer_update_requested = False 403 | viewer.logger.debug("Layer update request executed.") 404 | 405 | thread = threading.Thread( 406 | target=wait_and_update, 407 | args=(self,), 408 | ) 409 | thread.start() 410 | else: 411 | pass 412 | else: 413 | self.layer_update() 414 | 415 | def set_graph_style(self, radius: float = 10, node_color: str = None) -> None: 416 | """Set the style of any graphs added to viewer. 417 | 418 | Args: 419 | radius (float): radius of nodes in graph. Defaults to 10. 420 | node_color (str): (CSS) color of graph node (e.g. "blue") 421 | """ 422 | for name, graph in self.layer_dict["graphs"].items(): 423 | layer = graph["graph"]["layer"] 424 | 425 | # Below doesn't work because traitlet change not observed 426 | # layer.point_style['radius'] = radius 427 | 428 | self.layer_style["graph"]["point_style"]["radius"] = radius 429 | self.layer_style["graph"]["style"]["fillColor"] = node_color 430 | layer = ipyleaflet.GeoData( 431 | geo_dataframe=layer.geo_dataframe, 432 | name=layer.name, 433 | **self.layer_style["graph"], 434 | ) 435 | self.layer_dict["graphs"][name]["graph"]["layer"] = layer 436 | self.layer_update() 437 | 438 | def enable_graph_controls(self) -> None: 439 | """Add controls for graphs to GeoGraphViewer.""" 440 | 441 | if not self.layer_dict["graphs"]: 442 | raise AttributeError( 443 | ( 444 | "GeoGraphViewer has no graph. Add graph using viewer.add_graph() " 445 | "method before adding and showing controls." 446 | ) 447 | ) 448 | 449 | # Add combined control widgets to viewer 450 | control_widget = control_widgets.GraphControlWidget(viewer=self) 451 | control = ipyleaflet.WidgetControl(widget=control_widget, position="topright") 452 | self.add_control(control) 453 | 454 | # Add hover widget to viewer 455 | hover_widget = control_widgets.HoverWidget(viewer=self) 456 | hover_control = ipyleaflet.WidgetControl( 457 | widget=hover_widget, position="topright" 458 | ) 459 | self.add_control(hover_control) 460 | 461 | # Add GeoGraph branding 462 | header = widgets.HTML( 463 | """GeoGraph""", layout=widgets.Layout(padding="3px 10px 3px 10px") 464 | ) 465 | self.add_control( 466 | ipyleaflet.WidgetControl(widget=header, position="bottomright") 467 | ) 468 | 469 | # Add default ipyleaflet controls: fullscreen and scale 470 | self.add_control(ipyleaflet.FullScreenControl()) 471 | self.add_control(ipyleaflet.ScaleControl(position="bottomleft")) 472 | 473 | 474 | class FoliumGeoGraphViewer: 475 | """Class for viewing GeoGraph object without ipywidgets""" 476 | 477 | def __init__(self) -> None: 478 | """Class for viewing GeoGraph object without ipywidgets.""" 479 | self.widget = None 480 | 481 | def _repr_html_(self) -> str: 482 | """Return raw html of widget as string. 483 | 484 | This method gets called by IPython.display.display(). 485 | """ 486 | 487 | if self.widget is not None: 488 | return self.widget._repr_html_() # pylint: disable=protected-access 489 | 490 | def add_graph(self, graph: geograph.GeoGraph) -> None: 491 | """Add graph to viewer. 492 | 493 | The added graph is visualised in the viewer. 494 | 495 | Args: 496 | graph (geograph.GeoGraph): GeoGraph to be shown 497 | """ 498 | 499 | self._add_graph_to_folium_map(graph) 500 | 501 | def add_layer_control(self) -> None: 502 | """Add layer control to the viewer.""" 503 | folium.LayerControl().add_to(self.widget) 504 | 505 | def _add_graph_to_folium_map(self, graph: geograph.GeoGraph) -> None: 506 | """Add graph to folium map. 507 | 508 | Args: 509 | graph (geograph.GeoGraph): GeoGraph to be added 510 | """ 511 | self.widget = folium_utils.add_graph_to_folium_map( 512 | folium_map=self.widget, graph=graph 513 | ) 514 | -------------------------------------------------------------------------------- /geograph/visualisation/graph_utils.py: -------------------------------------------------------------------------------- 1 | """This module contains utility function for generally plotting graphs.""" 2 | from __future__ import annotations 3 | from typing import Tuple 4 | import geograph 5 | import geopandas as gpd 6 | import shapely.geometry 7 | from geograph.constants import WGS84 8 | 9 | 10 | def create_node_edge_geometries( 11 | graph: geograph.GeoGraph, 12 | crs: str = WGS84, 13 | ) -> Tuple[gpd.GeoSeries, gpd.GeoSeries]: 14 | """Create node and edge geometries for the networkx graph G. 15 | 16 | Returns node and edge geometries in two GeoDataFrames. The output can be used for 17 | plotting a graph. 18 | 19 | Args: 20 | graph (GeoGraph): graph with nodes and edges 21 | crs (str, optional): coordinate reference system of graph. Defaults to UTM35N. 22 | 23 | Returns: 24 | Tuple[gpd.GeoSeries, gpd.GeoSeries]: dataframes of nodes and edges 25 | respectively. 26 | """ 27 | 28 | node_geoms = graph.df.geometry.to_crs(crs).representative_point() 29 | rep_points = node_geoms.to_dict() 30 | 31 | edge_lines = {} 32 | for idx, (node_a, node_b) in enumerate(graph.graph.edges()): 33 | point_a = rep_points[node_a] 34 | point_b = rep_points[node_b] 35 | edge_lines[idx] = shapely.geometry.LineString([point_a, point_b]) 36 | edge_geoms = gpd.GeoSeries(edge_lines) 37 | 38 | return node_geoms, edge_geoms 39 | 40 | 41 | _NODE_DYNAMIC_TO_INT = { 42 | "split": 0, 43 | "shrank": 1, 44 | "unchanged": 2, 45 | "complex": 3, 46 | "grew": 4, 47 | "merged": 5, 48 | "birth": 6, 49 | } 50 | 51 | _map_dynamic_to_int = lambda x: _NODE_DYNAMIC_TO_INT[x] 52 | 53 | 54 | def map_dynamic_to_int(df: gpd.GeoDataFrame) -> gpd.Series: 55 | df["dynamic_class"] = df["node_dynamic"].map(_map_dynamic_to_int) 56 | return df 57 | -------------------------------------------------------------------------------- /geograph/visualisation/style.py: -------------------------------------------------------------------------------- 1 | """Module providing constants that define style of graph visualisation.""" 2 | import branca.colormap 3 | 4 | _GRAPH_STYLE = dict( 5 | style={"color": "black", "fillColor": "snow", "fillOpacity": 0.7}, 6 | hover_style={"fillOpacity": 0.5}, 7 | point_style={"radius": 10}, 8 | ) 9 | 10 | _PGONS_STYLE = dict( 11 | style={"fillOpacity": 0.8}, 12 | hover_style={"fillOpacity": 0.6}, 13 | border_color="black", 14 | colormap=branca.colormap.linear.YlOrBr_03, # See https://colorbrewer2.org/ 15 | ) 16 | 17 | _COMPONENT_STYLE = dict( 18 | style={"color": "black", "fillColor": "snow", "fillOpacity": 0.7}, 19 | hover_style={"color": "#be3f00", "fillColor": "snow", "fillOpacity": 0.6}, 20 | ) 21 | 22 | _DISCONNECTED_STYLE = dict( 23 | style={"color": "red", "fillColor": "red", "fillOpacity": 0.6}, 24 | hover_style={"fillOpacity": 0.4}, 25 | point_style={"radius": 20}, 26 | ) 27 | 28 | _POORLY_CONNECTED_STYLE = dict( 29 | style={"color": "orange", "fillColor": "orange", "fillOpacity": 0.6}, 30 | hover_style={"fillOpacity": 0.4}, 31 | point_style={"radius": 20}, 32 | ) 33 | 34 | _node_dynamics_cmap = branca.colormap.StepColormap( 35 | colors=[ 36 | "#ff7f00", # split 37 | "#fdbf6f", # shrank 38 | "#f3f3f3", # unchanged 39 | "#a6cee3", # complex 40 | "#b2df8a", # grew 41 | "#33a02c", # merged 42 | "#6a3d9a", # birth 43 | ], 44 | vmin=0, 45 | vmax=6, 46 | ) 47 | _NODE_DYNAMICS_STYLE = dict( 48 | style={"fillOpacity": 0.75, "weight": 0.1}, 49 | hover_style={"fillOpacity": 0.98, "weight": 1}, 50 | colormap=_node_dynamics_cmap, # See https://colorbrewer2.org/ 51 | value_min=0, 52 | value_max=6, 53 | ) 54 | 55 | _abs_growth_cmap = branca.colormap.LinearColormap( 56 | colors=["red", "white", "green"], index=[-10e5, 0, 10e5], vmin=-10e5, vmax=10e5 57 | ) 58 | _ABS_GROWTH_STYLE = dict( 59 | style={"fillOpacity": 0.75, "weight": 0.1}, 60 | hover_style={"fillOpacity": 0.98, "weight": 1}, 61 | colormap=_abs_growth_cmap, # See https://colorbrewer2.org/ 62 | value_min=-10e5, 63 | value_max=10e5, 64 | ) 65 | 66 | DEFAULT_LAYER_STYLE = dict( 67 | graph=_GRAPH_STYLE, 68 | pgons=_PGONS_STYLE, 69 | components=_COMPONENT_STYLE, 70 | disconnected_nodes=_DISCONNECTED_STYLE, 71 | poorly_connected_nodes=_POORLY_CONNECTED_STYLE, 72 | ) 73 | -------------------------------------------------------------------------------- /geograph/visualisation/widget_utils.py: -------------------------------------------------------------------------------- 1 | """Module with utils for logging, debugging and styling ipywidgets.""" 2 | import logging 3 | import IPython.display 4 | import ipywidgets as widgets 5 | 6 | 7 | # Styling widgets 8 | HRULE = widgets.HTML('
') 9 | 10 | 11 | def create_html_header(text: str, level=1) -> widgets.HTML: 12 | """Create html header widget from text.""" 13 | opacity_levels = [1.0, 0.68, 0.55, 0.4] 14 | opacity = opacity_levels[level] 15 | html_template = '{}' 16 | return widgets.HTML( 17 | html_template.format(opacity, text) 18 | ) # "{}".format(text)) 19 | 20 | 21 | # Logging widgets 22 | class OutputWidgetHandler(logging.Handler): 23 | """Custom logging handler sending logs to an output widget. 24 | 25 | Copied with minor adaptations from 26 | https://ipywidgets.readthedocs.io/en/latest/examples/Output%20Widget.html 27 | """ 28 | 29 | def __init__(self, *args, max_len=30, **kwargs): 30 | super().__init__(*args, **kwargs) 31 | self.max_len = max_len 32 | layout = { 33 | "width": "100%", 34 | "border": "1px solid black", 35 | } 36 | self.out = widgets.Output(layout=layout) 37 | formatter = logging.Formatter( 38 | "%(asctime)s - %(name)s - %(levelname)s - %(message)s" 39 | ) 40 | self.setFormatter(formatter) 41 | 42 | def emit(self, record): 43 | """Overload of logging.Handler method""" 44 | formatted_record = self.format(record) 45 | new_output = { 46 | "name": "stdout", 47 | "output_type": "stream", 48 | "text": formatted_record + "\n", 49 | } 50 | if len(self.out.outputs) > self.max_len: 51 | self.out.outputs = self.out.outputs[1:] + (new_output,) 52 | else: 53 | self.out.outputs = self.out.outputs + (new_output,) 54 | 55 | def show_logs(self): 56 | """Show the logs""" 57 | IPython.display.display(self.out) 58 | 59 | def clear_logs(self): 60 | """Clear the current logs""" 61 | self.out.clear_output() 62 | -------------------------------------------------------------------------------- /notebooks/3-demo-geographviewer-polesia.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "unlimited-specialist", 6 | "metadata": {}, 7 | "source": [ 8 | "# Visualising GeoGraph Interactively" 9 | ] 10 | }, 11 | { 12 | "cell_type": "markdown", 13 | "id": "86ba856b-c101-4414-945e-75a016237f7e", 14 | "metadata": {}, 15 | "source": [ 16 | "[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/ai4er-cdt/gtc-biodiversity/main?filepath=notebooks%2F3-demo-geographviewer-polesia.ipynb)" 17 | ] 18 | }, 19 | { 20 | "cell_type": "markdown", 21 | "id": "be46c56e-6f37-4528-a0de-fb61fa65932d", 22 | "metadata": {}, 23 | "source": [ 24 | "This tutorial shows how to visualise a GeoGraph on an interactive map." 25 | ] 26 | }, 27 | { 28 | "cell_type": "markdown", 29 | "id": "cooked-caution", 30 | "metadata": {}, 31 | "source": [ 32 | "---\n", 33 | "\n", 34 | "## 1. Setup and Loading package" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": 1, 40 | "id": "measured-modern", 41 | "metadata": { 42 | "ExecuteTime": { 43 | "end_time": "2021-03-24T18:30:41.287059Z", 44 | "start_time": "2021-03-24T18:30:40.997209Z" 45 | }, 46 | "execution": { 47 | "iopub.execute_input": "2021-03-25T11:54:50.866123Z", 48 | "iopub.status.busy": "2021-03-25T11:54:50.865818Z", 49 | "iopub.status.idle": "2021-03-25T11:54:50.912854Z", 50 | "shell.execute_reply": "2021-03-25T11:54:50.907825Z", 51 | "shell.execute_reply.started": "2021-03-25T11:54:50.866022Z" 52 | }, 53 | "nbsphinx": "hidden", 54 | "tags": [] 55 | }, 56 | "outputs": [], 57 | "source": [ 58 | "%load_ext autoreload\n", 59 | "%autoreload 2\n", 60 | "%config IPCompleter.greedy=True" 61 | ] 62 | }, 63 | { 64 | "cell_type": "code", 65 | "execution_count": 1, 66 | "id": "federal-virginia", 67 | "metadata": { 68 | "ExecuteTime": { 69 | "end_time": "2021-03-24T18:30:50.743648Z", 70 | "start_time": "2021-03-24T18:30:42.247097Z" 71 | }, 72 | "execution": { 73 | "iopub.execute_input": "2021-03-25T21:27:25.852124Z", 74 | "iopub.status.busy": "2021-03-25T21:27:25.851792Z", 75 | "iopub.status.idle": "2021-03-25T21:27:28.964203Z", 76 | "shell.execute_reply": "2021-03-25T21:27:28.963093Z", 77 | "shell.execute_reply.started": "2021-03-25T21:27:25.852039Z" 78 | }, 79 | "tags": [] 80 | }, 81 | "outputs": [], 82 | "source": [ 83 | "import ipyleaflet\n", 84 | "import geograph\n", 85 | "from geograph.visualisation import geoviewer\n", 86 | "from geograph.constants import UTM35N\n", 87 | "from geograph.demo.binder_constants import DATA_DIR" 88 | ] 89 | }, 90 | { 91 | "cell_type": "markdown", 92 | "id": "looking-elephant", 93 | "metadata": {}, 94 | "source": [ 95 | "---\n", 96 | "\n", 97 | "## 2. Loading Data" 98 | ] 99 | }, 100 | { 101 | "cell_type": "markdown", 102 | "id": "searching-serial", 103 | "metadata": {}, 104 | "source": [ 105 | "---\n", 106 | "\n", 107 | "## 3. Creating `GeoGraph`" 108 | ] 109 | }, 110 | { 111 | "cell_type": "code", 112 | "execution_count": 3, 113 | "id": "entitled-period", 114 | "metadata": { 115 | "ExecuteTime": { 116 | "end_time": "2021-03-24T18:31:10.520498Z", 117 | "start_time": "2021-03-24T18:31:08.879315Z" 118 | }, 119 | "execution": { 120 | "iopub.execute_input": "2021-03-25T21:27:33.188180Z", 121 | "iopub.status.busy": "2021-03-25T21:27:33.187907Z", 122 | "iopub.status.idle": "2021-03-25T21:27:33.903881Z", 123 | "shell.execute_reply": "2021-03-25T21:27:33.902949Z", 124 | "shell.execute_reply.started": "2021-03-25T21:27:33.188153Z" 125 | }, 126 | "tags": [] 127 | }, 128 | "outputs": [ 129 | { 130 | "name": "stderr", 131 | "output_type": "stream", 132 | "text": [ 133 | "Step 1 of 2: Creating nodes and finding neighbours: 100%|██████████| 323/323 [00:00<00:00, 569.78it/s]\n", 134 | "Step 2 of 2: Adding edges: 100%|██████████| 323/323 [00:00<00:00, 66710.67it/s]" 135 | ] 136 | }, 137 | { 138 | "name": "stdout", 139 | "output_type": "stream", 140 | "text": [ 141 | "Graph successfully loaded with 323 nodes and 816 edges.\n" 142 | ] 143 | }, 144 | { 145 | "name": "stderr", 146 | "output_type": "stream", 147 | "text": [ 148 | "\n" 149 | ] 150 | } 151 | ], 152 | "source": [ 153 | "# Building the main graph structure\n", 154 | "graph = geograph.GeoGraph(\n", 155 | " gdf, crs=UTM35N, columns_to_rename={\"Eunis_name\": \"class_label\", \"AREA\": \"area\"}\n", 156 | ")" 157 | ] 158 | }, 159 | { 160 | "cell_type": "markdown", 161 | "id": "forced-ballet", 162 | "metadata": {}, 163 | "source": [ 164 | "---\n", 165 | "\n", 166 | "## 4. Creating Habitats" 167 | ] 168 | }, 169 | { 170 | "cell_type": "code", 171 | "execution_count": 4, 172 | "id": "ruled-progress", 173 | "metadata": { 174 | "ExecuteTime": { 175 | "end_time": "2021-03-24T18:31:13.001569Z", 176 | "start_time": "2021-03-24T18:31:12.883560Z" 177 | }, 178 | "execution": { 179 | "iopub.execute_input": "2021-03-25T21:27:35.951711Z", 180 | "iopub.status.busy": "2021-03-25T21:27:35.951274Z", 181 | "iopub.status.idle": "2021-03-25T21:27:35.958675Z", 182 | "shell.execute_reply": "2021-03-25T21:27:35.957805Z", 183 | "shell.execute_reply.started": "2021-03-25T21:27:35.951679Z" 184 | }, 185 | "tags": [] 186 | }, 187 | "outputs": [ 188 | { 189 | "data": { 190 | "text/plain": [ 191 | "['Subcontinental moss Scots pine forests',\n", 192 | " 'Subcontinental lichen Scots pine forests',\n", 193 | " 'Subcontinental moorgrass Scots pine forests',\n", 194 | " 'Boreal Labrador tea Scots pine bog woods',\n", 195 | " 'Boreal cottonsedge Scots pine bog woods',\n", 196 | " 'Boreal neutrocline sphagnum Scots pine fen woods',\n", 197 | " 'Mixed Scots pine-birch woodland']" 198 | ] 199 | }, 200 | "execution_count": 4, 201 | "metadata": {}, 202 | "output_type": "execute_result" 203 | } 204 | ], 205 | "source": [ 206 | "# First selecting the classes that make up our habitat\n", 207 | "# We chose all classes with 'pine' in the name.\n", 208 | "pine_classes = [label for label in graph.df.class_label.unique() if \"pine\" in label]\n", 209 | "pine_classes" 210 | ] 211 | }, 212 | { 213 | "cell_type": "code", 214 | "execution_count": 5, 215 | "id": "regular-unemployment", 216 | "metadata": { 217 | "ExecuteTime": { 218 | "end_time": "2021-03-24T18:31:19.697701Z", 219 | "start_time": "2021-03-24T18:31:13.509316Z" 220 | }, 221 | "execution": { 222 | "iopub.execute_input": "2021-03-25T21:27:40.115028Z", 223 | "iopub.status.busy": "2021-03-25T21:27:40.114714Z", 224 | "iopub.status.idle": "2021-03-25T21:27:44.702417Z", 225 | "shell.execute_reply": "2021-03-25T21:27:44.701420Z", 226 | "shell.execute_reply.started": "2021-03-25T21:27:40.114996Z" 227 | }, 228 | "tags": [] 229 | }, 230 | "outputs": [ 231 | { 232 | "name": "stderr", 233 | "output_type": "stream", 234 | "text": [ 235 | "Generating habitat graph: 100%|██████████| 95/95 [00:00<00:00, 2926.68it/s]\n", 236 | "Constructing graph: 100%|██████████| 39/39 [00:00<00:00, 11990.75it/s]\n" 237 | ] 238 | }, 239 | { 240 | "name": "stdout", 241 | "output_type": "stream", 242 | "text": [ 243 | "Calculating components...\n", 244 | "Habitat successfully loaded with 95 nodes and 78 edges.\n" 245 | ] 246 | }, 247 | { 248 | "name": "stderr", 249 | "output_type": "stream", 250 | "text": [ 251 | "Generating habitat graph: 100%|██████████| 95/95 [00:00<00:00, 2645.28it/s]\n", 252 | "Constructing graph: 100%|██████████| 36/36 [00:00<00:00, 12607.08it/s]\n" 253 | ] 254 | }, 255 | { 256 | "name": "stdout", 257 | "output_type": "stream", 258 | "text": [ 259 | "Calculating components...\n", 260 | "Habitat successfully loaded with 95 nodes and 86 edges.\n" 261 | ] 262 | }, 263 | { 264 | "name": "stderr", 265 | "output_type": "stream", 266 | "text": [ 267 | "Generating habitat graph: 100%|██████████| 95/95 [00:00<00:00, 2717.04it/s]\n", 268 | "Constructing graph: 100%|██████████| 14/14 [00:00<00:00, 7885.09it/s]" 269 | ] 270 | }, 271 | { 272 | "name": "stdout", 273 | "output_type": "stream", 274 | "text": [ 275 | "Calculating components...\n", 276 | "Habitat successfully loaded with 95 nodes and 214 edges.\n" 277 | ] 278 | }, 279 | { 280 | "name": "stderr", 281 | "output_type": "stream", 282 | "text": [ 283 | "\n" 284 | ] 285 | } 286 | ], 287 | "source": [ 288 | "# Distances: mobile (<100m), semi mobile (<25m) and sessile (<5m)\n", 289 | "# (proposed by Adham Ashton-Butt at BTO)\n", 290 | "graph.add_habitat(\"Sessile\", max_travel_distance=5, valid_classes=pine_classes)\n", 291 | "\n", 292 | "graph.add_habitat(\"Semi mobile\", max_travel_distance=25, valid_classes=pine_classes)\n", 293 | "\n", 294 | "graph.add_habitat(\"Mobile\", max_travel_distance=500, valid_classes=pine_classes)" 295 | ] 296 | }, 297 | { 298 | "cell_type": "markdown", 299 | "id": "ambient-windsor", 300 | "metadata": {}, 301 | "source": [ 302 | "---\n", 303 | "\n", 304 | "## 5. Interactive Graph" 305 | ] 306 | }, 307 | { 308 | "cell_type": "code", 309 | "execution_count": 6, 310 | "id": "tired-shirt", 311 | "metadata": { 312 | "execution": { 313 | "iopub.execute_input": "2021-03-25T21:27:48.746288Z", 314 | "iopub.status.busy": "2021-03-25T21:27:48.745942Z", 315 | "iopub.status.idle": "2021-03-25T21:28:03.518738Z", 316 | "shell.execute_reply": "2021-03-25T21:28:03.517787Z", 317 | "shell.execute_reply.started": "2021-03-25T21:27:48.746254Z" 318 | }, 319 | "tags": [] 320 | }, 321 | "outputs": [ 322 | { 323 | "name": "stderr", 324 | "output_type": "stream", 325 | "text": [ 326 | "Constructing graph: 100%|██████████| 1/1 [00:00<00:00, 1093.69it/s]\n", 327 | "Constructing graph: 100%|██████████| 39/39 [00:00<00:00, 9548.09it/s]\n", 328 | "Constructing graph: 100%|██████████| 36/36 [00:00<00:00, 10671.77it/s]\n", 329 | "Constructing graph: 100%|██████████| 14/14 [00:00<00:00, 7224.44it/s]\n" 330 | ] 331 | }, 332 | { 333 | "data": { 334 | "application/vnd.jupyter.widget-view+json": { 335 | "model_id": "33843074778f4669bbaf8d396567384b", 336 | "version_major": 2, 337 | "version_minor": 0 338 | }, 339 | "text/plain": [ 340 | "GeoGraphViewer(center=[51.389167, 30.099444], controls=(ZoomControl(options=['position', 'zoom_in_text', 'zoom…" 341 | ] 342 | }, 343 | "metadata": {}, 344 | "output_type": "display_data" 345 | } 346 | ], 347 | "source": [ 348 | "viewer = geoviewer.GeoGraphViewer(small_screen=True)\n", 349 | "viewer.add_layer(ipyleaflet.basemaps.Esri.WorldImagery)\n", 350 | "viewer.add_graph(graph, name=\"Polesia data\", with_components=True)\n", 351 | "viewer.enable_graph_controls()\n", 352 | "viewer" 353 | ] 354 | }, 355 | { 356 | "cell_type": "markdown", 357 | "id": "9928be0d-dc99-4d8c-9376-1d41a3414e0e", 358 | "metadata": {}, 359 | "source": [ 360 | "> Note: an interactive viewer will show up here." 361 | ] 362 | } 363 | ], 364 | "metadata": { 365 | "hide_input": false, 366 | "kernelspec": { 367 | "display_name": "Python 3.9.1 64-bit", 368 | "language": "python", 369 | "name": "python3" 370 | }, 371 | "language_info": { 372 | "codemirror_mode": { 373 | "name": "ipython", 374 | "version": 3 375 | }, 376 | "file_extension": ".py", 377 | "mimetype": "text/x-python", 378 | "name": "python", 379 | "nbconvert_exporter": "python", 380 | "pygments_lexer": "ipython3", 381 | "version": "3.9.1" 382 | }, 383 | "varInspector": { 384 | "cols": { 385 | "lenName": 16, 386 | "lenType": 16, 387 | "lenVar": 40 388 | }, 389 | "kernels_config": { 390 | "python": { 391 | "delete_cmd_postfix": "", 392 | "delete_cmd_prefix": "del ", 393 | "library": "var_list.py", 394 | "varRefreshCmd": "print(var_dic_list())" 395 | }, 396 | "r": { 397 | "delete_cmd_postfix": ") ", 398 | "delete_cmd_prefix": "rm(", 399 | "library": "var_list.r", 400 | "varRefreshCmd": "cat(var_dic_list()) " 401 | } 402 | }, 403 | "types_to_exclude": [ 404 | "module", 405 | "function", 406 | "builtin_function_or_method", 407 | "instance", 408 | "_Feature" 409 | ], 410 | "window_display": false 411 | }, 412 | "vscode": { 413 | "interpreter": { 414 | "hash": "aee8b7b246df8f9039afb4144a1f6fd8d2ca17a180786b69acc140d282b71a49" 415 | } 416 | } 417 | }, 418 | "nbformat": 4, 419 | "nbformat_minor": 5 420 | } 421 | -------------------------------------------------------------------------------- /notebooks/4-demo-geographviewer-chernobyl.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "black-appraisal", 6 | "metadata": {}, 7 | "source": [ 8 | "# Visualising Temporal Changes in GeoGraphs Interactively" 9 | ] 10 | }, 11 | { 12 | "cell_type": "markdown", 13 | "id": "88247979-721e-438e-9c2b-f823c03f9d05", 14 | "metadata": {}, 15 | "source": [ 16 | "[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/ai4er-cdt/gtc-biodiversity/main?filepath=notebooks%2F4-demo-geographviewer-chernobyl.ipynb)" 17 | ] 18 | }, 19 | { 20 | "cell_type": "markdown", 21 | "id": "866ee1ad-931c-4a89-8538-55f35e85d458", 22 | "metadata": {}, 23 | "source": [ 24 | "This tutorial shows how to create and visualise a timeline of GeoGraphs." 25 | ] 26 | }, 27 | { 28 | "cell_type": "markdown", 29 | "id": "unknown-button", 30 | "metadata": { 31 | "execution": { 32 | "iopub.execute_input": "2021-03-25T00:07:53.493619Z", 33 | "iopub.status.busy": "2021-03-25T00:07:53.493296Z", 34 | "iopub.status.idle": "2021-03-25T00:07:53.509659Z", 35 | "shell.execute_reply": "2021-03-25T00:07:53.508340Z", 36 | "shell.execute_reply.started": "2021-03-25T00:07:53.493590Z" 37 | } 38 | }, 39 | "source": [ 40 | "---\n", 41 | "\n", 42 | "## 1. Setup and Loading package" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 1, 48 | "id": "preceding-albany", 49 | "metadata": { 50 | "ExecuteTime": { 51 | "end_time": "2021-05-13T14:38:41.122907Z", 52 | "start_time": "2021-05-13T14:38:41.085188Z" 53 | }, 54 | "execution": { 55 | "iopub.execute_input": "2021-03-25T21:28:16.706070Z", 56 | "iopub.status.busy": "2021-03-25T21:28:16.705353Z", 57 | "iopub.status.idle": "2021-03-25T21:28:16.970961Z", 58 | "shell.execute_reply": "2021-03-25T21:28:16.970299Z", 59 | "shell.execute_reply.started": "2021-03-25T21:28:16.705864Z" 60 | }, 61 | "nbsphinx": "hidden", 62 | "tags": [] 63 | }, 64 | "outputs": [], 65 | "source": [ 66 | "%load_ext autoreload\n", 67 | "%autoreload 2\n", 68 | "%config IPCompleter.greedy=True" 69 | ] 70 | }, 71 | { 72 | "cell_type": "markdown", 73 | "id": "greenhouse-subscriber", 74 | "metadata": {}, 75 | "source": [ 76 | "Let us start by installing all relevant dependencies" 77 | ] 78 | }, 79 | { 80 | "cell_type": "code", 81 | "execution_count": 2, 82 | "id": "ahead-duncan", 83 | "metadata": { 84 | "ExecuteTime": { 85 | "end_time": "2021-05-13T14:38:44.695951Z", 86 | "start_time": "2021-05-13T14:38:41.125450Z" 87 | }, 88 | "execution": { 89 | "iopub.execute_input": "2021-03-25T21:28:32.226379Z", 90 | "iopub.status.busy": "2021-03-25T21:28:32.226099Z", 91 | "iopub.status.idle": "2021-03-25T21:28:38.563747Z", 92 | "shell.execute_reply": "2021-03-25T21:28:38.563028Z", 93 | "shell.execute_reply.started": "2021-03-25T21:28:32.226346Z" 94 | }, 95 | "tags": [] 96 | }, 97 | "outputs": [ 98 | { 99 | "name": "stderr", 100 | "output_type": "stream", 101 | "text": [ 102 | "/home/users/svm/Code/gtc-biodiversity/env/lib/python3.8/site-packages/geopandas/_compat.py:84: UserWarning: The Shapely GEOS version (3.8.0-CAPI-1.13.1 ) is incompatible with the GEOS version PyGEOS was compiled with (3.9.0-CAPI-1.16.2). Conversions between both will be slow.\n", 103 | " warnings.warn(\n" 104 | ] 105 | } 106 | ], 107 | "source": [ 108 | "import ipyleaflet\n", 109 | "import pandas as pd\n", 110 | "import geopandas as gpd\n", 111 | "import rioxarray as rxr\n", 112 | "import geograph\n", 113 | "from geograph import geotimeline\n", 114 | "from geograph.visualisation import geoviewer\n", 115 | "from geograph.constants import UTM35N\n", 116 | "from geograph.demo.binder_constants import DATA_DIR, ROIS, ESA_CCI_LEGEND_LINK\n", 117 | "from geograph.metrics import LANDSCAPE_METRICS_DICT, COMPONENT_METRICS_DICT" 118 | ] 119 | }, 120 | { 121 | "cell_type": "markdown", 122 | "id": "military-security", 123 | "metadata": { 124 | "execution": { 125 | "iopub.execute_input": "2021-03-25T00:08:04.986653Z", 126 | "iopub.status.busy": "2021-03-25T00:08:04.986358Z", 127 | "iopub.status.idle": "2021-03-25T00:08:05.016681Z", 128 | "shell.execute_reply": "2021-03-25T00:08:05.015799Z", 129 | "shell.execute_reply.started": "2021-03-25T00:08:04.986625Z" 130 | } 131 | }, 132 | "source": [ 133 | "---\n", 134 | "\n", 135 | "## 2. Loading Data" 136 | ] 137 | }, 138 | { 139 | "cell_type": "markdown", 140 | "id": "sticky-distributor", 141 | "metadata": {}, 142 | "source": [ 143 | "Next, we will load the data for the Chernobyl region. For this example we will use land cover maps from the [ESA CCI land cover](http://www.esa-landcover-cci.org/) dataset. Specifically, we will look at the years 2013 and 2014 for the [Chernobyl exclusion zone](https://en.wikipedia.org/wiki/Chernobyl_Exclusion_Zone). All data comes pre-installed on the binder in the `DATA_DIR` that we imported from the `binder_constants`. If you are following this demo on your local machine, you can download the data with this link." 144 | ] 145 | }, 146 | { 147 | "cell_type": "code", 148 | "execution_count": 3, 149 | "id": "funky-youth", 150 | "metadata": { 151 | "ExecuteTime": { 152 | "end_time": "2021-05-13T14:38:44.804412Z", 153 | "start_time": "2021-05-13T14:38:44.698833Z" 154 | }, 155 | "execution": { 156 | "iopub.execute_input": "2021-03-25T21:28:38.565031Z", 157 | "iopub.status.busy": "2021-03-25T21:28:38.564810Z", 158 | "iopub.status.idle": "2021-03-25T21:28:38.699629Z", 159 | "shell.execute_reply": "2021-03-25T21:28:38.698758Z", 160 | "shell.execute_reply.started": "2021-03-25T21:28:38.565003Z" 161 | }, 162 | "tags": [] 163 | }, 164 | "outputs": [], 165 | "source": [ 166 | "# Parse geotif landcover data\n", 167 | "chernobyl_path = (\n", 168 | " lambda year: DATA_DIR / \"chernobyl\" / \"esa_cci\" / f\"esa_cci_{year}_chernobyl.tif\"\n", 169 | ")\n", 170 | "\n", 171 | "# Parse ROIS\n", 172 | "rois = gpd.read_file(ROIS)\n", 173 | "# Load the shape of the chernobyl exclusion zone\n", 174 | "cez = rois[rois[\"name\"] == \"Chernobyl Exclusion Zone\"]" 175 | ] 176 | }, 177 | { 178 | "cell_type": "code", 179 | "execution_count": 4, 180 | "id": "figured-reducing", 181 | "metadata": { 182 | "ExecuteTime": { 183 | "end_time": "2021-05-13T14:38:44.848347Z", 184 | "start_time": "2021-05-13T14:38:44.806591Z" 185 | }, 186 | "execution": { 187 | "iopub.execute_input": "2021-03-25T21:28:49.615703Z", 188 | "iopub.status.busy": "2021-03-25T21:28:49.615394Z", 189 | "iopub.status.idle": "2021-03-25T21:28:50.073012Z", 190 | "shell.execute_reply": "2021-03-25T21:28:50.071564Z", 191 | "shell.execute_reply.started": "2021-03-25T21:28:49.615664Z" 192 | }, 193 | "tags": [] 194 | }, 195 | "outputs": [], 196 | "source": [ 197 | "def clip_and_reproject(xrdata, clip_geometry=None, to_crs=UTM35N, x_res=300, y_res=300):\n", 198 | "\n", 199 | " if clip_geometry is not None:\n", 200 | " clipped_data = xrdata.rio.clip(clip_geometry)\n", 201 | " else:\n", 202 | " clipped_data = xrdata\n", 203 | "\n", 204 | " if to_crs is not None:\n", 205 | " reprojected_data = clipped_data.rio.reproject(to_crs, resolution=(x_res, y_res))\n", 206 | " else:\n", 207 | " reprojected_data = clipped_data\n", 208 | "\n", 209 | " return reprojected_data" 210 | ] 211 | }, 212 | { 213 | "cell_type": "code", 214 | "execution_count": 5, 215 | "id": "organizational-thriller", 216 | "metadata": { 217 | "ExecuteTime": { 218 | "end_time": "2021-05-13T14:38:45.807415Z", 219 | "start_time": "2021-05-13T14:38:44.850400Z" 220 | } 221 | }, 222 | "outputs": [], 223 | "source": [ 224 | "# Loading ESA CCI land cover raster data\n", 225 | "years = list(range(2013, 2015))\n", 226 | "cez_rasters = {\n", 227 | " year: clip_and_reproject(\n", 228 | " rxr.open_rasterio(chernobyl_path(year)), clip_geometry=cez.geometry\n", 229 | " )\n", 230 | " for year in years\n", 231 | "}" 232 | ] 233 | }, 234 | { 235 | "cell_type": "code", 236 | "execution_count": 6, 237 | "id": "nearby-persian", 238 | "metadata": { 239 | "ExecuteTime": { 240 | "end_time": "2021-05-13T14:38:46.772366Z", 241 | "start_time": "2021-05-13T14:38:45.809006Z" 242 | } 243 | }, 244 | "outputs": [ 245 | { 246 | "name": "stdout", 247 | "output_type": "stream", 248 | "text": [ 249 | "There are 38 classes.\n" 250 | ] 251 | } 252 | ], 253 | "source": [ 254 | "# Loading ESA CCI land cover legend to translate land cover labels to classes\n", 255 | "esa_cci_legend = pd.read_csv(ESA_CCI_LEGEND_LINK, delimiter=\";\", index_col=0)\n", 256 | "print(f\"There are {len(esa_cci_legend)} classes.\")\n", 257 | "\n", 258 | "class_value_to_label = {\n", 259 | " class_val: row.LCCOwnLabel for class_val, row in esa_cci_legend.iterrows()\n", 260 | "}" 261 | ] 262 | }, 263 | { 264 | "cell_type": "markdown", 265 | "id": "personalized-athens", 266 | "metadata": {}, 267 | "source": [ 268 | "---\n", 269 | "\n", 270 | "## 3. Creating `GeoGraph`" 271 | ] 272 | }, 273 | { 274 | "cell_type": "code", 275 | "execution_count": 7, 276 | "id": "elementary-poster", 277 | "metadata": { 278 | "ExecuteTime": { 279 | "end_time": "2021-05-13T14:39:22.388521Z", 280 | "start_time": "2021-05-13T14:38:46.774711Z" 281 | }, 282 | "execution": { 283 | "iopub.execute_input": "2021-03-25T21:28:51.204862Z", 284 | "iopub.status.busy": "2021-03-25T21:28:51.204604Z", 285 | "iopub.status.idle": "2021-03-25T21:29:30.176505Z", 286 | "shell.execute_reply": "2021-03-25T21:29:30.175567Z", 287 | "shell.execute_reply.started": "2021-03-25T21:28:51.204830Z" 288 | }, 289 | "tags": [] 290 | }, 291 | "outputs": [ 292 | { 293 | "name": "stdout", 294 | "output_type": "stream", 295 | "text": [ 296 | "Analysing year 2013\n" 297 | ] 298 | }, 299 | { 300 | "name": "stderr", 301 | "output_type": "stream", 302 | "text": [ 303 | "Identifying nodes: 100%|██████████| 2923/2923 [00:05<00:00, 556.09it/s]\n", 304 | "Step 1 of 2: Creating nodes and finding neighbours: 100%|██████████| 2003/2003 [00:10<00:00, 191.28it/s]\n", 305 | "Step 2 of 2: Adding edges: 100%|██████████| 2003/2003 [00:00<00:00, 69617.74it/s]\n" 306 | ] 307 | }, 308 | { 309 | "name": "stdout", 310 | "output_type": "stream", 311 | "text": [ 312 | "Graph successfully loaded with 2003 nodes and 5140 edges.\n", 313 | "Analysing year 2014\n" 314 | ] 315 | }, 316 | { 317 | "name": "stderr", 318 | "output_type": "stream", 319 | "text": [ 320 | "Identifying nodes: 100%|██████████| 2915/2915 [00:05<00:00, 541.60it/s]\n", 321 | "Step 1 of 2: Creating nodes and finding neighbours: 100%|██████████| 1999/1999 [00:11<00:00, 179.66it/s]\n", 322 | "Step 2 of 2: Adding edges: 100%|██████████| 1999/1999 [00:00<00:00, 60868.21it/s]" 323 | ] 324 | }, 325 | { 326 | "name": "stdout", 327 | "output_type": "stream", 328 | "text": [ 329 | "Graph successfully loaded with 1999 nodes and 5117 edges.\n" 330 | ] 331 | }, 332 | { 333 | "name": "stderr", 334 | "output_type": "stream", 335 | "text": [ 336 | "\n" 337 | ] 338 | } 339 | ], 340 | "source": [ 341 | "# Polygonising raster and transforming into graph\n", 342 | "cez_graphs = {}\n", 343 | "for year, raster in cez_rasters.items():\n", 344 | " print(f\"Analysing year {year}\")\n", 345 | " # Load geograph from the raster data (construction takes ~10s)\n", 346 | " cez_graphs[year] = geograph.GeoGraph(\n", 347 | " data=raster.data,\n", 348 | " transform=raster.rio.transform(),\n", 349 | " crs=UTM35N,\n", 350 | " mask=raster.data > 0,\n", 351 | " connectivity=8,\n", 352 | " )\n", 353 | " # Map the ESA CCI land cover class value (int) to the plain text label\n", 354 | " # to have the plain text labels available in the interactive viewer.\n", 355 | " cez_graphs[year].df.class_label = cez_graphs[year].df.class_label.apply(\n", 356 | " lambda x: class_value_to_label[x]\n", 357 | " )" 358 | ] 359 | }, 360 | { 361 | "cell_type": "markdown", 362 | "id": "classified-canvas", 363 | "metadata": { 364 | "execution": { 365 | "iopub.execute_input": "2021-03-25T00:14:12.973100Z", 366 | "iopub.status.busy": "2021-03-25T00:14:12.972772Z", 367 | "iopub.status.idle": "2021-03-25T00:14:13.867921Z", 368 | "shell.execute_reply": "2021-03-25T00:14:13.866972Z", 369 | "shell.execute_reply.started": "2021-03-25T00:14:12.973069Z" 370 | } 371 | }, 372 | "source": [ 373 | "---\n", 374 | "\n", 375 | "## 4. Creating `Timeline` and identifying nodes:" 376 | ] 377 | }, 378 | { 379 | "cell_type": "code", 380 | "execution_count": 8, 381 | "id": "theoretical-tactics", 382 | "metadata": { 383 | "ExecuteTime": { 384 | "end_time": "2021-05-13T14:39:31.891754Z", 385 | "start_time": "2021-05-13T14:39:22.392050Z" 386 | }, 387 | "execution": { 388 | "iopub.execute_input": "2021-03-25T21:29:30.177956Z", 389 | "iopub.status.busy": "2021-03-25T21:29:30.177742Z", 390 | "iopub.status.idle": "2021-03-25T21:29:39.866220Z", 391 | "shell.execute_reply": "2021-03-25T21:29:39.865586Z", 392 | "shell.execute_reply.started": "2021-03-25T21:29:30.177928Z" 393 | }, 394 | "tags": [] 395 | }, 396 | "outputs": [ 397 | { 398 | "name": "stderr", 399 | "output_type": "stream", 400 | "text": [ 401 | "Identifying nodes: 100%|██████████| 2003/2003 [00:03<00:00, 514.92it/s]\n" 402 | ] 403 | } 404 | ], 405 | "source": [ 406 | "cez_timeline = geotimeline.GeoGraphTimeline(cez_graphs)\n", 407 | "# Perform node identification\n", 408 | "cez_timeline.timestack()\n", 409 | "# Classify node dynamics for the year 2014\n", 410 | "cez_timeline.calculate_node_dynamics(2014);" 411 | ] 412 | }, 413 | { 414 | "cell_type": "markdown", 415 | "id": "painted-header", 416 | "metadata": {}, 417 | "source": [ 418 | "---\n", 419 | "\n", 420 | "## 5. Inspect in interactive viewer" 421 | ] 422 | }, 423 | { 424 | "cell_type": "code", 425 | "execution_count": 9, 426 | "id": "worldwide-progressive", 427 | "metadata": { 428 | "ExecuteTime": { 429 | "end_time": "2021-05-13T14:39:56.468959Z", 430 | "start_time": "2021-05-13T14:39:31.894193Z" 431 | }, 432 | "execution": { 433 | "iopub.execute_input": "2021-03-25T21:30:10.460571Z", 434 | "iopub.status.busy": "2021-03-25T21:30:10.460086Z", 435 | "iopub.status.idle": "2021-03-25T21:30:33.037970Z", 436 | "shell.execute_reply": "2021-03-25T21:30:33.037097Z", 437 | "shell.execute_reply.started": "2021-03-25T21:30:10.460525Z" 438 | }, 439 | "scrolled": true, 440 | "tags": [] 441 | }, 442 | "outputs": [ 443 | { 444 | "name": "stdout", 445 | "output_type": "stream", 446 | "text": [ 447 | "Calculating component polygons...\n" 448 | ] 449 | }, 450 | { 451 | "name": "stderr", 452 | "output_type": "stream", 453 | "text": [ 454 | "Constructing graph: 100%|██████████| 1/1 [00:00<00:00, 2499.59it/s]\n" 455 | ] 456 | }, 457 | { 458 | "name": "stdout", 459 | "output_type": "stream", 460 | "text": [ 461 | "Warning: very computationally expensive for graphs with more\n", 462 | " than ~100 components.\n" 463 | ] 464 | }, 465 | { 466 | "name": "stderr", 467 | "output_type": "stream", 468 | "text": [ 469 | "Constructing graph: 100%|██████████| 1/1 [00:00<00:00, 3179.91it/s]\n", 470 | "Calculating edge weights: 0it [00:00, ?it/s]\n" 471 | ] 472 | } 473 | ], 474 | "source": [ 475 | "# Choose metrics to display:\n", 476 | "metric_list = list(LANDSCAPE_METRICS_DICT.keys()) + list(COMPONENT_METRICS_DICT.keys())\n", 477 | "# Build up the viewer\n", 478 | "viewer = geoviewer.GeoGraphViewer(small_screen=True, metric_list=metric_list)\n", 479 | "viewer.add_layer(ipyleaflet.basemaps.Esri.WorldImagery)\n", 480 | "viewer.add_graph(cez_timeline[2014], name=\"Chernobyl data\", with_components=False)\n", 481 | "viewer.enable_graph_controls()" 482 | ] 483 | }, 484 | { 485 | "cell_type": "code", 486 | "execution_count": 10, 487 | "id": "global-fellow", 488 | "metadata": { 489 | "ExecuteTime": { 490 | "end_time": "2021-05-13T14:39:56.700682Z", 491 | "start_time": "2021-05-13T14:39:56.470983Z" 492 | } 493 | }, 494 | "outputs": [ 495 | { 496 | "data": { 497 | "application/vnd.jupyter.widget-view+json": { 498 | "model_id": "ace3e478b4354a0db1272e689b7262f5", 499 | "version_major": 2, 500 | "version_minor": 0 501 | }, 502 | "text/plain": [ 503 | "GeoGraphViewer(center=[51.389167, 30.099444], controls=(ZoomControl(options=['position', 'zoom_in_text', 'zoom…" 504 | ] 505 | }, 506 | "metadata": {}, 507 | "output_type": "display_data" 508 | } 509 | ], 510 | "source": [ 511 | "# Display the interactive map\n", 512 | "viewer" 513 | ] 514 | }, 515 | { 516 | "cell_type": "markdown", 517 | "id": "bd741fd6-4bf4-4f2f-ac27-cc73e4971b43", 518 | "metadata": {}, 519 | "source": [ 520 | "> Note: an interactive viewer will show up here." 521 | ] 522 | } 523 | ], 524 | "metadata": { 525 | "celltoolbar": "Edit Metadata", 526 | "hide_input": false, 527 | "kernelspec": { 528 | "display_name": "Python 3.9.1 64-bit", 529 | "language": "python", 530 | "name": "python3" 531 | }, 532 | "language_info": { 533 | "codemirror_mode": { 534 | "name": "ipython", 535 | "version": 3 536 | }, 537 | "file_extension": ".py", 538 | "mimetype": "text/x-python", 539 | "name": "python", 540 | "nbconvert_exporter": "python", 541 | "pygments_lexer": "ipython3", 542 | "version": "3.9.1" 543 | }, 544 | "varInspector": { 545 | "cols": { 546 | "lenName": 16, 547 | "lenType": 16, 548 | "lenVar": 40 549 | }, 550 | "kernels_config": { 551 | "python": { 552 | "delete_cmd_postfix": "", 553 | "delete_cmd_prefix": "del ", 554 | "library": "var_list.py", 555 | "varRefreshCmd": "print(var_dic_list())" 556 | }, 557 | "r": { 558 | "delete_cmd_postfix": ") ", 559 | "delete_cmd_prefix": "rm(", 560 | "library": "var_list.r", 561 | "varRefreshCmd": "cat(var_dic_list()) " 562 | } 563 | }, 564 | "types_to_exclude": [ 565 | "module", 566 | "function", 567 | "builtin_function_or_method", 568 | "instance", 569 | "_Feature" 570 | ], 571 | "window_display": false 572 | }, 573 | "vscode": { 574 | "interpreter": { 575 | "hash": "aee8b7b246df8f9039afb4144a1f6fd8d2ca17a180786b69acc140d282b71a49" 576 | } 577 | } 578 | }, 579 | "nbformat": 4, 580 | "nbformat_minor": 5 581 | } 582 | -------------------------------------------------------------------------------- /notebooks/README.md: -------------------------------------------------------------------------------- 1 | # Notebooks 2 | ## Structure 3 | We have the following five demo notebooks you can try: 4 | - `1-demo-landscape-metrics-comparison-to-pylandstats.ipynb`: a comparison between our method and the PyLandStats package. 5 | - `2-demo-landscape-timeseries-metrics.ipynb`: an illustration on how to use GeoGraph for time-series analysis of landscape-level, class-level, habitat-level and patch-distribution-level metrics around the Chernobyl exclusion zone. 6 | - `3-demo-nodediff.ipynb`: an investigation of the spatially resolved qualitative and quantitative dynamics of land cover in the Chernobyl Exclusion Zone. 7 | - `4-demo-geographviewer-polesia.ipynb`: a demo of our `GeoGraphViewer` user interface on the Polesia data1 (as seen in presentation). 8 | - `5-demo-geographviewer-chernobyl.ipynb`: a demo of our `GeoGraphViewer` user interface on ESA CCI data in the Chernobyl Exclusion Zone. This demo also shows how to use our temporal analysis of node dynamics and growth. 9 | 10 | 1The Polesia data set used here was created by Dmitri Grummo, for the Endangered Landscapes Program (https://www.endangeredlandscapes.org/), and funded by Arcadia, a charitable fund of Lisbet Rausing and Peter Baldwin. 11 | 12 | ## Useful initialization cell 13 | To avoid having to reload the notebook when you change code from underlying imports, we recommend the following handy initialization cell for jupyter notebooks: 14 | ``` 15 | %load_ext autoreload # loads the autoreload package into ipython kernel 16 | %autoreload 2 # sets autoreload mode to automatically reload modules when they change 17 | %config IPCompleter.greedy=True # enables tab completion 18 | ``` 19 | 20 | 21 | ``` 22 | from jupyterthemes import jtplot 23 | jtplot.style(theme=’monokai’, context=’notebook’, ticks=True, grid=False) 24 | ``` 25 | -------------------------------------------------------------------------------- /pylintrc: -------------------------------------------------------------------------------- 1 | # This Pylint rcfile contains a best-effort configuration to uphold the 2 | # best-practices and style described in the Google Python style guide: 3 | # https://google.github.io/styleguide/pyguide.html 4 | # 5 | # Its canonical open-source location is: 6 | # https://google.github.io/styleguide/pylintrc 7 | 8 | [MASTER] 9 | 10 | # Add files or directories to the blacklist. They should be base names, not 11 | # paths. 12 | ignore=third_party 13 | 14 | # Add files or directories matching the regex patterns to the blacklist. The 15 | # regex matches against base names, not paths. 16 | ignore-patterns= 17 | 18 | # Pickle collected data for later comparisons. 19 | persistent=no 20 | 21 | # List of plugins (as comma separated values of python modules names) to load, 22 | # usually to register additional checkers. 23 | load-plugins= 24 | 25 | # Use multiple processes to speed up Pylint. 26 | jobs=4 27 | 28 | # Allow loading of arbitrary C extensions. Extensions are imported into the 29 | # active Python interpreter and may run arbitrary code. 30 | unsafe-load-any-extension=no 31 | 32 | # A comma-separated list of package or module names from where C extensions may 33 | # be loaded. Extensions are loading into the active Python interpreter and may 34 | # run arbitrary code 35 | extension-pkg-whitelist= 36 | 37 | 38 | [MESSAGES CONTROL] 39 | 40 | # Only show warnings with the listed confidence levels. Leave empty to show 41 | # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED 42 | confidence= 43 | 44 | # Enable the message, report, category or checker with the given id(s). You can 45 | # either give multiple identifier separated by comma (,) or put this option 46 | # multiple time (only on the command line, not in the configuration file where 47 | # it should appear only once). See also the "--disable" option for examples. 48 | #enable= 49 | 50 | # Disable the message, report, category or checker with the given id(s). You 51 | # can either give multiple identifiers separated by comma (,) or put this 52 | # option multiple times (only on the command line, not in the configuration 53 | # file where it should appear only once).You can also use "--disable=all" to 54 | # disable everything first and then reenable specific checks. For example, if 55 | # you want to run only the similarities checker, you can use "--disable=all 56 | # --enable=similarities". If you want to run only the classes checker, but have 57 | # no Warning level messages displayed, use"--disable=all --enable=classes 58 | # --disable=W" 59 | disable=abstract-method, 60 | apply-builtin, 61 | arguments-differ, 62 | attribute-defined-outside-init, 63 | backtick, 64 | bad-option-value, 65 | basestring-builtin, 66 | buffer-builtin, 67 | c-extension-no-member, 68 | consider-using-enumerate, 69 | cmp-builtin, 70 | cmp-method, 71 | coerce-builtin, 72 | coerce-method, 73 | delslice-method, 74 | div-method, 75 | duplicate-code, 76 | eq-without-hash, 77 | execfile-builtin, 78 | file-builtin, 79 | filter-builtin-not-iterating, 80 | fixme, 81 | getslice-method, 82 | global-statement, 83 | hex-method, 84 | idiv-method, 85 | implicit-str-concat-in-sequence, 86 | import-error, 87 | import-self, 88 | import-star-module-level, 89 | inconsistent-return-statements, 90 | input-builtin, 91 | intern-builtin, 92 | invalid-str-codec, 93 | locally-disabled, 94 | long-builtin, 95 | long-suffix, 96 | map-builtin-not-iterating, 97 | misplaced-comparison-constant, 98 | metaclass-assignment, 99 | next-method-called, 100 | next-method-defined, 101 | no-absolute-import, 102 | no-else-break, 103 | no-else-continue, 104 | no-else-raise, 105 | no-else-return, 106 | no-init, # added 107 | no-member, 108 | no-name-in-module, 109 | no-self-use, 110 | nonzero-method, 111 | oct-method, 112 | old-division, 113 | old-ne-operator, 114 | old-octal-literal, 115 | old-raise-syntax, 116 | parameter-unpacking, 117 | print-statement, 118 | raising-string, 119 | range-builtin-not-iterating, 120 | raw_input-builtin, 121 | rdiv-method, 122 | reduce-builtin, 123 | relative-import, 124 | reload-builtin, 125 | round-builtin, 126 | setslice-method, 127 | signature-differs, 128 | standarderror-builtin, 129 | suppressed-message, 130 | sys-max-int, 131 | too-few-public-methods, 132 | too-many-ancestors, 133 | too-many-arguments, 134 | too-many-boolean-expressions, 135 | too-many-branches, 136 | too-many-instance-attributes, 137 | too-many-locals, 138 | too-many-nested-blocks, 139 | too-many-public-methods, 140 | too-many-return-statements, 141 | too-many-statements, 142 | trailing-newlines, 143 | unichr-builtin, 144 | unicode-builtin, 145 | unnecessary-pass, 146 | unpacking-in-except, 147 | useless-else-on-loop, 148 | useless-suppression, 149 | using-cmp-argument, 150 | wrong-import-order, 151 | xrange-builtin, 152 | zip-builtin-not-iterating, 153 | bad-indentation 154 | 155 | 156 | [REPORTS] 157 | 158 | # Set the output format. Available formats are text, parseable, colorized, msvs 159 | # (visual studio) and html. You can also give a reporter class, eg 160 | # mypackage.mymodule.MyReporterClass. 161 | output-format=text 162 | 163 | # Put messages in a separate file for each module / package specified on the 164 | # command line instead of printing them on stdout. Reports (if any) will be 165 | # written in a file name "pylint_global.[txt|html]". This option is deprecated 166 | # and it will be removed in Pylint 2.0. 167 | files-output=no 168 | 169 | # Tells whether to display a full report or only the messages 170 | reports=no 171 | 172 | # Python expression which should return a note less than 10 (10 is the highest 173 | # note). You have access to the variables errors warning, statement which 174 | # respectively contain the number of errors / warnings messages and the total 175 | # number of statements analyzed. This is used by the global evaluation report 176 | # (RP0004). 177 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) 178 | 179 | # Template used to display messages. This is a python new-style format string 180 | # used to format the message information. See doc for all details 181 | #msg-template= 182 | 183 | 184 | [BASIC] 185 | 186 | # Good variable names which should always be accepted, separated by a comma 187 | good-names=main,_,G 188 | 189 | # Bad variable names which should always be refused, separated by a comma 190 | bad-names= 191 | 192 | # Colon-delimited sets of names that determine each other's naming style when 193 | # the name regexes allow several styles. 194 | name-group= 195 | 196 | # Include a hint for the correct naming format with invalid-name 197 | include-naming-hint=no 198 | 199 | # List of decorators that produce properties, such as abc.abstractproperty. Add 200 | # to this list to register other decorators that produce valid properties. 201 | property-classes=abc.abstractproperty,cached_property.cached_property,cached_property.threaded_cached_property,cached_property.cached_property_with_ttl,cached_property.threaded_cached_property_with_ttl 202 | 203 | # Regular expression matching correct function names 204 | function-rgx=^(?:(?PsetUp|tearDown|setUpModule|tearDownModule)|(?P_?[A-Z][a-zA-Z0-9]*)|(?P_?[a-z][a-z0-9_]*))$ 205 | 206 | # Regular expression matching correct variable names 207 | variable-rgx=^[a-z][a-z0-9_]*$ 208 | 209 | # Regular expression matching correct constant names 210 | const-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$ 211 | 212 | # Regular expression matching correct attribute names 213 | attr-rgx=^_{0,2}[a-z][a-z0-9_]*$ 214 | 215 | # Regular expression matching correct argument names 216 | argument-rgx=^[a-z][a-z0-9_]*$ 217 | 218 | # Regular expression matching correct class attribute names 219 | class-attribute-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$ 220 | 221 | # Regular expression matching correct inline iteration names 222 | inlinevar-rgx=^[a-z][a-z0-9_]*$ 223 | 224 | # Regular expression matching correct class names 225 | class-rgx=^_?[A-Z][a-zA-Z0-9]*$ 226 | 227 | # Regular expression matching correct module names 228 | module-rgx=^(_?[a-z][a-z0-9_]*|__init__)$ 229 | 230 | # Regular expression matching correct method names 231 | method-rgx=(?x)^(?:(?P_[a-z0-9_]+__|runTest|setUp|tearDown|setUpTestCase|tearDownTestCase|setupSelf|tearDownClass|setUpClass|(test|assert)_*[A-Z0-9][a-zA-Z0-9_]*|next)|(?P_{0,2}[A-Z][a-zA-Z0-9_]*)|(?P_{0,2}[a-z][a-z0-9_]*))$ 232 | 233 | # Regular expression which should only match function or class names that do 234 | # not require a docstring. 235 | no-docstring-rgx=(__.*__|main|test.*|.*test|.*Test)$ 236 | 237 | # Minimum line length for functions/classes that require docstrings, shorter 238 | # ones are exempt. 239 | docstring-min-length=10 240 | 241 | 242 | [TYPECHECK] 243 | 244 | # List of decorators that produce context managers, such as 245 | # contextlib.contextmanager. Add to this list to register other decorators that 246 | # produce valid context managers. 247 | contextmanager-decorators=contextlib.contextmanager,contextlib2.contextmanager 248 | 249 | # Tells whether missing members accessed in mixin class should be ignored. A 250 | # mixin class is detected if its name ends with "mixin" (case insensitive). 251 | ignore-mixin-members=yes 252 | 253 | # List of module names for which member attributes should not be checked 254 | # (useful for modules/projects where namespaces are manipulated during runtime 255 | # and thus existing member attributes cannot be deduced by static analysis. It 256 | # supports qualified module names, as well as Unix pattern matching. 257 | ignored-modules= 258 | 259 | # List of class names for which member attributes should not be checked (useful 260 | # for classes with dynamically set attributes). This supports the use of 261 | # qualified names. 262 | ignored-classes=optparse.Values,thread._local,_thread._local 263 | 264 | # List of members which are set dynamically and missed by pylint inference 265 | # system, and so shouldn't trigger E1101 when accessed. Python regular 266 | # expressions are accepted. 267 | generated-members= 268 | 269 | 270 | [FORMAT] 271 | 272 | # Maximum number of characters on a single line. 273 | max-line-length=88 274 | 275 | # TODO(https://github.com/PyCQA/pylint/issues/3352): Direct pylint to exempt 276 | # lines made too long by directives to pytype. 277 | 278 | # Regexp for a line that is allowed to be longer than the limit. 279 | ignore-long-lines=(?x)( 280 | ^\s*(\#\ )??$| 281 | ^\s*(from\s+\S+\s+)?import\s+.+$) 282 | 283 | # Allow the body of an if to be on the same line as the test if there is no 284 | # else. 285 | single-line-if-stmt=yes 286 | 287 | # List of optional constructs for which whitespace checking is disabled. `dict- 288 | # separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. 289 | # `trailing-comma` allows a space between comma and closing bracket: (a, ). 290 | # `empty-line` allows space-only lines. 291 | no-space-check= 292 | 293 | # Maximum number of lines in a module 294 | max-module-lines=99999 295 | 296 | # String used as indentation unit. The internal Google style guide mandates 2 297 | # spaces. Google's externaly-published style guide says 4, consistent with 298 | # PEP 8. Here, we use 2 spaces, for conformity with many open-sourced Google 299 | # projects (like TensorFlow). 300 | indent-string=' ' 301 | 302 | # Number of spaces of indent required inside a hanging or continued line. 303 | indent-after-paren=4 304 | 305 | # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. 306 | expected-line-ending-format= 307 | 308 | 309 | [MISCELLANEOUS] 310 | 311 | # List of note tags to take in consideration, separated by a comma. 312 | notes=TODO 313 | 314 | 315 | [STRING] 316 | 317 | # This flag controls whether inconsistent-quotes generates a warning when the 318 | # character used as a quote delimiter is used inconsistently within a module. 319 | check-quote-consistency=yes 320 | 321 | 322 | [VARIABLES] 323 | 324 | # Tells whether we should check for unused import in __init__ files. 325 | init-import=no 326 | 327 | # A regular expression matching the name of dummy variables (i.e. expectedly 328 | # not used). 329 | dummy-variables-rgx=^\*{0,2}(_$|unused_|dummy_) 330 | 331 | # List of additional names supposed to be defined in builtins. Remember that 332 | # you should avoid to define new builtins when possible. 333 | additional-builtins= 334 | 335 | # List of strings which can identify a callback function by name. A callback 336 | # name must start or end with one of those strings. 337 | callbacks=cb_,_cb 338 | 339 | # List of qualified module names which can have objects that can redefine 340 | # builtins. 341 | redefining-builtins-modules=six,six.moves,past.builtins,future.builtins,functools 342 | 343 | 344 | [LOGGING] 345 | 346 | # Logging modules to check that the string format arguments are in logging 347 | # function parameter format 348 | logging-modules=logging,absl.logging,tensorflow.io.logging 349 | 350 | 351 | [SIMILARITIES] 352 | 353 | # Minimum lines number of a similarity. 354 | min-similarity-lines=4 355 | 356 | # Ignore comments when computing similarities. 357 | ignore-comments=yes 358 | 359 | # Ignore docstrings when computing similarities. 360 | ignore-docstrings=yes 361 | 362 | # Ignore imports when computing similarities. 363 | ignore-imports=no 364 | 365 | 366 | [SPELLING] 367 | 368 | # Spelling dictionary name. Available dictionaries: none. To make it working 369 | # install python-enchant package. 370 | spelling-dict= 371 | 372 | # List of comma separated words that should not be checked. 373 | spelling-ignore-words= 374 | 375 | # A path to a file that contains private dictionary; one word per line. 376 | spelling-private-dict-file= 377 | 378 | # Tells whether to store unknown words to indicated private dictionary in 379 | # --spelling-private-dict-file option instead of raising a message. 380 | spelling-store-unknown-words=no 381 | 382 | 383 | [IMPORTS] 384 | 385 | # Deprecated modules which should not be used, separated by a comma 386 | deprecated-modules=regsub, 387 | TERMIOS, 388 | Bastion, 389 | rexec, 390 | sets 391 | 392 | # Create a graph of every (i.e. internal and external) dependencies in the 393 | # given file (report RP0402 must not be disabled) 394 | import-graph= 395 | 396 | # Create a graph of external dependencies in the given file (report RP0402 must 397 | # not be disabled) 398 | ext-import-graph= 399 | 400 | # Create a graph of internal dependencies in the given file (report RP0402 must 401 | # not be disabled) 402 | int-import-graph= 403 | 404 | # Force import order to recognize a module as part of the standard 405 | # compatibility libraries. 406 | known-standard-library= 407 | 408 | # Force import order to recognize a module as part of a third party library. 409 | known-third-party=enchant, absl 410 | 411 | # Analyse import fallback blocks. This can be used to support both Python 2 and 412 | # 3 compatible code, which means that the block might have code that exists 413 | # only in one or another interpreter, leading to false positives when analysed. 414 | analyse-fallback-blocks=no 415 | 416 | 417 | [CLASSES] 418 | 419 | # List of method names used to declare (i.e. assign) instance attributes. 420 | defining-attr-methods=__init__, 421 | __new__, 422 | setUp 423 | 424 | # List of member names, which should be excluded from the protected access 425 | # warning. 426 | exclude-protected=_asdict, 427 | _fields, 428 | _replace, 429 | _source, 430 | _make 431 | 432 | # List of valid names for the first argument in a class method. 433 | valid-classmethod-first-arg=cls, 434 | class_ 435 | 436 | # List of valid names for the first argument in a metaclass class method. 437 | valid-metaclass-classmethod-first-arg=mcs 438 | 439 | 440 | [EXCEPTIONS] 441 | 442 | # Exceptions that will emit a warning when being caught. Defaults to 443 | # "Exception" 444 | overgeneral-exceptions=StandardError, 445 | Exception, 446 | BaseException 447 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | # content of pytest.ini 2 | [pytest] 3 | addopts = --doctest-modules 4 | doctest_encoding = latin1 5 | doctest_optionflags = NORMALIZE_WHITESPACE IGNORE_EXCEPTION_DETAIL 6 | 7 | -------------------------------------------------------------------------------- /requirements/README.md: -------------------------------------------------------------------------------- 1 | # Requirements 2 | 3 | ## Dependencies structure 4 | This directory contains the environment setup to reproduce all code in this repository. By listing all of your requirements 5 | in the repository you can easily track the packages needed to recreate the analysis. 6 | 7 | - `environment.yml`: Is the general environment specification for deployment. Per default, this automatically installs `dev-requirements.txt` into the python environment. 8 | - `dev-requirements.txt`: PIP requirements file for the packages needed for developing code (includes convenient dependencies, linters, formatters) 9 | - `test-requirements.txt`: PIP requirements file for the packages needed to run continuous integration (includes linting, unit test dependencies) 10 | - `requirements.txt`: PIP requirements file for the packages needed to run code for deployment (minimal dependencies only) 11 | 12 | ## Workflow 13 | A good workflow is: 14 | 1. `pip install` the packages that your analysis needs 15 | 2. Run `pip freeze > requirements.txt` to pin the exact package versions used to recreate the analysis 16 | 3. If you find you need to install another package, run `pip freeze > requirements.txt` again and commit the changes to version control. -------------------------------------------------------------------------------- /requirements/dev-requirements.txt: -------------------------------------------------------------------------------- 1 | # NOTE: These requirements are used for developing code on the repo. 2 | # As a standard they include certain formatters and linters. 3 | 4 | # local package 5 | -e ../. 6 | 7 | # external requirements (mostly linters and formatters) 8 | pylint # pylint linter 9 | mypy # python type checker 10 | black # automatic formatting provider 11 | pre-commit # for git precommit hooks 12 | isort # automatic import sorter 13 | python-dotenv # environment variable manager 14 | pydocstyle # set pydocstyle 15 | 16 | # linear algebra and general data analysis 17 | numpy # arrays, linear algebra 18 | scipy # linear algebra and numerical mathematics 19 | numba # speeding up array operations 20 | pandas # tabular data analysis 21 | 22 | # plotting 23 | matplotlib # general python plotting 24 | seaborn # fancier plotting styles 25 | descartes # geospatial plotting of shapefiles 26 | folium # plotting maps 27 | ipyleaflet # plotting ipywidget maps 28 | 29 | # interactive computing 30 | jupyterlab # jupyter notebooks 31 | tqdm # progress bars 32 | 33 | # geospatial analysis requirements 34 | rasterio # opening and loading raster data 35 | fiona # manipulating geospatial vector data 36 | geopandas # manipulating geospatial vector data 37 | shapely # working with vector shapes 38 | pycrs # working with coordinate reference systems 39 | geopy # convenient API requests to geocoders 40 | xarray # useful data structures 41 | rioxarray # adaptation of xarray for raterio. 42 | dask[array] # allows to composite multiple satellite images stored in different shards 43 | dask[dataframe] # allows more lazy operation for xarray. 44 | dask[dataframe] # allows more lazy operation for xarray. 45 | dask[distributed] # allows distributed computing 46 | netCDF4 # makes sure that the default driver is netCDF4. 47 | bottleneck # needed for fill forward (xarray.DataArray.ffil) 48 | 49 | # additional 50 | networkx # manipulating graph data 51 | rtree # rtree library 52 | 53 | # gdrive functionality 54 | google-api-python-client 55 | google-auth-httplib2 56 | google-auth-oauthlib 57 | 58 | # make videos for animating timeseries etc. 59 | imageio 60 | imageio-ffmpeg 61 | 62 | # xgboost 63 | xgboost # gradient boosted regression 64 | sklearn # sklearn 65 | graphviz # can plot the decission tree 66 | 67 | # 68 | pillow 69 | torch==1.8 70 | torchvision 71 | wandb 72 | pytorch-lightning 73 | git+https://github.com/qubvel/segmentation_models.pytorch 74 | hydra-core 75 | twine # for publishing to PyPI 76 | -------------------------------------------------------------------------------- /requirements/doc-requirements.txt: -------------------------------------------------------------------------------- 1 | # NOTE: These requirements are used for developing code on the repo. 2 | # As a standard they include certain formatters and linters. 3 | 4 | # local package 5 | # -e ../. 6 | 7 | # external requirements (mostly linters and formatters) 8 | pylint # pylint linter 9 | mypy # python type checker 10 | black # automatic formatting provider 11 | pre-commit # for git precommit hooks 12 | isort # automatic import sorter 13 | python-dotenv # environment variable manager 14 | pydocstyle # set pydocstyle 15 | 16 | # linear algebra and general data analysis 17 | numpy # arrays, linear algebra 18 | scipy # linear algebra and numerical mathematics 19 | numba # speeding up array operations 20 | pandas # tabular data analysis 21 | 22 | # plotting 23 | matplotlib # general python plotting 24 | seaborn # fancier plotting styles 25 | descartes # geospatial plotting of shapefiles 26 | folium # plotting maps 27 | ipyleaflet # plotting ipywidget maps 28 | 29 | # interactive computing 30 | jupyterlab # jupyter notebooks 31 | tqdm # progress bars 32 | 33 | # geospatial analysis requirements 34 | rasterio # opening and loading raster data 35 | fiona # manipulating geospatial vector data 36 | geopandas # manipulating geospatial vector data 37 | shapely # working with vector shapes 38 | pycrs # working with coordinate reference systems 39 | geopy # convenient API requests to geocoders 40 | xarray # useful data structures 41 | rioxarray # adaptation of xarray for raterio. 42 | dask[array] # allows to composite multiple satellite images stored in different shards 43 | dask[dataframe] # allows more lazy operation for xarray. 44 | dask[dataframe] # allows more lazy operation for xarray. 45 | dask[distributed] # allows distributed computing 46 | netCDF4 # makes sure that the default driver is netCDF4. 47 | bottleneck # needed for fill forward (xarray.DataArray.ffil) 48 | 49 | # additional 50 | networkx # manipulating graph data 51 | rtree # rtree library 52 | 53 | # gdrive functionality 54 | google-api-python-client 55 | google-auth-httplib2 56 | google-auth-oauthlib 57 | 58 | # make videos for animating timeseries etc. 59 | imageio 60 | imageio-ffmpeg 61 | 62 | # xgboost 63 | xgboost # gradient boosted regression 64 | sklearn # sklearn 65 | graphviz # can plot the decission tree 66 | 67 | # 68 | pillow 69 | torch==1.8 70 | torchvision 71 | wandb 72 | pytorch-lightning 73 | git+https://github.com/qubvel/segmentation_models.pytorch 74 | hydra-core 75 | 76 | #docs 77 | sphinx 78 | sphinx-autodoc-typehints 79 | nbsphinx 80 | nbsphinx-link -------------------------------------------------------------------------------- /requirements/environment.yml: -------------------------------------------------------------------------------- 1 | prefix: ./env 2 | channels: 3 | - conda-forge 4 | - defaults 5 | dependencies: 6 | - python=3.8 7 | - pip 8 | - pip: 9 | - -r dev-requirements.txt 10 | -------------------------------------------------------------------------------- /requirements/requirements.txt: -------------------------------------------------------------------------------- 1 | # NOTE: Your final requirements for production/publishing the repo go here. 2 | 3 | # local package 4 | -e . 5 | 6 | # linear algebra and general data analysis 7 | numpy # arrays, linear algebra 8 | pandas # tabular data analysis 9 | 10 | # plotting 11 | folium # plotting maps 12 | ipyleaflet # plotting ipywidget maps 13 | 14 | # interactive computing 15 | tqdm # progress bars 16 | 17 | # geospatial analysis requirements 18 | # vector data 19 | geopandas # manipulating geospatial vector data 20 | shapely # working with vector shapes 21 | rtree # efficiently querying polygon data 22 | # raster data 23 | rasterio==1.1.8 # opening and loading raster data 24 | xarray # useful data structures 25 | 26 | # graph requirements 27 | networkx # manipulating graph data 28 | -------------------------------------------------------------------------------- /requirements/test-requirements.txt: -------------------------------------------------------------------------------- 1 | # NOTE: These requirements are used for continuous integration (CI) 2 | # meta requirements 3 | virtualenv 4 | 5 | # local package 6 | -e . 7 | 8 | # external requirements 9 | click # BSD-3-Clause 10 | sphinx # BSD-3-Clause 11 | coverage # MIT 12 | flake8 # MIT 13 | python-dotenv # BSD-3-Clause -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """ 5 | Module that defines a setup function and publishes the package to PyPI. 6 | 7 | Use the command `python setup.py upload`. 8 | """ 9 | # Note: To use the "upload" functionality of this file, you must: 10 | # $ pip install twine 11 | 12 | import io 13 | import os 14 | import sys 15 | from shutil import rmtree 16 | from typing import Dict, List 17 | 18 | from setuptools import Command, find_packages, setup 19 | 20 | # Package meta-data. 21 | NAME = "geograph" 22 | DESCRIPTION = "Group Team Challenge 2021 - Biodiversity Team" 23 | URL = "https://geograph.readthedocs.io/" 24 | EMAIL = "hb574@cam.ac.uk" 25 | AUTHOR = "Biodiversity Team" 26 | REQUIRES_PYTHON = ">=3.8.0" 27 | 28 | # What packages are required for this module to be executed? 29 | REQUIRED: List = [ 30 | "numpy", 31 | "pandas", 32 | "folium", 33 | "ipyleaflet", 34 | "tqdm", 35 | "matplotlib", 36 | "seaborn", 37 | "geopandas", 38 | "shapely", 39 | "rtree", 40 | "rasterio", 41 | "xarray", 42 | "networkx", 43 | ] 44 | 45 | # What packages are optional? 46 | EXTRAS: Dict = { 47 | # "fancy feature": ["django"], 48 | } 49 | 50 | here = os.path.abspath(os.path.dirname(__file__)) 51 | 52 | # Import the PYPI README and use it as the long-description. 53 | # Note: this will only work if "PYPI_README.md" is present in your MANIFEST.in file! 54 | try: 55 | with io.open(os.path.join(here, "PYPI_README.md"), encoding="utf-8") as f: 56 | long_description = "\n" + f.read() 57 | except FileNotFoundError: 58 | long_description = DESCRIPTION 59 | 60 | # Load the package"s _version.py module as a dictionary. 61 | about: Dict = {} 62 | with open(os.path.join(here, NAME, "_version.py")) as f: 63 | # pylint: disable=exec-used 64 | exec(f.read(), about) 65 | 66 | 67 | class UploadCommand(Command): 68 | """Support setup.py upload.""" 69 | 70 | description = "Build and publish the package." 71 | user_options: List = [] 72 | 73 | @staticmethod 74 | def status(s): 75 | """Print things in bold.""" 76 | print("\033[1m{0}\033[0m".format(s)) 77 | 78 | def initialize_options(self): 79 | pass 80 | 81 | def finalize_options(self): 82 | pass 83 | 84 | def run(self): 85 | """Publish package to PyPI.""" 86 | try: 87 | self.status("Removing previous builds…") 88 | rmtree(os.path.join(here, "dist")) 89 | except OSError: 90 | pass 91 | 92 | self.status("Building Source and Wheel (universal) distribution…") 93 | os.system("{0} setup.py sdist bdist_wheel --universal".format(sys.executable)) 94 | 95 | self.status("Uploading the package to PyPI via Twine…") 96 | os.system("twine upload dist/*") 97 | 98 | self.status("Pushing git tags…") 99 | os.system("git tag v{0}".format(about["__version__"])) 100 | os.system("git push --tags") 101 | 102 | sys.exit() 103 | 104 | 105 | # Where the magic happens: 106 | setup( 107 | name=NAME, 108 | version=about["__version__"], 109 | description=DESCRIPTION, 110 | long_description=long_description, 111 | long_description_content_type="text/markdown", 112 | author=AUTHOR, 113 | author_email=EMAIL, 114 | python_requires=REQUIRES_PYTHON, 115 | url=URL, 116 | packages=find_packages(exclude=["tests", "*.tests", "*.tests.*", "tests.*"]), 117 | # entry_points={}, 118 | install_requires=REQUIRED, 119 | extras_require=EXTRAS, 120 | include_package_data=True, 121 | license="MIT", 122 | classifiers=[ 123 | # Trove classifiers 124 | # Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers 125 | "Development Status :: 3 - Alpha", 126 | "License :: OSI Approved :: MIT License", 127 | "Intended Audience :: Science/Research", 128 | "Intended Audience :: Developers", 129 | "Programming Language :: Python", 130 | "Programming Language :: Python :: 3", 131 | "Programming Language :: Python :: 3.8", 132 | "Programming Language :: Python :: 3.9", 133 | "Programming Language :: Python :: 3.10", 134 | "Programming Language :: Python :: 3 :: Only", 135 | "Programming Language :: Python :: Implementation :: CPython", 136 | "Topic :: Scientific/Engineering :: GIS", 137 | "Typing :: Typed", 138 | "Operating System :: Microsoft :: Windows", 139 | "Operating System :: POSIX", 140 | "Operating System :: Unix", 141 | "Operating System :: MacOS", 142 | ], 143 | # $ setup.py publish support. 144 | cmdclass={ 145 | "upload": UploadCommand, 146 | }, 147 | test_suite="geograph.tests.test_all.suite", 148 | ) 149 | --------------------------------------------------------------------------------