├── .pylintc ├── doc ├── esda.rst ├── giddy.rst ├── libpysal.rst ├── mapping.rst ├── _static │ ├── images │ │ ├── vba_map.png │ │ ├── nonplanar.png │ │ ├── pysal_favicon.ico │ │ ├── lisa_cluster_map.png │ │ └── viz_local_autocorrelation.png │ ├── references.bib │ └── pysal-styles.css ├── references.rst ├── api.rst ├── Makefile ├── make.bat ├── index.rst ├── installation.rst └── conf.py ├── utils └── matplotlibrc ├── .gitattributes ├── figs ├── k9.png └── viz_local_autocorrelation.png ├── paper ├── figs │ ├── vba_choropleth.png │ └── local_autocorrelation.png ├── paper.md └── paper.bib ├── MANIFEST.in ├── splot ├── __init__.py ├── _bk.py ├── libpysal.py ├── tests │ ├── test_viz_utils.py │ ├── test_viz_libpysal_mpl.py │ ├── test_viz_bokeh.py │ ├── test_viz_giddy_mpl.py │ ├── test_viz_value_by_alpha_mapl.py │ └── test_viz_esda_mpl.py ├── mapping.py ├── giddy.py ├── esda.py ├── _viz_libpysal_mpl.py ├── _viz_utils.py ├── _viz_value_by_alpha_mpl.py ├── _viz_bokeh.py ├── _viz_giddy_mpl.py └── _version.py ├── requirements.txt ├── .coveragerc ├── notebooks └── Makefile ├── requirements_dev.txt ├── .github ├── CONTRIBUTING.md ├── release.yml ├── PULL_REQUEST_TEMPLATE.md ├── workflows │ ├── release_and_publish.yml │ └── unittests.yml └── ISSUE_TEMPLATE.md ├── ci ├── 311-DEV.yaml ├── 38-MIN.yaml ├── 39.yaml ├── 310.yaml ├── 311.yaml └── 38.yaml ├── .pre-commit-config.yaml ├── setup.cfg ├── readthedocs.yml ├── codecov.yml ├── LICENSE.txt ├── setup.py ├── .gitignore ├── README.md └── CHANGELOG.md /.pylintc: -------------------------------------------------------------------------------- 1 | disable= 2 | fixme 3 | -------------------------------------------------------------------------------- /doc/esda.rst: -------------------------------------------------------------------------------- 1 | .. automodule:: splot.esda -------------------------------------------------------------------------------- /utils/matplotlibrc: -------------------------------------------------------------------------------- 1 | backend : Agg 2 | -------------------------------------------------------------------------------- /doc/giddy.rst: -------------------------------------------------------------------------------- 1 | .. automodule:: splot.giddy -------------------------------------------------------------------------------- /doc/libpysal.rst: -------------------------------------------------------------------------------- 1 | .. automodule:: splot.libpysal -------------------------------------------------------------------------------- /doc/mapping.rst: -------------------------------------------------------------------------------- 1 | .. automodule:: splot.mapping -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | splot/_version.py export-subst 2 | -------------------------------------------------------------------------------- /figs/k9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pysal/splot/HEAD/figs/k9.png -------------------------------------------------------------------------------- /doc/_static/images/vba_map.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pysal/splot/HEAD/doc/_static/images/vba_map.png -------------------------------------------------------------------------------- /paper/figs/vba_choropleth.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pysal/splot/HEAD/paper/figs/vba_choropleth.png -------------------------------------------------------------------------------- /doc/_static/images/nonplanar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pysal/splot/HEAD/doc/_static/images/nonplanar.png -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include splot/tests/*.py 2 | include *.txt 3 | include versioneer.py 4 | include splot/_version.py 5 | -------------------------------------------------------------------------------- /figs/viz_local_autocorrelation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pysal/splot/HEAD/figs/viz_local_autocorrelation.png -------------------------------------------------------------------------------- /doc/_static/images/pysal_favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pysal/splot/HEAD/doc/_static/images/pysal_favicon.ico -------------------------------------------------------------------------------- /paper/figs/local_autocorrelation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pysal/splot/HEAD/paper/figs/local_autocorrelation.png -------------------------------------------------------------------------------- /doc/_static/images/lisa_cluster_map.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pysal/splot/HEAD/doc/_static/images/lisa_cluster_map.png -------------------------------------------------------------------------------- /splot/__init__.py: -------------------------------------------------------------------------------- 1 | from . import _version 2 | 3 | __version__ = _version.get_versions()["version"] 4 | # import modules/functions 5 | -------------------------------------------------------------------------------- /doc/_static/images/viz_local_autocorrelation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pysal/splot/HEAD/doc/_static/images/viz_local_autocorrelation.png -------------------------------------------------------------------------------- /doc/references.rst: -------------------------------------------------------------------------------- 1 | .. reference for the docs 2 | 3 | References 4 | ========== 5 | 6 | .. bibliography:: _static/references.bib 7 | :cited: 8 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | esda 2 | geopandas>=0.9.0 3 | giddy 4 | libpysal 5 | mapclassify 6 | matplotlib>=3.3.3 7 | numpy 8 | packaging 9 | seaborn>=0.11.0 10 | spreg 11 | -------------------------------------------------------------------------------- /splot/_bk.py: -------------------------------------------------------------------------------- 1 | from ._viz_bokeh import ( # noqa F401 2 | lisa_cluster, 3 | moran_scatterplot, 4 | plot_choropleth, 5 | plot_local_autocorrelation, 6 | ) 7 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = 3 | __init__.py 4 | */contrib/* 5 | */test/* 6 | [report] 7 | omit = 8 | __init__.py 9 | */contrib/* 10 | */test/* 11 | -------------------------------------------------------------------------------- /notebooks/Makefile: -------------------------------------------------------------------------------- 1 | test: 2 | jupyter nbconvert --execute geotable_plot.ipynb 3 | 4 | test3: 5 | jupyter nbconvert --to notebook --nbformat 3 geotable_plot.ipynb --output geotable_plot.ipynb 6 | jupyter nbconvert --execute geotable_plot.ipynb 7 | -------------------------------------------------------------------------------- /requirements_dev.txt: -------------------------------------------------------------------------------- 1 | black 2 | bokeh 3 | codecov 4 | coverage 5 | flake8 6 | ipywidgets 7 | isort 8 | jupyter 9 | nbconvert 10 | numpydoc 11 | pre-commit 12 | pyflakes 13 | pytest 14 | pytest-cov 15 | sphinx 16 | sphinx_bootstrap_theme 17 | sphinxcontrib-bibtex 18 | -------------------------------------------------------------------------------- /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | Thank you for your interest in contributing! We work primarily on Github. Please 2 | review the [contributing procedures](https://github.com/pysal/pysal/wiki/GitHub-Standard-Operating-Procedures) so that we can accept your contributions! Alternatively, contact someone in the [development chat channel](https://gitter.im/pysal.pysal). 3 | -------------------------------------------------------------------------------- /doc/api.rst: -------------------------------------------------------------------------------- 1 | .. _api_ref: 2 | 3 | .. currentmodule:: splot 4 | 5 | API reference 6 | ============= 7 | 8 | .. _splot.giddy_api: 9 | 10 | .. automodule:: splot.giddy 11 | 12 | 13 | .. _splot.esda_api: 14 | 15 | .. automodule:: splot.esda 16 | 17 | 18 | .. _splot.libpysal_api: 19 | 20 | .. automodule:: splot.libpysal 21 | 22 | 23 | .. _splot.mapping_api: 24 | 25 | .. automodule:: splot.mapping -------------------------------------------------------------------------------- /splot/libpysal.py: -------------------------------------------------------------------------------- 1 | """ 2 | ``splot.libpysal`` 3 | ================== 4 | 5 | Provides visualisations for all core components of 6 | Python Spatial Analysis Library in `libpysal`. 7 | 8 | libpysal weights 9 | ---------------- 10 | 11 | .. autosummary:: 12 | :toctree: generated/ 13 | 14 | plot_spatial_weights 15 | 16 | """ 17 | 18 | from ._viz_libpysal_mpl import plot_spatial_weights # noqa F401 19 | -------------------------------------------------------------------------------- /ci/311-DEV.yaml: -------------------------------------------------------------------------------- 1 | name: test 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.11 6 | # testing 7 | - codecov 8 | - pytest 9 | - pytest-cov 10 | # required 11 | - bokeh 12 | - geopandas 13 | - ipywidgets 14 | - matplotlib 15 | - numpy 16 | - packaging 17 | - pip 18 | - seaborn>=0.11.0 19 | # formatting 20 | - black 21 | - flake8 22 | - isort 23 | - pyflakes 24 | -------------------------------------------------------------------------------- /.github/release.yml: -------------------------------------------------------------------------------- 1 | changelog: 2 | exclude: 3 | labels: 4 | - ignore-for-release 5 | authors: 6 | - octocat 7 | - dependabot 8 | categories: 9 | - title: Breaking Changes 🛠 10 | labels: 11 | - breaking-change 12 | - title: Exciting New Features 🎉 13 | labels: 14 | - enhancement 15 | - title: Other Changes 16 | labels: 17 | - "*" 18 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pycqa/isort 3 | rev: 5.10.1 4 | hooks: 5 | - id: isort 6 | language_version: python3 7 | - repo: https://github.com/pycqa/flake8 8 | rev: 5.0.4 9 | hooks: 10 | - id: flake8 11 | language_version: python3 12 | - repo: https://github.com/psf/black 13 | rev: 22.10.0 14 | hooks: 15 | - id: black 16 | language_version: python3 17 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [versioneer] 2 | VCS = git 3 | style = pep440 4 | versionfile_source = splot/_version.py 5 | versionfile_build = splot/_version.py 6 | tag_prefix = v 7 | parentdir_prefix = splot- 8 | 9 | [flake8] 10 | max_line_length = 88 11 | exclude = doc/conf.py, versioneer.py, splot/_version.py 12 | 13 | [black] 14 | line-length = 88 15 | exclude = doc/conf.py, versioneer.py, splot/_version.py 16 | 17 | [isort] 18 | profile = black 19 | -------------------------------------------------------------------------------- /readthedocs.yml: -------------------------------------------------------------------------------- 1 | # Required 2 | version: 2 3 | 4 | # Build documentation in the docs/ directory with Sphinx 5 | sphinx: 6 | configuration: doc/conf.py 7 | 8 | # Optionally build your docs in additional formats such as PDF and ePub 9 | formats: all 10 | 11 | python: 12 | version: 3.7 13 | install: 14 | - requirements: requirements.txt 15 | - method: pip 16 | path: . 17 | extra_requirements: 18 | - dev -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | codecov: 2 | notify: 3 | after_n_builds: 6 4 | coverage: 5 | range: 50..95 6 | round: nearest 7 | precision: 1 8 | status: 9 | project: 10 | default: 11 | threshold: 2% 12 | patch: 13 | default: 14 | threshold: 2% 15 | target: 80% 16 | ignore: 17 | - "tests/*" 18 | comment: 19 | layout: "reach, diff, files" 20 | behavior: once 21 | after_n_builds: 6 22 | require_changes: true 23 | -------------------------------------------------------------------------------- /ci/38-MIN.yaml: -------------------------------------------------------------------------------- 1 | name: test 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.8 6 | # testing 7 | - codecov 8 | - pytest 9 | - pytest-cov 10 | # required 11 | - bokeh 12 | - esda 13 | - geopandas==0.9.0 14 | - giddy 15 | - libpysal 16 | - mapclassify 17 | - matplotlib==3.3.3 18 | - numpy 19 | - packaging 20 | - pip 21 | - seaborn==0.11.0 22 | - spreg 23 | # formatting 24 | - black 25 | - flake8 26 | - isort 27 | - pyflakes 28 | -------------------------------------------------------------------------------- /ci/39.yaml: -------------------------------------------------------------------------------- 1 | name: test 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.9 6 | # testing 7 | - codecov 8 | - pytest 9 | - pytest-cov 10 | # required 11 | - bokeh 12 | - esda 13 | - geopandas 14 | - giddy 15 | - ipywidgets 16 | - libpysal 17 | - mapclassify 18 | - matplotlib 19 | - numpy 20 | - packaging 21 | - pip 22 | - seaborn>=0.11.0 23 | - spreg 24 | # formatting 25 | - black 26 | - flake8 27 | - isort 28 | - pyflakes 29 | -------------------------------------------------------------------------------- /ci/310.yaml: -------------------------------------------------------------------------------- 1 | name: test 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.10 6 | # testing 7 | - codecov 8 | - pytest 9 | - pytest-cov 10 | # required 11 | - bokeh 12 | - esda 13 | - geopandas 14 | - giddy 15 | - ipywidgets 16 | - libpysal 17 | - mapclassify 18 | - matplotlib 19 | - numpy 20 | - packaging 21 | - pip 22 | - seaborn>=0.11.0 23 | - spreg 24 | # formatting 25 | - black 26 | - flake8 27 | - isort 28 | - pyflakes 29 | -------------------------------------------------------------------------------- /ci/311.yaml: -------------------------------------------------------------------------------- 1 | name: test 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.11 6 | # testing 7 | - codecov 8 | - pytest 9 | - pytest-cov 10 | # required 11 | - bokeh 12 | - esda 13 | - geopandas 14 | - giddy 15 | - ipywidgets 16 | - libpysal 17 | - mapclassify 18 | - matplotlib 19 | - numpy 20 | - packaging 21 | - pip 22 | - seaborn>=0.11.0 23 | - spreg 24 | # formatting 25 | - black 26 | - flake8 27 | - isort 28 | - pyflakes 29 | -------------------------------------------------------------------------------- /ci/38.yaml: -------------------------------------------------------------------------------- 1 | name: test 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.8 6 | # testing 7 | - codecov 8 | - pytest 9 | - pytest-cov 10 | # required 11 | - bokeh 12 | - esda 13 | - geopandas>=0.9.0 14 | - giddy 15 | - ipywidgets 16 | - libpysal 17 | - mapclassify 18 | - matplotlib>=3.3.3 19 | - numpy 20 | - packaging 21 | - pip 22 | - seaborn>=0.11.0 23 | - spreg 24 | # formatting 25 | - black 26 | - flake8 27 | - isort 28 | - pyflakes 29 | -------------------------------------------------------------------------------- /splot/tests/test_viz_utils.py: -------------------------------------------------------------------------------- 1 | import matplotlib as mpl 2 | 3 | from splot._viz_utils import shift_colormap, truncate_colormap 4 | 5 | 6 | def test_shift_colormap(): 7 | map_test = shift_colormap( 8 | "RdBu", start=0.1, midpoint=0.2, stop=0.9, name="shiftedcmap" 9 | ) 10 | assert isinstance(map_test, mpl.colors.LinearSegmentedColormap) 11 | 12 | 13 | def test_truncat_colormap(): 14 | map_test_truncate = truncate_colormap("RdBu", minval=0.1, maxval=0.9, n=99) 15 | assert isinstance(map_test_truncate, mpl.colors.LinearSegmentedColormap) 16 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = splot 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | 22 | clean: 23 | rm -rf $(BUILDDIR)/* 24 | rm -rf auto_examples/ 25 | -------------------------------------------------------------------------------- /splot/mapping.py: -------------------------------------------------------------------------------- 1 | """ 2 | ``splot.mapping`` 3 | ================= 4 | 5 | Provides Choropleth visualizations and mapping utilities. 6 | 7 | Value-by-Alpha maps 8 | ------------------- 9 | 10 | .. autosummary:: 11 | :toctree: generated/ 12 | 13 | value_by_alpha_cmap 14 | vba_choropleth 15 | vba_legend 16 | mapclassify_bin 17 | 18 | 19 | Colormap utilities 20 | ------------------ 21 | 22 | .. autosummary:: 23 | :toctree: generated/ 24 | 25 | shift_colormap 26 | truncate_colormap 27 | 28 | """ 29 | 30 | from ._viz_utils import shift_colormap, truncate_colormap # noqa F401 31 | from ._viz_value_by_alpha_mpl import ( # noqa F401 32 | mapclassify_bin, 33 | value_by_alpha_cmap, 34 | vba_choropleth, 35 | vba_legend, 36 | ) 37 | -------------------------------------------------------------------------------- /splot/giddy.py: -------------------------------------------------------------------------------- 1 | """ 2 | ``splot.giddy`` 3 | =============== 4 | 5 | Provides visualisations for the Geospatial Distribution Dynamics - `giddy` module. 6 | `giddy` provides a tool for space–time analytics that consider the role of space 7 | in the evolution of distributions over time. 8 | 9 | Directional LISA analytics 10 | -------------------------- 11 | 12 | .. autosummary:: 13 | :toctree: generated/ 14 | 15 | dynamic_lisa_heatmap 16 | dynamic_lisa_rose 17 | dynamic_lisa_vectors 18 | dynamic_lisa_composite 19 | dynamic_lisa_composite_explore 20 | 21 | """ 22 | 23 | from ._viz_giddy_mpl import ( # noqa F401 24 | dynamic_lisa_composite, 25 | dynamic_lisa_composite_explore, 26 | dynamic_lisa_heatmap, 27 | dynamic_lisa_rose, 28 | dynamic_lisa_vectors, 29 | ) 30 | -------------------------------------------------------------------------------- /splot/esda.py: -------------------------------------------------------------------------------- 1 | """ 2 | ``splot.esda`` 3 | =============== 4 | 5 | Provides visualisations for the `esda` subpackage. 6 | `esda` provides tools for exploratory spatial data analysis that 7 | consider the role of space in a distribution of attribute values. 8 | 9 | Moran analytics 10 | --------------- 11 | 12 | .. autosummary:: 13 | :toctree: generated/ 14 | 15 | moran_scatterplot 16 | plot_moran_simulation 17 | plot_moran 18 | plot_moran_bv_simulation 19 | plot_moran_bv 20 | lisa_cluster 21 | plot_local_autocorrelation 22 | moran_facet 23 | 24 | """ 25 | 26 | from ._viz_esda_mpl import ( # noqa F401 27 | lisa_cluster, 28 | moran_facet, 29 | moran_scatterplot, 30 | plot_local_autocorrelation, 31 | plot_moran, 32 | plot_moran_bv, 33 | plot_moran_bv_simulation, 34 | plot_moran_simulation, 35 | ) 36 | -------------------------------------------------------------------------------- /doc/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=python -msphinx 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | set SPHINXPROJ=splot 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The Sphinx module was not found. Make sure you have Sphinx installed, 20 | echo.then set the SPHINXBUILD environment variable to point to the full 21 | echo.path of the 'sphinx-build' executable. Alternatively you may add the 22 | echo.Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Hello! Please make sure to check all these boxes before submitting a Pull Request 2 | (PR). Once you have checked the boxes, feel free to remove all text except the 3 | justification in point 5. 4 | 5 | 1. [ ] You have run tests on this submission, either by using [Travis Continuous Integration testing](https://github.com/pysal/pysal/wiki/GitHub-Standard-Operating-Procedures#automated-testing-w-travis-ci) testing or running `nosetests` on your changes? 6 | 2. [ ] This pull request is directed to the `pysal/dev` branch. 7 | 3. [ ] This pull introduces new functionality covered by 8 | [docstrings](https://en.wikipedia.org/wiki/Docstring#Python) and 9 | [unittests](https://docs.python.org/2/library/unittest.html)? 10 | 4. [ ] You have [assigned a 11 | reviewer](https://help.github.com/articles/assigning-issues-and-pull-requests-to-other-github-users/) and added relevant [labels](https://help.github.com/articles/applying-labels-to-issues-and-pull-requests/) 12 | 5. [ ] The justification for this PR is: 13 | -------------------------------------------------------------------------------- /doc/_static/references.bib: -------------------------------------------------------------------------------- 1 | %% This BibTeX bibliography file was created using BibDesk. 2 | %% http://bibdesk.sourceforge.net/ 3 | 4 | 5 | %% Saved with string encoding Unicode (UTF-8) 6 | 7 | 8 | 9 | @Article{anselin2006geoda, 10 | author={Anselin, Luc and Syabri, Ibnu and Kho, Youngihn}, 11 | title={GeoDa: an introduction to spatial data analysis}, 12 | journal={Geographical analysis}, 13 | volume={38}, 14 | number={1}, 15 | pages={5--22}, 16 | year={2006}, 17 | publisher={Wiley Online Library} 18 | } 19 | 20 | 21 | @Article{Anselin95, 22 | author = {Anselin, Luc}, 23 | title = {Local Indicators of Spatial Association-{LISA}}, 24 | year = 1995, 25 | volume = 27, 26 | number = 2, 27 | month = {Sep}, 28 | pages = {93–115}, 29 | issn = {0016-7363}, 30 | doi = {10.1111/j.1538-4632.1995.tb00338.x}, 31 | url = {http://dx.doi.org/10.1111/j.1538-4632.1995.tb00338.x}, 32 | journal = {Geographical Analysis}, 33 | publisher = {Wiley} 34 | } -------------------------------------------------------------------------------- /.github/workflows/release_and_publish.yml: -------------------------------------------------------------------------------- 1 | name: Release & Publish 2 | 3 | on: 4 | push: 5 | # Sequence of patterns matched against refs/tags 6 | tags: 7 | - "v*" # Push events to matching v*, i.e. v1.0, v20.15.10 8 | 9 | jobs: 10 | build: 11 | name: Create release & publish to PyPI 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Checkout repo 15 | uses: actions/checkout@v3 16 | 17 | - name: Set up python 18 | uses: actions/setup-python@v4 19 | with: 20 | python-version: "3.x" 21 | 22 | - name: Install Dependencies 23 | run: | 24 | python -m pip install --upgrade pip 25 | pip install setuptools wheel twine jupyter urllib3 pandas pyyaml 26 | python setup.py sdist bdist_wheel 27 | 28 | - name: Create Release Notes 29 | uses: actions/github-script@v6 30 | with: 31 | github-token: ${{secrets.GITHUB_TOKEN}} 32 | script: | 33 | await github.request(`POST /repos/${{ github.repository }}/releases`, { 34 | tag_name: "${{ github.ref }}", 35 | generate_release_notes: true 36 | }); 37 | 38 | - name: Publish distribution 📦 to PyPI 39 | uses: pypa/gh-action-pypi-publish@master 40 | with: 41 | user: __token__ 42 | password: ${{ secrets.PYPI_PASSWORD }} 43 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright 2017 PySAL-splot Developers 2 | 3 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 4 | 5 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 6 | 7 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 8 | 9 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 10 | 11 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 12 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Thank you for filing this issue! To help troubleshoot this issue, please follow 2 | the following directions to the best of your ability before submitting an issue. 3 | Feel free to delete this text once you've filled out the relevant requests. 4 | 5 | Please include the output of the following in your issue submission. If you don't know how to provide the information, commands to get the relevant information from the Python interpreter will follow each bullet point. 6 | 7 | Feel free to delete the commands after you've filled out each bullet. 8 | 9 | - Platform information: 10 | ```python 11 | >>> import os; print(os.name, os.sys.platform);print(os.uname()) 12 | ``` 13 | - Python version: 14 | ```python 15 | >>> import sys; print(sys.version) 16 | ``` 17 | - SciPy version: 18 | ```python 19 | >>> import scpiy; print(scipy.__version__) 20 | ``` 21 | - NumPy version: 22 | ```python 23 | >>> import numpy; print(numpy.__version__) 24 | ``` 25 | 26 | Also, please upload any relevant data as [a file 27 | attachment](https://help.github.com/articles/file-attachments-on-issues-and-pull-requests/). Please **do not** upload pickled objects, since it's nearly impossible to troubleshoot them without replicating your exact namespace. Instead, provide the minimal subset of the data required to replicate the problem. If it makes you more comfortable submitting the issue, feel free to: 28 | 29 | 1. remove personally identifying information from data or code 30 | 2. provide only the required subset of the full data or code 31 | -------------------------------------------------------------------------------- /splot/tests/test_viz_libpysal_mpl.py: -------------------------------------------------------------------------------- 1 | import geopandas as gpd 2 | import libpysal 3 | import matplotlib.pyplot as plt 4 | import pytest 5 | from libpysal import examples 6 | from libpysal.weights.contiguity import Queen 7 | 8 | from splot.libpysal import plot_spatial_weights 9 | 10 | 11 | @pytest.mark.filterwarnings("ignore:Geometry is in a geographic CRS.") 12 | def test_plot_spatial_weights(): 13 | # get data 14 | rio_grande_do_sul = examples.load_example("Rio Grande do Sul") 15 | gdf = gpd.read_file(rio_grande_do_sul.get_path("43MUE250GC_SIR.shp")) 16 | 17 | # calculate weights 18 | weights = Queen.from_dataframe(gdf, silence_warnings=True) 19 | 20 | # plot weights 21 | fig, _ = plot_spatial_weights(weights, gdf) 22 | plt.close(fig) 23 | 24 | # calculate nonplanar_joins 25 | wnp = libpysal.weights.util.nonplanar_neighbors(weights, gdf) 26 | # plot new joins 27 | fig2, _ = plot_spatial_weights(wnp, gdf) 28 | plt.close(fig2) 29 | 30 | # customize 31 | fig3, _ = plot_spatial_weights(wnp, gdf, nonplanar_edge_kws=dict(color="#4393c3")) 32 | plt.close(fig3) 33 | 34 | # plot in existing figure 35 | fig4, axs = plt.subplots(1, 3) 36 | plot_spatial_weights(wnp, gdf, ax=axs[0]) 37 | plt.close(fig4) 38 | 39 | # uses a column as the index for spatial weights object 40 | weights_index = Queen.from_dataframe( 41 | gdf, idVariable="CD_GEOCMU", silence_warnings=True 42 | ) 43 | fig, _ = plot_spatial_weights(weights_index, gdf, indexed_on="CD_GEOCMU") 44 | plt.close(fig) 45 | -------------------------------------------------------------------------------- /doc/_static/pysal-styles.css: -------------------------------------------------------------------------------- 1 | /* Make thumbnails with equal heights */ 2 | @media only screen and (min-width : 481px) { 3 | .row.equal-height { 4 | display: flex; 5 | flex-wrap: wrap; 6 | } 7 | .row.equal-height > [class*='col-'] { 8 | display: flex; 9 | flex-direction: column; 10 | } 11 | .row.equal-height.row:after, 12 | .row.equal-height.row:before { 13 | display: flex; 14 | } 15 | 16 | .row.equal-height > [class*='col-'] > .thumbnail, 17 | .row.equal-height > [class*='col-'] > .thumbnail > .caption { 18 | display: flex; 19 | flex: 1 0 auto; 20 | flex-direction: column; 21 | } 22 | .row.equal-height > [class*='col-'] > .thumbnail > .caption > .flex-text { 23 | flex-grow: 1; 24 | } 25 | .row.equal-height > [class*='col-'] > .thumbnail > img { 26 | width: 100%; 27 | height: 200px; /* force image's height */ 28 | 29 | /* force image fit inside it's "box" */ 30 | -webkit-object-fit: cover; 31 | -moz-object-fit: cover; 32 | -ms-object-fit: cover; 33 | -o-object-fit: cover; 34 | object-fit: cover; 35 | } 36 | } 37 | 38 | .row.extra-bottom-padding{ 39 | margin-bottom: 20px; 40 | } 41 | 42 | 43 | .topnavicons { 44 | margin-left: 10% !important; 45 | } 46 | 47 | .topnavicons li { 48 | margin-left: 0px !important; 49 | min-width: 100px; 50 | text-align: center; 51 | } 52 | 53 | .topnavicons .thumbnail { 54 | margin-right: 10px; 55 | border: none; 56 | box-shadow: none; 57 | text-align: center; 58 | font-size: 85%; 59 | font-weight: bold; 60 | line-height: 10px; 61 | height: 100px; 62 | } 63 | 64 | .topnavicons .thumbnail img { 65 | display: block; 66 | margin-left: auto; 67 | margin-right: auto; 68 | } 69 | 70 | 71 | /* Table with a scrollbar */ 72 | .bodycontainer { max-height: 600px; width: 100%; margin: 0; overflow-y: auto; } 73 | .table-scrollable { margin: 0; padding: 0; } 74 | 75 | div.body { 76 | max-width: 1080px; 77 | } 78 | -------------------------------------------------------------------------------- /splot/tests/test_viz_bokeh.py: -------------------------------------------------------------------------------- 1 | # Tests are enabled even though Bokeh functionality is private for now, 2 | # in order to keep code coverage good. 3 | # Bokeh versions are not intended for release 4 | # but will be picked up later 5 | 6 | import esda 7 | import geopandas as gpd 8 | import pytest 9 | from libpysal import examples 10 | from libpysal.weights.contiguity import Queen 11 | 12 | from splot._bk import ( 13 | lisa_cluster, 14 | moran_scatterplot, 15 | plot_choropleth, 16 | plot_local_autocorrelation, 17 | ) 18 | 19 | 20 | @pytest.mark.skip(reason="to be deprecated") 21 | def test_plot_choropleth(): 22 | link = examples.get_path("columbus.shp") 23 | df = gpd.read_file(link) 24 | 25 | w = Queen.from_dataframe(df) 26 | w.transform = "r" 27 | 28 | TOOLS = "tap,help" 29 | plot_choropleth(df, "HOVAL", title="columbus", reverse_colors=True, tools=TOOLS) 30 | 31 | 32 | @pytest.mark.skip(reason="to be deprecated") 33 | def test_lisa_cluster(): 34 | link = examples.get_path("columbus.shp") 35 | df = gpd.read_file(link) 36 | 37 | y = df["HOVAL"].values 38 | w = Queen.from_dataframe(df) 39 | w.transform = "r" 40 | 41 | moran_loc = esda.moran.Moran_Local(y, w) 42 | 43 | TOOLS = "tap,reset,help" 44 | lisa_cluster(moran_loc, df, p=0.05, tools=TOOLS) 45 | 46 | 47 | @pytest.mark.skip(reason="to be deprecated") 48 | def test_moran_scatterplot(): 49 | link = examples.get_path("columbus.shp") 50 | df = gpd.read_file(link) 51 | 52 | y = df["HOVAL"].values 53 | w = Queen.from_dataframe(df) 54 | w.transform = "r" 55 | 56 | moran_loc = esda.moran.Moran_Local(y, w) 57 | 58 | moran_scatterplot(moran_loc, p=0.05) 59 | 60 | 61 | @pytest.mark.skip(reason="to be deprecated") 62 | def test_plot_local_autocorrelation(): 63 | link = examples.get_path("columbus.shp") 64 | df = gpd.read_file(link) 65 | 66 | y = df["HOVAL"].values 67 | w = Queen.from_dataframe(df) 68 | w.transform = "r" 69 | 70 | moran_loc = esda.moran.Moran_Local(y, w) 71 | 72 | plot_local_autocorrelation(moran_loc, df, "HOVAL") 73 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from os import path 2 | 3 | from setuptools import setup 4 | 5 | import versioneer 6 | 7 | package = "splot" 8 | 9 | 10 | def _get_requirements_from_files(groups_files): 11 | groups_reqlist = {} 12 | 13 | for k, v in groups_files.items(): 14 | with open(v, "r") as f: 15 | pkg_list = f.read().splitlines() 16 | groups_reqlist[k] = pkg_list 17 | 18 | return groups_reqlist 19 | 20 | 21 | _groups_files = { 22 | "base": "requirements.txt", # basic requirements 23 | "dev": "requirements_dev.txt", # requirements for dev, doc, test 24 | } 25 | 26 | reqs = _get_requirements_from_files(_groups_files) 27 | install_reqs = reqs.pop("base") 28 | extras_reqs = reqs 29 | 30 | # add long_description form README.md 31 | this_directory = path.abspath(path.dirname(__file__)) 32 | with open(path.join(this_directory, "README.md"), encoding="utf-8") as f: 33 | long_description = f.read() 34 | 35 | setup( 36 | name=package, # name of package 37 | version=versioneer.get_version(), 38 | description="Visual analytics for spatial analysis with PySAL.", 39 | long_description=long_description, 40 | long_description_content_type="text/markdown", 41 | url="https://github.com/pysal/splot", 42 | maintainer="Serge Rey, Stefanie Lumnitz", 43 | maintainer_email="sjsrey@gmail.com, stefanie.lumnitz@gmail.com", 44 | test_suite="nose.collector", 45 | tests_require=["nose"], 46 | keywords="spatial statistics visualization", 47 | classifiers=[ 48 | "Development Status :: 5 - Production/Stable", 49 | "Intended Audience :: Science/Research", 50 | "Intended Audience :: Developers", 51 | "Intended Audience :: Education", 52 | "Topic :: Scientific/Engineering", 53 | "Topic :: Scientific/Engineering :: GIS", 54 | "License :: OSI Approved :: BSD License", 55 | "Programming Language :: Python", 56 | "Programming Language :: Python :: 3.8", 57 | "Programming Language :: Python :: 3.9", 58 | "Programming Language :: Python :: 3.10", 59 | ], 60 | license="3-Clause BSD", 61 | packages=["splot"], 62 | include_package_data=True, 63 | install_requires=install_reqs, 64 | extras_require=extras_reqs, 65 | zip_safe=False, 66 | cmdclass=versioneer.get_cmdclass(), 67 | ) 68 | -------------------------------------------------------------------------------- /doc/index.rst: -------------------------------------------------------------------------------- 1 | .. splot documentation master file, created by 2 | sphinx-quickstart on Sun Jun 10 10:22:03 2018. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to splot's documentation! 7 | ================================= 8 | 9 | 10 | :Release: |release| 11 | :Date: |today| 12 | 13 | `splot` provides `PySAL` users with a lightweight visualization interface 14 | to explore their data and quickly iterate through static and dynamic visualisations. 15 | 16 | 17 | .. raw:: html 18 | 19 |
20 |
21 |
22 |
23 | 31 | 39 | 49 |
50 |
51 |
52 |
53 | 54 | 55 | .. toctree:: 56 | :hidden: 57 | :maxdepth: 3 58 | :caption: Contents: 59 | 60 | Installation 61 | API 62 | References 63 | 64 | 65 | Indices and tables 66 | ================== 67 | 68 | * :ref:`genindex` 69 | * :ref:`modindex` 70 | * :ref:`search` 71 | 72 | 73 | .. _PySAL: https://github.com/pysal/pysal -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.py[cod] 2 | *.bak 3 | .ipynb_checkpoints/ 4 | # C extensions 5 | *.so 6 | node_modules/ 7 | 8 | # Packages 9 | *.egg 10 | *.egg-info 11 | dist 12 | build 13 | eggs 14 | parts 15 | bin 16 | var 17 | sdist 18 | develop-eggs 19 | .installed.cfg 20 | lib 21 | lib64 22 | __pycache__ 23 | 24 | 25 | # virtual environment 26 | venv/ 27 | 28 | # Installer logs 29 | pip-log.txt 30 | 31 | # Unit test / coverage reports 32 | .coverage 33 | .tox 34 | nosetests.xml 35 | 36 | # Translations 37 | *.mo 38 | 39 | # Mr Developer 40 | .mr.developer.cfg 41 | .project 42 | .pydevproject 43 | 44 | # OS generated files # 45 | ###################### 46 | .DS_Store 47 | .DS_Store? 48 | ._* 49 | .Spotlight-V100 50 | .Trashes 51 | Icon? 52 | ehthumbs.db 53 | Thumbs.db 54 | 55 | 56 | # pysal 57 | # 58 | lattice.* 59 | .vagrant/ 60 | pysal/contrib/viz/.ipynb_checkpoints/ 61 | pysal/contrib/viz/bp.png 62 | pysal/contrib/viz/fj.png 63 | pysal/contrib/viz/fj_classless.png 64 | pysal/contrib/viz/lmet.tex 65 | pysal/contrib/viz/lmp.tex 66 | pysal/contrib/viz/lmplot.png 67 | pysal/contrib/viz/lmss.tex 68 | pysal/contrib/viz/lmt.tex 69 | pysal/contrib/viz/out.png 70 | pysal/contrib/viz/p.tex 71 | pysal/contrib/viz/quantiles.png 72 | pysal/contrib/viz/quantiles_HR60.png 73 | pysal/contrib/viz/quantiles_HR70.png 74 | pysal/contrib/viz/quantiles_HR80.png 75 | pysal/contrib/viz/quantiles_HR90.png 76 | pysal/contrib/viz/quatiles.png 77 | pysal/contrib/viz/region.ipynb 78 | pysal/contrib/viz/south_base.html 79 | pysal/contrib/viz/sp.tex 80 | pysal/contrib/viz/sss.tex 81 | pysal/examples/south.prj 82 | 83 | #Vi 84 | *.swp 85 | .ropeproject/ 86 | .eggs/ 87 | pysal/contrib/planar/ 88 | pysal/esda/.ropeproject/ 89 | pysal/esda/jenks_nb.ipynb 90 | pysal/examples/snow_maps/fake.dbf 91 | pysal/examples/snow_maps/fake.prj 92 | pysal/examples/snow_maps/fake.qpj 93 | pysal/examples/snow_maps/fake.shp 94 | pysal/examples/snow_maps/fake.shx 95 | pysal/examples/snow_maps/fixed.dbf 96 | pysal/examples/snow_maps/fixed.prj 97 | pysal/examples/snow_maps/fixed.qgs 98 | pysal/examples/snow_maps/fixed.qgs~ 99 | pysal/examples/snow_maps/fixed.qpj 100 | pysal/examples/snow_maps/fixed.shp 101 | pysal/examples/snow_maps/fixed.shx 102 | pysal/examples/snow_maps/snow.qgs 103 | pysal/examples/snow_maps/snow.qgs~ 104 | pysal/examples/snow_maps/soho_graph.dbf 105 | pysal/examples/snow_maps/soho_graph.prj 106 | pysal/examples/snow_maps/soho_graph.qpj 107 | pysal/examples/snow_maps/soho_graph.shp 108 | pysal/examples/snow_maps/soho_graph.shx 109 | 110 | doc/_build/ 111 | doc/generated/ 112 | -------------------------------------------------------------------------------- /doc/installation.rst: -------------------------------------------------------------------------------- 1 | .. Installation 2 | 3 | Installation 4 | ============ 5 | 6 | Installing dependencies 7 | ----------------------- 8 | 9 | `splot` is compatible with Python `3.8+` and 10 | depends on GeoPandas 0.9.0 or later and matplotlib 3.3.3 or later. 11 | Please make sure that you are operating in a Python 3 environment. 12 | 13 | splot also uses 14 | 15 | * numpy 16 | * seaborn 17 | * mapclassify 18 | * Ipywidgets 19 | 20 | Depending on your spatial analysis workflow and the PySAL objects 21 | you would like to visualize, splot relies on: 22 | 23 | PySAL >=2.0 24 | 25 | or the installation of separate packages found in the PySAL stack: 26 | 27 | * esda 28 | * libpysal 29 | * spreg 30 | * giddy 31 | 32 | 33 | Installing the newest release 34 | ----------------------------- 35 | 36 | There are two ways of accessing splot. First, splot is installed with 37 | the PySAL 2.0 metapackage through: 38 | 39 | ```$ pip install -U pysal``` 40 | 41 | or 42 | 43 | ```$ conda install -c conda-forge pysal``` 44 | 45 | Second, splot can be installed as a separate package. If you are 46 | using Anaconda, install splot via the conda utility: 47 | 48 | ```$ conda install -c conda-forge splot``` 49 | 50 | Otherwise, you can install splot from PyPI with pip: 51 | 52 | ```$ pip install splot``` 53 | 54 | 55 | Troubleshooting 56 | --------------- 57 | Most common installation errors are due to splot's dependency on GeoPandas. 58 | 59 | It often helps to first install GeoPandas separately from conda-forge with: 60 | 61 | ```$ conda install --channel conda-forge geopandas``` 62 | 63 | before installing splot (preferably also from conda, alternatively from pip). 64 | 65 | For more information on troubleshooting the installation of GeoPandas with pip, see the `GeoPandas`_ docuemntation. 66 | 67 | It is also possible to install splot with a later Python version (>3.8) 68 | through the separate installation of GeoPandas or through installation with conda-forge. 69 | 70 | 71 | Installing the development version 72 | ---------------------------------- 73 | 74 | Potentially, you might want to use the newest features in the development 75 | version of splot on github - `pysal/splot`_ while have not been incorporated 76 | in the Pypi released version. You can achieve that by installing `pysal/splot`_ 77 | by running the following from a command shell:: 78 | 79 | pip install git+https://github.com/pysal/splot.git 80 | 81 | You can also `fork`_ the `pysal/splot`_ repo and create a local clone of 82 | your fork. By making changes 83 | to your local clone and submitting a pull request to `pysal/splot`_, you can 84 | contribute to the splot development. 85 | 86 | 87 | .. _GeoPandas: http://geopandas.org/install.html 88 | .. _pysal/splot: https://github.com/pysal/splot 89 | .. _fork: https://help.github.com/articles/fork-a-repo/ 90 | 91 | -------------------------------------------------------------------------------- /splot/tests/test_viz_giddy_mpl.py: -------------------------------------------------------------------------------- 1 | import geopandas as gpd 2 | import matplotlib.pyplot as plt 3 | import numpy as np 4 | import pandas as pd 5 | import pytest 6 | from libpysal import examples 7 | from libpysal.weights.contiguity import Queen 8 | 9 | try: 10 | import ipywidgets # noqa F401 11 | 12 | HAS_IPYWIDGETS = True 13 | except (ImportError, ModuleNotFoundError): 14 | HAS_IPYWIDGETS = False 15 | 16 | 17 | def _data_generation(): 18 | from giddy.directional import Rose 19 | 20 | # get csv and shp 21 | shp_link = examples.get_path("us48.shp") 22 | df = gpd.read_file(shp_link) 23 | income_table = pd.read_csv(examples.get_path("usjoin.csv")) 24 | # calculate relative values 25 | for year in range(1969, 2010): 26 | income_table[str(year) + "_rel"] = ( 27 | income_table[str(year)] / income_table[str(year)].mean() 28 | ) 29 | # merge 30 | gdf = df.merge(income_table, left_on="STATE_NAME", right_on="Name") 31 | # retrieve spatial weights and data for two points in time 32 | w = Queen.from_dataframe(gdf) 33 | w.transform = "r" 34 | y1 = gdf["1969_rel"].values 35 | y2 = gdf["2000_rel"].values 36 | # calculate rose Object 37 | Y = np.array([y1, y2]).T 38 | rose = Rose(Y, w, k=5) 39 | return gdf, y1, rose 40 | 41 | 42 | def test_dynamic_lisa_heatmap(): 43 | from splot.giddy import dynamic_lisa_heatmap 44 | 45 | _, _, rose = _data_generation() 46 | fig, _ = dynamic_lisa_heatmap(rose) 47 | plt.close(fig) 48 | 49 | fig2, _ = dynamic_lisa_heatmap(rose, cmap="GnBu") 50 | plt.close(fig2) 51 | 52 | 53 | def test_dynamic_lisa_rose(): 54 | from splot.giddy import dynamic_lisa_rose 55 | 56 | _, y1, rose = _data_generation() 57 | fig1, _ = dynamic_lisa_rose(rose) 58 | plt.close(fig1) 59 | 60 | fig2, _ = dynamic_lisa_rose(rose, attribute=y1) 61 | plt.close(fig2) 62 | 63 | fig3, _ = dynamic_lisa_rose(rose, c="r") 64 | plt.close(fig3) 65 | 66 | pytest.raises(ValueError, dynamic_lisa_rose, rose, attribute=y1, color="blue") 67 | 68 | 69 | def test_dynamic_lisa_vectors(): 70 | from splot.giddy import dynamic_lisa_vectors 71 | 72 | _, _, rose = _data_generation() 73 | fig1, _ = dynamic_lisa_vectors(rose) 74 | plt.close(fig1) 75 | 76 | fig2, _ = dynamic_lisa_vectors(rose, arrows=False) 77 | plt.close(fig2) 78 | 79 | fig3, _ = dynamic_lisa_vectors(rose, c="r") 80 | plt.close(fig3) 81 | 82 | fig4, axs = plt.subplots(1, 3) 83 | dynamic_lisa_vectors(rose, ax=axs[0], color="r") 84 | plt.close(fig4) 85 | 86 | 87 | def test_dynamic_lisa_composite(): 88 | from splot.giddy import dynamic_lisa_composite 89 | 90 | gdf, _, rose = _data_generation() 91 | fig, _ = dynamic_lisa_composite(rose, gdf) 92 | plt.close(fig) 93 | 94 | 95 | @pytest.mark.skipif(HAS_IPYWIDGETS, reason="ipywidgets available") 96 | def test_import_ipywidgets_error(): 97 | with pytest.raises(ImportError, match="`ipywidgets` package is required"): 98 | from splot.giddy import dynamic_lisa_composite_explore 99 | 100 | gdf, _, rose = _data_generation() 101 | 102 | dynamic_lisa_composite_explore(rose, gdf) 103 | -------------------------------------------------------------------------------- /.github/workflows/unittests.yml: -------------------------------------------------------------------------------- 1 | name: Continuous Integration 2 | 3 | on: 4 | push: 5 | branches: 6 | - '*' 7 | pull_request: 8 | branches: 9 | - '*' 10 | schedule: 11 | - cron: '59 23 * * *' 12 | workflow_dispatch: 13 | inputs: 14 | version: 15 | description: Manual Unittest Run 16 | default: test 17 | required: false 18 | jobs: 19 | testing: 20 | env: 21 | RUN_TEST: pytest splot -v -r a --cov splot --cov-config .coveragerc --cov-report xml --color yes --cov-append --cov-report term-missing 22 | name: ${{ matrix.os }}, ${{ matrix.environment-file }} 23 | runs-on: ${{ matrix.os }} 24 | timeout-minutes: 30 25 | strategy: 26 | matrix: 27 | os: [ubuntu-latest] 28 | environment-file: 29 | - ci/38-MIN.yaml 30 | - ci/38.yaml 31 | - ci/39.yaml 32 | - ci/310.yaml 33 | - ci/311.yaml 34 | - ci/311-DEV.yaml 35 | include: 36 | - environment-file: ci/311.yaml 37 | os: macos-latest 38 | - environment-file: ci/311.yaml 39 | os: windows-latest 40 | fail-fast: false 41 | 42 | defaults: 43 | run: 44 | shell: bash -l {0} 45 | 46 | steps: 47 | - name: checkout repo 48 | uses: actions/checkout@v3 49 | 50 | - name: setup micromamba 51 | uses: mamba-org/setup-micromamba@v1 52 | with: 53 | environment-file: ${{ matrix.environment-file }} 54 | micromamba-version: 'latest' 55 | 56 | - name: install bleeding edge PySAL submodules (only Ubuntu / Python 3.10) 57 | run: | 58 | pip install git+https://github.com/pysal/libpysal.git@main 59 | pip install git+https://github.com/pysal/mapclassify.git@main 60 | pip install git+https://github.com/pysal/esda.git@main 61 | pip install git+https://github.com/pysal/spreg.git@main 62 | pip install git+https://github.com/pysal/giddy.git@main 63 | if: matrix.os == 'ubuntu-latest' && contains(matrix.environment-file, 'DEV') 64 | 65 | - name: install libpysal example datasets 66 | run: | 67 | python -c 'import libpysal; libpysal.examples.load_example("Guerry"); libpysal.examples.load_example("Rio Grande do Sul")' 68 | 69 | - name: install and import splot 70 | run: | 71 | python -m pip install --no-deps -e . 72 | python -c 'import splot' 73 | 74 | - name: copy matplotlibrc 75 | run: cp utils/matplotlibrc . 76 | 77 | - name: environment info 78 | run: | 79 | micromamba info 80 | micromamba list 81 | 82 | - name: spatial versions 83 | run: 'python -c "import geopandas; geopandas.show_versions();"' 84 | 85 | - name: run tests 86 | run: | 87 | pytest splot -v -r a --cov splot --cov-config .coveragerc --cov-report xml --color yes --cov-append --cov-report term-missing 88 | 89 | - name: codecov 90 | uses: codecov/codecov-action@v3 91 | with: 92 | token: ${{ secrets.CODECOV_TOKEN }} 93 | file: ./coverage.xml 94 | name: splot-codecov 95 | 96 | - name: Generate and publish the report 97 | if: | 98 | failure() 99 | && steps.status.outcome == 'failure' 100 | && github.event_name == 'schedule' 101 | && github.repository_owner == 'pysal' 102 | uses: xarray-contrib/issue-from-pytest-log@v1 103 | with: 104 | log-path: pytest-log.jsonl 105 | -------------------------------------------------------------------------------- /splot/tests/test_viz_value_by_alpha_mapl.py: -------------------------------------------------------------------------------- 1 | import geopandas as gpd 2 | import matplotlib.pyplot as plt 3 | from libpysal import examples 4 | 5 | from splot.mapping import ( 6 | mapclassify_bin, 7 | value_by_alpha_cmap, 8 | vba_choropleth, 9 | vba_legend, 10 | ) 11 | 12 | 13 | def test_value_by_alpha_cmap(): 14 | # data 15 | link_to_data = examples.get_path("columbus.shp") 16 | gdf = gpd.read_file(link_to_data) 17 | x = gdf["HOVAL"].values 18 | y = gdf["CRIME"].values 19 | 20 | # create cmap 21 | rgba, cmap = value_by_alpha_cmap(x, y) 22 | 23 | # create divergent rgba 24 | div_rgba, _ = value_by_alpha_cmap(x, y, cmap="seismic", divergent=True) 25 | 26 | # create reverted rgba 27 | rev_rgba, _ = value_by_alpha_cmap(x, y, cmap="RdBu", revert_alpha=True) 28 | 29 | 30 | def test_vba_choropleth(): 31 | 32 | # data 33 | link_to_data = examples.get_path("columbus.shp") 34 | gdf = gpd.read_file(link_to_data) 35 | 36 | # plot (string as input) 37 | fig, _ = vba_choropleth("HOVAL", "CRIME", gdf) 38 | plt.close(fig) 39 | 40 | # plot with divergent and reverted alpha 41 | fig, _ = vba_choropleth( 42 | "HOVAL", "CRIME", gdf, cmap="RdBu", divergent=True, revert_alpha=True 43 | ) 44 | plt.close(fig) 45 | 46 | # plot with classified alpha and rgb 47 | fig, _ = vba_choropleth( 48 | "HOVAL", 49 | "CRIME", 50 | gdf, 51 | cmap="RdBu", 52 | alpha_mapclassify=dict(classifier="quantiles"), 53 | rgb_mapclassify=dict(classifier="quantiles"), 54 | ) 55 | plt.close(fig) 56 | 57 | # plot classified with legend 58 | fig, _ = vba_choropleth( 59 | "HOVAL", 60 | "CRIME", 61 | gdf, 62 | alpha_mapclassify=dict(classifier="std_mean"), 63 | rgb_mapclassify=dict(classifier="std_mean"), 64 | legend=True, 65 | ) 66 | plt.close(fig) 67 | 68 | # plot (values as input) 69 | x = gdf["HOVAL"].values 70 | y = gdf["CRIME"].values 71 | fig, _ = vba_choropleth(x, y, gdf) 72 | plt.close(fig) 73 | 74 | # plot with divergent and reverted alpha 75 | fig, _ = vba_choropleth(x, y, gdf, cmap="RdBu", divergent=True, revert_alpha=True) 76 | plt.close(fig) 77 | 78 | # plot with classified alpha and rgb 79 | fig, _ = vba_choropleth( 80 | x, 81 | y, 82 | gdf, 83 | cmap="RdBu", 84 | alpha_mapclassify=dict(classifier="quantiles"), 85 | rgb_mapclassify=dict(classifier="quantiles"), 86 | ) 87 | plt.close(fig) 88 | 89 | # plot classified with legend 90 | fig, _ = vba_choropleth( 91 | x, 92 | y, 93 | gdf, 94 | alpha_mapclassify=dict(classifier="std_mean"), 95 | rgb_mapclassify=dict(classifier="std_mean"), 96 | legend=True, 97 | ) 98 | plt.close(fig) 99 | 100 | 101 | def test_vba_legend(): 102 | # data 103 | link_to_data = examples.get_path("columbus.shp") 104 | gdf = gpd.read_file(link_to_data) 105 | x = gdf["HOVAL"].values 106 | y = gdf["CRIME"].values 107 | # classify data 108 | rgb_bins = mapclassify_bin(x, "quantiles") 109 | alpha_bins = mapclassify_bin(y, "quantiles") 110 | 111 | # plot legend 112 | fig, _ = vba_legend(rgb_bins, alpha_bins, cmap="RdBu") 113 | plt.close(fig) 114 | 115 | 116 | def test_mapclassify_bin(): 117 | # data 118 | link_to_data = examples.get_path("columbus.shp") 119 | gdf = gpd.read_file(link_to_data) 120 | x = gdf["HOVAL"].values 121 | 122 | # quantiles 123 | mapclassify_bin(x, "quantiles") 124 | mapclassify_bin(x, "quantiles", k=3) 125 | 126 | # box_plot 127 | mapclassify_bin(x, "box_plot") 128 | mapclassify_bin(x, "box_plot", hinge=2) 129 | 130 | # headtail_breaks 131 | mapclassify_bin(x, "headtail_breaks") 132 | 133 | # percentiles 134 | mapclassify_bin(x, "percentiles") 135 | mapclassify_bin(x, "percentiles", pct=[25, 50, 75, 100]) 136 | 137 | # std_mean 138 | mapclassify_bin(x, "std_mean") 139 | mapclassify_bin(x, "std_mean", multiples=[-1, -0.5, 0.5, 1]) 140 | 141 | # maximum_breaks 142 | mapclassify_bin(x, "maximum_breaks") 143 | mapclassify_bin(x, "maximum_breaks", k=3, mindiff=0.1) 144 | 145 | # natural_breaks, max_p_classifier 146 | mapclassify_bin(x, "natural_breaks") 147 | mapclassify_bin(x, "max_p_classifier", k=3, initial=50) 148 | 149 | # user_defined 150 | mapclassify_bin(x, "user_defined", bins=[20, max(x)]) 151 | -------------------------------------------------------------------------------- /splot/_viz_libpysal_mpl.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import numpy as np 3 | from matplotlib.collections import LineCollection 4 | 5 | """ 6 | Lightweight visualizations for libpysal using Matplotlib and Geopandas 7 | 8 | TODO 9 | * make gdf argument in plot_spatial_weights optional 10 | """ 11 | 12 | __author__ = "Stefanie Lumnitz " 13 | 14 | 15 | def plot_spatial_weights( 16 | w, 17 | gdf, 18 | indexed_on=None, 19 | ax=None, 20 | figsize=(10, 10), 21 | node_kws=None, 22 | edge_kws=None, 23 | nonplanar_edge_kws=None, 24 | ): 25 | """ 26 | Plot spatial weights network. 27 | NOTE: Additionally plots `w.non_planar_joins` if 28 | `libpysal.weights.util.nonplanar_neighbors()` was applied. 29 | 30 | Parameters 31 | ---------- 32 | w : libpysal.W object 33 | Values of libpysal weights object. 34 | gdf : geopandas dataframe 35 | The original shapes whose topological relations are 36 | modelled in W. 37 | indexed_on : str, optional 38 | Column of gdf which the weights object uses as an index. 39 | Default =None, so the geodataframe's index is used. 40 | ax : matplotlib axis, optional 41 | Axis on which to plot the weights. 42 | Default =None, so plots on the current figure. 43 | figsize : tuple, optional 44 | W, h of figure. Default =(10,10) 45 | node_kws : keyword argument dictionary, optional 46 | Dictionary of keyword arguments to send to pyplot.scatter, 47 | which provide fine-grained control over the aesthetics 48 | of the nodes in the plot. Default =None. 49 | edge_kws : keyword argument dictionary, optional 50 | Dictionary of keyword arguments to send to pyplot.plot, 51 | which provide fine-grained control over the aesthetics 52 | of the edges in the plot. Default =None. 53 | nonplanar_edge_kws : keyword argument dictionary, optional 54 | Dictionary of keyword arguments to send to pyplot.plot, 55 | which provide fine-grained control over the aesthetics 56 | of the edges from `weights.non_planar_joins` in the plot. 57 | Default =None. 58 | 59 | Returns 60 | ------- 61 | fig : matplotlip Figure instance 62 | Figure of spatial weight network. 63 | ax : matplotlib Axes instance 64 | Axes in which the figure is plotted. 65 | 66 | Examples 67 | -------- 68 | Imports 69 | 70 | >>> from libpysal.weights.contiguity import Queen 71 | >>> import geopandas as gpd 72 | >>> import libpysal 73 | >>> from libpysal import examples 74 | >>> import matplotlib.pyplot as plt 75 | >>> from splot.libpysal import plot_spatial_weights 76 | 77 | Data preparation and statistical analysis 78 | 79 | >>> gdf = gpd.read_file(examples.get_path('map_RS_BR.shp')) 80 | >>> weights = Queen.from_dataframe(gdf) 81 | >>> wnp = libpysal.weights.util.nonplanar_neighbors(weights, gdf) 82 | 83 | Plot weights 84 | 85 | >>> plot_spatial_weights(weights, gdf) 86 | >>> plt.show() 87 | 88 | Plot corrected weights 89 | 90 | >>> plot_spatial_weights(wnp, gdf) 91 | >>> plt.show() 92 | 93 | """ 94 | if ax is None: 95 | fig = plt.figure(figsize=figsize) 96 | ax = fig.add_subplot(111) 97 | else: 98 | fig = ax.get_figure() 99 | 100 | # default for node_kws 101 | if node_kws is None: 102 | node_kws = dict(markersize=10, facecolor="#4d4d4d", edgecolor="#4d4d4d") 103 | 104 | # default for edge_kws 105 | if edge_kws is None: 106 | edge_kws = dict(colors="#4393c3") 107 | 108 | # default for nonplanar_edge_kws 109 | if nonplanar_edge_kws is None: 110 | edge_kws.setdefault("lw", 0.7) 111 | nonplanar_edge_kws = edge_kws.copy() 112 | nonplanar_edge_kws["colors"] = "#d6604d" 113 | 114 | node_has_nonplanar_join = [] 115 | if hasattr(w, "non_planar_joins"): 116 | # This attribute is present when an instance is created by the user 117 | # calling `weights.util.nonplanar_neighbors`. If so, treat those 118 | # edges differently by default. 119 | node_has_nonplanar_join = w.non_planar_joins.keys() 120 | 121 | centroids_shp = gdf.centroid.values 122 | 123 | segments = [] 124 | non_planar_segments = [] 125 | 126 | if indexed_on is not None: 127 | dict_index = dict(zip(gdf[indexed_on].values, range(len(gdf)))) 128 | for idx in w.id_order: 129 | if idx in w.islands: 130 | continue 131 | # Find the centroid of the polygon we're looking at now 132 | origin = np.array(centroids_shp[dict_index[idx]].coords)[0] 133 | for jdx in w.neighbors[idx]: 134 | dest = np.array(centroids_shp[dict_index[jdx]].coords)[0] 135 | if (idx in node_has_nonplanar_join) and ( 136 | jdx in w.non_planar_joins[idx] 137 | ): 138 | # This is a non-planar edge 139 | non_planar_segments.append([origin, dest]) 140 | else: 141 | segments.append([origin, dest]) 142 | else: 143 | for idx in w.id_order: 144 | if idx in w.islands: 145 | continue 146 | 147 | # Find the centroid of the polygon we're looking at now 148 | origin = np.array(centroids_shp[idx].coords)[0] 149 | for j in w.neighbors[idx]: 150 | jdx = w.id2i[j] 151 | dest = np.array(centroids_shp[jdx].coords)[0] 152 | if (idx in node_has_nonplanar_join) and ( 153 | jdx in w.non_planar_joins[idx] 154 | ): 155 | # This is a non-planar edge 156 | non_planar_segments.append([origin, dest]) 157 | else: 158 | segments.append([origin, dest]) 159 | 160 | # Plot the polygons from the geodataframe as a base layer 161 | gdf.plot(ax=ax, color="#bababa", edgecolor="w") 162 | 163 | # plot polygon centroids 164 | gdf.centroid.plot(ax=ax, **node_kws) 165 | 166 | # plot weight edges 167 | non_planar_segs_plot = LineCollection( 168 | np.array(non_planar_segments), **nonplanar_edge_kws 169 | ) 170 | segs_plot = LineCollection(np.array(segments), **edge_kws) 171 | ax.add_collection(segs_plot) 172 | ax.add_collection(non_planar_segs_plot) 173 | 174 | ax.set_axis_off() 175 | ax.set_aspect("equal") 176 | return fig, ax 177 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | **`splot` is in the process of being archived. It's functionality is being integrated into associated PySAL projects.** 2 | 3 | # splot 4 | 5 | [![Continuous Integration](https://github.com/pysal/splot/actions/workflows/unittests.yml/badge.svg)](https://github.com/pysal/splot/actions/workflows/unittests.yml) 6 | [![codecov](https://codecov.io/gh/pysal/splot/branch/main/graph/badge.svg)](https://codecov.io/gh/pysal/splot) 7 | [![Documentation Status](https://readthedocs.org/projects/splot/badge/?version=latest)](https://splot.readthedocs.io/en/latest/?badge=latest) 8 | [![PyPI version](https://badge.fury.io/py/splot.svg)](https://badge.fury.io/py/splot) 9 | [![DOI](https://joss.theoj.org/papers/10.21105/joss.01882/status.svg)](https://doi.org/10.21105/joss.01882) 10 | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.3258810.svg)](https://doi.org/10.5281/zenodo.3258810) 11 | 12 | **Visual analytics for spatial analysis with PySAL.** 13 | 14 | ![Local Spatial Autocorrelation](figs/viz_local_autocorrelation.png) 15 | 16 | ## What is splot? 17 | 18 | `splot` connects spatial analysis done in [`PySAL`](https://github.com/pysal) to different popular visualization toolkits like [`matplotlib`](https://matplotlib.org). 19 | The `splot` package allows you to create both static plots ready for publication and interactive visualizations for quick iteration and spatial data exploration. The primary goal of `splot` is to enable you to visualize popular `PySAL` objects and gives you different views on your spatial analysis workflow. 20 | 21 | If you are new to `splot` and `PySAL` you will best get started with our [documentation](https://splot.readthedocs.io/en/latest/) and the short introduction [video](https://youtu.be/kriQOJMycIQ?t=2403) of the package at the Scipy 2018 conference! 22 | 23 | ## Installing splot 24 | 25 | ### Installing dependencies 26 | 27 | `splot` is compatible with `Python` 3.8+ and depends on `geopandas` 0.9.0 or later and `matplotlib` 3.3.3 or later. 28 | 29 | splot also uses 30 | 31 | * `numpy` 32 | * `seaborn` 33 | * `mapclassify` 34 | * `Ipywidgets` 35 | 36 | Depending on your spatial analysis workflow and the `PySAL` objects you would like to visualize, `splot` relies on: 37 | 38 | * PySAL 2.0 39 | 40 | or separate packages found in the `PySAL` stack: 41 | 42 | * esda 43 | * libpysal 44 | * spreg 45 | * giddy 46 | 47 | ### Installing splot 48 | 49 | There are two ways of accessing `splot`. First, `splot` is installed with the [PySAL 2.0](https://pysal.readthedocs.io/en/latest/installation.html) metapackage through: 50 | 51 | $ pip install -U pysal 52 | 53 | or 54 | 55 | $ conda install -c conda-forge pysal 56 | 57 | Second, `splot` can be installed as a separate package. If you are using Anaconda, install `splot` via the `conda` utility: 58 | 59 | conda install -c conda-forge splot 60 | 61 | Otherwise you can install `splot` from `PyPI` with pip: 62 | 63 | pip install splot 64 | 65 | ## Usage 66 | 67 | Usage examples for different spatial statistical workflows are provided as [notebooks](https://github.com/pysal/splot/tree/main/notebooks): 68 | 69 | * [for creating value-by-alpha maps](https://github.com/pysal/splot/blob/main/notebooks/mapping_vba.ipynb) 70 | * [for assessing the relationship between neighboring polygons](https://github.com/pysal/splot/blob/main/notebooks/libpysal_non_planar_joins_viz.ipynb) 71 | * [for the visualization of space-time autocorrelation](https://github.com/pysal/splot/blob/main/notebooks/giddy_space_time.ipynb), also documented in [giddy](https://github.com/pysal/giddy/blob/main/notebooks/directional.ipynb) 72 | * for visualizing spatial autocorrelation of [univariate](https://github.com/pysal/splot/blob/main/notebooks/esda_morans_viz.ipynb) or [multivariate](https://github.com/pysal/splot/blob/main/notebooks/esda_moran_matrix_viz.ipynb) variable analysis 73 | 74 | You can also check our [documentation](https://splot.readthedocs.io/en/latest/) for examples on how to use each function. A detailed report about the development, structure and usage of `splot` can be found [here](https://gist.github.com/slumnitz/a86ef4a5b48b1b5fac41e91cfd05fff2). More tutorials for the whole `PySAL` ecosystem can be found in our [notebooks book](http://pysal.org/notebooks/intro.html) project. 75 | 76 | ## Contributing to splot 77 | 78 | `splot` is an open source project within the Python Spatial Analysis Library that is supported by a community of Geographers, visualization lovers, map fans, users and data scientists. As a community we work together to create splot as our own spatial visualization toolkit and will gratefully and humbly accept any contributions and ideas you might bring into this project. 79 | 80 | Feel free to check out our discussion spaces, add ideas and contributions: 81 | 82 | * [Idea collection](https://github.com/pysal/splot/issues/10) which PySAL objects to support and how new visualizations could look like 83 | * [Discussion](https://github.com/pysal/splot/issues/9) about the splot API 84 | * Ideas how to integrate [other popular visualization toolkits](https://github.com/pysal/splot/issues/22) like `Bokeh` or `Altair` 85 | 86 | If you have never contributed before or you are just discovering what `PySAL` and `splot` have to offer, reading through """Doc-strings""" and correcting our Documentation can be a great way to start. Check for spelling and grammar mistakes or use [pep8](https://pypi.org/project/pep8/) and [pyflakes](https://pypi.org/project/pyflakes/) to clean our `.py` files. This will allow you to get used to working with [git](https://try.github.io) and generally allows you to familiarize yourself with the `splot` and `PySAL` code base. 87 | 88 | If you have already used `PySAL` and `splot` and you are missing object-specific views for your analysis feel free to add to our code-base or discuss your ideas. Please make sure you include unit test, documentation and examples or (create an issue so someone else can work together with you). The common `splot` API design discussed [here](https://github.com/pysal/splot/issues/9) can help you to decide how to best integrate your visualization prototype into `splot`. 89 | 90 | Beyond working on documentation and prototyping new visualizations, you can always write a bug report or feature request on [Github issues](https://github.com/pysal/splot/issues). Whether large or small, any contribution makes a big difference and we hope you enjoy being part of our community as much as we do! The only thing we ask is that you abide principles of openness, respect, and consideration of others as described in the [PySAL Code of Conduct](https://github.com/pysal/code_of_conduct/blob/master/README.md). 91 | 92 | ## Road-map 93 | 94 | We are planning on extending `splot`'s visualization toolkit in future. Functionality we plan to implement includes: 95 | 96 | * visualisations for [density methods](https://github.com/pysal/splot/issues/32) (mapping density estimations) 97 | * [cross-hatching fill styles](https://github.com/pysal/splot/issues/35) for maps (to allow choropleth visualizations without class intervals) 98 | * [legendgrams](https://github.com/pysal/splot/issues/34) (map legends that visualize the distribution of observations by color in a given map) 99 | 100 | If you are interested in working on one of these or any other methods, check out the linked issues or get in touch! 101 | 102 | ## Community support 103 | 104 | * [PySAL 2.0](http://pysal.org) 105 | * [Gitter chat splot](https://gitter.im/pysal/splot?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) 106 | -------------------------------------------------------------------------------- /doc/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # splot documentation build configuration file, created by 4 | # sphinx-quickstart on Wed Jun 6 15:54:22 2018. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import os 16 | 17 | # If extensions (or modules to document with autodoc) are in another directory, 18 | # add these directories to sys.path here. If the directory is relative to the 19 | # documentation root, use os.path.abspath to make it absolute, like shown here. 20 | # 21 | import sys 22 | 23 | import sphinx_bootstrap_theme 24 | 25 | sys.path.insert(0, os.path.abspath("../../")) 26 | 27 | # import your package to obtain the version info to display on the docs website 28 | import splot # noqa E402 29 | 30 | # -- General configuration ------------------------------------------------ 31 | 32 | # If your documentation needs a minimal Sphinx version, state it here. 33 | # 34 | # needs_sphinx = '1.0' 35 | # Add any Sphinx extension module names here, as strings. They can be 36 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 37 | # ones. 38 | extensions = [ # 'sphinx_gallery.gen_gallery', 39 | "sphinx.ext.autodoc", 40 | "sphinx.ext.autosummary", 41 | "sphinx.ext.viewcode", 42 | "sphinxcontrib.bibtex", 43 | "sphinx.ext.mathjax", 44 | "sphinx.ext.doctest", 45 | "sphinx.ext.intersphinx", 46 | "numpydoc", 47 | "matplotlib.sphinxext.plot_directive", 48 | ] 49 | 50 | bibtex_bibfiles = ["_static/references.bib"] 51 | 52 | # Add any paths that contain templates here, relative to this directory. 53 | templates_path = ["_templates"] 54 | 55 | # The suffix(es) of source filenames. 56 | # You can specify multiple suffix as a list of string: 57 | # 58 | # source_suffix = ['.rst', '.md'] 59 | source_suffix = ".rst" 60 | 61 | # The master toctree document. 62 | master_doc = "index" 63 | 64 | # General information about the project. 65 | project = "splot" 66 | copyright = "2018, pysal developers" 67 | author = "pysal developers" 68 | 69 | # The version info for the project you're documenting, acts as replacement for 70 | # |version| and |release|, also used in various other places throughout the 71 | # built documents. 72 | # 73 | # The full version. 74 | version = splot.__version__ # should replace it with your PACKAGE_NAME 75 | release = splot.__version__ # should replace it with your PACKAGE_NAME 76 | 77 | # The language for content autogenerated by Sphinx. Refer to documentation 78 | # for a list of supported languages. 79 | # 80 | # This is also used if you do content translation via gettext catalogs. 81 | # Usually you set "language" from the command line for these cases. 82 | language = None 83 | 84 | # List of patterns, relative to source directory, that match files and 85 | # directories to ignore when looking for source files. 86 | # This patterns also effect to html_static_path and html_extra_path 87 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "tests/*"] 88 | 89 | # The name of the Pygments (syntax highlighting) style to use. 90 | pygments_style = "sphinx" 91 | 92 | # If true, `todo` and `todoList` produce output, else they produce nothing. 93 | todo_include_todos = False 94 | 95 | # -- Options for HTML output ---------------------------------------------- 96 | 97 | # The theme to use for HTML and HTML Help pages. See the documentation for 98 | # a list of builtin themes. 99 | # 100 | # html_theme = 'alabaster' 101 | html_theme = "bootstrap" 102 | html_theme_path = sphinx_bootstrap_theme.get_html_theme_path() 103 | html_title = "%s v%s Manual" % (project, version) 104 | 105 | # (Optional) Logo of your package. 106 | # Should be small enough to fit the navbar (ideally 24x24). 107 | # Path should be relative to the ``_static`` files directory. 108 | # html_logo = "_static/images/package_logo.jpg" 109 | 110 | # (Optional) PySAL favicon 111 | html_favicon = "_static/images/pysal_favicon.ico" 112 | 113 | 114 | # Theme options are theme-specific and customize the look and feel of a theme 115 | # further. For a list of options available for each theme, see the 116 | # documentation. 117 | # 118 | html_theme_options = { 119 | # Navigation bar title. (Default: ``project`` value) 120 | "navbar_title": "splot", # string of your project name, for example, 'giddy' 121 | # Render the next and previous page links in navbar. (Default: true) 122 | "navbar_sidebarrel": False, 123 | # Render the current pages TOC in the navbar. (Default: true) 124 | # 'navbar_pagenav': True, 125 | # 'navbar_pagenav': False, 126 | # No sidebar 127 | "nosidebar": True, 128 | # Tab name for the current pages TOC. (Default: "Page") 129 | # 'navbar_pagenav_name': "Page", 130 | # Global TOC depth for "site" navbar tab. (Default: 1) 131 | # Switching to -1 shows all levels. 132 | "globaltoc_depth": 2, 133 | # Include hidden TOCs in Site navbar? 134 | # 135 | # Note: If this is "false", you cannot have mixed ``:hidden:`` and 136 | # non-hidden ``toctree`` directives in the same page, or else the build 137 | # will break. 138 | # 139 | # Values: "true" (default) or "false" 140 | "globaltoc_includehidden": "true", 141 | # HTML navbar class (Default: "navbar") to attach to
element. 142 | # For black navbar, do "navbar navbar-inverse" 143 | # 'navbar_class': "navbar navbar-inverse", 144 | # Fix navigation bar to top of page? 145 | # Values: "true" (default) or "false" 146 | "navbar_fixed_top": "true", 147 | # Location of link to source. 148 | # Options are "nav" (default), "footer" or anything else to exclude. 149 | "source_link_position": "footer", 150 | # Bootswatch (http://bootswatch.com/) theme. 151 | # 152 | # Options are nothing (default) or the name of a valid theme 153 | # such as "amelia" or "cosmo", "yeti", "flatly". 154 | "bootswatch_theme": "yeti", 155 | # Choose Bootstrap version. 156 | # Values: "3" (default) or "2" (in quotes) 157 | "bootstrap_version": "3", 158 | # Navigation bar menu 159 | "navbar_links": [ 160 | ("Installation", "installation"), 161 | ("API", "api"), 162 | ("References", "references"), 163 | ], 164 | } 165 | 166 | # Add any paths that contain custom static files (such as style sheets) here, 167 | # relative to this directory. They are copied after the builtin static files, 168 | # so a file named "default.css" will overwrite the builtin "default.css". 169 | html_static_path = ["_static"] 170 | 171 | # Custom sidebar templates, maps document names to template names. 172 | # html_sidebars = {} 173 | # html_sidebars = {'sidebar': ['localtoc.html', 'sourcelink.html', 'searchbox.html']} 174 | 175 | # -- Options for HTMLHelp output ------------------------------------------ 176 | 177 | # Output file base name for HTML help builder. 178 | htmlhelp_basename = "splot" + "doc" 179 | 180 | 181 | # -- Options for LaTeX output --------------------------------------------- 182 | 183 | latex_elements = { 184 | # The paper size ('letterpaper' or 'a4paper'). 185 | # 186 | # 'papersize': 'letterpaper', 187 | # The font size ('10pt', '11pt' or '12pt'). 188 | # 189 | # 'pointsize': '10pt', 190 | # Additional stuff for the LaTeX preamble. 191 | # 192 | # 'preamble': '', 193 | # Latex figure (float) alignment 194 | # 195 | # 'figure_align': 'htbp', 196 | } 197 | 198 | # Grouping the document tree into LaTeX files. List of tuples 199 | # (source start file, target name, title, 200 | # author, documentclass [howto, manual, or own class]). 201 | latex_documents = [ 202 | (master_doc, "splot.tex", "splot Documentation", "pysal developers", "manual"), 203 | ] 204 | 205 | 206 | # -- Options for manual page output --------------------------------------- 207 | 208 | # One entry per manual page. List of tuples 209 | # (source start file, name, description, authors, manual section). 210 | man_pages = [(master_doc, "splot", "splot Documentation", [author], 1)] 211 | 212 | 213 | # -- Options for Texinfo output ------------------------------------------- 214 | 215 | # Grouping the document tree into Texinfo files. List of tuples 216 | # (source start file, target name, title, author, 217 | # dir menu entry, description, category) 218 | texinfo_documents = [ 219 | ( 220 | master_doc, 221 | "splot", 222 | "splot Documentation", 223 | author, 224 | "splot", 225 | "One line description of project.", 226 | "Miscellaneous", 227 | ), 228 | ] 229 | 230 | 231 | # ----------------------------------------------------------------------------- 232 | # Autosummary 233 | # ----------------------------------------------------------------------------- 234 | 235 | # Generate the API documentation when building 236 | autosummary_generate = True 237 | numpydoc_show_class_members = True 238 | class_members_toctree = True 239 | numpydoc_show_inherited_class_members = True 240 | numpydoc_use_plots = True 241 | 242 | # display the source code for Plot directive 243 | plot_include_source = True 244 | 245 | 246 | def setup(app): 247 | app.add_css_file("pysal-styles.css") 248 | 249 | 250 | # Example configuration for intersphinx: refer to the Python standard library. 251 | intersphinx_mapping = {"https://docs.python.org/3.6/": None} 252 | -------------------------------------------------------------------------------- /paper/paper.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: '`splot` - visual analytics for spatial statistics' 3 | tags: 4 | - Python 5 | - visualization 6 | - spatial analysis 7 | - spatial statistics 8 | authors: 9 | - name: Stefanie Lumnitz 10 | orcid: 0000-0002-7007-5812 11 | affiliation: "1, 2" # (Multiple affiliations must be quoted) 12 | - name: Dani Arribas-Bell 13 | orcid: 0000-0002-6274-1619 14 | affiliation: 3 15 | - name: Renan X. Cortes 16 | orcid: 0000-0002-1889-5282 17 | affiliation: 2 18 | - name: James D. Gaboardi 19 | orcid: 0000-0002-4776-6826 20 | affiliation: 4 21 | - name: Verena Griess 22 | orcid: 0000-0002-3856-3736 23 | affiliation: 1 24 | - name: Wei Kang 25 | orcid: 0000-0002-1073-7781 26 | affiliation: 2 27 | - name: Taylor M. Oshan 28 | orcid: 0000-0002-0537-2941 29 | affiliation: 7 30 | - name: Levi Wolf 31 | orcid: 0000-0003-0274-599X 32 | affiliation: "5,6" 33 | - name: Sergio Rey 34 | orcid: 0000-0001-5857-9762 35 | affiliation: 2 36 | affiliations: 37 | - name: Department of Forest Resource Management, University of British Columbia 38 | index: 1 39 | - name: Center for Geospatial Sciences, University of California Riverside 40 | index: 2 41 | - name: Geographic Data Science Lab, Department of Geography & Planning, University of Liverpool 42 | index: 3 43 | - name: Department of Geography, Pennsylvania State University 44 | index: 4 45 | - name: School of Geographical Sciences, University of Bristol 46 | index: 5 47 | - name: Alan Turing Institute 48 | index: 6 49 | - name: Department of Geographical Sciences, University of Maryland, College Park 50 | index: 7 51 | date: 25 October 2019 52 | bibliography: paper.bib 53 | --- 54 | 55 | # Summary 56 | 57 | Geography is an intensely visual domain. Its longstanding dependence on visualization and cartography shows as much, with John Snow's cholera map serving as one of the first instances of geovisual analytics in science [@johnson2007ghost;@arribas-bel2017looking], and the perennial presence of maps as statistical displays in seminal works on visualization [@tufte2001visual]. As such, the existence and continued focus on maps in geographical analysis demands serious, dedicated attention in scientific computing. However, existing methods in Python, specifically for *statistical* visualization of spatial data, are lacking. General-purpose mapping provided by `geopandas` is not fine-tuned enough for statistical analysis [@kelsey_jordahl_2019_3333010]. The more analytically-oriented views offered by `geoplot`, while useful, are limited in their statistical applications [@aleksey_bilogur_2019_3475569]. Thus, the need remains for a strong, analytically-oriented toolbox for visual geographical analysis. 58 | 59 | This need is heightened by the fact that the collection and generation of geographical data is becoming more pervasive [@goodchild2007citizen;@arribas-bel2014accidental]. With the proliferation of high-accuracy GPS data, many datasets are now *becoming* spatial datasets; their analysis and visualization increasingly requires explicitly spatial methods that account for the various special structures in geographical data [@anselin1988spatial]. Geographical questions about dependence, endogeneity, heterogeneity, and non-stationarity require special statistical tools to diagnose, and spatial analytic software to visualize [@anselin2014modern]. Further, with the increasing importance of code and computation in geographical curricula [@rey2009show;@rey2018code;@ucgis2019geographic], it has become critical for both pedagogical and research reasons to support geographical analyses with robust visualization tools. To date there are few toolkits for geovisualization developed in the scientific Python stack to fill this need and none for visualization of the process and outcome of spatial analytics. It is this niche that `splot` is designed to fill. 60 | 61 | Implemented in Python, `splot` extends both *spatial analytical methods* like that found in the Python Spatial Analysis Library (`PySAL`) and *general purpose visualization* functionality provided by popular packages such as `matplotlib`, in order to simplify visualizing spatial analysis workflows and results. The `splot` package was developed in parallel to the ecosystem of tools to store, manage, and analyze spatial data, which evolved in ways that gave more relevance to integrated command-line oriented environments such as `Jupyter`; and less to disconnected, one-purpose point-and-click tools such as traditional desktop GIS packages. In this context, visual analytics done with `splot` allows for more general scientific workflows via the integration of spatial analytics with the rest of the Python data science ecosystem. 62 | 63 | As a visual steering tool, `splot` facilitates analyses and interpretation of results, and streamlines the process of model and method selection for many spatial applications. Our high-level API allows quick access to visualizing popular `PySAL` objects generated through spatial statistical analysis. The `PySAL` ecosystem can hereby be understood as a library, integrating many spatial analytical packages (called *sub-modules*) under one umbrella. These sub-modules range in purpose from exploratory data analysis to explanatory statistical models of spatial relationships. As a separate standing package within the ecosystem, `splot` implements a multitude of views for different spatial analysis workflows to give users the opportunity to assess a problem from different perspectives. 64 | 65 | Building on top of our users' feedback, `splot`'s functionality can be accessed in two main ways. First, basic `splot` visualization is exposed as `.plot` methods on objects found in various packages in the `PySAL` ecosystem. Integrating simple `splot` visualizations in other `PySAL` packages ensures that users have the quickest possible access to visualizations. This is especially useful for an instantaneous sanity check to determine if the spatial analysis done in `PySAL` is correct, or if there are any errors present in the data used. 66 | 67 | Second, all visualizations can be found and called using a `splot.'PySAL_submodule'` name space, depending on the previously analysed object that needs to be visualized (e.g. `splot.giddy`). Directly calling visualizations through `splot` has the advantage to extend users' spatial analysis workflows with more general cartographic and visual methods in `splot.mapping`. One example of this is a Value-by-Alpha [@roth2010vba] (vba) map, a multivariate choropleth mapping method useful to visualize geographic data with uncertainty or visually compare characteristics of populations with varying sizes. A conventional workflow could look like this: after cleaning and preparing data, a `PySAL` Local Moran object is created that estimates whether crimes tend to cluster around one another or disperse far from one another. In order to assess whether the occurrences of crime in the neighborhood of Columbus, Ohio USA, are clustered (or, *spatially autocorrelated*), Local Indicators of Spatial Autocorrelation (LISA) hot and cold spots, Moran I scatterplots and a choropleth map can quickly be created to provide visual analysis (see fig. 1). 68 | 69 | ```python 70 | from splot.esda import plot_local_autocorrelation 71 | plot_local_autocorrelation(moran_loc, gdf, 'Crime') 72 | plt.show() 73 | ``` 74 | 75 | 76 | ![Local spatial autocorrelation (see https://github.com/pysal/splot/blob/f0d9e12ab0588595ac6557add44e71c194d585ae/notebooks/esda_morans_viz.ipynb).](figs/local_autocorrelation.png) 77 | 78 | The user can now further visually assess whether there is dependency between high crime rates (fig. 2, rgb variable) and high income in this neighborhood (fig. 2, alpha variable). Darker shades of the colormap correspond to higher crime and income values, displayed through a static Value-by-Alpha Choropleth using `splot.mapping.vba_choropleth`. 79 | 80 | 81 | ```python 82 | fig = plt.figure(figsize=(10,10)) 83 | ax = fig.add_subplot(111) 84 | vba_choropleth(x, y, gdf, 85 | alpha_mapclassify=dict(classifier='quantiles', k=5, 86 | rgb_mapclassify=dict(classifier='quantiles', k=5, 87 | cmap='Blues', 88 | legend=True, divergent=True, ax=ax) 89 | plt.show() 90 | ``` 91 | 92 | 93 | ![Value-by-alpha mapping (see https://github.com/pysal/splot/blob/f0d9e12ab0588595ac6557add44e71c194d585ae/notebooks/mapping_vba.ipynb).](figs/vba_choropleth.png) 94 | 95 | Ultimately, the `splot` package is designed to facilitate the creation of both static plots ready for publication, and interactive visualizations for quick iteration and spatial data exploration. Although most of `splot` is currently implemented with a `matplotlib` backend, `splot` is framework independent. In that sense, `splot` offers a "grammar" of views that are important and useful in spatial analyses and geographic data science. The `splot` package is not restricted or limited to the current `matplotlib` implementation and can be advanced by integrating emerging or succeeding interactive visualization toolkits, such as `altair` or `bokeh`. 96 | 97 | In conclusion, `splot` tightly connects visual analytics with statistical analysis and facilitates the integration of spatial analytics into more general Python workflows through it's compatibility with integrated code-based environments like Jupyter. From spatial autocorrelation analysis to value by alpha choropleths, `splot` is designed as a grammar of views that can be applied to a multitude of spatial analysis workflows. As `splot` developers, we strive to expand `splot`'s grammar of views through new functionality (e.g. in flow mapping methods), as well as provide different backend implementations, including interactive backends, such as `bokeh`, in the future. 98 | 99 | # Acknowledgements 100 | 101 | We acknowledge contributions from, and thank, all our users for reporting bugs, raising issues and suggesting changes to `splot`'s API. Thank you, Joris Van den Bossche and the `geopandas` team for timing releases in accordance with `splot` developments. Thank you, Rebecca Bilbro and Benjamin Bengfort for sharing your insights in how to structure and build API's for visualizations. Thank you Ralf Gommers for guidance on how to design library code for easy maintainability. 102 | 103 | ### References 104 | -------------------------------------------------------------------------------- /splot/tests/test_viz_esda_mpl.py: -------------------------------------------------------------------------------- 1 | import geopandas as gpd 2 | import libpysal as lp 3 | import matplotlib.pyplot as plt 4 | import numpy as np 5 | import pytest 6 | from esda.moran import Moran, Moran_BV, Moran_BV_matrix, Moran_Local, Moran_Local_BV 7 | from libpysal import examples 8 | from libpysal.weights.contiguity import Queen 9 | 10 | from splot._viz_esda_mpl import ( 11 | _moran_bv_scatterplot, 12 | _moran_global_scatterplot, 13 | _moran_loc_bv_scatterplot, 14 | _moran_loc_scatterplot, 15 | ) 16 | from splot.esda import ( 17 | lisa_cluster, 18 | moran_facet, 19 | moran_scatterplot, 20 | plot_local_autocorrelation, 21 | plot_moran, 22 | plot_moran_bv, 23 | plot_moran_bv_simulation, 24 | plot_moran_simulation, 25 | ) 26 | 27 | 28 | def _test_data(): 29 | guerry = examples.load_example("Guerry") 30 | link_to_data = guerry.get_path("guerry.shp") 31 | gdf = gpd.read_file(link_to_data) 32 | return gdf 33 | 34 | 35 | def _test_data_columbus(): 36 | columbus = examples.load_example("Columbus") 37 | link_to_data = columbus.get_path("columbus.shp") 38 | df = gpd.read_file(link_to_data) 39 | return df 40 | 41 | 42 | def _test_LineString(): 43 | link_to_data = examples.get_path("streets.shp") 44 | gdf = gpd.read_file(link_to_data) 45 | return gdf 46 | 47 | 48 | def test_moran_scatterplot(): 49 | gdf = _test_data() 50 | x = gdf["Suicids"].values 51 | y = gdf["Donatns"].values 52 | w = Queen.from_dataframe(gdf) 53 | w.transform = "r" 54 | 55 | # Calculate `esda.moran` Objects 56 | moran = Moran(y, w) 57 | moran_bv = Moran_BV(y, x, w) 58 | moran_loc = Moran_Local(y, w) 59 | moran_loc_bv = Moran_Local_BV(y, x, w) 60 | 61 | # try with p value so points are colored or warnings apply 62 | with pytest.warns(UserWarning, match="`p` is only used for plotting"): 63 | fig, _ = moran_scatterplot(moran, p=0.05, aspect_equal=False) 64 | plt.close(fig) 65 | 66 | fig, _ = moran_scatterplot(moran_loc, p=0.05) 67 | plt.close(fig) 68 | 69 | with pytest.warns(UserWarning, match="`p` is only used for plotting"): 70 | fig, _ = moran_scatterplot(moran_bv, p=0.05) 71 | plt.close(fig) 72 | 73 | fig, _ = moran_scatterplot(moran_loc_bv, p=0.05) 74 | plt.close(fig) 75 | 76 | 77 | def test_moran_global_scatterplot(): 78 | # Load data and apply statistical analysis 79 | gdf = _test_data() 80 | y = gdf["Donatns"].values 81 | w = Queen.from_dataframe(gdf) 82 | w.transform = "r" 83 | # Calc Global Moran 84 | w = Queen.from_dataframe(gdf) 85 | moran = Moran(y, w) 86 | # plot 87 | fig, _ = _moran_global_scatterplot(moran) 88 | plt.close(fig) 89 | # customize 90 | fig, _ = _moran_global_scatterplot( 91 | moran, zstandard=False, aspect_equal=False, fitline_kwds=dict(color="#4393c3") 92 | ) 93 | plt.close(fig) 94 | 95 | 96 | def test_plot_moran_simulation(): 97 | # Load data and apply statistical analysis 98 | gdf = _test_data() 99 | y = gdf["Donatns"].values 100 | w = Queen.from_dataframe(gdf) 101 | w.transform = "r" 102 | # Calc Global Moran 103 | w = Queen.from_dataframe(gdf) 104 | moran = Moran(y, w) 105 | # plot 106 | fig, _ = plot_moran_simulation(moran) 107 | plt.close(fig) 108 | # customize 109 | fig, _ = plot_moran_simulation(moran, fitline_kwds=dict(color="#4393c3")) 110 | plt.close(fig) 111 | 112 | 113 | def test_plot_moran(): 114 | # Load data and apply statistical analysis 115 | gdf = _test_data() 116 | y = gdf["Donatns"].values 117 | w = Queen.from_dataframe(gdf) 118 | w.transform = "r" 119 | # Calc Global Moran 120 | w = Queen.from_dataframe(gdf) 121 | moran = Moran(y, w) 122 | # plot 123 | fig, _ = plot_moran(moran) 124 | plt.close(fig) 125 | # customize 126 | fig, _ = plot_moran( 127 | moran, zstandard=False, aspect_equal=False, fitline_kwds=dict(color="#4393c3") 128 | ) 129 | plt.close(fig) 130 | 131 | 132 | def test_moran_bv_scatterplot(): 133 | gdf = _test_data() 134 | x = gdf["Suicids"].values 135 | y = gdf["Donatns"].values 136 | w = Queen.from_dataframe(gdf) 137 | w.transform = "r" 138 | # Calculate Bivariate Moran 139 | moran_bv = Moran_BV(x, y, w) 140 | # plot 141 | fig, _ = _moran_bv_scatterplot(moran_bv) 142 | plt.close(fig) 143 | # customize plot 144 | fig, _ = _moran_bv_scatterplot( 145 | moran_bv, aspect_equal=False, fitline_kwds=dict(color="#4393c3") 146 | ) 147 | plt.close(fig) 148 | 149 | 150 | def test_plot_moran_bv_simulation(): 151 | # Load data and calculate weights 152 | gdf = _test_data() 153 | x = gdf["Suicids"].values 154 | y = gdf["Donatns"].values 155 | w = Queen.from_dataframe(gdf) 156 | w.transform = "r" 157 | # Calculate Bivariate Moran 158 | moran_bv = Moran_BV(x, y, w) 159 | # plot 160 | fig, _ = plot_moran_bv_simulation(moran_bv) 161 | plt.close(fig) 162 | # customize plot 163 | fig, _ = plot_moran_bv_simulation( 164 | moran_bv, aspect_equal=False, fitline_kwds=dict(color="#4393c3") 165 | ) 166 | plt.close(fig) 167 | 168 | 169 | def test_plot_moran_bv(): 170 | # Load data and calculate weights 171 | gdf = _test_data() 172 | x = gdf["Suicids"].values 173 | y = gdf["Donatns"].values 174 | w = Queen.from_dataframe(gdf) 175 | w.transform = "r" 176 | # Calculate Bivariate Moran 177 | moran_bv = Moran_BV(x, y, w) 178 | # plot 179 | fig, _ = plot_moran_bv(moran_bv) 180 | plt.close(fig) 181 | # customize plot 182 | fig, _ = plot_moran_bv( 183 | moran_bv, aspect_equal=False, fitline_kwds=dict(color="#4393c3") 184 | ) 185 | plt.close(fig) 186 | 187 | 188 | def test_moran_loc_scatterplot(): 189 | df = _test_data_columbus() 190 | 191 | x = df["INC"].values 192 | y = df["HOVAL"].values 193 | w = Queen.from_dataframe(df) 194 | w.transform = "r" 195 | 196 | moran_loc = Moran_Local(y, w) 197 | moran_bv = Moran_BV(x, y, w) 198 | 199 | # try without p value 200 | fig, _ = _moran_loc_scatterplot(moran_loc) 201 | plt.close(fig) 202 | 203 | # try with p value and different figure size 204 | fig, _ = _moran_loc_scatterplot( 205 | moran_loc, p=0.05, aspect_equal=False, fitline_kwds=dict(color="#4393c3") 206 | ) 207 | plt.close(fig) 208 | 209 | # try with p value and zstandard=False 210 | fig, _ = _moran_loc_scatterplot( 211 | moran_loc, p=0.05, zstandard=False, fitline_kwds=dict(color="#4393c3") 212 | ) 213 | plt.close(fig) 214 | 215 | # try without p value and zstandard=False 216 | fig, _ = _moran_loc_scatterplot( 217 | moran_loc, zstandard=False, fitline_kwds=dict(color="#4393c3") 218 | ) 219 | plt.close(fig) 220 | 221 | pytest.raises(ValueError, _moran_loc_scatterplot, moran_bv, p=0.5) 222 | pytest.warns( 223 | UserWarning, 224 | _moran_loc_scatterplot, 225 | moran_loc, 226 | p=0.5, 227 | scatter_kwds=dict(c="#4393c3"), 228 | ) 229 | 230 | 231 | def _test_calc_moran_loc(gdf, var="HOVAL"): 232 | y = gdf[var].values 233 | w = Queen.from_dataframe(gdf) 234 | w.transform = "r" 235 | 236 | moran_loc = Moran_Local(y, w) 237 | return moran_loc 238 | 239 | 240 | def test_lisa_cluster(): 241 | df = _test_data_columbus() 242 | moran_loc = _test_calc_moran_loc(df) 243 | 244 | fig, _ = lisa_cluster(moran_loc, df) 245 | plt.close(fig) 246 | 247 | # test LineStrings 248 | df_line = _test_LineString() 249 | moran_loc = _test_calc_moran_loc(df_line, var="Length") 250 | 251 | fig, _ = lisa_cluster(moran_loc, df_line) 252 | plt.close(fig) 253 | 254 | 255 | def test_plot_local_autocorrelation(): 256 | df = _test_data_columbus() 257 | moran_loc = _test_calc_moran_loc(df) 258 | 259 | fig, _ = plot_local_autocorrelation(moran_loc, df, "HOVAL", p=0.05) 260 | plt.close(fig) 261 | 262 | # also test with quadrant and mask 263 | with pytest.warns(UserWarning, match="Values in `mask` are not the same dtype"): 264 | fig, _ = plot_local_autocorrelation( 265 | moran_loc, 266 | df, 267 | "HOVAL", 268 | p=0.05, 269 | region_column="POLYID", 270 | aspect_equal=False, 271 | mask=["1", "2", "3"], 272 | quadrant=1, 273 | ) 274 | plt.close(fig) 275 | 276 | # also test with quadrant and mask 277 | with pytest.warns(UserWarning, match="Values in `mask` are not the same dtype"): 278 | pytest.raises( 279 | ValueError, 280 | plot_local_autocorrelation, 281 | moran_loc, 282 | df, 283 | "HOVAL", 284 | p=0.05, 285 | region_column="POLYID", 286 | mask=["100", "200", "300"], 287 | quadrant=1, 288 | ) 289 | 290 | 291 | def test_moran_loc_bv_scatterplot(): 292 | gdf = _test_data() 293 | x = gdf["Suicids"].values 294 | y = gdf["Donatns"].values 295 | w = Queen.from_dataframe(gdf) 296 | w.transform = "r" 297 | # Calculate Univariate and Bivariate Moran 298 | moran_loc = Moran_Local(y, w) 299 | moran_loc_bv = Moran_Local_BV(x, y, w) 300 | # try with p value so points are colored 301 | fig, _ = _moran_loc_bv_scatterplot(moran_loc_bv) 302 | plt.close(fig) 303 | 304 | # try with p value and different figure size 305 | fig, _ = _moran_loc_bv_scatterplot(moran_loc_bv, p=0.05, aspect_equal=False) 306 | plt.close(fig) 307 | 308 | pytest.raises(ValueError, _moran_loc_bv_scatterplot, moran_loc, p=0.5) 309 | pytest.warns( 310 | UserWarning, 311 | _moran_loc_bv_scatterplot, 312 | moran_loc_bv, 313 | p=0.5, 314 | scatter_kwds=dict(c="r"), 315 | ) 316 | 317 | 318 | def test_moran_facet(): 319 | sids2 = examples.load_example("sids2") 320 | f = lp.io.open(sids2.get_path("sids2.dbf")) 321 | varnames = ["SIDR74", "SIDR79", "NWR74", "NWR79"] 322 | vars = [np.array(f.by_col[var]) for var in varnames] 323 | w = lp.io.open(examples.get_path("sids2.gal")).read() 324 | # calculate moran matrix 325 | moran_matrix = Moran_BV_matrix(vars, w, varnames=varnames) 326 | # plot 327 | fig, axarr = moran_facet(moran_matrix) 328 | plt.close(fig) 329 | # customize 330 | fig, axarr = moran_facet( 331 | moran_matrix, scatter_glob_kwds=dict(color="r"), fitline_bv_kwds=dict(color="y") 332 | ) 333 | plt.close(fig) 334 | -------------------------------------------------------------------------------- /paper/paper.bib: -------------------------------------------------------------------------------- 1 | @book{johnson2007ghost, 2 | address = {London}, 3 | edition = {Reprint edition}, 4 | title = {The {{Ghost Map}}: {{The Story}} of {{London}}'s {{Most Terrifying Epidemic}}--and {{How It Changed Science}}, {{Cities}}, and the {{Modern World}}}, 5 | isbn = {978-1-59448-269-4}, 6 | shorttitle = {The {{Ghost Map}}}, 7 | abstract = {A National Bestseller, a New York Times Notable Book, and an Entertainment Weekly Best Book of the Year It's the summer of 1854, and London is just emerging as one of the first modern cities in the world. But lacking the infrastructure-garbage removal, clean water, sewers-necessary to support its rapidly expanding population, the city has become the perfect breeding ground for a terrifying disease no one knows how to cure. As the cholera outbreak takes hold, a physician and a local curate are spurred to action-and ultimately solve the most pressing medical riddle of their time. In a triumph of multidisciplinary thinking, Johnson illuminates the intertwined histories of the spread of disease, the rise of cities, and the nature of scientific inquiry, offering both a riveting history and a powerful explanation of how it has shaped the world we live in.}, 8 | language = {English}, 9 | publisher = {{Riverhead Books}}, 10 | author = {Johnson, Steven}, 11 | month = oct, 12 | year = {2007}, 13 | doi = {10.1080/01944360802146329} 14 | } 15 | 16 | @incollection{arribas-bel2017looking, 17 | address = {Cham}, 18 | series = {Advances in {{Spatial Science}}}, 19 | title = {Looking at {{John Snow}}'s {{Cholera Map}} from the {{Twenty First Century}}: {{A Practical Primer}} on {{Reproducibility}} and {{Open Science}}}, 20 | isbn = {978-3-319-50590-9}, 21 | shorttitle = {Looking at {{John Snow}}'s {{Cholera Map}} from the {{Twenty First Century}}}, 22 | abstract = {This chapter (This manuscript is a chapter version of the original document, which is a reproducible online notebook. The entire, version-controlled project can be found online at: https://bitbucket.org/darribas/reproducible\_john\_snow.) presents an entirely reproducible spatial analysis of the classic John Snow's map of the 1854 cholera epidemic in London. The analysis draws on many of the techniques most commonly used by regional scientists, such as choropleth mapping, spatial autocorrelation, and point pattern analysis. In doing so, the chapter presents a practical roadmap for performing a completely open and reproducible analysis in regional science. In particular, we deal with the automation of (1) synchronizing code and text, (2) presenting results in figures and tables, and (3) generating reference lists. In addition, we discuss the significant added value of version control systems and their role in enhancing transparency through public, open repositories. With this chapter, we aim to practically illustrate a set of principles and techniques that facilitate transparency and reproducibility in empirical research, both keys to the health and credibility of regional science in the next 50 years to come.}, 23 | language = {en}, 24 | booktitle = {Regional {{Research Frontiers}} - {{Vol}}. 2: {{Methodological Advances}}, {{Regional Systems Modeling}} and {{Open Sciences}}}, 25 | publisher = {{Springer International Publishing}}, 26 | author = {{Arribas-Bel}, Daniel and {de Graaff}, Thomas and Rey, Sergio J.}, 27 | editor = {Jackson, Randall and Schaeffer, Peter}, 28 | year = {2017}, 29 | keywords = {Regional Science,Spatial Autocorrelation,Spatial Outlier,Spatial Weight Matrix,Street Segment}, 30 | pages = {283-306}, 31 | doi = {10.1007/978-3-319-50590-9_17} 32 | } 33 | 34 | @book{tufte2001visual, 35 | title = {The Visual Display of Quantitative Information}, 36 | publisher = {{Graphics Press Cheshire, CT, USA}}, 37 | author = {Tufte, E. R.}, 38 | year = {2001} 39 | } 40 | 41 | @article{goodchild2007citizen, 42 | title = {Citizen as Sensors: The World of Volunteered Geography}, 43 | volume = {69}, 44 | journal = {GeoJournal}, 45 | author = {Goodchild, Michael F}, 46 | year = {2007}, 47 | keywords = {volunteer geographic information}, 48 | pages = {211--221} 49 | } 50 | 51 | @article{arribas-bel2014accidental, 52 | title = {Accidental, Open and Everywhere: {{Emerging}} Data Sources for the Understanding of Cities}, 53 | volume = {49}, 54 | issn = {01436228}, 55 | shorttitle = {Accidental, Open and Everywhere}, 56 | abstract = {In this paper, I review the recent emergence of three groups of data sources and assess some of the opportunities and challenges they pose for the understanding of cities, particularly in the context of the Regional Science and urban research agenda. These are data collected from mobile sensors carried by individuals, data derived from businesses moving their activity online and government data released in an open format. Although very different from each other, they are all becoming available as a side-effect since they were created with different purposes but their degree of popularity, pervasiveness and ease of access is turning them into interesting alternatives for researchers. Existing projects and initiatives that conform to each class are featured as illustrative examples of these new potential sources of knowledge. \'O 2013 Elsevier Ltd. All rights reserved.}, 57 | language = {en}, 58 | journal = {Applied Geography}, 59 | doi = {10.1016/j.apgeog.2013.09.012}, 60 | author = {{Arribas-Bel}, Daniel}, 61 | month = may, 62 | year = {2014}, 63 | pages = {45-53}, 64 | file = {/home/lw17329/Zotero/storage/I4QFYAPM/Arribas-Bel - 2014 - Accidental, open and everywhere Emerging data sou.pdf} 65 | } 66 | 67 | @article{anselin1988spatial, 68 | title = {Do Spatial Effects Really Matter in Regression Analysis?}, 69 | volume = {65}, 70 | journal = {Papers Regional Science}, 71 | author = {Anselin, L. and Griffith, Daniel A}, 72 | year = {1988}, 73 | keywords = {Spatial dependence}, 74 | pages = {11--34}, 75 | doi = {10.1111/j.1435-5597.1988.tb01155.x} 76 | } 77 | 78 | @book{anselin2014modern, 79 | address = {Chicago, IL}, 80 | title = {Modern {{Spatial Econometrics}} in {{Practice}}, a {{Guide}} to {{GeoDa}}, {{GeoDaSpace}}, and {{PySAL}}}, 81 | publisher = {{GeoDa Press}}, 82 | author = {Anselin, Luc and Rey, Sergio J.}, 83 | year = {2014} 84 | } 85 | 86 | @article{rey2009show, 87 | title = {Show Me the Code: Spatial Analysis and Open Source}, 88 | volume = {11}, 89 | issn = {1435-5930, 1435-5949}, 90 | shorttitle = {Show Me the Code}, 91 | language = {en}, 92 | number = {2}, 93 | journal = {Journal of Geographical Systems}, 94 | doi = {10.1007/s10109-009-0086-8}, 95 | author = {Rey, Sergio J.}, 96 | month = jun, 97 | year = {2009}, 98 | pages = {191--207}, 99 | file = {/home/lw17329/Dropbox/literature/Rey - 2009 - Show me the code spatial analysis and open source.pdf;/home/lw17329/Zotero/storage/E6UEYX4F/art\%3A10.1007\%2Fs10109-009-0086-8.pdf;/home/lw17329/Zotero/storage/K9JRHXCG/art\%3A10.1007\%2Fs10109-009-0086-8.pdf} 100 | } 101 | 102 | @incollection{rey2018code, 103 | address = {Cham}, 104 | series = {Advances in {{Geographic Information Science}}}, 105 | title = {Code as {{Text}}: {{Open Source Lessons}} for {{Geospatial Research}} and {{Education}}}, 106 | isbn = {978-3-319-59511-5}, 107 | shorttitle = {Code as {{Text}}}, 108 | abstract = {This chapter examines the potential opportunities that open source offers for research and education in spatial analysis. Drawing on lessons learned in the development of PySAL: Python Library for Spatial Analysis, it touches on the opportunities and challenges related to the adoption of open source practices and culture. While open source has had major impacts on pedagogy and research in spatial analysis, these are somewhat under-appreciated and at times seen as separate spheres. A central argument is that a mind shift is required that comes to see code not just as a tool for doing research, but rather to view code as text in the sense it becomes an object of research. The chapter reconsiders open source spatial analysis teaching and research from this lens of code as text.}, 109 | language = {en}, 110 | booktitle = {{{GeoComputational Analysis}} and {{Modeling}} of {{Regional Systems}}}, 111 | publisher = {{Springer International Publishing}}, 112 | author = {Rey, Sergio J.}, 113 | editor = {Thill, Jean-Claude and Dragicevic, Suzana}, 114 | year = {2018}, 115 | pages = {7-21}, 116 | doi = {10.1007/978-3-319-59511-5_2} 117 | } 118 | 119 | @techreport{ucgis2019geographic, 120 | title = {Geographic {{Information Science}} and {{Technology Body}} of {{Knowledge}}}, 121 | author = {{University Consortium of Geographic Information Science}}, 122 | year = {2019}, 123 | file = {/home/lw17329/Zotero/storage/9INJ4PZ7/gistbok.ucgis.org.html} 124 | } 125 | 126 | @misc{kelsey_jordahl_2019_3333010, 127 | author = {Kelsey Jordahl and 128 | Joris Van den Bossche and 129 | Jacob Wasserman and 130 | James McBride and 131 | Jeffrey Gerard and 132 | Jeff Tratner and 133 | Matthew Perry and 134 | Carson Farmer and 135 | Sean Gillies and 136 | Micah Cochran and 137 | Matt Bartos and 138 | Martin Fleischmann and 139 | Lucas Culbertson and 140 | Nick Eubank and 141 | maxalbert and 142 | Aleksey Bilogur and 143 | Geir Arne Hjelle and 144 | Dani Arribas-Bel and 145 | Christopher Ren and 146 | Sergio Rey and 147 | Martin Journois and 148 | Levi John Wolf and 149 | Nick Grue and 150 | Joshua Wilson and 151 | \"Omer \"Ozak and 152 | Yuichi Notoya}, 153 | title = {geopandas/geopandas: v0.5.1}, 154 | month = jul, 155 | year = 2019, 156 | doi = {10.5281/zenodo.3333010}, 157 | url = {https://doi.org/10.5281/zenodo.3333010} 158 | } 159 | 160 | @misc{aleksey_bilogur_2019_3475569, 161 | author = {Aleksey Bilogur and 162 | Aneesh Karve and 163 | Luis Marsano and 164 | Martin Fleischmann}, 165 | title = {ResidentMario/geoplot 0.3.3}, 166 | month = oct, 167 | year = 2019, 168 | doi = {10.5281/zenodo.3475569}, 169 | url = {https://doi.org/10.5281/zenodo.3475569} 170 | } 171 | 172 | @article{roth2010vba, 173 | author = {Robert E Roth and Andrew W Woodruff and Zachary F Johnson}, 174 | title = {Value-by-alpha maps: {An} alternative to the cartogram}, 175 | journal = {The Cartographic Journal}, 176 | year = {2010}, 177 | volume = 47, 178 | issue = 2, 179 | pages = {130--140}, 180 | doi = {10.1179/000870409X12488753453372} 181 | } 182 | -------------------------------------------------------------------------------- /splot/_viz_utils.py: -------------------------------------------------------------------------------- 1 | import mapclassify as classify 2 | import matplotlib 3 | import matplotlib as mpl 4 | import numpy as np 5 | from packaging.version import Version 6 | 7 | # isolate MPL version - GH#162 8 | MPL_36 = Version(matplotlib.__version__) >= Version("3.6") 9 | if MPL_36: 10 | from matplotlib import colormaps as cm 11 | else: 12 | import matplotlib.cm as cm 13 | import matplotlib.pyplot as plt 14 | 15 | 16 | """ 17 | Utility functions for lightweight visualizations in splot 18 | """ 19 | 20 | __author__ = "Stefanie Lumnitz " 21 | 22 | 23 | def moran_hot_cold_spots(moran_loc, p=0.05): 24 | sig = 1 * (moran_loc.p_sim < p) 25 | HH = 1 * (sig * moran_loc.q == 1) 26 | LL = 3 * (sig * moran_loc.q == 3) 27 | LH = 2 * (sig * moran_loc.q == 2) 28 | HL = 4 * (sig * moran_loc.q == 4) 29 | cluster = HH + LL + LH + HL 30 | return cluster 31 | 32 | 33 | def mask_local_auto(moran_loc, p=0.5): 34 | """ 35 | Create Mask for coloration and labeling of local spatial autocorrelation 36 | 37 | Parameters 38 | ---------- 39 | moran_loc : esda.moran.Moran_Local instance 40 | values of Moran's I Global Autocorrelation Statistic 41 | p : float 42 | The p-value threshold for significance. Points will 43 | be colored by significance. 44 | 45 | Returns 46 | ------- 47 | cluster_labels : list of str 48 | List of labels - ['ns', 'HH', 'LH', 'LL', 'HL'] 49 | colors5 : list of str 50 | List of colours - ['#d7191c', '#fdae61', '#abd9e9', 51 | '#2c7bb6', 'lightgrey'] 52 | colors : array of str 53 | Array containing coloration for each input value/ shape. 54 | labels : list of str 55 | List of label for each attribute value/ polygon. 56 | """ 57 | # create a mask for local spatial autocorrelation 58 | cluster = moran_hot_cold_spots(moran_loc, p) 59 | 60 | cluster_labels = ["ns", "HH", "LH", "LL", "HL"] 61 | labels = [cluster_labels[i] for i in cluster] 62 | 63 | colors5 = {0: "lightgrey", 1: "#d7191c", 2: "#abd9e9", 3: "#2c7bb6", 4: "#fdae61"} 64 | colors = [colors5[i] for i in cluster] # for Bokeh 65 | # for MPL, keeps colors even if clusters are missing: 66 | x = np.array(labels) 67 | y = np.unique(x) 68 | colors5_mpl = { 69 | "HH": "#d7191c", 70 | "LH": "#abd9e9", 71 | "LL": "#2c7bb6", 72 | "HL": "#fdae61", 73 | "ns": "lightgrey", 74 | } 75 | colors5 = [colors5_mpl[i] for i in y] # for mpl 76 | 77 | # HACK need this, because MPL sorts these labels while Bokeh does not 78 | cluster_labels.sort() 79 | return cluster_labels, colors5, colors, labels 80 | 81 | 82 | _classifiers = { 83 | "box_plot": classify.BoxPlot, 84 | "equal_interval": classify.EqualInterval, 85 | "fisher_jenks": classify.FisherJenks, 86 | "headtail_breaks": classify.HeadTailBreaks, 87 | "jenks_caspall": classify.JenksCaspall, 88 | "jenks_caspall_forced": classify.JenksCaspallForced, 89 | "max_p_classifier": classify.MaxP, 90 | "maximum_breaks": classify.MaximumBreaks, 91 | "natural_breaks": classify.NaturalBreaks, 92 | "quantiles": classify.Quantiles, 93 | "percentiles": classify.Percentiles, 94 | "std_mean": classify.StdMean, 95 | "user_defined": classify.UserDefined, 96 | } 97 | 98 | 99 | def bin_values_choropleth(attribute_values, method="quantiles", k=5): 100 | """ 101 | Create bins based on different classification methods. 102 | Needed for legend labels and Choropleth coloring. 103 | 104 | Parameters 105 | ---------- 106 | attribute_values : array or geopandas.series instance 107 | Array containing relevant attribute values. 108 | method : str 109 | Classification method to be used. Options supported: 110 | * 'quantiles' (default) 111 | * 'fisher-jenks' 112 | * 'equal-interval' 113 | k : int 114 | Number of bins, assigning values to. Default k=5 115 | 116 | Returns 117 | ------- 118 | bin_values : mapclassify instance 119 | Object containing bin ids for each observation (.yb), 120 | upper bounds of each class (.bins), number of classes (.k) 121 | and number of onservations falling in each class (.counts) 122 | """ 123 | if method not in ["quantiles", "fisher_jenks", "equal_interval"]: 124 | raise ValueError("Method {} not supported".format(method)) 125 | 126 | bin_values = _classifiers[method](attribute_values, k) 127 | return bin_values 128 | 129 | 130 | def bin_labels_choropleth(gdf, attribute_values, method="quantiles", k=5): 131 | """ 132 | Create labels for each bin in the legend 133 | 134 | Parameters 135 | ---------- 136 | gdf : Geopandas dataframe 137 | Dataframe containign relevant shapes and attribute values. 138 | attribute_values : array or geopandas.series instance 139 | Array containing relevant attribute values. 140 | method : str, optional 141 | Classification method to be used. Options supported: 142 | * 'quantiles' (default) 143 | * 'fisher-jenks' 144 | * 'equal-interval' 145 | k : int, optional 146 | Number of bins, assigning values to. Default k=5 147 | 148 | Returns 149 | ------- 150 | bin_labels : list of str 151 | List of label for each bin. 152 | """ 153 | # Retrieve bin values from bin_values_choropleth() 154 | bin_values = bin_values_choropleth(attribute_values, method=method, k=k) 155 | 156 | # Extract bin ids (.yb) and upper bounds for each class (.bins) 157 | yb = bin_values.yb 158 | bins = bin_values.bins 159 | 160 | # Create bin labels (smaller version) 161 | bin_edges = bins.tolist() 162 | bin_labels = [] 163 | for i in range(k): 164 | bin_labels.append("<{:1.1f}".format(bin_edges[i])) 165 | 166 | # Add labels (which are the labels printed in the legend) to each row of gdf 167 | labels = np.array([bin_labels[c] for c in yb]) 168 | gdf["labels_choro"] = [str(l_) for l_ in labels] 169 | return bin_labels 170 | 171 | 172 | def add_legend(fig, labels, colors): 173 | """ 174 | Add a legend to a figure given legend labels & colors. 175 | 176 | Parameters 177 | ---------- 178 | fig : Bokeh Figure instance 179 | Figure instance labels should be generated for. 180 | labels : list of str 181 | Labels to use as legend entries. 182 | colors : Bokeh Palette instance 183 | Palette instance containing colours of choice. 184 | """ 185 | from bokeh.models import Legend 186 | 187 | # add labels to figure (workaround, 188 | # legend with geojsondatasource doesn't work, 189 | # see https://github.com/bokeh/bokeh/issues/5904) 190 | items = [] 191 | for label, color in zip(labels, colors): 192 | patch = fig.patches(xs=[], ys=[], fill_color=color) 193 | items.append((label, [patch])) 194 | 195 | legend = Legend( 196 | items=items, location="top_left", margin=0, orientation="horizontal" 197 | ) 198 | # possibility to define glyph_width=10, glyph_height=10) 199 | legend.label_text_font_size = "8pt" 200 | fig.add_layout(legend, "below") 201 | return legend 202 | 203 | 204 | def format_legend(values): 205 | """ 206 | Helper to return sensible legend values 207 | 208 | Parameters 209 | ---------- 210 | values: array 211 | Values plotted in legend. 212 | """ 213 | in_thousand = False 214 | if np.any(values > 1000): 215 | in_thousand = True 216 | values = values / 1000 217 | return values, in_thousand 218 | 219 | 220 | def calc_data_aspect(plot_height, plot_width, bounds): 221 | # Deal with data ranges in Bokeh: 222 | # make a meter in x and a meter in y the same in pixel lengths 223 | aspect_box = plot_height / plot_width # 2 / 1 = 2 224 | xmin, ymin, xmax, ymax = bounds 225 | x_range = xmax - xmin # 1 = 1 - 0 226 | y_range = ymax - ymin # 3 = 3 - 0 227 | aspect_data = y_range / x_range # 3 / 1 = 3 228 | if aspect_data > aspect_box: 229 | # we need to increase x_range, 230 | # such that aspect_data becomes equal to aspect_box 231 | halfrange = 0.5 * x_range * (aspect_data / aspect_box - 1) 232 | # 0.5 * 1 * (3 / 2 - 1) = 0.25 233 | xmin -= halfrange # 0 - 0.25 = -0.25 234 | xmax += halfrange # 1 + 0.25 = 1.25 235 | else: 236 | # we need to increase y_range 237 | halfrange = 0.5 * y_range * (aspect_box / aspect_data - 1) 238 | ymin -= halfrange 239 | ymax += halfrange 240 | 241 | # Add a bit of margin to both x and y 242 | margin = 0.03 243 | xmin -= (xmax - xmin) / 2 * margin 244 | xmax += (xmax - xmin) / 2 * margin 245 | ymin -= (ymax - ymin) / 2 * margin 246 | ymax += (ymax - ymin) / 2 * margin 247 | return xmin, xmax, ymin, ymax 248 | 249 | 250 | # Utility functions for colormaps 251 | # Color design 252 | splot_colors = dict(moran_base="#bababa", moran_fit="#d6604d") 253 | 254 | # Utility function #1 - forces continuous diverging colormap to be centered at zero 255 | def shift_colormap( # noqa E302 256 | cmap, start=0, midpoint=0.5, stop=1.0, name="shiftedcmap" 257 | ): 258 | """ 259 | Function to offset the "center" of a colormap. Useful for 260 | data with a negative min and positive max and you want the 261 | middle of the colormap's dynamic range to be at zero 262 | 263 | Parameters 264 | ---------- 265 | cmap : str or matplotlib.cm instance 266 | colormap to be altered 267 | start : float, optional 268 | Offset from lowest point in the colormap's range. 269 | Should be between 0.0 and `midpoint`. 270 | Default =0.0 (no lower ofset). 271 | midpoint : float, optional 272 | The new center of the colormap.Should be between 0.0 and 273 | 1.0. In general, this should be 1 - vmax/(vmax + abs(vmin)). 274 | For example if your data range from -15.0 to +5.0 and 275 | you want the center of the colormap at 0.0, `midpoint` 276 | should be set to 1 - 5/(5 + 15)) or 0.75. 277 | Default =0.5 (no shift). 278 | stop : float, optional 279 | Offset from highets point in the colormap's range. 280 | Should be between `midpoint` and 1.0. 281 | Default =1.0 (no upper ofset). 282 | name : str, optional 283 | Name of the new colormap. 284 | 285 | Returns 286 | ------- 287 | new_cmap : A new colormap that has been shifted. 288 | """ 289 | if isinstance(cmap, str): 290 | cmap = cm.get_cmap(cmap) 291 | 292 | cdict = {"red": [], "green": [], "blue": [], "alpha": []} 293 | 294 | # regular index to compute the colors 295 | reg_index = np.linspace(start, stop, 257) 296 | 297 | # shifted index to match the data 298 | shift_index = np.hstack( 299 | [ 300 | np.linspace(0.0, midpoint, 128, endpoint=False), 301 | np.linspace(midpoint, 1.0, 129, endpoint=True), 302 | ] 303 | ) 304 | 305 | for ri, si in zip(reg_index, shift_index): 306 | r, g, b, a = cmap(ri) 307 | 308 | cdict["red"].append((si, r, r)) 309 | cdict["green"].append((si, g, g)) 310 | cdict["blue"].append((si, b, b)) 311 | cdict["alpha"].append((si, a, a)) 312 | 313 | """ 314 | new_cmap = mpl.colors.LinearSegmentedColormap(name, cdict) 315 | plt.register_cmap(cmap=new_cmap) 316 | return new_cmap 317 | """ 318 | 319 | new_cmap = mpl.colors.LinearSegmentedColormap(name, cdict) 320 | if MPL_36: 321 | cm.register(new_cmap) 322 | else: 323 | plt.register_cmap(cmap=new_cmap) 324 | return new_cmap 325 | 326 | 327 | # Utility #2 - truncate colorcap in order to grab only positive or negative portion 328 | def truncate_colormap(cmap, minval=0.0, maxval=1.0, n=100): 329 | """ 330 | Function to truncate a colormap by selecting a subset of 331 | the original colormap's values 332 | 333 | Parameters 334 | ---------- 335 | cmap : str or matplotlib.cm instance 336 | Colormap to be altered 337 | minval : float, optional 338 | Minimum value of the original colormap to include 339 | in the truncated colormap. Default =0.0. 340 | maxval : Maximum value of the original colormap to 341 | include in the truncated colormap. Default =1.0. 342 | n : int, optional 343 | Number of intervals between the min and max values 344 | for the gradient of the truncated colormap. Default =100. 345 | 346 | Returns 347 | ------- 348 | new_cmap : A new colormap that has been shifted. 349 | """ 350 | 351 | if isinstance(cmap, str): 352 | cmap = cm.get_cmap(cmap) 353 | 354 | new_cmap = mpl.colors.LinearSegmentedColormap.from_list( 355 | "trunc({n},{a:.2f},{b:.2f})".format(n=cmap.name, a=minval, b=maxval), 356 | cmap(np.linspace(minval, maxval, n)), 357 | ) 358 | return new_cmap 359 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changes 2 | 3 | # Version 1.1.5 (2022-04-13) 4 | 5 | Minor patch release. 6 | 7 | - [BUG] set viz defaults for LineStrings in lisa_cluster (#140) 8 | - Import ABC from collections.abc for Python 3.10 compatibility. (#150) 9 | 10 | The following individuals contributed to this release: 11 | 12 | - Stefanie Lumnitz 13 | - James Gaboardi 14 | - Martin Fleischmann 15 | - Karthikeyan Singaravelan 16 | 17 | # Version 1.1.4 (2021-07-27) 18 | 19 | We closed a total of 39 issues (enhancements and bug fixes) through 12 pull requests, since our last release on 2020-03-23. 20 | 21 | ## Issues Closed 22 | 23 | - Streamline & upgrade CI (#135) 24 | - update conf.py (#134) 25 | - Migrating testing & coverage services (#124) 26 | - [MAINT] rename 'master' to 'main' (#121) 27 | - ipywidgets dependency (#130) 28 | - REF: make ipywidgets optional dependency (#132) 29 | - [WIP] update testing procedure with new datasets (#133) 30 | - MatplotlibDeprecationWarning from ax.spines[label].set_smart_bounds() (#115) 31 | - [DOC] include libpysal.example api changes & reinstall splot for testing (#128) 32 | - [MAINT] remove `.set_smart_bounds()` (#125) 33 | - Gha testing (#126) 34 | - GitHub Actions for continuous integration (#111) 35 | - [MAINT] change in`pandas.isin()` affecting `plot_local_autocorrelation` (#123) 36 | - [BUG] enforce dtype in `mask` in `plot_local_autocorrelation()` (#122) 37 | - [MAINT] AttributeError: 'NoneType' object has no attribute 'startswith' in all Moran plots (#117) 38 | - [BUG] 'color' and 'c' in `test_viz_giddy_mpl.test_dynamic_lisa_vectors` (#116) 39 | - [MAINT] update links to Guerry dataset in `_test_data()` (#119) 40 | - [BUG] Build failing due to change in Seaborn (#110) 41 | - [BUG] pin seaborn to v0.10.0 for testing new functionality (#114) 42 | - Topological colouring (#94) 43 | - vba_choropleth --> ValueError: Invalid RGBA argument: (#100) 44 | - Pyviz affiliation (#75) 45 | - BUG: Bokeh needed for testing (#107) 46 | - [JOSS] add Joss badge to README.md (#106) 47 | - [JOSS] doi reference correction (#105) 48 | - Fixing BibTeX entry pages. (#104) 49 | - Release1.1.3 (#103) 50 | 51 | ## Pull Requests 52 | 53 | - Streamline & upgrade CI (#135) 54 | - REF: make ipywidgets optional dependency (#132) 55 | - [DOC] include libpysal.example api changes & reinstall splot for testing (#128) 56 | - [MAINT] remove `.set_smart_bounds()` (#125) 57 | - Gha testing (#126) 58 | - [BUG] enforce dtype in `mask` in `plot_local_autocorrelation()` (#122) 59 | - [MAINT] update links to Guerry dataset in `_test_data()` (#119) 60 | - BUG: Bokeh needed for testing (#107) 61 | - [JOSS] add Joss badge to README.md (#106) 62 | - [JOSS] doi reference correction (#105) 63 | - Fixing BibTeX entry pages. (#104) 64 | - Release1.1.3 (#103) 65 | 66 | The following individuals contributed to this release: 67 | 68 | - Stefanie Lumnitz 69 | - James Gaboardi 70 | - Martin Fleischmann 71 | - Dani Arribas-Bel 72 | - Serge Rey 73 | - Arfon Smith 74 | 75 | # Version 1.1.3 (2020-03-18) 76 | 77 | We closed a total of 15 issues (enhancements and bug fixes) through 6 pull requests, since our last release on 2020-01-18. 78 | 79 | ## Issues Closed 80 | 81 | - add permanent links to current version of no's to joss paper (#102) 82 | - [BUG] set colors as list in _plot_choropleth_fig() (#101) 83 | - Remove the links around figures in the JOSS paper (#99) 84 | - Release prep for 1.1.2 (#98) 85 | - Installation instructions; pip install fails on macOS (#88) 86 | - Usage in readme is a fragment (#90) 87 | - JOSS: missing figure captions (#92) 88 | - [DOC] update installation instruction (#96) 89 | - [DOC] add example links to README.md & figure captions in joss article (#97) 90 | 91 | ## Pull Requests 92 | 93 | - add permanent links to current version of no's to joss paper (#102) 94 | - [BUG] set colors as list in _plot_choropleth_fig() (#101) 95 | - Remove the links around figures in the JOSS paper (#99) 96 | - Release prep for 1.1.2 (#98) 97 | - [DOC] update installation instruction (#96) 98 | - [DOC] add example links to README.md & figure captions in joss article (#97) 99 | 100 | The following individuals contributed to this release: 101 | 102 | - Stefanie Lumnitz 103 | - Levi John Wolf 104 | - Leonardo Uieda 105 | - Serge Rey 106 | 107 | # Version 1.1.2 (2020-01-18) 108 | 109 | We closed a total of 33 issues (enhancements and bug fixes) through 13 pull requests, since our last release on 2019-07-13. 110 | 111 | ## Issues Closed 112 | 113 | - Installation instructions; pip install fails on macOS (#88) 114 | - Usage in readme is a fragment (#90) 115 | - JOSS: missing figure captions (#92) 116 | - [DOC] update installation instruction (#96) 117 | - [DOC] add example links to README.md & figure captions in joss article (#97) 118 | - [BUG] vba_choropleth failure (#83) 119 | - BUG: Fix breakage due to mapclassify deprecation (#95) 120 | - addressing pysal/pysal#1145 & adapting testing examples (#93) 121 | - Fix docstring for plot_spatial_weights (#89) 122 | - JOSS paper submission (#59) 123 | - Fix format for multiple citations in JOSS paper (#87) 124 | - Joss paper, finalise title (#86) 125 | - [JOSS] work on `paper.md` (#62) 126 | - [ENH] change doc badge to latest doc (#85) 127 | - [BUG] require geopandas>=0.4.0,<=0.6.0rc1 for vba_choropleth testing (#84) 128 | - `plot_moran_simulation` weird dimensions (#82) 129 | - Colors are not fixed is LISA maps (#80) 130 | - Release 1.1.1 (#79) 131 | - add ipywidgets to requirements_dev.txt (#78) 132 | - add descartes to `requirements.txt` (#77) 133 | 134 | ## Pull Requests 135 | 136 | - [DOC] update installation instruction (#96) 137 | - [DOC] add example links to README.md & figure captions in joss article (#97) 138 | - BUG: Fix breakage due to mapclassify deprecation (#95) 139 | - addressing pysal/pysal#1145 & adapting testing examples (#93) 140 | - Fix docstring for plot_spatial_weights (#89) 141 | - Fix format for multiple citations in JOSS paper (#87) 142 | - Joss paper, finalise title (#86) 143 | - [JOSS] work on `paper.md` (#62) 144 | - [ENH] change doc badge to latest doc (#85) 145 | - [BUG] require geopandas>=0.4.0,<=0.6.0rc1 for vba_choropleth testing (#84) 146 | - Release 1.1.1 (#79) 147 | - add ipywidgets to requirements_dev.txt (#78) 148 | - add descartes to `requirements.txt` (#77) 149 | 150 | The following individuals contributed to this release: 151 | 152 | - Stefanie Lumnitz 153 | - Serge Rey 154 | - James Gaboardi 155 | - Martin Fleischmann 156 | - Leonardo Uieda 157 | - Levi John Wolf 158 | - Wei Kang 159 | 160 | # Version 1.1.1 (2019-07-13) 161 | 162 | We closed a total of 8 issues (enhancements and bug fixes) through 4 pull requests, since our last release on 2019-06-27. 163 | 164 | ## Issues Closed 165 | 166 | - add ipywidgets to requirements_dev.txt (#78) 167 | - add descartes to `requirements.txt` (#77) 168 | - [ENH] read long_description from README.md (#76) 169 | - Rel1.1.0 (#74) 170 | 171 | ## Pull Requests 172 | 173 | - add ipywidgets to requirements_dev.txt (#78) 174 | - add descartes to `requirements.txt` (#77) 175 | - [ENH] read long_description from README.md (#76) 176 | - Rel1.1.0 (#74) 177 | 178 | The following individuals contributed to this release: 179 | 180 | - Stefanie Lumnitz 181 | - Levi John Wolf 182 | 183 | # Version 1.1.0 (2019-06-27) 184 | 185 | We closed a total of 54 issues (enhancements and bug fixes) through 21 pull requests, since our last release on 2018-11-13. 186 | 187 | ## Issues Closed 188 | 189 | - LISA cluster map colours mixed when cluster value not present (#72) 190 | - [ENH] select colour by presence of value in list in `mask_local_auto` (#73) 191 | - Moran Scatterplots with equal bounds on X and Y axes? (#51) 192 | - Add aspect_equal argument to Moran functionality (#70) 193 | - set up dual travis tests for pysal dependencies (pip and github) (#69) 194 | - API changes of mapclassify propagate to splot (#65) 195 | - [DOC] include rtree and descartes in `requirements_dev.txt` (#68) 196 | - Readme update (#67) 197 | - docs building using readthedocs.yml version 2 (#64) 198 | - [DOC] add test for missing code cove % (#57) 199 | - Add tests for warnings and ValueErrors (#61) 200 | - Update travis for testing (#1) 201 | - travis ci testing: migrate from 3.5 and 3.6 to 3.6 and 3.7 (#63) 202 | - create paper directory (#58) 203 | - clean and rerun notebooks (#56) 204 | - `vba_choropleth` API (#45) 205 | - allow string (default) in vba_choropleth function of tests (#52) 206 | - migrating to readthedocs II (#54) 207 | - migration to readthedocs (#53) 208 | - Make docs (#46) 209 | - Segmentation fault in running tests on TravisCI (#47) 210 | - code 139 memory segmentation fault: RESOLVED (#48) 211 | - pip install on linux fails on pyproj (#41) 212 | - update archaic Miniconda build (#44) 213 | - adjusting markdown font (#43) 214 | - add `moran_facette` functionality and merge `esda.moran` plots to `moran_scatterplot` (#27) 215 | - (ENH) speed up plot_spatial_weights for plotting spatial weights (#42) 216 | - Travis testing against esda and giddy master branch (#31) 217 | - 1.0.0 Release (#40) 218 | - merge Sprint with master branch (#39) 219 | - Change documentation style (#38) 220 | - add travis build badge to README.md (#37) 221 | - fix current documentation for sprint (#36) 222 | 223 | ## Pull Requests 224 | 225 | - [ENH] select colour by presence of value in list in `mask_local_auto` (#73) 226 | - Add aspect_equal argument to Moran functionality (#70) 227 | - set up dual travis tests for pysal dependencies (pip and github) (#69) 228 | - Readme update (#67) 229 | - docs building using readthedocs.yml version 2 (#64) 230 | - Add tests for warnings and ValueErrors (#61) 231 | - travis ci testing: migrate from 3.5 and 3.6 to 3.6 and 3.7 (#63) 232 | - create paper directory (#58) 233 | - clean and rerun notebooks (#56) 234 | - allow string (default) in vba_choropleth function of tests (#52) 235 | - migrating to readthedocs II (#54) 236 | - migration to readthedocs (#53) 237 | - Make docs (#46) 238 | - code 139 memory segmentation fault: RESOLVED (#48) 239 | - update archaic Miniconda build (#44) 240 | - adjusting markdown font (#43) 241 | - (ENH) speed up plot_spatial_weights for plotting spatial weights (#42) 242 | - 1.0.0 Release (#40) 243 | - merge Sprint with master branch (#39) 244 | - Change documentation style (#38) 245 | - fix current documentation for sprint (#36) 246 | 247 | The following individuals contributed to this release: 248 | 249 | - Stefanie Lumnitz 250 | - Wei Kang 251 | - James Gaboardi 252 | - Renanxcortes 253 | - Dani Arribas-Bel 254 | 255 | # Version 1.0.0 (2018-11-30) 256 | 257 | We closed a total of 52 issues (enhancements and bug fixes) through 23 pull requests, since our last release on 2017-05-09. 258 | 259 | ## Issues Closed 260 | 261 | - merge Sprint with master branch (#39) 262 | - Change documentation style (#38) 263 | - add travis build badge to README.md (#37) 264 | - fix current documentation for sprint (#36) 265 | - `value_by_alpha` prototype (#28) 266 | - Clean up of current code base (#30) 267 | - Value By Alpha specification (#24) 268 | - nonplanar example update (#33) 269 | - add README.md (#29) 270 | - issues in some docstrings for giddy (#26) 271 | - debug `splot` documentation (#25) 272 | - collection of cleanups for`splot.giddy` (#23) 273 | - created `esda.moran.Moran_Local_BV` visualisations (#20) 274 | - add `esda.moran.Moran_BV` visualizations to `splot.esda` (#18) 275 | - add `seaborn` and `matplotlib` to `install_requirements` in `setup.py` (#19) 276 | - prototype `moran_scatterplot()`, `plot_moran_simulation()` and `plot_moran()` for `esda` (#17) 277 | - include utility functions `shift_colormap` and `truncate_colormap` (#15) 278 | - fix setup.py so files are installed with "pip install ." (#16) 279 | - `plot_spatial_weights` including network joins for `non_planar_joins` (#14) 280 | - adapting existing `esda` functionality to `splot.esda` namespace and allow `.plot()` method (#13) 281 | - adding license (#4) 282 | - add `giddy` dynamic LISA functionality under `splot.giddy` (#11) 283 | - start sphinx html documentation (#12) 284 | - add visualization option with significance to mplot (#7) 285 | - Visualising Local Autocorrelation (#8) 286 | - Copy new changes made to viz module into split (#5) 287 | - run 2to3 for splot (#6) 288 | - Fix for Pysal#930 (#3) 289 | - Add a Gitter chat badge to README.md (#2) 290 | 291 | ## Pull Requests 292 | 293 | - merge Sprint with master branch (#39) 294 | - Change documentation style (#38) 295 | - fix current documentation for sprint (#36) 296 | - `value_by_alpha` prototype (#28) 297 | - Clean up of current code base (#30) 298 | - add README.md (#29) 299 | - debug `splot` documentation (#25) 300 | - collection of cleanups for`splot.giddy` (#23) 301 | - created `esda.moran.Moran_Local_BV` visualisations (#20) 302 | - add `esda.moran.Moran_BV` visualizations to `splot.esda` (#18) 303 | - add `seaborn` and `matplotlib` to `install_requirements` in `setup.py` (#19) 304 | - prototype `moran_scatterplot()`, `plot_moran_simulation()` and `plot_moran()` for `esda` (#17) 305 | - include utility functions `shift_colormap` and `truncate_colormap` (#15) 306 | - fix setup.py so files are installed with "pip install ." (#16) 307 | - `plot_spatial_weights` including network joins for `non_planar_joins` (#14) 308 | - adapting existing `esda` functionality to `splot.esda` namespace and allow `.plot()` method (#13) 309 | - add `giddy` dynamic LISA functionality under `splot.giddy` (#11) 310 | - start sphinx html documentation (#12) 311 | - add visualization option with significance to mplot (#7) 312 | - Visualising Local Autocorrelation (#8) 313 | - run 2to3 for splot (#6) 314 | - Fix for Pysal#930 (#3) 315 | - Add a Gitter chat badge to README.md (#2) 316 | 317 | The following individuals contributed to this release: 318 | 319 | - Stefanie Lumnitz 320 | - Dani Arribas-Bel 321 | - Levi John Wolf 322 | - Serge Rey 323 | - Thequackdaddy 324 | - Jsignell 325 | - Serge 326 | -------------------------------------------------------------------------------- /splot/_viz_value_by_alpha_mpl.py: -------------------------------------------------------------------------------- 1 | import collections.abc 2 | 3 | import matplotlib 4 | import matplotlib.pyplot as plt 5 | import numpy as np 6 | from matplotlib import colors, patches 7 | from packaging.version import Version 8 | 9 | from ._viz_utils import _classifiers, format_legend 10 | 11 | # isolate MPL version - GH#162 12 | MPL_36 = Version(matplotlib.__version__) >= Version("3.6") 13 | if MPL_36: 14 | from matplotlib import colormaps as cm 15 | else: 16 | import matplotlib.cm as cm 17 | 18 | 19 | """ 20 | Creating Maps with splot 21 | * Value-by-Alpha maps 22 | * Mapclassify wrapper 23 | * Color utilities 24 | 25 | TODO: 26 | * add Choropleth functionality with one input variable 27 | * merge all alpha keywords in one keyword dictionary 28 | for vba_choropleth 29 | 30 | """ 31 | 32 | __author__ = "Stefanie Lumnitz " 33 | 34 | 35 | def value_by_alpha_cmap(x, y, cmap="GnBu", revert_alpha=False, divergent=False): 36 | """ 37 | Calculates Value by Alpha rgba values 38 | 39 | Parameters 40 | ---------- 41 | x : array 42 | Variable determined by color 43 | y : array 44 | Variable determining alpha value 45 | cmap : str or list of str 46 | Matplotlib Colormap or list of colors used 47 | to create vba_layer 48 | revert_alpha : bool, optional 49 | If True, high y values will have a 50 | low alpha and low values will be transparent. 51 | Default =False. 52 | divergent : bool, optional 53 | Creates a divergent alpha array with high values 54 | at the extremes and low, transparent values 55 | in the middle of the input values. 56 | 57 | Returns 58 | ------- 59 | rgba : ndarray (n,4) 60 | RGBA colormap, where the alpha channel represents one 61 | attribute (x) and the rgb color the other attribute (y) 62 | cmap : str or list of str 63 | Original Matplotlib Colormap or list of colors used 64 | to create vba_layer 65 | 66 | Examples 67 | -------- 68 | 69 | Imports 70 | 71 | >>> from libpysal import examples 72 | >>> import geopandas as gpd 73 | >>> import matplotlib.pyplot as plt 74 | >>> import matplotlib 75 | >>> import numpy as np 76 | >>> from splot.mapping import value_by_alpha_cmap 77 | 78 | Load Example Data 79 | 80 | >>> link_to_data = examples.get_path('columbus.shp') 81 | >>> gdf = gpd.read_file(link_to_data) 82 | >>> x = gdf['HOVAL'].values 83 | >>> y = gdf['CRIME'].values 84 | 85 | Create rgba values 86 | 87 | >>> rgba, _ = value_by_alpha_cmap(x, y) 88 | 89 | Create divergent rgba and change Colormap 90 | 91 | >>> div_rgba, _ = value_by_alpha_cmap(x, y, cmap='seismic', divergent=True) 92 | 93 | Create rgba values with reverted alpha values 94 | 95 | >>> rev_rgba, _ = value_by_alpha_cmap(x, y, cmap='RdBu', revert_alpha=True) 96 | 97 | """ 98 | # option for cmap or colorlist input 99 | if isinstance(cmap, str): 100 | cmap = cm.get_cmap(cmap) 101 | elif isinstance(cmap, collections.abc.Sequence): 102 | cmap = colors.LinearSegmentedColormap.from_list("newmap", cmap) 103 | 104 | rgba = cmap((x - x.min()) / (x.max() - x.min())) 105 | if revert_alpha: 106 | rgba[:, 3] = 1 - ((y - y.min()) / (y.max() - y.min())) 107 | else: 108 | rgba[:, 3] = (y - y.min()) / (y.max() - y.min()) 109 | if divergent is not False: 110 | a_under_0p5 = rgba[:, 3] < 0.5 111 | rgba[a_under_0p5, 3] = 1 - rgba[a_under_0p5, 3] 112 | rgba[:, 3] = (rgba[:, 3] - 0.5) * 2 113 | return rgba, cmap 114 | 115 | 116 | def vba_choropleth( 117 | x_var, 118 | y_var, 119 | gdf, 120 | cmap="GnBu", 121 | divergent=False, 122 | revert_alpha=False, 123 | alpha_mapclassify=None, 124 | rgb_mapclassify=None, 125 | ax=None, 126 | legend=False, 127 | ): 128 | """ 129 | Value by Alpha Choropleth 130 | 131 | Parameters 132 | ---------- 133 | x_var : string or array 134 | The name of variable in gdf determined by color or an array 135 | of values determined by color. 136 | y_var : string or array 137 | The name of variable in gdf determining alpha value or an array 138 | of values determined by color. 139 | gdf : geopandas dataframe instance 140 | The Dataframe containing information to plot. 141 | cmap : str or list of str 142 | Matplotlib Colormap or list of colors used 143 | to create vba_layer 144 | divergent : bool, optional 145 | Creates a divergent alpha array with high values at 146 | the extremes and low, transparent values in the 147 | middle of the input values. 148 | revert_alpha : bool, optional 149 | If True, high y values will have a 150 | low alpha and low values will be transparent. 151 | Default = False. 152 | alpha_mapclassify : dict 153 | Keywords used for binning input values and 154 | classifying alpha values with `mapclassify`. 155 | Note: valid keywords are eg. dict(classifier='quantiles', k=5, 156 | hinge=1.5). For other options check `splot.mapping.mapclassify_bin`. 157 | rgb_mapclassify : dict 158 | Keywords used for binning input values and 159 | classifying rgb values with `mapclassify`. 160 | Note: valid keywords are eg.g dict(classifier='quantiles', k=5, 161 | hinge=1.5).For other options check `splot.mapping.mapclassify_bin`. 162 | ax : matplotlib Axes instance, optional 163 | Axes in which to plot the figure in multiple Axes layout. 164 | Default = None 165 | legend : bool, optional 166 | Adds a legend. 167 | Note: currently only available if data is classified, 168 | hence if `alpha_mapclassify` and `rgb_mapclassify` are used. 169 | 170 | Returns 171 | ------- 172 | fig : matplotlip Figure instance 173 | Figure of Value by Alpha choropleth 174 | ax : matplotlib Axes instance 175 | Axes in which the figure is plotted 176 | 177 | Examples 178 | -------- 179 | 180 | Imports 181 | 182 | >>> from libpysal import examples 183 | >>> import geopandas as gpd 184 | >>> import matplotlib.pyplot as plt 185 | >>> import matplotlib 186 | >>> import numpy as np 187 | >>> from splot.mapping import vba_choropleth 188 | 189 | Load Example Data 190 | 191 | >>> link_to_data = examples.get_path('columbus.shp') 192 | >>> gdf = gpd.read_file(link_to_data) 193 | 194 | Plot a Value-by-Alpha map 195 | 196 | >>> fig, _ = vba_choropleth('HOVAL', 'CRIME', gdf) 197 | >>> plt.show() 198 | 199 | Plot a Value-by-Alpha map with reverted alpha values 200 | 201 | >>> fig, _ = vba_choropleth('HOVAL', 'CRIME', gdf, cmap='RdBu', 202 | ... revert_alpha=True) 203 | >>> plt.show() 204 | 205 | Plot a Value-by-Alpha map with classified alpha and rgb values 206 | 207 | >>> fig, axs = plt.subplots(2,2, figsize=(20,10)) 208 | >>> vba_choropleth('HOVAL', 'CRIME', gdf, cmap='viridis', ax = axs[0,0], 209 | ... rgb_mapclassify=dict(classifier='quantiles', k=3), 210 | ... alpha_mapclassify=dict(classifier='quantiles', k=3)) 211 | >>> vba_choropleth('HOVAL', 'CRIME', gdf, cmap='viridis', ax = axs[0,1], 212 | ... rgb_mapclassify=dict(classifier='natural_breaks'), 213 | ... alpha_mapclassify=dict(classifier='natural_breaks')) 214 | >>> vba_choropleth('HOVAL', 'CRIME', gdf, cmap='viridis', ax = axs[1,0], 215 | ... rgb_mapclassify=dict(classifier='std_mean'), 216 | ... alpha_mapclassify=dict(classifier='std_mean')) 217 | >>> vba_choropleth('HOVAL', 'CRIME', gdf, cmap='viridis', ax = axs[1,1], 218 | ... rgb_mapclassify=dict(classifier='fisher_jenks', k=3), 219 | ... alpha_mapclassify=dict(classifier='fisher_jenks', k=3)) 220 | >>> plt.show() 221 | 222 | Pass in a list of colors instead of a cmap 223 | 224 | >>> color_list = ['#a1dab4','#41b6c4','#225ea8'] 225 | >>> vba_choropleth('HOVAL', 'CRIME', gdf, cmap=color_list, 226 | ... rgb_mapclassify=dict(classifier='quantiles', k=3), 227 | ... alpha_mapclassify=dict(classifier='quantiles')) 228 | >>> plt.show() 229 | 230 | Add a legend and use divergent alpha values 231 | 232 | >>> fig = plt.figure(figsize=(15,10)) 233 | >>> ax = fig.add_subplot(111) 234 | >>> vba_choropleth('HOVAL', 'CRIME', gdf, divergent=True, 235 | ... alpha_mapclassify=dict(classifier='quantiles', k=5), 236 | ... rgb_mapclassify=dict(classifier='quantiles', k=5), 237 | ... legend=True, ax=ax) 238 | >>> plt.show() 239 | 240 | """ 241 | 242 | if isinstance(x_var, str): 243 | x = np.array(gdf[x_var]) 244 | else: 245 | x = x_var 246 | 247 | if isinstance(y_var, str): 248 | y = np.array(gdf[y_var]) 249 | else: 250 | y = y_var 251 | 252 | if ax is None: 253 | fig = plt.figure() 254 | ax = fig.add_subplot(111) 255 | else: 256 | fig = ax.get_figure() 257 | 258 | if rgb_mapclassify is not None: 259 | rgb_mapclassify.setdefault("k", 5) 260 | rgb_mapclassify.setdefault("hinge", 1.5) 261 | rgb_mapclassify.setdefault("multiples", [-2, -1, 1, 2]) 262 | rgb_mapclassify.setdefault("mindiff", 0) 263 | rgb_mapclassify.setdefault("initial", 100) 264 | rgb_mapclassify.setdefault("bins", [20, max(x)]) 265 | classifier = rgb_mapclassify["classifier"] 266 | k = rgb_mapclassify["k"] 267 | hinge = rgb_mapclassify["hinge"] 268 | multiples = rgb_mapclassify["multiples"] 269 | mindiff = rgb_mapclassify["mindiff"] 270 | initial = rgb_mapclassify["initial"] 271 | bins = rgb_mapclassify["bins"] 272 | rgb_bins = mapclassify_bin( 273 | x, 274 | classifier, 275 | k=k, 276 | hinge=hinge, 277 | multiples=multiples, 278 | mindiff=mindiff, 279 | initial=initial, 280 | bins=bins, 281 | ) 282 | x = rgb_bins.yb 283 | 284 | if alpha_mapclassify is not None: 285 | alpha_mapclassify.setdefault("k", 5) 286 | alpha_mapclassify.setdefault("hinge", 1.5) 287 | alpha_mapclassify.setdefault("multiples", [-2, -1, 1, 2]) 288 | alpha_mapclassify.setdefault("mindiff", 0) 289 | alpha_mapclassify.setdefault("initial", 100) 290 | alpha_mapclassify.setdefault("bins", [20, max(y)]) 291 | classifier = alpha_mapclassify["classifier"] 292 | k = alpha_mapclassify["k"] 293 | hinge = alpha_mapclassify["hinge"] 294 | multiples = alpha_mapclassify["multiples"] 295 | mindiff = alpha_mapclassify["mindiff"] 296 | initial = alpha_mapclassify["initial"] 297 | bins = alpha_mapclassify["bins"] 298 | # TODO: use the pct keyword here 299 | alpha_bins = mapclassify_bin( 300 | y, 301 | classifier, 302 | k=k, 303 | hinge=hinge, 304 | multiples=multiples, 305 | mindiff=mindiff, 306 | initial=initial, 307 | bins=bins, 308 | ) 309 | y = alpha_bins.yb 310 | 311 | rgba, vba_cmap = value_by_alpha_cmap( 312 | x=x, y=y, cmap=cmap, divergent=divergent, revert_alpha=revert_alpha 313 | ) 314 | gdf.plot(color=rgba, ax=ax) 315 | ax.set_axis_off() 316 | ax.set_aspect("equal") 317 | 318 | if legend: 319 | left, bottom, width, height = [0, 0.5, 0.2, 0.2] 320 | ax2 = fig.add_axes([left, bottom, width, height]) 321 | vba_legend(rgb_bins, alpha_bins, vba_cmap, ax=ax2) 322 | return fig, ax 323 | 324 | 325 | def vba_legend(rgb_bins, alpha_bins, cmap, ax=None): 326 | """ 327 | Creates Value by Alpha heatmap used as choropleth legend. 328 | 329 | Parameters 330 | ---------- 331 | rgb_bins : pysal.mapclassify instance 332 | Object of classified values used for rgb. 333 | Can be created with `mapclassify_bin()` 334 | or `pysal.mapclassify`. 335 | alpha_bins : pysal.mapclassify instance 336 | Object of classified values used for alpha. 337 | Can be created with `mapclassify_bin()` 338 | or `pysal.mapclassify`. 339 | ax : matplotlib Axes instance, optional 340 | Axes in which to plot the figure in multiple Axes layout. 341 | Default = None 342 | 343 | Returns 344 | ------- 345 | fig : matplotlip Figure instance 346 | Figure of Value by Alpha heatmap 347 | ax : matplotlib Axes instance 348 | Axes in which the figure is plotted 349 | 350 | Examples 351 | -------- 352 | Imports 353 | 354 | >>> from libpysal import examples 355 | >>> import geopandas as gpd 356 | >>> import matplotlib.pyplot as plt 357 | >>> import matplotlib 358 | >>> import numpy as np 359 | >>> from splot.mapping import vba_legend, mapclassify_bin 360 | 361 | Load Example Data 362 | 363 | >>> link_to_data = examples.get_path('columbus.shp') 364 | >>> gdf = gpd.read_file(link_to_data) 365 | >>> x = gdf['HOVAL'].values 366 | >>> y = gdf['CRIME'].values 367 | 368 | Classify your data 369 | 370 | >>> rgb_bins = mapclassify_bin(x, 'quantiles') 371 | >>> alpha_bins = mapclassify_bin(y, 'quantiles') 372 | 373 | Plot your legend 374 | 375 | >>> fig, _ = vba_legend(rgb_bins, alpha_bins, cmap='RdBu') 376 | >>> plt.show() 377 | 378 | """ 379 | # VALUES 380 | rgba, legend_cmap = value_by_alpha_cmap(rgb_bins.yb, alpha_bins.yb, cmap=cmap) 381 | # separate rgb and alpha values 382 | alpha = rgba[:, 3] 383 | # extract unique values for alpha and rgb 384 | alpha_vals = np.unique(alpha) 385 | rgb_vals = legend_cmap( 386 | (rgb_bins.bins - rgb_bins.bins.min()) 387 | / (rgb_bins.bins.max() - rgb_bins.bins.min()) 388 | )[:, 0:3] 389 | 390 | # PLOTTING 391 | if ax is None: 392 | fig = plt.figure() 393 | ax = fig.add_subplot(111) 394 | else: 395 | fig = ax.get_figure() 396 | 397 | for irow, alpha_val in enumerate(alpha_vals): 398 | for icol, rgb_val in enumerate(rgb_vals): 399 | rect = patches.Rectangle( 400 | (irow, icol), 401 | 1, 402 | 1, 403 | linewidth=3, 404 | edgecolor="none", 405 | facecolor=rgb_val, 406 | alpha=alpha_val, 407 | ) 408 | ax.add_patch(rect) 409 | 410 | values_alpha, x_in_thousand = format_legend(alpha_bins.bins) 411 | values_rgb, y_in_thousand = format_legend(rgb_bins.bins) 412 | ax.plot([], []) 413 | ax.set_xlim([0, irow + 1]) 414 | ax.set_ylim([0, icol + 1]) 415 | ax.set_xticks(np.arange(irow + 1) + 0.5) 416 | ax.set_yticks(np.arange(icol + 1) + 0.5) 417 | ax.set_xticklabels( 418 | ["< %1.1f" % val for val in values_alpha], 419 | rotation=30, 420 | horizontalalignment="right", 421 | ) 422 | ax.set_yticklabels(["$<$%1.1f" % val for val in values_rgb]) 423 | if x_in_thousand: 424 | ax.set_xlabel("alpha variable ($10^3$)") 425 | if y_in_thousand: 426 | ax.set_ylabel("rgb variable ($10^3$)") 427 | else: 428 | ax.set_xlabel("alpha variable") 429 | ax.set_ylabel("rgb variable") 430 | ax.spines["left"].set_visible(False) 431 | ax.spines["right"].set_visible(False) 432 | ax.spines["bottom"].set_visible(False) 433 | ax.spines["top"].set_visible(False) 434 | return fig, ax 435 | 436 | 437 | def mapclassify_bin( 438 | y, 439 | classifier, 440 | k=5, 441 | pct=[1, 10, 50, 90, 99, 100], 442 | hinge=1.5, 443 | multiples=[-2, -1, 1, 2], 444 | mindiff=0, 445 | initial=100, 446 | bins=None, 447 | ): 448 | """ 449 | Classify your data with `pysal.mapclassify` 450 | Note: Input parameters are dependent on classifier used. 451 | 452 | Parameters 453 | ---------- 454 | y : array 455 | (n,1), values to classify 456 | classifier : str 457 | pysal.mapclassify classification scheme 458 | k : int, optional 459 | The number of classes. Default=5. 460 | pct : array, optional 461 | Percentiles used for classification with `percentiles`. 462 | Default=[1,10,50,90,99,100] 463 | hinge : float, optional 464 | Multiplier for IQR when `BoxPlot` classifier used. 465 | Default=1.5. 466 | multiples : array, optional 467 | The multiples of the standard deviation to add/subtract from 468 | the sample mean to define the bins using `std_mean`. 469 | Default=[-2,-1,1,2]. 470 | mindiff : float, optional 471 | The minimum difference between class breaks 472 | if using `maximum_breaks` classifier. Deafult =0. 473 | initial : int 474 | Number of initial solutions to generate or number of runs 475 | when using `natural_breaks` or `max_p_classifier`. 476 | Default =100. 477 | Note: setting initial to 0 will result in the quickest 478 | calculation of bins. 479 | bins : array, optional 480 | (k,1), upper bounds of classes (have to be monotically 481 | increasing) if using `user_defined` classifier. 482 | Default =None, Example =[20, max(y)]. 483 | 484 | Returns 485 | ------- 486 | bins : pysal.mapclassify instance 487 | Object containing bin ids for each observation (.yb), 488 | upper bounds of each class (.bins), number of classes (.k) 489 | and number of onservations falling in each class (.counts) 490 | 491 | Note: Supported classifiers include: quantiles, box_plot, euqal_interval, 492 | fisher_jenks, headtail_breaks, jenks_caspall, jenks_caspall_forced, 493 | max_p_classifier, maximum_breaks, natural_breaks, percentiles, std_mean, 494 | user_defined 495 | 496 | Examples 497 | -------- 498 | 499 | Imports 500 | 501 | >>> from libpysal import examples 502 | >>> import geopandas as gpd 503 | >>> from splot.mapping import mapclassify_bin 504 | 505 | Load Example Data 506 | 507 | >>> link_to_data = examples.get_path('columbus.shp') 508 | >>> gdf = gpd.read_file(link_to_data) 509 | >>> x = gdf['HOVAL'].values 510 | 511 | Classify values by quantiles 512 | 513 | >>> quantiles = mapclassify_bin(x, 'quantiles') 514 | 515 | Classify values by box_plot and set hinge to 2 516 | 517 | >>> box_plot = mapclassify_bin(x, 'box_plot', hinge=2) 518 | 519 | """ 520 | classifier = classifier.lower() 521 | if classifier not in _classifiers: 522 | raise ValueError( 523 | "Invalid scheme. Scheme must be in the" " set: %r" % _classifiers.keys() 524 | ) 525 | elif classifier == "box_plot": 526 | bins = _classifiers[classifier](y, hinge) 527 | elif classifier == "headtail_breaks": 528 | bins = _classifiers[classifier](y) 529 | elif classifier == "percentiles": 530 | bins = _classifiers[classifier](y, pct) 531 | elif classifier == "std_mean": 532 | bins = _classifiers[classifier](y, multiples) 533 | elif classifier == "maximum_breaks": 534 | bins = _classifiers[classifier](y, k, mindiff) 535 | elif classifier in ["natural_breaks", "max_p_classifier"]: 536 | bins = _classifiers[classifier](y, k, initial) 537 | elif classifier == "user_defined": 538 | bins = _classifiers[classifier](y, bins) 539 | else: 540 | bins = _classifiers[classifier](y, k) 541 | return bins 542 | -------------------------------------------------------------------------------- /splot/_viz_bokeh.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | # import pysal as ps 4 | import spreg 5 | from bokeh import palettes 6 | from bokeh.layouts import gridplot 7 | from bokeh.models import ( 8 | CategoricalColorMapper, 9 | ColumnDataSource, 10 | GeoJSONDataSource, 11 | HoverTool, 12 | Span, 13 | ) 14 | from bokeh.plotting import figure 15 | from esda.moran import Moran_Local 16 | 17 | from ._viz_utils import ( 18 | add_legend, 19 | bin_labels_choropleth, 20 | calc_data_aspect, 21 | mask_local_auto, 22 | ) 23 | 24 | """ 25 | Leightweight interactive visualizations in Bokeh. 26 | 27 | TODO: 28 | * We are not re-projection data into web-mercator atm, 29 | to allow plotting from raw coordinates. 30 | The user should however be aware of the projection of the used data. 31 | """ 32 | 33 | __author__ = "Stefanie Lumnitz " 34 | 35 | 36 | def plot_choropleth( 37 | df, 38 | attribute, 39 | title=None, 40 | plot_width=500, 41 | plot_height=500, 42 | method="quantiles", 43 | k=5, 44 | reverse_colors=False, 45 | tools="", 46 | region_column="", 47 | ): 48 | """ 49 | Plot Choropleth colored according to attribute 50 | 51 | Parameters 52 | ---------- 53 | df : Geopandas dataframe 54 | Dataframe containign relevant shapes and attribute values. 55 | attribute : str 56 | Name of column containing attribute values of interest. 57 | title : str, optional 58 | Title of map. Default title=None 59 | plot_width : int, optional 60 | Width dimension of the figure in screen units/ pixels. 61 | Default = 500 62 | plot_height : int, optional 63 | Height dimension of the figure in screen units/ pixels. 64 | Default = 500 65 | method : str, optional 66 | Classification method to be used. Options supported: 67 | * 'quantiles' (default) 68 | * 'fisher-jenks' 69 | * 'equal-interval' 70 | k : int, optional 71 | Number of bins, assigning values to. Default k=5 72 | reverse_colors: boolean 73 | Reverses the color palette to show lightest colors for 74 | lowest values. Default reverse_colors=False 75 | tools : str, optional 76 | Tools used for bokeh plotting. Default = '' 77 | region_column : str, optional 78 | Column name containing region descpriptions/ names or polygone ids. 79 | Default = ''. 80 | 81 | Returns 82 | ------- 83 | fig : Bokeh Figure instance 84 | Figure of Choropleth 85 | 86 | Examples 87 | -------- 88 | >>> import libpysal.api as lp 89 | >>> from libpysal import examples 90 | >>> import geopandas as gpd 91 | >>> import esda 92 | >>> from splot.bk import plot_choropleth 93 | >>> from bokeh.io import show 94 | 95 | >>> link = examples.get_path('columbus.shp') 96 | >>> df = gpd.read_file(link) 97 | >>> w = lp.Queen.from_dataframe(df) 98 | >>> w.transform = 'r' 99 | 100 | >>> TOOLS = "tap,help" 101 | >>> fig = plot_choropleth(df, 'HOVAL', title='columbus', 102 | ... reverse_colors=True, tools=TOOLS) 103 | >>> show(fig) 104 | """ 105 | # We're adding columns, do that on a copy rather than on the users' input 106 | df = df.copy() 107 | 108 | # Extract attribute values from df 109 | attribute_values = df[attribute].values 110 | 111 | # Create bin labels with bin_labels_choropleth() 112 | bin_labels = bin_labels_choropleth(df, attribute_values, method, k) 113 | 114 | # Initialize GeoJSONDataSource 115 | geo_source = GeoJSONDataSource(geojson=df.to_json()) 116 | 117 | fig = _plot_choropleth_fig( 118 | geo_source, 119 | attribute, 120 | bin_labels, 121 | bounds=df.total_bounds, 122 | region_column=region_column, 123 | title=title, 124 | plot_width=plot_width, 125 | plot_height=plot_height, 126 | method=method, 127 | k=k, 128 | reverse_colors=reverse_colors, 129 | tools=tools, 130 | ) 131 | return fig 132 | 133 | 134 | def _plot_choropleth_fig( 135 | geo_source, 136 | attribute, 137 | bin_labels, 138 | bounds, 139 | region_column="", 140 | title=None, 141 | plot_width=500, 142 | plot_height=500, 143 | method="quantiles", 144 | k=5, 145 | reverse_colors=False, 146 | tools="", 147 | ): 148 | colors = list(palettes.YlGnBu[k]) 149 | if reverse_colors is True: 150 | colors.reverse() # lightest color for lowest values 151 | 152 | # make data aspect ration match the figure aspect ratio 153 | # to avoid map distortion (1km=1km) 154 | x_min, x_max, y_min, y_max = calc_data_aspect(plot_height, plot_width, bounds) 155 | 156 | # Create figure 157 | fig = figure( 158 | title=title, 159 | plot_width=plot_width, 160 | plot_height=plot_height, 161 | tools=tools, 162 | x_range=(x_min, x_max), 163 | y_range=(y_min, y_max), 164 | ) 165 | # The use of `nonselection_fill_*` shouldn't be necessary, 166 | # but currently it is. This looks like a bug in Bokeh 167 | # where gridplot plus taptool chooses the underlay from the figure 168 | # that is clicked and applies it to the other figure as well. 169 | fill_color = { 170 | "field": "labels_choro", 171 | "transform": CategoricalColorMapper(palette=colors, factors=bin_labels), 172 | } 173 | fig.patches( 174 | "xs", 175 | "ys", 176 | fill_alpha=0.7, 177 | fill_color=fill_color, 178 | line_color="white", 179 | nonselection_fill_alpha=0.2, 180 | nonselection_fill_color=fill_color, 181 | selection_line_color="firebrick", 182 | selection_fill_color=fill_color, 183 | line_width=0.5, 184 | source=geo_source, 185 | ) 186 | 187 | # add hover tool 188 | if "hover" in tools: 189 | hover = fig.select_one(HoverTool) 190 | hover.point_policy = "follow_mouse" 191 | hover.tooltips = [ 192 | ("Region", "@" + region_column), 193 | ("Attribute", "@" + attribute + "{0.0}"), 194 | ] 195 | 196 | # add legend with add_legend() 197 | add_legend(fig, bin_labels, colors) 198 | 199 | # change layout 200 | fig.xgrid.grid_line_color = None 201 | fig.ygrid.grid_line_color = None 202 | fig.axis.visible = None 203 | return fig 204 | 205 | 206 | def lisa_cluster( 207 | moran_loc, 208 | df, 209 | p=0.05, 210 | region_column="", 211 | title=None, 212 | plot_width=500, 213 | plot_height=500, 214 | tools="", 215 | ): 216 | """ 217 | Lisa Cluster map, coloured by local spatial autocorrelation 218 | 219 | Parameters 220 | ---------- 221 | moran_loc : esda.moran.Moran_Local instance 222 | values of Moran's Local Autocorrelation Statistic 223 | df : geopandas dataframe instance 224 | In mask_local_auto(), assign df['labels'] per row. Note that 225 | ``df`` will be modified, so calling functions uses a copy of 226 | the user provided ``df``. 227 | p : float, optional 228 | The p-value threshold for significance. Points will 229 | be colored by significance. 230 | title : str, optional 231 | Title of map. Default title=None 232 | plot_width : int, optional 233 | Width dimension of the figure in screen units/ pixels. 234 | Default = 500 235 | plot_height : int, optional 236 | Height dimension of the figure in screen units/ pixels. 237 | Default = 500 238 | 239 | Returns 240 | ------- 241 | fig : Bokeh figure instance 242 | Figure of LISA cluster map, colored by local spatial autocorrelation 243 | 244 | Examples 245 | -------- 246 | >>> import libpysal.api as lp 247 | >>> from libpysal import examples 248 | >>> import geopandas as gpd 249 | >>> from esda.moran import Moran_Local 250 | >>> from splot.bk import lisa_cluster 251 | >>> from bokeh.io import show 252 | 253 | >>> link = examples.get_path('columbus.shp') 254 | >>> df = gpd.read_file(link) 255 | >>> y = df['HOVAL'].values 256 | >>> w = lp.Queen.from_dataframe(df) 257 | >>> w.transform = 'r' 258 | >>> moran_loc = Moran_Local(y, w) 259 | 260 | >>> TOOLS = "tap,reset,help" 261 | >>> fig = lisa_cluster(moran_loc, df, p=0.05, tools=TOOLS) 262 | >>> show(fig) 263 | """ 264 | # We're adding columns, do that on a copy rather than on the users' input 265 | df = df.copy() 266 | 267 | # add cluster_labels and colors5 in mask_local_auto 268 | cluster_labels, colors5, _, labels = mask_local_auto(moran_loc, p=0.05) 269 | df["labels_lisa"] = labels 270 | df["moranloc_psim"] = moran_loc.p_sim 271 | df["moranloc_q"] = moran_loc.q 272 | 273 | # load df into bokeh data source 274 | geo_source = GeoJSONDataSource(geojson=df.to_json()) 275 | 276 | fig = _lisa_cluster_fig( 277 | geo_source, 278 | moran_loc, 279 | cluster_labels, 280 | colors5, 281 | bounds=df.total_bounds, 282 | region_column=region_column, 283 | title=title, 284 | plot_width=plot_width, 285 | plot_height=plot_height, 286 | tools=tools, 287 | ) 288 | return fig 289 | 290 | 291 | def _lisa_cluster_fig( 292 | geo_source, 293 | moran_loc, 294 | cluster_labels, 295 | colors5, 296 | bounds, 297 | region_column="", 298 | title=None, 299 | plot_width=500, 300 | plot_height=500, 301 | tools="", 302 | ): 303 | # make data aspect ration match the figure aspect ratio 304 | # to avoid map distortion (1km=1km) 305 | x_min, x_max, y_min, y_max = calc_data_aspect(plot_height, plot_width, bounds) 306 | 307 | # Create figure 308 | fig = figure( 309 | title=title, 310 | toolbar_location="right", 311 | plot_width=plot_width, 312 | plot_height=plot_height, 313 | x_range=(x_min, x_max), 314 | y_range=(y_min, y_max), 315 | tools=tools, 316 | ) 317 | fill_color = { 318 | "field": "labels_lisa", 319 | "transform": CategoricalColorMapper(palette=colors5, factors=cluster_labels), 320 | } 321 | fig.patches( 322 | "xs", 323 | "ys", 324 | fill_color=fill_color, 325 | fill_alpha=0.8, 326 | nonselection_fill_alpha=0.2, 327 | nonselection_fill_color=fill_color, 328 | line_color="white", 329 | selection_line_color="firebrick", 330 | selection_fill_color=fill_color, 331 | line_width=0.5, 332 | source=geo_source, 333 | ) 334 | 335 | if "hover" in tools: 336 | # add hover tool 337 | hover = fig.select_one(HoverTool) 338 | hover.point_policy = "follow_mouse" 339 | hover.tooltips = [ 340 | ("Region", "@" + region_column), 341 | ("Significance", "@moranloc_psim{0.00}"), 342 | ("Quadrant", "@moranloc_q{0}"), 343 | ] 344 | 345 | # add legend with add_legend() 346 | add_legend(fig, cluster_labels, colors5) 347 | 348 | # change layout 349 | fig.xgrid.grid_line_color = None 350 | fig.ygrid.grid_line_color = None 351 | fig.axis.visible = None 352 | return fig 353 | 354 | 355 | def moran_scatterplot( 356 | moran_loc, p=None, region_column="", plot_width=500, plot_height=500, tools="" 357 | ): 358 | """ 359 | Moran Scatterplot, optional coloured by local spatial autocorrelation 360 | 361 | Parameters 362 | ---------- 363 | moran_loc : esda.moran.Moran_Local instance 364 | values of Moran's Local Autocorrelation Statistic 365 | p : float, optional 366 | The p-value threshold for significance. Points will 367 | be colored by significance. 368 | plot_width : int, optional 369 | Width dimension of the figure in screen units/ pixels. 370 | Default = 500 371 | plot_height : int, optional 372 | Height dimension of the figure in screen units/ pixels. 373 | Default = 500 374 | 375 | Returns 376 | ------- 377 | fig : Bokeh figure instance 378 | Figure of Moran Scatterplot, optionally colored by 379 | local spatial autocorrelation 380 | 381 | Examples 382 | -------- 383 | >>> import libpysal.api as lp 384 | >>> from libpysal import examples 385 | >>> import geopandas as gpd 386 | >>> from esda.moran import Moran_Local 387 | >>> from splot.bk import moran_scatterplot 388 | >>> from bokeh.io import show 389 | 390 | >>> link = examples.get_path('columbus.shp') 391 | >>> df = gpd.read_file(link) 392 | >>> y = df['HOVAL'].values 393 | >>> w = lp.Queen.from_dataframe(df) 394 | >>> w.transform = 'r' 395 | >>> moran_loc = Moran_Local(y, w) 396 | 397 | >>> fig = moran_scatterplot(moran_loc, p=0.05) 398 | >>> show(fig) 399 | """ 400 | data = _moran_scatterplot_calc(moran_loc, p) 401 | source = ColumnDataSource(pd.DataFrame(data)) 402 | fig = _moran_scatterplot_fig( 403 | source, 404 | p=p, 405 | region_column=region_column, 406 | plot_width=plot_width, 407 | plot_height=plot_height, 408 | tools=tools, 409 | ) 410 | return fig 411 | 412 | 413 | def _moran_scatterplot_calc(moran_loc, p): 414 | lag = spreg.lag_spatial(moran_loc.w, moran_loc.z) 415 | fit = spreg.OLS(moran_loc.z[:, None], lag[:, None]) 416 | if p is not None: 417 | if not isinstance(moran_loc, Moran_Local): 418 | raise ValueError("`moran_loc` is not a esda.moran.Moran_Local instance") 419 | 420 | _, _, colors, _ = mask_local_auto(moran_loc, p=p) 421 | else: 422 | colors = "black" 423 | 424 | data = { 425 | "moran_z": moran_loc.z, 426 | "lag": lag, 427 | "colors": colors, 428 | "fit_y": fit.predy.flatten(), 429 | "moranloc_psim": moran_loc.p_sim, 430 | "moranloc_q": moran_loc.q, 431 | } 432 | return data 433 | 434 | 435 | def _moran_scatterplot_fig( 436 | source, 437 | p=None, 438 | title="Moran Scatterplot", 439 | region_column="", 440 | plot_width=500, 441 | plot_height=500, 442 | tools="", 443 | ): 444 | """ 445 | Parameters 446 | ---------- 447 | source : Bokeh ColumnDatasource or GeoJSONDataSource instance 448 | The data source, should contain the columns ``moran_z`` and ``lag``, 449 | which will be used as x and y inputs of the scatterplot. 450 | """ 451 | # Vertical line 452 | vline = Span( 453 | location=0, 454 | dimension="height", 455 | line_color="lightskyblue", 456 | line_width=2, 457 | line_dash="dashed", 458 | ) 459 | # Horizontal line 460 | hline = Span( 461 | location=0, 462 | dimension="width", 463 | line_color="lightskyblue", 464 | line_width=2, 465 | line_dash="dashed", 466 | ) 467 | 468 | # Create figure 469 | fig = figure( 470 | title=title, 471 | x_axis_label="Response", 472 | y_axis_label="Spatial Lag", 473 | toolbar_location="left", 474 | plot_width=plot_width, 475 | plot_height=plot_height, 476 | tools=tools, 477 | ) 478 | fig.scatter( 479 | x="moran_z", 480 | y="lag", 481 | source=source, 482 | color="colors", 483 | size=8, 484 | fill_alpha=0.6, 485 | selection_fill_alpha=1, 486 | selection_line_color="firebrick", 487 | selection_fill_color="colors", 488 | ) 489 | fig.renderers.extend([vline, hline]) 490 | fig.xgrid.grid_line_color = None 491 | fig.ygrid.grid_line_color = None 492 | fig.line(x="lag", y="fit_y", source=source, line_width=2) # fit line 493 | 494 | if "hover" in tools: 495 | hover = fig.select_one(HoverTool) 496 | hover.point_policy = "follow_mouse" 497 | hover.tooltips = [ 498 | ("Region", "@" + region_column), 499 | ("Significance", "@moranloc_psim{0.00}"), 500 | ("Quadrant", "@moranloc_q{0}"), 501 | ] 502 | return fig 503 | 504 | 505 | def plot_local_autocorrelation( 506 | moran_loc, 507 | df, 508 | attribute, 509 | p=0.05, 510 | region_column="", 511 | plot_width=350, 512 | plot_height=400, 513 | method="quantiles", 514 | k=5, 515 | reverse_colors=False, 516 | ): 517 | """ 518 | Plot Moran Scatterplot, LISA cluster and Choropleth 519 | for Local Spatial Autocorrelation Analysis 520 | 521 | Parameters 522 | ---------- 523 | moran_loc : esda.moran.Moran_Local instance 524 | values of Moran's Local Autocorrelation Statistic 525 | df : Geopandas dataframe 526 | Dataframe containing relevant polygon and attribute values. 527 | attribute : str 528 | Name of column containing attribute values of interest. 529 | plot_width : int, optional 530 | Width dimension of the figure in screen units/ pixels. 531 | Default = 250 532 | plot_height : int, optional 533 | Height dimension of the figure in screen units/ pixels. 534 | Default = 300 535 | method : str, optional 536 | Classification method to be used. Options supported: 537 | * 'quantiles' (default) 538 | * 'fisher-jenks' 539 | * 'equal-interval' 540 | k : int, optional 541 | Number of bins, assigning values to. Default k=5 542 | reverse_colors: boolean 543 | Reverses the color palette to show lightest colors for 544 | lowest values in Choropleth map. Default reverse_colors=False 545 | 546 | Returns 547 | ------- 548 | fig : Bokeh Figure instance 549 | Figure of Choropleth 550 | 551 | Examples 552 | -------- 553 | >>> import libpysal.api as lp 554 | >>> from libpysal import examples 555 | >>> import geopandas as gpd 556 | >>> from esda.moran import Moran_Local 557 | >>> from splot.bk import plot_local_autocorrelation 558 | >>> from bokeh.io import show 559 | 560 | >>> link = examples.get_path('columbus.shp') 561 | >>> df = gpd.read_file(link) 562 | >>> y = df['HOVAL'].values 563 | >>> w = lp.Queen.from_dataframe(df) 564 | >>> w.transform = 'r' 565 | >>> moran_loc = Moran_Local(y, w) 566 | 567 | >>> fig = plot_local_autocorrelation(moran_loc, df, 'HOVAL', 568 | reverse_colors=True) 569 | >>> show(fig) 570 | """ 571 | # We're adding columns, do that on a copy rather than on the users' input 572 | df = df.copy() 573 | 574 | # Add relevant results for moran_scatterplot as columns to geodataframe 575 | moran_scatterplot_data = _moran_scatterplot_calc(moran_loc, p) 576 | for key in moran_scatterplot_data: 577 | df[key] = moran_scatterplot_data[key] 578 | 579 | # add cluster_labels and colors5 in mask_local_auto 580 | cluster_labels, colors5, _, labels = mask_local_auto(moran_loc, p=0.05) 581 | df["labels_lisa"] = labels 582 | df["moranloc_psim"] = moran_loc.p_sim 583 | df["moranloc_q"] = moran_loc.q 584 | # Extract attribute values from df 585 | attribute_values = df[attribute].values 586 | # Create bin labels with bin_labels_choropleth() 587 | bin_labels = bin_labels_choropleth(df, attribute_values, method, k) 588 | 589 | # load df into bokeh data source 590 | geo_source = GeoJSONDataSource(geojson=df.to_json()) 591 | 592 | TOOLS = "tap,reset,help,hover" 593 | 594 | scatter = _moran_scatterplot_fig( 595 | geo_source, 596 | p=p, 597 | region_column=region_column, 598 | title="Local Spatial Autocorrelation", 599 | plot_width=int(plot_width * 1.15), 600 | plot_height=plot_height, 601 | tools=TOOLS, 602 | ) 603 | LISA = _lisa_cluster_fig( 604 | geo_source, 605 | moran_loc, 606 | cluster_labels, 607 | colors5, 608 | bounds=df.total_bounds, 609 | region_column=region_column, 610 | plot_width=plot_width, 611 | plot_height=plot_height, 612 | tools=TOOLS, 613 | ) 614 | choro = _plot_choropleth_fig( 615 | geo_source, 616 | attribute, 617 | bin_labels, 618 | bounds=df.total_bounds, 619 | region_column=region_column, 620 | reverse_colors=reverse_colors, 621 | plot_width=plot_width, 622 | plot_height=plot_height, 623 | tools=TOOLS, 624 | ) 625 | 626 | fig = gridplot([[scatter, LISA, choro]], sizing_mode="scale_width") 627 | return fig 628 | -------------------------------------------------------------------------------- /splot/_viz_giddy_mpl.py: -------------------------------------------------------------------------------- 1 | import matplotlib as mpl 2 | import matplotlib.pyplot as plt 3 | import numpy as np 4 | import seaborn as sns 5 | from esda.moran import Moran_Local 6 | from giddy.directional import Rose 7 | 8 | from ._viz_esda_mpl import lisa_cluster 9 | from ._viz_utils import moran_hot_cold_spots 10 | 11 | """ 12 | Lightweight visualizations for pysal dynamics using Matplotlib and Geopandas 13 | 14 | TODO 15 | * implement LIMA 16 | * allow for different patterns or list of str 17 | in dynamic_lisa_composite_explore() 18 | """ 19 | 20 | __author__ = "Stefanie Lumnitz " 21 | 22 | 23 | def _dynamic_lisa_heatmap_data(moran_locy, moran_locx, p=0.05): 24 | """ 25 | Utility function to calculate dynamic lisa heatmap table 26 | and diagonal color mask 27 | """ 28 | clustery = moran_hot_cold_spots(moran_locy, p=p) 29 | clusterx = moran_hot_cold_spots(moran_locx, p=p) 30 | 31 | # to put into seaborn function 32 | # and set diagonal elements to zero to see the rest better 33 | heatmap_data = np.zeros((5, 5), dtype=int) 34 | mask = np.zeros((5, 5), dtype=bool) 35 | for row in range(5): 36 | for col in range(5): 37 | yr1 = clustery == row 38 | yr2 = clusterx == col 39 | heatmap_data[row, col] = (yr1 & yr2).sum() 40 | if row == col: 41 | mask[row, col] = True 42 | return heatmap_data, mask 43 | 44 | 45 | def _moran_loc_from_rose_calc(rose): 46 | """ 47 | Calculate esda.moran.Moran_Local values from giddy.rose object 48 | """ 49 | old_state = np.random.get_state() 50 | moran_locy = Moran_Local(rose.Y[:, 0], rose.w) 51 | np.random.set_state(old_state) 52 | moran_locx = Moran_Local(rose.Y[:, 1], rose.w) 53 | np.random.set_state(old_state) 54 | return moran_locy, moran_locx 55 | 56 | 57 | def dynamic_lisa_heatmap(rose, p=0.05, ax=None, **kwargs): 58 | """ 59 | Heatmap indicating significant transition of LISA values 60 | over time inbetween Moran Scatterplot quadrants 61 | 62 | Parameters 63 | ---------- 64 | rose : giddy.directional.Rose instance 65 | A ``Rose`` object, which contains (among other attributes) LISA 66 | values at two points in time, and a method 67 | to perform inference on those. 68 | p : float, optional 69 | The p-value threshold for significance. Default =0.05 70 | ax : Matplotlib Axes instance, optional 71 | If given, the figure will be created inside this axis. 72 | Default =None. 73 | **kwargs : keyword arguments, optional 74 | Keywords used for creating and designing the heatmap. 75 | These are passed on to `seaborn.heatmap()`. 76 | See `seaborn` documentation for valid keywords. 77 | Note: "Start time" refers to `y1` in `Y = np.array([y1, y2]).T` 78 | with `giddy.Rose(Y, w, k=5)`, "End time" referst to `y2`. 79 | 80 | Returns 81 | ------- 82 | fig : Matplotlib Figure instance 83 | Heatmap figure 84 | ax : matplotlib Axes instance 85 | Axes in which the figure is plotted 86 | 87 | Examples 88 | -------- 89 | >>> import geopandas as gpd 90 | >>> import pandas as pd 91 | >>> from libpysal.weights.contiguity import Queen 92 | >>> from libpysal import examples 93 | >>> import numpy as np 94 | >>> import matplotlib.pyplot as plt 95 | >>> from giddy.directional import Rose 96 | >>> from splot.giddy import dynamic_lisa_heatmap 97 | 98 | get csv and shp files 99 | 100 | >>> shp_link = examples.get_path('us48.shp') 101 | >>> df = gpd.read_file(shp_link) 102 | >>> income_table = pd.read_csv(examples.get_path("usjoin.csv")) 103 | 104 | calculate relative values 105 | 106 | >>> for year in range(1969, 2010): 107 | ... income_table[str(year) + '_rel'] = ( 108 | ... income_table[str(year)] / income_table[str(year)].mean()) 109 | 110 | merge to one gdf 111 | 112 | >>> gdf = df.merge(income_table,left_on='STATE_NAME',right_on='Name') 113 | 114 | retrieve spatial weights and data for two points in time 115 | 116 | >>> w = Queen.from_dataframe(gdf) 117 | >>> w.transform = 'r' 118 | >>> y1 = gdf['1969_rel'].values 119 | >>> y2 = gdf['2000_rel'].values 120 | 121 | calculate rose Object 122 | 123 | >>> Y = np.array([y1, y2]).T 124 | >>> rose = Rose(Y, w, k=5) 125 | 126 | plot 127 | 128 | >>> dynamic_lisa_heatmap(rose) 129 | >>> plt.show() 130 | 131 | customize plot 132 | 133 | >>> dynamic_lisa_heatmap(rose, cbar='GnBu') 134 | >>> plt.show() 135 | 136 | """ 137 | moran_locy, moran_locx = _moran_loc_from_rose_calc(rose) 138 | fig, ax = _dynamic_lisa_heatmap(moran_locy, moran_locx, p=p, ax=ax, **kwargs) 139 | return fig, ax 140 | 141 | 142 | def _dynamic_lisa_heatmap(moran_locy, moran_locx, p, ax, **kwargs): 143 | """ 144 | Create dynamic_lisa_heatmap figure from esda.moran.Moran_local values 145 | """ 146 | heatmap_data, diagonal_mask = _dynamic_lisa_heatmap_data(moran_locy, moran_locx, p) 147 | # set default plot style 148 | annot = kwargs.pop("annot", True) 149 | cmap = kwargs.pop("cmap", "YlGnBu") 150 | mask = kwargs.pop("mask", diagonal_mask) 151 | cbar = kwargs.pop("cbar", False) 152 | square = kwargs.pop("square", True) 153 | 154 | # set name for tick labels 155 | xticklabels = kwargs.pop("xticklabels", ["ns", "HH", "HL", "LH", "LL"]) 156 | yticklabels = kwargs.pop("yticklabels", ["ns", "HH", "HL", "LH", "LL"]) 157 | 158 | ax = sns.heatmap( 159 | heatmap_data, 160 | annot=annot, 161 | cmap=cmap, 162 | xticklabels=xticklabels, 163 | yticklabels=yticklabels, 164 | mask=mask, 165 | ax=ax, 166 | cbar=cbar, 167 | square=square, 168 | **kwargs 169 | ) 170 | ax.set_xlabel("End time") 171 | ax.set_ylabel("Start time") 172 | fig = ax.get_figure() 173 | return fig, ax 174 | 175 | 176 | def dynamic_lisa_rose(rose, attribute=None, ax=None, **kwargs): 177 | """ 178 | Plot dynamic LISA values in a rose diagram. 179 | 180 | Parameters 181 | ---------- 182 | rose : giddy.directional.Rose instance 183 | A ``Rose`` object, which contains (among other attributes) LISA 184 | values at two points in time, and a method 185 | to perform inference on those. 186 | attribute : (n,) ndarray, optional 187 | Points will be colored by chosen attribute values. 188 | Variable to specify colors of the colorbars. Default =None. 189 | ax : Matplotlib Axes instance, optional 190 | If given, the figure will be created inside this axis. 191 | Default =None. Note: This axis should have a polar projection. 192 | **kwargs : keyword arguments, optional 193 | Keywords used for creating and designing the 194 | `matplotlib.pyplot.scatter()`. 195 | Note: 'c' and 'color' cannot be passed when attribute is not None. 196 | 197 | Returns 198 | ------- 199 | fig : Matplotlib Figure instance 200 | LISA rose plot figure 201 | ax : matplotlib Axes instance 202 | Axes in which the figure is plotted 203 | 204 | Examples 205 | -------- 206 | >>> import geopandas as gpd 207 | >>> import pandas as pd 208 | >>> from libpysal.weights.contiguity import Queen 209 | >>> from libpysal import examples 210 | >>> import numpy as np 211 | >>> import matplotlib.pyplot as plt 212 | >>> from giddy.directional import Rose 213 | >>> from splot.giddy import dynamic_lisa_rose 214 | 215 | get csv and shp files 216 | 217 | >>> shp_link = examples.get_path('us48.shp') 218 | >>> df = gpd.read_file(shp_link) 219 | >>> income_table = pd.read_csv(examples.get_path("usjoin.csv")) 220 | 221 | calculate relative values 222 | 223 | >>> for year in range(1969, 2010): 224 | ... income_table[str(year) + '_rel'] = ( 225 | ... income_table[str(year)] / income_table[str(year)].mean()) 226 | 227 | merge to one gdf 228 | 229 | >>> gdf = df.merge(income_table,left_on='STATE_NAME',right_on='Name') 230 | 231 | retrieve spatial weights and data for two points in time 232 | 233 | >>> w = Queen.from_dataframe(gdf) 234 | >>> w.transform = 'r' 235 | >>> y1 = gdf['1969_rel'].values 236 | >>> y2 = gdf['2000_rel'].values 237 | 238 | calculate rose Object 239 | 240 | >>> Y = np.array([y1, y2]).T 241 | >>> rose = Rose(Y, w, k=5) 242 | 243 | plot 244 | 245 | >>> dynamic_lisa_rose(rose, attribute=y1) 246 | >>> plt.show() 247 | 248 | customize plot 249 | 250 | >>> dynamic_lisa_rose(rose, c='r') 251 | >>> plt.show() 252 | 253 | """ 254 | # save_old default values 255 | old_gridcolor = mpl.rcParams["grid.color"] 256 | old_facecolor = mpl.rcParams["axes.facecolor"] 257 | old_edgecolor = mpl.rcParams["axes.edgecolor"] 258 | # define plotting style 259 | mpl.rcParams["grid.color"] = "w" 260 | mpl.rcParams["axes.edgecolor"] = "w" 261 | mpl.rcParams["axes.facecolor"] = "#E5E5E5" 262 | alpha = kwargs.pop("alpha", 0.9) 263 | cmap = kwargs.pop("cmap", "YlGnBu") 264 | 265 | if ax is None: 266 | fig = plt.figure() 267 | ax = fig.add_subplot(111, projection="polar") 268 | can_insert_colorbar = True 269 | else: 270 | fig = ax.get_figure() 271 | can_insert_colorbar = False 272 | 273 | ax.set_rlabel_position(315) 274 | 275 | if attribute is None: 276 | c = ax.scatter(rose.theta, rose.r, alpha=alpha, cmap=cmap, **kwargs) 277 | else: 278 | if "c" in kwargs.keys() or "color" in kwargs.keys(): 279 | raise ValueError( 280 | "c and color are not valid keywords here; " 281 | "attribute is used for coloring" 282 | ) 283 | 284 | c = ax.scatter( 285 | rose.theta, rose.r, c=attribute, alpha=alpha, cmap=cmap, **kwargs 286 | ) 287 | if can_insert_colorbar: 288 | fig.colorbar(c) 289 | 290 | # reset style to old default values 291 | mpl.rcParams["grid.color"] = old_gridcolor 292 | mpl.rcParams["axes.facecolor"] = old_facecolor 293 | mpl.rcParams["axes.edgecolor"] = old_edgecolor 294 | return fig, ax 295 | 296 | 297 | def _add_arrow(line, position=None, direction="right", size=15, color=None): 298 | """ 299 | add an arrow to a line. 300 | 301 | Parameters 302 | ---------- 303 | line: 304 | Line2D object 305 | position: float 306 | x-position of the arrow. If None, mean of xdata is taken 307 | direction: str 308 | 'left' or 'right' 309 | size: int 310 | size of the arrow in fontsize points 311 | color: str 312 | if None, line color is taken. 313 | 314 | """ 315 | if color is None: 316 | color = line.get_color() 317 | 318 | xdata = line.get_xdata() 319 | ydata = line.get_ydata() 320 | line.axes.annotate( 321 | "", 322 | xytext=(xdata[0], ydata[0]), 323 | xy=(xdata[1], ydata[1]), 324 | arrowprops=dict(arrowstyle="->", color=color), 325 | size=size, 326 | ) 327 | 328 | 329 | def dynamic_lisa_vectors(rose, ax=None, arrows=True, **kwargs): 330 | """ 331 | Plot vectors of positional transition of LISA values 332 | in Moran scatterplot 333 | 334 | Parameters 335 | ---------- 336 | rose : giddy.directional.Rose instance 337 | A ``Rose`` object, which contains (among other attributes) LISA 338 | values at two points in time, and a method 339 | to perform inference on those. 340 | ax : Matplotlib Axes instance, optional 341 | If given, the figure will be created inside this axis. 342 | Default =None. 343 | arrows : boolean, optional 344 | If True show arrowheads of vectors. Default =True 345 | **kwargs : keyword arguments, optional 346 | Keywords used for creating and designing the `matplotlib.pyplot.plot()` 347 | 348 | Returns 349 | ------- 350 | fig : Matplotlib Figure instance 351 | Figure of dynamic LISA vectors 352 | ax : matplotlib Axes instance 353 | Axes in which the figure is plotted 354 | 355 | Examples 356 | -------- 357 | >>> import geopandas as gpd 358 | >>> import pandas as pd 359 | >>> from libpysal.weights.contiguity import Queen 360 | >>> from libpysal import examples 361 | >>> import numpy as np 362 | >>> import matplotlib.pyplot as plt 363 | 364 | >>> from giddy.directional import Rose 365 | >>> from splot.giddy import dynamic_lisa_vectors 366 | 367 | get csv and shp files 368 | 369 | >>> shp_link = examples.get_path('us48.shp') 370 | >>> df = gpd.read_file(shp_link) 371 | >>> income_table = pd.read_csv(examples.get_path("usjoin.csv")) 372 | 373 | calculate relative values 374 | 375 | >>> for year in range(1969, 2010): 376 | ... income_table[str(year) + '_rel'] = ( 377 | ... income_table[str(year)] / income_table[str(year)].mean()) 378 | 379 | merge to one gdf 380 | 381 | >>> gdf = df.merge(income_table,left_on='STATE_NAME',right_on='Name') 382 | 383 | retrieve spatial weights and data for two points in time 384 | 385 | >>> w = Queen.from_dataframe(gdf) 386 | >>> w.transform = 'r' 387 | >>> y1 = gdf['1969_rel'].values 388 | >>> y2 = gdf['2000_rel'].values 389 | 390 | calculate rose Object 391 | 392 | >>> Y = np.array([y1, y2]).T 393 | >>> rose = Rose(Y, w, k=5) 394 | 395 | plot 396 | 397 | >>> dynamic_lisa_vectors(rose) 398 | >>> plt.show() 399 | 400 | customize plot 401 | 402 | >>> dynamic_lisa_vectors(rose, arrows=False, color='r') 403 | >>> plt.show() 404 | 405 | """ 406 | if ax is None: 407 | fig = plt.figure() 408 | ax = fig.add_subplot(111) 409 | can_insert_colorbar = True 410 | else: 411 | fig = ax.get_figure() 412 | can_insert_colorbar = False 413 | 414 | xlim = [rose.Y.min(), rose.Y.max()] 415 | ylim = [rose.wY.min(), rose.wY.max()] 416 | 417 | if "c" in kwargs.keys(): 418 | color = kwargs.pop("c", "b") 419 | can_insert_colorbar = False 420 | else: 421 | color = kwargs.pop("color", "b") 422 | can_insert_colorbar = False 423 | 424 | xs = [] 425 | ys = [] 426 | for i in range(len(rose.Y)): 427 | # Plot a vector from xy_start to xy_end 428 | xs.append(rose.Y[i, :]) 429 | ys.append(rose.wY[i, :]) 430 | 431 | xs = np.asarray(xs).T 432 | ys = np.asarray(ys).T 433 | lines = ax.plot(xs, ys, color=color, **kwargs) 434 | if can_insert_colorbar: 435 | fig.colorbar(lines) 436 | 437 | if arrows: 438 | for line in lines: 439 | _add_arrow(line) 440 | 441 | ax.axis("equal") 442 | ax.set_xlim(xlim) 443 | ax.set_ylim(ylim) 444 | return fig, ax 445 | 446 | 447 | def dynamic_lisa_composite(rose, gdf, p=0.05, figsize=(13, 10)): 448 | """ 449 | Composite visualisation for dynamic LISA values over two points in time. 450 | Includes dynamic lisa heatmap, dynamic lisa rose plot, 451 | and LISA cluster plots for both, compared points in time. 452 | 453 | Parameters 454 | ---------- 455 | rose : giddy.directional.Rose instance 456 | A ``Rose`` object, which contains (among other attributes) LISA 457 | values at two points in time, and a method 458 | to perform inference on those. 459 | gdf : geopandas dataframe instance 460 | The GeoDataFrame containing information and polygons to plot. 461 | p : float, optional 462 | The p-value threshold for significance. Default =0.05. 463 | figsize: tuple, optional 464 | W, h of figure. Default =(13,10) 465 | 466 | Returns 467 | ------- 468 | fig : Matplotlib Figure instance 469 | Dynamic lisa composite figure. 470 | axs : matplotlib Axes instance 471 | Axes in which the figure is plotted. 472 | 473 | Examples 474 | -------- 475 | >>> import geopandas as gpd 476 | >>> import pandas as pd 477 | >>> from libpysal.weights.contiguity import Queen 478 | >>> from libpysal import examples 479 | >>> import numpy as np 480 | >>> import matplotlib.pyplot as plt 481 | >>> from giddy.directional import Rose 482 | >>> from splot.giddy import dynamic_lisa_composite 483 | 484 | get csv and shp files 485 | 486 | >>> shp_link = examples.get_path('us48.shp') 487 | >>> df = gpd.read_file(shp_link) 488 | >>> income_table = pd.read_csv(examples.get_path("usjoin.csv")) 489 | 490 | calculate relative values 491 | 492 | >>> for year in range(1969, 2010): 493 | ... income_table[str(year) + '_rel'] = ( 494 | ... income_table[str(year)] / income_table[str(year)].mean()) 495 | 496 | merge to one gdf 497 | 498 | >>> gdf = df.merge(income_table,left_on='STATE_NAME',right_on='Name') 499 | 500 | retrieve spatial weights and data for two points in time 501 | 502 | >>> w = Queen.from_dataframe(gdf) 503 | >>> w.transform = 'r' 504 | >>> y1 = gdf['1969_rel'].values 505 | >>> y2 = gdf['2000_rel'].values 506 | 507 | calculate rose Object 508 | 509 | >>> Y = np.array([y1, y2]).T 510 | >>> rose = Rose(Y, w, k=5) 511 | 512 | plot 513 | 514 | >>> dynamic_lisa_composite(rose, gdf) 515 | >>> plt.show() 516 | 517 | customize plot 518 | 519 | >>> fig, axs = dynamic_lisa_composite(rose, gdf) 520 | >>> axs[0].set_ylabel('1996') 521 | >>> axs[0].set_xlabel('2009') 522 | >>> axs[1].set_title('LISA cluster for 1996') 523 | >>> axs[3].set_title('LISA clsuter for 2009') 524 | >>> plt.show() 525 | 526 | """ 527 | # Moran_Local uses random numbers, 528 | # which we cannot change between the two years! 529 | moran_locy, moran_locx = _moran_loc_from_rose_calc(rose) 530 | 531 | # initialize figure 532 | fig = plt.figure(figsize=figsize) 533 | fig.suptitle("Space-time autocorrelation", fontsize=20) 534 | axs = [] 535 | axs.append(plt.subplot(221)) 536 | axs.append(plt.subplot(222)) 537 | # save_old default values 538 | old_gridcolor = mpl.rcParams["grid.color"] 539 | old_facecolor = mpl.rcParams["axes.facecolor"] 540 | old_edgecolor = mpl.rcParams["axes.edgecolor"] 541 | # define plotting style 542 | mpl.rcParams["grid.color"] = "w" 543 | mpl.rcParams["axes.edgecolor"] = "w" 544 | mpl.rcParams["axes.facecolor"] = "#E5E5E5" 545 | # define axs[2] 546 | axs.append(plt.subplot(223, projection="polar")) 547 | # reset style to old default values 548 | mpl.rcParams["grid.color"] = old_gridcolor 549 | mpl.rcParams["axes.facecolor"] = old_facecolor 550 | mpl.rcParams["axes.edgecolor"] = old_edgecolor 551 | # define axs[3] 552 | axs.append(plt.subplot(224)) 553 | 554 | # space_time_heatmap 555 | _dynamic_lisa_heatmap(moran_locy, moran_locx, p=p, ax=axs[0]) 556 | axs[0].xaxis.set_ticks_position("top") 557 | axs[0].xaxis.set_label_position("top") 558 | 559 | # Lisa_cluster maps 560 | lisa_cluster( 561 | moran_locy, 562 | gdf, 563 | p=p, 564 | ax=axs[1], 565 | legend=True, 566 | legend_kwds={"loc": "upper left", "bbox_to_anchor": (0.92, 1.05)}, 567 | ) 568 | axs[1].set_title("Start time") 569 | lisa_cluster( 570 | moran_locx, 571 | gdf, 572 | p=p, 573 | ax=axs[3], 574 | legend=True, 575 | legend_kwds={"loc": "upper left", "bbox_to_anchor": (0.92, 1.05)}, 576 | ) 577 | axs[3].set_title("End time") 578 | 579 | # Rose diagram: Moran movement vectors: 580 | dynamic_lisa_rose(rose, ax=axs[2]) 581 | return fig, axs 582 | 583 | 584 | def _dynamic_lisa_widget_update( 585 | rose, gdf, start_time, end_time, p=0.05, figsize=(13, 10) 586 | ): 587 | """ 588 | Update rose values if widgets are used 589 | """ 590 | # determine rose object for (timex, timey), 591 | # which comes from interact widgets 592 | y1 = gdf[start_time].values 593 | y2 = gdf[end_time].values 594 | Y = np.array([y1, y2]).T 595 | rose_update = Rose(Y, rose.w, k=5) 596 | 597 | fig, _ = dynamic_lisa_composite(rose_update, gdf, p=p, figsize=figsize) 598 | 599 | 600 | def dynamic_lisa_composite_explore(rose, gdf, pattern="", p=0.05, figsize=(13, 10)): 601 | """ 602 | Interactive exploration of dynamic LISA values 603 | for different dates in a dataframe. 604 | Note: only possible in jupyter notebooks 605 | 606 | Parameters 607 | ---------- 608 | rose : giddy.directional.Rose instance 609 | A ``Rose`` object, which contains (among other attributes) 610 | weights to calculate `esda.moran.Moran_local` values 611 | gdf : geopandas dataframe instance 612 | The Dataframe containing information and polygons to plot. 613 | pattern : str, optional 614 | Option to extract all columns ending with a specific pattern. 615 | Only extracted columns will be used for comparison. 616 | p : float, optional 617 | The p-value threshold for significance. Default =0.05 618 | figsize: tuple, optional 619 | W, h of figure. Default =(13,10) 620 | 621 | Returns 622 | ------- 623 | None 624 | 625 | Examples 626 | -------- 627 | **Note**: this function creates Jupyter notebook widgets, so is meant only 628 | to run in a notebook. 629 | 630 | >>> import geopandas as gpd 631 | >>> import pandas as pd 632 | >>> from libpysal.weights.contiguity import Queen 633 | >>> from libpysal import examples 634 | >>> import numpy as np 635 | >>> import matplotlib.pyplot as plt 636 | 637 | If you want to see figures embedded inline in a Jupyter notebook, 638 | add a line ``%matplotlib inline`` at the top of your notebook. 639 | 640 | >>> from giddy.directional import Rose 641 | >>> from splot.giddy import dynamic_lisa_composite_explore 642 | 643 | get csv and shp files 644 | 645 | >>> shp_link = examples.get_path('us48.shp') 646 | >>> df = gpd.read_file(shp_link) 647 | >>> income_table = pd.read_csv(examples.get_path("usjoin.csv")) 648 | 649 | calculate relative values 650 | 651 | >>> for year in range(1969, 2010): 652 | ... income_table[str(year) + '_rel'] = ( 653 | ... income_table[str(year)] / income_table[str(year)].mean()) 654 | 655 | merge to one gdf 656 | 657 | >>> gdf = df.merge(income_table,left_on='STATE_NAME',right_on='Name') 658 | 659 | retrieve spatial weights and data for two points in time 660 | 661 | >>> w = Queen.from_dataframe(gdf) 662 | >>> w.transform = 'r' 663 | >>> y1 = gdf['1969_rel'].values 664 | >>> y2 = gdf['2000_rel'].values 665 | 666 | calculate rose Object 667 | 668 | >>> Y = np.array([y1, y2]).T 669 | >>> rose = Rose(Y, w, k=5) 670 | 671 | plot 672 | 673 | >>> fig = dynamic_lisa_composite_explore(rose, gdf, pattern='rel') 674 | >>> # plt.show() 675 | 676 | """ 677 | try: 678 | from ipywidgets import fixed, interact 679 | except (ImportError, ModuleNotFoundError): 680 | raise ImportError( 681 | "`ipywidgets` package is required to use " 682 | "dynamic_lisa_composite_explore." 683 | "You can install it using `conda install ipywidgets` " 684 | "or `pip install ipywidgets`." 685 | ) 686 | coldict = {col: col for col in gdf.columns if col.endswith(pattern)} 687 | interact( 688 | _dynamic_lisa_widget_update, 689 | start_time=coldict, 690 | end_time=coldict, 691 | rose=fixed(rose), 692 | gdf=fixed(gdf), 693 | p=fixed(p), 694 | figsize=fixed(figsize), 695 | ) 696 | -------------------------------------------------------------------------------- /splot/_version.py: -------------------------------------------------------------------------------- 1 | # This file helps to compute a version number in source trees obtained from 2 | # git-archive tarball (such as those provided by githubs download-from-tag 3 | # feature). Distribution tarballs (built by setup.py sdist) and build 4 | # directories (produced by setup.py build) will contain a much shorter file 5 | # that just contains the computed version number. 6 | 7 | # This file is released into the public domain. Generated by 8 | # versioneer-0.20 (https://github.com/python-versioneer/python-versioneer) 9 | 10 | """Git implementation of _version.py.""" 11 | 12 | import errno 13 | import os 14 | import re 15 | import subprocess 16 | import sys 17 | 18 | 19 | def get_keywords(): 20 | """Get the keywords needed to look up the version information.""" 21 | # these strings will be replaced by git during git-archive. 22 | # setup.py/versioneer.py will grep for the variable names, so they must 23 | # each be defined on a line of their own. _version.py will just call 24 | # get_keywords(). 25 | git_refnames = " (HEAD -> main)" 26 | git_full = "b8361cb5f4685d0945e08cbf9172ba701ce57c44" 27 | git_date = "2025-06-19 20:45:36 -0400" 28 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 29 | return keywords 30 | 31 | 32 | class VersioneerConfig: # pylint: disable=too-few-public-methods 33 | """Container for Versioneer configuration parameters.""" 34 | 35 | 36 | def get_config(): 37 | """Create, populate and return the VersioneerConfig() object.""" 38 | # these strings are filled in when 'setup.py versioneer' creates 39 | # _version.py 40 | cfg = VersioneerConfig() 41 | cfg.VCS = "git" 42 | cfg.style = "pep440" 43 | cfg.tag_prefix = "v" 44 | cfg.parentdir_prefix = "splot-" 45 | cfg.versionfile_source = "splot/_version.py" 46 | cfg.verbose = False 47 | return cfg 48 | 49 | 50 | class NotThisMethod(Exception): 51 | """Exception raised if a method is not valid for the current scenario.""" 52 | 53 | 54 | LONG_VERSION_PY = {} 55 | HANDLERS = {} 56 | 57 | 58 | def register_vcs_handler(vcs, method): # decorator 59 | """Create decorator to mark a method as the handler of a VCS.""" 60 | 61 | def decorate(f): 62 | """Store f in HANDLERS[vcs][method].""" 63 | if vcs not in HANDLERS: 64 | HANDLERS[vcs] = {} 65 | HANDLERS[vcs][method] = f 66 | return f 67 | 68 | return decorate 69 | 70 | 71 | # pylint:disable=too-many-arguments,consider-using-with # noqa 72 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): 73 | """Call the given command(s).""" 74 | assert isinstance(commands, list) 75 | process = None 76 | for command in commands: 77 | try: 78 | dispcmd = str([command] + args) 79 | # remember shell=False, so use git.cmd on windows, not just git 80 | process = subprocess.Popen( 81 | [command] + args, 82 | cwd=cwd, 83 | env=env, 84 | stdout=subprocess.PIPE, 85 | stderr=(subprocess.PIPE if hide_stderr else None), 86 | ) 87 | break 88 | except EnvironmentError: 89 | e = sys.exc_info()[1] 90 | if e.errno == errno.ENOENT: 91 | continue 92 | if verbose: 93 | print("unable to run %s" % dispcmd) 94 | print(e) 95 | return None, None 96 | else: 97 | if verbose: 98 | print("unable to find command, tried %s" % (commands,)) 99 | return None, None 100 | stdout = process.communicate()[0].strip().decode() 101 | if process.returncode != 0: 102 | if verbose: 103 | print("unable to run %s (error)" % dispcmd) 104 | print("stdout was %s" % stdout) 105 | return None, process.returncode 106 | return stdout, process.returncode 107 | 108 | 109 | def versions_from_parentdir(parentdir_prefix, root, verbose): 110 | """Try to determine the version from the parent directory name. 111 | 112 | Source tarballs conventionally unpack into a directory that includes both 113 | the project name and a version string. We will also support searching up 114 | two directory levels for an appropriately named parent directory 115 | """ 116 | rootdirs = [] 117 | 118 | for _ in range(3): 119 | dirname = os.path.basename(root) 120 | if dirname.startswith(parentdir_prefix): 121 | return { 122 | "version": dirname[len(parentdir_prefix) :], 123 | "full-revisionid": None, 124 | "dirty": False, 125 | "error": None, 126 | "date": None, 127 | } 128 | rootdirs.append(root) 129 | root = os.path.dirname(root) # up a level 130 | 131 | if verbose: 132 | print( 133 | "Tried directories %s but none started with prefix %s" 134 | % (str(rootdirs), parentdir_prefix) 135 | ) 136 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 137 | 138 | 139 | @register_vcs_handler("git", "get_keywords") 140 | def git_get_keywords(versionfile_abs): 141 | """Extract version information from the given file.""" 142 | # the code embedded in _version.py can just fetch the value of these 143 | # keywords. When used from setup.py, we don't want to import _version.py, 144 | # so we do it with a regexp instead. This function is not used from 145 | # _version.py. 146 | keywords = {} 147 | try: 148 | with open(versionfile_abs, "r") as fobj: 149 | for line in fobj: 150 | if line.strip().startswith("git_refnames ="): 151 | mo = re.search(r'=\s*"(.*)"', line) 152 | if mo: 153 | keywords["refnames"] = mo.group(1) 154 | if line.strip().startswith("git_full ="): 155 | mo = re.search(r'=\s*"(.*)"', line) 156 | if mo: 157 | keywords["full"] = mo.group(1) 158 | if line.strip().startswith("git_date ="): 159 | mo = re.search(r'=\s*"(.*)"', line) 160 | if mo: 161 | keywords["date"] = mo.group(1) 162 | except EnvironmentError: 163 | pass 164 | return keywords 165 | 166 | 167 | @register_vcs_handler("git", "keywords") 168 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 169 | """Get version information from git keywords.""" 170 | if "refnames" not in keywords: 171 | raise NotThisMethod("Short version file found") 172 | date = keywords.get("date") 173 | if date is not None: 174 | # Use only the last line. Previous lines may contain GPG signature 175 | # information. 176 | date = date.splitlines()[-1] 177 | 178 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 179 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 180 | # -like" string, which we must then edit to make compliant), because 181 | # it's been around since git-1.5.3, and it's too difficult to 182 | # discover which version we're using, or to work around using an 183 | # older one. 184 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 185 | refnames = keywords["refnames"].strip() 186 | if refnames.startswith("$Format"): 187 | if verbose: 188 | print("keywords are unexpanded, not using") 189 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 190 | refs = {r.strip() for r in refnames.strip("()").split(",")} 191 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 192 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 193 | TAG = "tag: " 194 | tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)} 195 | if not tags: 196 | # Either we're using git < 1.8.3, or there really are no tags. We use 197 | # a heuristic: assume all version tags have a digit. The old git %d 198 | # expansion behaves like git log --decorate=short and strips out the 199 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 200 | # between branches and tags. By ignoring refnames without digits, we 201 | # filter out many common branch names like "release" and 202 | # "stabilization", as well as "HEAD" and "master". 203 | tags = {r for r in refs if re.search(r"\d", r)} 204 | if verbose: 205 | print("discarding '%s', no digits" % ",".join(refs - tags)) 206 | if verbose: 207 | print("likely tags: %s" % ",".join(sorted(tags))) 208 | for ref in sorted(tags): 209 | # sorting will prefer e.g. "2.0" over "2.0rc1" 210 | if ref.startswith(tag_prefix): 211 | r = ref[len(tag_prefix) :] 212 | # Filter out refs that exactly match prefix or that don't start 213 | # with a number once the prefix is stripped (mostly a concern 214 | # when prefix is '') 215 | if not re.match(r"\d", r): 216 | continue 217 | if verbose: 218 | print("picking %s" % r) 219 | return { 220 | "version": r, 221 | "full-revisionid": keywords["full"].strip(), 222 | "dirty": False, 223 | "error": None, 224 | "date": date, 225 | } 226 | # no suitable tags, so version is "0+unknown", but full hex is still there 227 | if verbose: 228 | print("no suitable tags, using unknown + full revision id") 229 | return { 230 | "version": "0+unknown", 231 | "full-revisionid": keywords["full"].strip(), 232 | "dirty": False, 233 | "error": "no suitable tags", 234 | "date": None, 235 | } 236 | 237 | 238 | @register_vcs_handler("git", "pieces_from_vcs") 239 | def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): 240 | """Get version from 'git describe' in the root of the source tree. 241 | 242 | This only gets called if the git-archive 'subst' keywords were *not* 243 | expanded, and _version.py hasn't already been rewritten with a short 244 | version string, meaning we're inside a checked out source tree. 245 | """ 246 | GITS = ["git"] 247 | if sys.platform == "win32": 248 | GITS = ["git.cmd", "git.exe"] 249 | 250 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) 251 | if rc != 0: 252 | if verbose: 253 | print("Directory %s not under git control" % root) 254 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 255 | 256 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 257 | # if there isn't one, this yields HEX[-dirty] (no NUM) 258 | describe_out, rc = runner( 259 | GITS, 260 | [ 261 | "describe", 262 | "--tags", 263 | "--dirty", 264 | "--always", 265 | "--long", 266 | "--match", 267 | "%s*" % tag_prefix, 268 | ], 269 | cwd=root, 270 | ) 271 | # --long was added in git-1.5.5 272 | if describe_out is None: 273 | raise NotThisMethod("'git describe' failed") 274 | describe_out = describe_out.strip() 275 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) 276 | if full_out is None: 277 | raise NotThisMethod("'git rev-parse' failed") 278 | full_out = full_out.strip() 279 | 280 | pieces = {} 281 | pieces["long"] = full_out 282 | pieces["short"] = full_out[:7] # maybe improved later 283 | pieces["error"] = None 284 | 285 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root) 286 | # --abbrev-ref was added in git-1.6.3 287 | if rc != 0 or branch_name is None: 288 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") 289 | branch_name = branch_name.strip() 290 | 291 | if branch_name == "HEAD": 292 | # If we aren't exactly on a branch, pick a branch which represents 293 | # the current commit. If all else fails, we are on a branchless 294 | # commit. 295 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) 296 | # --contains was added in git-1.5.4 297 | if rc != 0 or branches is None: 298 | raise NotThisMethod("'git branch --contains' returned error") 299 | branches = branches.split("\n") 300 | 301 | # Remove the first line if we're running detached 302 | if "(" in branches[0]: 303 | branches.pop(0) 304 | 305 | # Strip off the leading "* " from the list of branches. 306 | branches = [branch[2:] for branch in branches] 307 | if "master" in branches: 308 | branch_name = "master" 309 | elif not branches: 310 | branch_name = None 311 | else: 312 | # Pick the first branch that is returned. Good or bad. 313 | branch_name = branches[0] 314 | 315 | pieces["branch"] = branch_name 316 | 317 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 318 | # TAG might have hyphens. 319 | git_describe = describe_out 320 | 321 | # look for -dirty suffix 322 | dirty = git_describe.endswith("-dirty") 323 | pieces["dirty"] = dirty 324 | if dirty: 325 | git_describe = git_describe[: git_describe.rindex("-dirty")] 326 | 327 | # now we have TAG-NUM-gHEX or HEX 328 | 329 | if "-" in git_describe: 330 | # TAG-NUM-gHEX 331 | mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) 332 | if not mo: 333 | # unparseable. Maybe git-describe is misbehaving? 334 | pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out 335 | return pieces 336 | 337 | # tag 338 | full_tag = mo.group(1) 339 | if not full_tag.startswith(tag_prefix): 340 | if verbose: 341 | fmt = "tag '%s' doesn't start with prefix '%s'" 342 | print(fmt % (full_tag, tag_prefix)) 343 | pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( 344 | full_tag, 345 | tag_prefix, 346 | ) 347 | return pieces 348 | pieces["closest-tag"] = full_tag[len(tag_prefix) :] 349 | 350 | # distance: number of commits since tag 351 | pieces["distance"] = int(mo.group(2)) 352 | 353 | # commit: short hex revision ID 354 | pieces["short"] = mo.group(3) 355 | 356 | else: 357 | # HEX: no tags 358 | pieces["closest-tag"] = None 359 | count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) 360 | pieces["distance"] = int(count_out) # total number of commits 361 | 362 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 363 | date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() 364 | # Use only the last line. Previous lines may contain GPG signature 365 | # information. 366 | date = date.splitlines()[-1] 367 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 368 | 369 | return pieces 370 | 371 | 372 | def plus_or_dot(pieces): 373 | """Return a + if we don't already have one, else return a .""" 374 | if "+" in pieces.get("closest-tag", ""): 375 | return "." 376 | return "+" 377 | 378 | 379 | def render_pep440(pieces): 380 | """Build up version string, with post-release "local version identifier". 381 | 382 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 383 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 384 | 385 | Exceptions: 386 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 387 | """ 388 | if pieces["closest-tag"]: 389 | rendered = pieces["closest-tag"] 390 | if pieces["distance"] or pieces["dirty"]: 391 | rendered += plus_or_dot(pieces) 392 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 393 | if pieces["dirty"]: 394 | rendered += ".dirty" 395 | else: 396 | # exception #1 397 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) 398 | if pieces["dirty"]: 399 | rendered += ".dirty" 400 | return rendered 401 | 402 | 403 | def render_pep440_branch(pieces): 404 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . 405 | 406 | The ".dev0" means not master branch. Note that .dev0 sorts backwards 407 | (a feature branch will appear "older" than the master branch). 408 | 409 | Exceptions: 410 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] 411 | """ 412 | if pieces["closest-tag"]: 413 | rendered = pieces["closest-tag"] 414 | if pieces["distance"] or pieces["dirty"]: 415 | if pieces["branch"] != "master": 416 | rendered += ".dev0" 417 | rendered += plus_or_dot(pieces) 418 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 419 | if pieces["dirty"]: 420 | rendered += ".dirty" 421 | else: 422 | # exception #1 423 | rendered = "0" 424 | if pieces["branch"] != "master": 425 | rendered += ".dev0" 426 | rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) 427 | if pieces["dirty"]: 428 | rendered += ".dirty" 429 | return rendered 430 | 431 | 432 | def render_pep440_pre(pieces): 433 | """TAG[.post0.devDISTANCE] -- No -dirty. 434 | 435 | Exceptions: 436 | 1: no tags. 0.post0.devDISTANCE 437 | """ 438 | if pieces["closest-tag"]: 439 | rendered = pieces["closest-tag"] 440 | if pieces["distance"]: 441 | rendered += ".post0.dev%d" % pieces["distance"] 442 | else: 443 | # exception #1 444 | rendered = "0.post0.dev%d" % pieces["distance"] 445 | return rendered 446 | 447 | 448 | def render_pep440_post(pieces): 449 | """TAG[.postDISTANCE[.dev0]+gHEX] . 450 | 451 | The ".dev0" means dirty. Note that .dev0 sorts backwards 452 | (a dirty tree will appear "older" than the corresponding clean one), 453 | but you shouldn't be releasing software with -dirty anyways. 454 | 455 | Exceptions: 456 | 1: no tags. 0.postDISTANCE[.dev0] 457 | """ 458 | if pieces["closest-tag"]: 459 | rendered = pieces["closest-tag"] 460 | if pieces["distance"] or pieces["dirty"]: 461 | rendered += ".post%d" % pieces["distance"] 462 | if pieces["dirty"]: 463 | rendered += ".dev0" 464 | rendered += plus_or_dot(pieces) 465 | rendered += "g%s" % pieces["short"] 466 | else: 467 | # exception #1 468 | rendered = "0.post%d" % pieces["distance"] 469 | if pieces["dirty"]: 470 | rendered += ".dev0" 471 | rendered += "+g%s" % pieces["short"] 472 | return rendered 473 | 474 | 475 | def render_pep440_post_branch(pieces): 476 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . 477 | 478 | The ".dev0" means not master branch. 479 | 480 | Exceptions: 481 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] 482 | """ 483 | if pieces["closest-tag"]: 484 | rendered = pieces["closest-tag"] 485 | if pieces["distance"] or pieces["dirty"]: 486 | rendered += ".post%d" % pieces["distance"] 487 | if pieces["branch"] != "master": 488 | rendered += ".dev0" 489 | rendered += plus_or_dot(pieces) 490 | rendered += "g%s" % pieces["short"] 491 | if pieces["dirty"]: 492 | rendered += ".dirty" 493 | else: 494 | # exception #1 495 | rendered = "0.post%d" % pieces["distance"] 496 | if pieces["branch"] != "master": 497 | rendered += ".dev0" 498 | rendered += "+g%s" % pieces["short"] 499 | if pieces["dirty"]: 500 | rendered += ".dirty" 501 | return rendered 502 | 503 | 504 | def render_pep440_old(pieces): 505 | """TAG[.postDISTANCE[.dev0]] . 506 | 507 | The ".dev0" means dirty. 508 | 509 | Exceptions: 510 | 1: no tags. 0.postDISTANCE[.dev0] 511 | """ 512 | if pieces["closest-tag"]: 513 | rendered = pieces["closest-tag"] 514 | if pieces["distance"] or pieces["dirty"]: 515 | rendered += ".post%d" % pieces["distance"] 516 | if pieces["dirty"]: 517 | rendered += ".dev0" 518 | else: 519 | # exception #1 520 | rendered = "0.post%d" % pieces["distance"] 521 | if pieces["dirty"]: 522 | rendered += ".dev0" 523 | return rendered 524 | 525 | 526 | def render_git_describe(pieces): 527 | """TAG[-DISTANCE-gHEX][-dirty]. 528 | 529 | Like 'git describe --tags --dirty --always'. 530 | 531 | Exceptions: 532 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 533 | """ 534 | if pieces["closest-tag"]: 535 | rendered = pieces["closest-tag"] 536 | if pieces["distance"]: 537 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 538 | else: 539 | # exception #1 540 | rendered = pieces["short"] 541 | if pieces["dirty"]: 542 | rendered += "-dirty" 543 | return rendered 544 | 545 | 546 | def render_git_describe_long(pieces): 547 | """TAG-DISTANCE-gHEX[-dirty]. 548 | 549 | Like 'git describe --tags --dirty --always -long'. 550 | The distance/hash is unconditional. 551 | 552 | Exceptions: 553 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 554 | """ 555 | if pieces["closest-tag"]: 556 | rendered = pieces["closest-tag"] 557 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 558 | else: 559 | # exception #1 560 | rendered = pieces["short"] 561 | if pieces["dirty"]: 562 | rendered += "-dirty" 563 | return rendered 564 | 565 | 566 | def render(pieces, style): 567 | """Render the given version pieces into the requested style.""" 568 | if pieces["error"]: 569 | return { 570 | "version": "unknown", 571 | "full-revisionid": pieces.get("long"), 572 | "dirty": None, 573 | "error": pieces["error"], 574 | "date": None, 575 | } 576 | 577 | if not style or style == "default": 578 | style = "pep440" # the default 579 | 580 | if style == "pep440": 581 | rendered = render_pep440(pieces) 582 | elif style == "pep440-branch": 583 | rendered = render_pep440_branch(pieces) 584 | elif style == "pep440-pre": 585 | rendered = render_pep440_pre(pieces) 586 | elif style == "pep440-post": 587 | rendered = render_pep440_post(pieces) 588 | elif style == "pep440-post-branch": 589 | rendered = render_pep440_post_branch(pieces) 590 | elif style == "pep440-old": 591 | rendered = render_pep440_old(pieces) 592 | elif style == "git-describe": 593 | rendered = render_git_describe(pieces) 594 | elif style == "git-describe-long": 595 | rendered = render_git_describe_long(pieces) 596 | else: 597 | raise ValueError("unknown style '%s'" % style) 598 | 599 | return { 600 | "version": rendered, 601 | "full-revisionid": pieces["long"], 602 | "dirty": pieces["dirty"], 603 | "error": None, 604 | "date": pieces.get("date"), 605 | } 606 | 607 | 608 | def get_versions(): 609 | """Get version information or return default if unable to do so.""" 610 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 611 | # __file__, we can work backwards from there to the root. Some 612 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 613 | # case we can only use expanded keywords. 614 | 615 | cfg = get_config() 616 | verbose = cfg.verbose 617 | 618 | try: 619 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) 620 | except NotThisMethod: 621 | pass 622 | 623 | try: 624 | root = os.path.realpath(__file__) 625 | # versionfile_source is the relative path from the top of the source 626 | # tree (where the .git directory might live) to this file. Invert 627 | # this to find the root from __file__. 628 | for _ in cfg.versionfile_source.split("/"): 629 | root = os.path.dirname(root) 630 | except NameError: 631 | return { 632 | "version": "0+unknown", 633 | "full-revisionid": None, 634 | "dirty": None, 635 | "error": "unable to find root of source tree", 636 | "date": None, 637 | } 638 | 639 | try: 640 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 641 | return render(pieces, cfg.style) 642 | except NotThisMethod: 643 | pass 644 | 645 | try: 646 | if cfg.parentdir_prefix: 647 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 648 | except NotThisMethod: 649 | pass 650 | 651 | return { 652 | "version": "0+unknown", 653 | "full-revisionid": None, 654 | "dirty": None, 655 | "error": "unable to compute version", 656 | "date": None, 657 | } 658 | --------------------------------------------------------------------------------