├── .gitattributes ├── .github ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md ├── PULL_REQUEST_TEMPLATE.md ├── dependabot.yml └── workflows │ ├── build_docs.yml │ ├── prerelease.yml │ ├── publish.yml │ ├── release_and_publish.yml │ └── testing.yml ├── .gitignore ├── .pre-commit-config.yaml ├── CHANGELOG.md ├── LICENSE.txt ├── README.md ├── ci ├── 310-latest.yaml ├── 310-oldest.yaml ├── 311-latest.yaml ├── 312-dev.yaml └── 312-latest.yaml ├── docs ├── .nojekyll ├── Makefile ├── _static │ ├── auto │ │ └── references.el │ ├── images │ │ ├── nonplanar.png │ │ ├── prices.png │ │ ├── pysal_favicon.ico │ │ ├── pysal_favicon.png │ │ ├── pysal_logo.png │ │ ├── pysal_logo_light.png │ │ └── rose_conditional.png │ ├── pysal-styles.css │ ├── references.bib │ └── references_all.bib ├── api.rst ├── conf.py ├── index.rst ├── installation.rst ├── libpysal-inv.txt ├── references.rst └── releases.rst ├── environment.yml ├── figs ├── lisamaps.png ├── lisamaps7.png └── pysal_logo.png ├── pyproject.toml ├── pysal ├── __init__.py ├── base.py ├── explore │ ├── __init__.py │ ├── esda │ │ └── __init__.py │ ├── giddy │ │ └── __init__.py │ ├── inequality │ │ └── __init__.py │ ├── momepy │ │ └── __init__.py │ ├── pointpats │ │ └── __init__.py │ ├── segregation │ │ └── __init__.py │ └── spaghetti │ │ └── __init__.py ├── lib │ ├── __init__.py │ ├── common.py │ └── examples │ │ └── __init__.py ├── model │ ├── __init__.py │ ├── access │ │ └── __init__.py │ ├── mgwr │ │ └── __init__.py │ ├── spglm │ │ └── __init__.py │ ├── spint │ │ └── __init__.py │ ├── spopt │ │ └── __init__.py │ ├── spreg │ │ └── __init__.py │ └── tobler │ │ └── __init__.py ├── tests │ └── test_imports.py └── viz │ ├── __init__.py │ └── mapclassify │ └── __init__.py └── tools ├── Makefile ├── README.md ├── change_log.py ├── changelog.md ├── frozen.py ├── gitcount.py ├── release.yaml └── release_info.py /.gitattributes: -------------------------------------------------------------------------------- 1 | pysal/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | Thank you for your interest in contributing! We work primarily on Github. Please 2 | review the [contributing procedures](https://github.com/pysal/pysal/wiki/GitHub-Standard-Operating-Procedures) so that we can accept your contributions! Alternatively, contact someone in [PySAL's Discord channel](https://discord.gg/BxFTEPFFZn). 3 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Desktop (please complete the following information):** 27 | - OS: [e.g. iOS] 28 | - Browser [e.g. chrome, safari] 29 | - Version [e.g. 22] 30 | 31 | **Smartphone (please complete the following information):** 32 | - Device: [e.g. iPhone6] 33 | - OS: [e.g. iOS8.1] 34 | - Browser [e.g. stock browser, safari] 35 | - Version [e.g. 22] 36 | 37 | **Additional context** 38 | Add any other context about the problem here. 39 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Hello! Please make sure to check all these boxes before submitting a Pull Request 2 | (PR). Once you have checked the boxes, feel free to remove all text except the 3 | justification in point 6. 4 | 5 | 1. [ ] You have run tests on this submission locally with [`pytest`](https://docs.pytest.org/en/8.2.x/) (see example command in [`.github/workflows/testing.yml`](https://github.com/pysal/pysal/blob/12ae3c1a59b02e3142d9f9e083c211e8b2fdc314/.github/workflows/testing.yml#L74-L82)). 6 | 2. [ ] This submission is [formatted](https://docs.astral.sh/ruff/formatter/) and [linted](https://docs.astral.sh/ruff/linter/) with [`ruff`](https://docs.astral.sh/ruff/). Formatting & linting are done automatically if you have [installed](https://pre-commit.com/#3-install-the-git-hook-scripts) the [`.pre-commit-config.yaml`](https://github.com/pysal/pysal/blob/main/.pre-commit-config.yaml) properly in a local environment. 7 | 3. [ ] If this PR adds or updates the codebase, appropriate adjustments are made to corresponding [docstrings](https://numpydoc.readthedocs.io/en/latest/format.html#overview) and changes are covered (see (1.) above). 8 | 4. [ ] This pull request is directed to the `pysal/main` branch. **This is important, as any PRs submitted against any other branches will be delayed.** 9 | 5. [ ] You have [assigned a 10 | reviewer](https://help.github.com/articles/assigning-issues-and-pull-requests-to-other-github-users/) and added relevant [labels](https://help.github.com/articles/applying-labels-to-issues-and-pull-requests/) (if possible) 11 | 6. [ ] The justification for this PR is: 12 | * *Add justification here* 13 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | -------------------------------------------------------------------------------- /.github/workflows/build_docs.yml: -------------------------------------------------------------------------------- 1 | name: Build Docs 2 | on: 3 | push: 4 | # Sequence of patterns matched against refs/tags 5 | tags: 6 | - 'v*' # Push events to matching v*, i.e. v1.0, v20.15.10 7 | workflow_dispatch: 8 | inputs: 9 | version: 10 | description: Manual Doc Build Reason 11 | default: test 12 | required: false 13 | 14 | jobs: 15 | docs: 16 | name: Build & Push Docs 17 | runs-on: ${{ matrix.os }} 18 | timeout-minutes: 90 19 | strategy: 20 | matrix: 21 | os: ['ubuntu-latest'] 22 | environment-file: [ci/312-latest.yaml] 23 | experimental: [false] 24 | defaults: 25 | run: 26 | shell: bash -l {0} 27 | 28 | steps: 29 | - name: Checkout repo 30 | uses: actions/checkout@v4 31 | with: 32 | fetch-depth: 0 # Fetch all history for all branches and tags. 33 | 34 | - name: Setup micromamba 35 | uses: mamba-org/setup-micromamba@v2 36 | with: 37 | environment-file: ${{ matrix.environment-file }} 38 | micromamba-version: 'latest' 39 | 40 | - name: Install 41 | run: 42 | pip install -e . --no-deps --force-reinstall 43 | 44 | - name: Make Docs 45 | run: cd docs; make html 46 | 47 | - name: Commit Docs 48 | run: | 49 | git clone https://github.com/ammaraskar/sphinx-action-test.git --branch gh-pages --single-branch gh-pages 50 | cp -r docs/_build/html/* gh-pages/ 51 | cd gh-pages 52 | git config --local user.email "action@github.com" 53 | git config --local user.name "GitHub Action" 54 | git add . 55 | git commit -m "Update documentation" -a || true 56 | # The above command will fail if no changes were present, 57 | # so we ignore the return code. 58 | 59 | - name: Push to gh-pages 60 | uses: ad-m/github-push-action@master 61 | with: 62 | branch: gh-pages 63 | directory: gh-pages 64 | github_token: ${{ secrets.GITHUB_TOKEN }} 65 | force: true 66 | -------------------------------------------------------------------------------- /.github/workflows/prerelease.yml: -------------------------------------------------------------------------------- 1 | name: Create Pre-release 2 | 3 | on: 4 | push: 5 | tags: 6 | - '*' 7 | 8 | jobs: 9 | create-pre-release: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - name: Checkout repository 14 | uses: actions/checkout@v4 15 | 16 | - name: Check if the tag contains 'rc' 17 | id: check_tag 18 | run: | 19 | if [[ "${GITHUB_REF##*/}" == *rc* ]]; then 20 | echo "Tag contains rc" 21 | echo "::set-output name=contains_rc::true" 22 | else 23 | echo "Tag does not contain rc" 24 | echo "::set-output name=contains_rc::false" 25 | fi 26 | 27 | - name: Create pre-release 28 | if: steps.check_tag.outputs.contains_rc == 'true' 29 | env: 30 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 31 | run: | 32 | TAG_NAME=${GITHUB_REF##*/} 33 | RESPONSE=$(curl -X POST \ 34 | -H "Authorization: token $GITHUB_TOKEN" \ 35 | -H "Accept: application/vnd.github.v3+json" \ 36 | https://api.github.com/repos/${GITHUB_REPOSITORY}/releases \ 37 | -d @- << EOF 38 | { 39 | "tag_name": "${TAG_NAME}", 40 | "name": "${TAG_NAME}", 41 | "body": "Pre-release ${TAG_NAME}", 42 | "draft": false, 43 | "prerelease": true 44 | } 45 | EOF 46 | ) 47 | 48 | echo "$RESPONSE" 49 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish to PyPI 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | tag: 7 | description: "Git tag to build from" 8 | required: true 9 | 10 | jobs: 11 | deploy: 12 | name: Publish Python package 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - name: Check out the code 17 | uses: actions/checkout@v4 18 | with: 19 | ref: ${{ github.event.inputs.tag }} 20 | 21 | - name: Set up Python 22 | uses: actions/setup-python@v5 23 | with: 24 | python-version: '3.x' 25 | 26 | - name: Install build tools 27 | run: | 28 | python -m pip install --upgrade pip 29 | pip install build twine 30 | 31 | - name: Build the package 32 | run: python -m build 33 | 34 | - name: Publish distribution to PyPI 35 | uses: pypa/gh-action-pypi-publish@master 36 | with: 37 | user: __token__ 38 | password: ${{ secrets.PYPI_PASSWORD }} 39 | -------------------------------------------------------------------------------- /.github/workflows/release_and_publish.yml: -------------------------------------------------------------------------------- 1 | name: Release & Publish 2 | 3 | on: 4 | push: 5 | # Sequence of patterns matched against refs/tags 6 | tags: 7 | - "v*" # Push events to matching v*, i.e. v1.0, v20.15.10 8 | workflow_dispatch: # Enables manual triggering from GH UI 9 | inputs: 10 | version: 11 | description: Manual Release & Publish 12 | default: release+publish 13 | required: false 14 | 15 | permissions: 16 | contents: read 17 | 18 | jobs: 19 | build: 20 | permissions: 21 | contents: write # for softprops/action-gh-release to create GitHub release 22 | name: Create release & publish to PyPI 23 | runs-on: ubuntu-latest 24 | 25 | steps: 26 | - name: Checkout repo 27 | uses: actions/checkout@v4 28 | 29 | - name: Set up python 30 | uses: actions/setup-python@v5 31 | with: 32 | python-version: "3.x" 33 | 34 | - name: Install Dependencies 35 | run: | 36 | python -m pip install --upgrade pip 37 | pip install build twine jupyter urllib3 pandas pyyaml 38 | python -m build 39 | twine check --strict dist/* 40 | 41 | - name: Get the tag name 42 | run: echo "TAG=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_ENV 43 | 44 | - name: Determine draft status 45 | id: draft_status 46 | run: | 47 | if [[ "${GITHUB_REF}" == *"rc"* ]]; then 48 | echo "draft=true" >> $GITHUB_ENV 49 | else 50 | echo "draft=false" >> $GITHUB_ENV 51 | fi 52 | 53 | - name: Create Release 54 | id: create_release 55 | uses: actions/create-release@v1 56 | env: 57 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token is provided by Actions, you do not need to create your own token 58 | with: 59 | tag_name: ${{ github.ref }} 60 | release_name: Release ${{ github.ref }} 61 | draft: ${{ env.draft }} 62 | prerelease: ${{ env.draft }} 63 | body_path: tools/changelog.md 64 | 65 | - name: Publish distribution � to PyPI 66 | uses: pypa/gh-action-pypi-publish@master 67 | with: 68 | user: __token__ 69 | password: ${{ secrets.PYPI_PASSWORD }} 70 | -------------------------------------------------------------------------------- /.github/workflows/testing.yml: -------------------------------------------------------------------------------- 1 | name: Continuous Integration 2 | 3 | on: 4 | push: 5 | branches: 6 | - '*' 7 | pull_request: 8 | branches: 9 | - '*' 10 | schedule: 11 | - cron: '59 23 * * *' 12 | workflow_dispatch: 13 | inputs: 14 | version: 15 | description: Manual CI Run 16 | default: test 17 | required: false 18 | 19 | jobs: 20 | tests: 21 | name: ${{ matrix.os }}, ${{ matrix.environment-file }} 22 | runs-on: ${{ matrix.os }} 23 | timeout-minutes: 30 24 | strategy: 25 | matrix: 26 | os: [ubuntu-latest] 27 | environment-file: [ 28 | ci/310-oldest.yaml, 29 | ci/310-latest.yaml, 30 | ci/311-latest.yaml, 31 | ci/312-latest.yaml, 32 | ci/312-dev.yaml, 33 | ] 34 | include: 35 | - environment-file: ci/312-latest.yaml 36 | os: macos-13 # Intel 37 | - environment-file: ci/312-latest.yaml 38 | os: macos-14 # Apple Silicon 39 | - environment-file: ci/312-latest.yaml 40 | os: windows-latest 41 | fail-fast: false 42 | 43 | defaults: 44 | run: 45 | shell: bash -l {0} 46 | 47 | steps: 48 | - name: checkout repo 49 | uses: actions/checkout@v4 50 | with: 51 | fetch-depth: 0 # Fetch all history for all branches and tags. 52 | 53 | - name: setup micromamba 54 | uses: mamba-org/setup-micromamba@v2 55 | with: 56 | environment-file: ${{ matrix.environment-file }} 57 | micromamba-version: 'latest' 58 | 59 | 60 | - name: environment info 61 | run: | 62 | micromamba info 63 | micromamba list 64 | 65 | - name: spatial versions 66 | run: 'python -c "import geopandas; geopandas.show_versions();"' 67 | 68 | - name: install pysal 69 | run: | 70 | pip install -e . 71 | 72 | - name: run tests 73 | run: | 74 | pytest pysal \ 75 | -v \ 76 | -r a \ 77 | -n logical \ 78 | --color yes \ 79 | --cov pysal \ 80 | --cov-append \ 81 | --cov-report term-missing \ 82 | --cov-report xml . 83 | 84 | - name: codecov 85 | uses: codecov/codecov-action@v5 86 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .cache/ 2 | .#failures 3 | .python_history 4 | develop.sh 5 | docscrc/giddy.inv 6 | .conda* 7 | .local* 8 | .bash_history 9 | docsrc/_bui* 10 | failures 11 | newtags.json 12 | notebooks/lib/io.ipynb 13 | package_info.py 14 | *.py[cod] 15 | *.bak 16 | .ipynb_checkpoints/ 17 | # C extensions 18 | *.so 19 | 20 | # Packages 21 | *.egg 22 | *.egg-info 23 | dist 24 | build 25 | eggs 26 | parts 27 | bin 28 | var 29 | sdist 30 | develop-eggs 31 | .installed.cfg 32 | __pycache__ 33 | 34 | tmp/ 35 | 36 | # virtual environment 37 | venv/ 38 | 39 | # Installer logs 40 | pip-log.txt 41 | 42 | # Unit test / coverage reports 43 | .coverage 44 | .tox 45 | nosetests.xml 46 | 47 | # Translations 48 | *.mo 49 | 50 | # Mr Developer 51 | .mr.developer.cfg 52 | .project 53 | .pydevproject 54 | 55 | # OS generated files # 56 | ###################### 57 | .DS_Store 58 | .DS_Store? 59 | ._* 60 | .Spotlight-V100 61 | .Trashes 62 | Icon? 63 | ehthumbs.db 64 | Thumbs.db 65 | 66 | 67 | # pysal 68 | # 69 | lattice.* 70 | .vagrant/ 71 | pysal/contrib/viz/.ipynb_checkpoints/ 72 | pysal/contrib/viz/bp.png 73 | pysal/contrib/viz/fj.png 74 | pysal/contrib/viz/fj_classless.png 75 | pysal/contrib/viz/lmet.tex 76 | pysal/contrib/viz/lmp.tex 77 | pysal/contrib/viz/lmplot.png 78 | pysal/contrib/viz/lmss.tex 79 | pysal/contrib/viz/lmt.tex 80 | pysal/contrib/viz/out.png 81 | pysal/contrib/viz/p.tex 82 | pysal/contrib/viz/quantiles.png 83 | pysal/contrib/viz/quantiles_HR60.png 84 | pysal/contrib/viz/quantiles_HR70.png 85 | pysal/contrib/viz/quantiles_HR80.png 86 | pysal/contrib/viz/quantiles_HR90.png 87 | pysal/contrib/viz/quatiles.png 88 | pysal/contrib/viz/region.ipynb 89 | pysal/contrib/viz/south_base.html 90 | pysal/contrib/viz/sp.tex 91 | pysal/contrib/viz/sss.tex 92 | pysal/examples/south.prj 93 | 94 | #Vi 95 | *.swp 96 | .ropeproject/ 97 | .eggs/ 98 | pysal/contrib/planar/ 99 | pysal/esda/.ropeproject/ 100 | pysal/esda/jenks_nb.ipynb 101 | pysal/examples/snow_maps/fake.dbf 102 | pysal/examples/snow_maps/fake.prj 103 | pysal/examples/snow_maps/fake.qpj 104 | pysal/examples/snow_maps/fake.shp 105 | pysal/examples/snow_maps/fake.shx 106 | pysal/examples/snow_maps/fixed.dbf 107 | pysal/examples/snow_maps/fixed.prj 108 | pysal/examples/snow_maps/fixed.qgs 109 | pysal/examples/snow_maps/fixed.qgs~ 110 | pysal/examples/snow_maps/fixed.qpj 111 | pysal/examples/snow_maps/fixed.shp 112 | pysal/examples/snow_maps/fixed.shx 113 | pysal/examples/snow_maps/snow.qgs 114 | pysal/examples/snow_maps/snow.qgs~ 115 | pysal/examples/snow_maps/soho_graph.dbf 116 | pysal/examples/snow_maps/soho_graph.prj 117 | pysal/examples/snow_maps/soho_graph.qpj 118 | pysal/examples/snow_maps/soho_graph.shp 119 | pysal/examples/snow_maps/soho_graph.shx 120 | 121 | 122 | .chglog/ 123 | convert.py 124 | docsrc/giddy.inv 125 | newlog.html 126 | newlog.md 127 | notebooks/access.ipynb 128 | notebooks/chi_med_data/ 129 | pysal/explore/giddy/markov/ 130 | pysal/lazy.py 131 | pysal/tests/__init__.py 132 | tools/README.html 133 | tools/changes.md 134 | tools/commit_table.html 135 | tools/commits_by_person.html 136 | tools/contributor_table.html 137 | tools/ghapi.ipynb 138 | tools/githelper.py 139 | tools/github_auth.sh 140 | tools/issue_details.p 141 | tools/issues_closed.p 142 | tools/package_versions.txt 143 | tools/packages.ipynb 144 | tools/pull_details.p 145 | tools/pulls_closed.p 146 | tools/pypireleases.py 147 | tools/requirements.txt 148 | tools/tarballs.json 149 | tools/test.md 150 | tools/token -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | files: "pysal\/" 2 | repos: 3 | - repo: https://github.com/astral-sh/ruff-pre-commit 4 | rev: "v0.5.0" 5 | hooks: 6 | - id: ruff 7 | - id: ruff-format 8 | 9 | ci: 10 | autofix_prs: false 11 | autoupdate_schedule: quarterly 12 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2007-, PySAL Developers 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | 10 | * Redistributions in binary form must reproduce the above copyright 11 | notice, this list of conditions and the following disclaimer in the 12 | documentation and/or other materials provided with the distribution. 13 | 14 | * Neither the name of the PySAL Developers nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 15 | 16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND 17 | CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, 18 | INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 19 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 20 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR 21 | CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 22 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 23 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF 24 | USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 25 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 26 | LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 27 | ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 28 | POSSIBILITY OF SUCH DAMAGE. 29 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Python Spatial Analysis Library 2 | 3 | [![Continuous Integration](https://github.com/pysal/pysal/actions/workflows/testing.yml/badge.svg)](https://github.com/pysal/pysal/actions/workflows/testing.yml) 4 | [![PyPI version](https://badge.fury.io/py/pysal.svg)](https://badge.fury.io/py/pysal) 5 | [![Anaconda-Server Badge](https://anaconda.org/conda-forge/pysal/badges/version.svg)](https://anaconda.org/conda-forge/pysal) 6 | [![Discord](https://img.shields.io/badge/Discord-join%20chat-7289da?style=flat&logo=discord&logoColor=cccccc&link=https://discord.gg/BxFTEPFFZn)](https://discord.gg/BxFTEPFFZn) 7 | [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) 8 | [![DOI](https://zenodo.org/badge/8295380.svg)](https://zenodo.org/badge/latestdoi/8295380) 9 | 10 |

11 | 12 |

13 | 14 | PySAL, the Python spatial analysis library, is an open source cross-platform library for geospatial data science with an emphasis on geospatial vector data written in Python. It supports the development of high level applications for spatial analysis, such as 15 | 16 | - detection of spatial clusters, hot-spots, and outliers 17 | - construction of graphs from spatial data 18 | - spatial regression and statistical modeling on geographically embedded networks 19 | - spatial econometrics 20 | - exploratory spatio-temporal data analysis 21 | 22 | ## PySAL Components 23 | 24 | PySAL is a family of packages for spatial data science and is divided into four major components: 25 | 26 | ### Lib 27 | 28 | solve a wide variety of computational geometry problems including graph construction from polygonal lattices, lines, and points, construction and interactive editing of spatial weights matrices & graphs - computation of alpha shapes, spatial indices, and spatial-topological relationships, and reading and writing of sparse graph data, as well as pure python readers of spatial vector data. Unike other PySAL modules, these functions are exposed together as a single package. 29 | 30 | - [libpysal](https://pysal.org/libpysal) : `libpysal` provides foundational algorithms and data structures that support the rest of the library. This currently includes the following modules: input/output (`io`), which provides readers and writers for common geospatial file formats; weights (`weights`), which provides the main class to store spatial weights matrices, as well as several utilities to manipulate and operate on them; computational geometry (`cg`), with several algorithms, such as Voronoi tessellations or alpha shapes that efficiently process geometric shapes; and an additional module with example data sets (`examples`). 31 | 32 | ### Explore 33 | 34 | The `explore` layer includes modules to conduct exploratory analysis of spatial and spatio-temporal data. At a high level, packages in `explore` are focused on enabling the user to better understand patterns in the data and suggest new interesting questions rather than answer existing ones. They include methods to characterize the structure of spatial distributions (either on networks, in continuous space, or on polygonal lattices). In addition, this domain offers methods to examine the *dynamics* of these distributions, such as how their composition or spatial extent changes over time. 35 | 36 | - [esda](https://pysal.org/esda/) : `esda` implements methods for the analysis of both global (map-wide) and local (focal) spatial autocorrelation, for both continuous and binary data. In addition, the package increasingly offers cutting-edge statistics about boundary strength and measures of aggregation error in statistical analyses 37 | 38 | - [giddy](https://pysal.org/giddy/) : `giddy` is an extension of `esda` to spatio-temporal data. The package hosts state-of-the-art methods that explicitly consider the role of space in the dynamics of distributions over time 39 | 40 | - [inequality](https://pysal.org/inequality/) : `inequality` provides indices for measuring inequality over space and time. These comprise classic measures such as the Theil *T* information index and the Gini index in mean deviation form; but also spatially-explicit measures that incorporate the location and spatial configuration of observations in the calculation of inequality measures. 41 | 42 | - [momepy](https://docs.momepy.org) : `momepy` is a library for quantitative analysis of urban form - urban morphometrics. It aims to provide a wide range of tools for a systematic and exhaustive analysis of urban form. It can work with a wide range of elements, while focused on building footprints and street networks. momepy stands for Morphological Measuring in Python. 43 | 44 | - [pointpats](https://pysal.org/pointpats/) : `pointpats` supports the statistical analysis of point data, including methods to characterize the spatial structure of an observed point pattern: a collection of locations where some phenomena of interest have been recorded. This includes measures of centrography which provide overall geometric summaries of the point pattern, including central tendency, dispersion, intensity, and extent. 45 | 46 | - [segregation](https://pysal.org/segregation/) : `segregation` package calculates over 40 different segregation indices and provides a suite of additional features for measurement, visualization, and hypothesis testing that together represent the state-of-the-art in quantitative segregation analysis. 47 | 48 | - [spaghetti](https://pysal.org/spaghetti) : `spaghetti` supports the the spatial analysis of graphs, networks, topology, and inference. It includes functionality for the statistical testing of clusters on networks, a robust all-to-all Dijkstra shortest path algorithm with multiprocessing functionality, and high-performance geometric and spatial computations using `geopandas` that are necessary for high-resolution interpolation along networks, and the ability to connect near-network observations onto the network 49 | 50 | ### Model 51 | 52 | In contrast to `explore`, the `model` layer focuses on confirmatory analysis. In particular, its packages focus on the estimation of spatial relationships in data with a variety of linear, generalized-linear, generalized-additive, nonlinear, multi-level, and local regression models. 53 | 54 | - [mgwr](https://mgwr.readthedocs.io/en/latest/) : `mgwr` provides scalable algorithms for estimation, inference, and prediction using single- and multi-scale geographically-weighted regression models in a variety of generalized linear model frameworks, as well model diagnostics tools 55 | 56 | - [spglm](https://pysal.org/spglm/) : `spglm` implements a set of generalized linear regression techniques, including Gaussian, Poisson, and Logistic regression, that allow for sparse matrix operations in their computation and estimation to lower memory overhead and decreased computation time. 57 | 58 | - [spint](https://github.com/pysal/spint) : `spint` provides a collection of tools to study spatial interaction processes and analyze spatial interaction data. It includes functionality to facilitate the calibration and interpretation of a family of gravity-type spatial interaction models, including those with *production* constraints, *attraction* constraints, or a combination of the two. 59 | 60 | - [spreg](https://pysal.org/spreg/) : `spreg` supports the estimation of classic and spatial econometric models. Currently it contains methods for estimating standard Ordinary Least Squares (OLS), Two Stage Least Squares (2SLS) and Seemingly Unrelated Regressions (SUR), in addition to various tests of homokestadicity, normality, spatial randomness, and different types of spatial autocorrelation. It also includes a suite of tests for spatial dependence in models with binary dependent variables. 61 | 62 | - [tobler](http://pysal.org/tobler/) : `tobler` provides functionality for for areal interpolation and dasymetric mapping. Its name is an homage to the legendary geographer Waldo Tobler a pioneer of dozens of spatial analytical methods. `tobler` includes functionality for interpolating data using area-weighted approaches, regression model-based approaches that leverage remotely-sensed raster data as auxiliary information, and hybrid approaches. 63 | 64 | - [access](https://pysal.org/access/) : `access` aims to make it easy for analysis to calculate measures of spatial accessibility. This work has traditionally had two challenges: [1] to calculate accurate travel time matrices at scale and [2] to derive measures of access using the travel times and supply and demand locations. `access` implements classic spatial access models, allowing easy comparison of methodologies and assumptions. 65 | 66 | - [spopt](https://pysal.org/spopt/): `spopt` is an open-source Python library for solving optimization problems with spatial data. Originating 67 | from the original `region` module in PySAL, it is under active development for the inclusion of newly proposed models and methods for regionalization, facility location, and transportation-oriented solutions. 68 | 69 | ### Viz 70 | 71 | The `viz` layer provides functionality to support the creation of geovisualisations and visual representations of outputs from a variety of spatial analyses. Visualization plays a central role in modern spatial/geographic data science. Current packages provide classification methods for choropleth mapping and a common API for linking PySAL outputs to visualization tool-kits in the Python ecosystem. 72 | 73 | - [legendgram](https://github.com/pysal/legendgram) : `legendgram` is a small package that provides "legendgrams" legends that visualize the distribution of observations by color in a given map. These distributional visualizations for map classification schemes assist in analytical cartography and spatial data visualization 74 | 75 | - [mapclassify](https://pysal.org/mapclassify) : `mapclassify` provides functionality for Choropleth map classification. Currently, fifteen different classification schemes are available, including a highly-optimized implementation of Fisher-Jenks optimal classification. Each scheme inherits a common structure that ensures computations are scalable and supports applications in streaming contexts. 76 | 77 | - [splot](https://splot.readthedocs.io/en/latest/) : `splot` provides statistical visualizations for spatial analysis. It methods for visualizing global and local spatial autocorrelation (through Moran scatterplots and cluster maps), temporal analysis of cluster dynamics (through heatmaps and rose diagrams), and multivariate choropleth mapping (through value-by-alpha maps. A high level API supports the creation of publication-ready visualizations 78 | 79 | # Installation 80 | 81 | PySAL is available through [Anaconda](https://www.continuum.io/downloads) (in the defaults or conda-forge channel) We recommend installing PySAL from conda-forge: 82 | 83 | ``` {.sourceCode .bash} 84 | conda config --add channels conda-forge 85 | conda install pysal 86 | ``` 87 | 88 | PySAL can also be installed using pip: 89 | 90 | ``` {.sourceCode .bash} 91 | pip install pysal 92 | ``` 93 | 94 | As of version 2.0.0 PySAL has shifted to Python 3 only. 95 | 96 | Users who need an older stable version of PySAL that is Python 2 compatible can install version 1.14.3 through pip or conda: 97 | 98 | ``` {.sourceCode .bash} 99 | conda install pysal==1.14.3 100 | ``` 101 | 102 | # Documentation 103 | 104 | For help on using PySAL, check out the following resources: 105 | 106 | - [Project Home](https://pysal.org) 107 | - [Users](http://pysal.org/docs/users) 108 | - [Developers](http://pysal.org/docs/devs/) 109 | 110 | # Development 111 | 112 | As of version 2.0.0, PySAL is now a collection of affiliated geographic data science packages. Changes to the code for any of the subpackages should be directed at the respective [upstream repositories](http://github.com/pysal/help), and not made here. Infrastructural changes for the meta-package, like those for tooling, building the package, and code standards, will be considered. 113 | 114 | Development is hosted on [github](https://github.com/pysal/pysal). 115 | 116 | Discussions of development as well as help for users occurs on the [developer list](http://groups.google.com/group/pysal-dev) as well as in [PySAL's Discord channel](https://discord.gg/BxFTEPFFZn). 117 | 118 | # Getting Involved 119 | 120 | If you are interested in contributing to PySAL please see our [development guidelines](https://github.com/pysal/pysal/wiki). 121 | 122 | # Bug reports 123 | 124 | To search for or report bugs, please see PySAL\'s [issues](http://github.com/pysal/pysal/issues). 125 | 126 | # Build Instructions 127 | 128 | To build the meta-package pysal see [tools/README.md](tools/README.md). 129 | 130 | # License information 131 | 132 | See the file \"LICENSE.txt\" for information on the history of this software, terms & conditions for usage, and a DISCLAIMER OF ALL WARRANTIES. 133 | -------------------------------------------------------------------------------- /ci/310-latest.yaml: -------------------------------------------------------------------------------- 1 | name: test 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.10 6 | # ecosystem 7 | - access 8 | - esda 9 | - giddy 10 | - inequality 11 | - libpysal 12 | - mapclassify 13 | - mgwr 14 | - momepy 15 | - pointpats 16 | - segregation 17 | - spaghetti 18 | - splot 19 | - spint 20 | - spopt 21 | - spreg 22 | - spvcm 23 | - tobler>=0.2.1 24 | # external & optiopnal 25 | - bokeh 26 | - deprecated 27 | - dill 28 | - geopandas 29 | - ipywidgets 30 | - matplotlib 31 | - numba 32 | - pyyaml 33 | - quantecon 34 | - quilt3 35 | - rasterio 36 | - rasterstats 37 | - rtree 38 | - scikit-learn 39 | - seaborn 40 | - six 41 | - tqdm 42 | - twine 43 | - urbanaccess 44 | - urllib3>=1.26 45 | - wheel 46 | # testing 47 | - codecov 48 | - coverage 49 | - pytest 50 | - pytest-cov 51 | - pytest-xdist 52 | -------------------------------------------------------------------------------- /ci/310-oldest.yaml: -------------------------------------------------------------------------------- 1 | name: test 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.10 6 | # ecosystem 7 | - access 8 | - esda 9 | - giddy 10 | - inequality 11 | - libpysal 12 | - mapclassify 13 | - mgwr 14 | - momepy 15 | - pointpats 16 | - segregation 17 | - spaghetti 18 | - splot 19 | - spint 20 | - spopt 21 | - spreg 22 | - spvcm 23 | - tobler>=0.2.1 24 | # external & optiopnal 25 | - bokeh 26 | - deprecated 27 | - dill 28 | - fiona<1.10 29 | - geopandas=0.12 30 | - ipywidgets 31 | - matplotlib 32 | - numba 33 | - pyyaml 34 | - quantecon 35 | - quilt3 36 | - rasterio 37 | - rasterstats 38 | - rtree 39 | - scikit-learn 40 | - seaborn 41 | - six 42 | - tqdm 43 | - twine 44 | - urbanaccess 45 | - urllib3>=1.26 46 | - wheel 47 | # testing 48 | - codecov 49 | - coverage 50 | - pytest 51 | - pytest-cov 52 | - pytest-xdist 53 | -------------------------------------------------------------------------------- /ci/311-latest.yaml: -------------------------------------------------------------------------------- 1 | name: test 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.11 6 | # ecosystem 7 | - access 8 | - esda 9 | - giddy 10 | - inequality 11 | - libpysal 12 | - mapclassify 13 | - mgwr 14 | - momepy 15 | - pointpats 16 | - segregation 17 | - spaghetti 18 | - splot 19 | - spint 20 | - spopt 21 | - spreg 22 | - spvcm 23 | - tobler>=0.2.1 24 | # external & optiopnal 25 | - bokeh 26 | - deprecated 27 | - dill 28 | - geopandas 29 | - ipywidgets 30 | - matplotlib 31 | - numba 32 | - pyyaml 33 | - quantecon 34 | - quilt3 35 | - rasterio 36 | - rasterstats 37 | - rtree 38 | - scikit-learn 39 | - seaborn 40 | - six 41 | - tqdm 42 | - twine 43 | - urbanaccess 44 | - urllib3>=1.26 45 | - wheel 46 | # testing 47 | - codecov 48 | - coverage 49 | - pytest 50 | - pytest-cov 51 | - pytest-xdist 52 | -------------------------------------------------------------------------------- /ci/312-dev.yaml: -------------------------------------------------------------------------------- 1 | name: test 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.12 6 | - folium # for geopandas.explore() 7 | - geos # for shapely 8 | - matplotlib # for geopandas.explore() 9 | - tqdm 10 | # testing 11 | - codecov 12 | - coverage 13 | - pytest 14 | - pytest-cov 15 | - pytest-xdist 16 | # with pip 17 | - pip 18 | - pip: 19 | # dev versions of packages 20 | - --pre \ 21 | --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple \ 22 | --extra-index-url https://pypi.org/simple 23 | # external 24 | - numpy 25 | - pandas 26 | - pulp 27 | - scikit-learn 28 | - scipy 29 | - git+https://github.com/geopandas/geopandas.git 30 | - git+https://github.com/shapely/shapely.git 31 | - git+https://github.com/networkx/networkx.git 32 | # ecosystem 33 | - git+https://github.com/pysal/access.git 34 | - git+https://github.com/pysal/esda.git 35 | - git+https://github.com/pysal/libpysal.git 36 | - git+https://github.com/pysal/giddy.git 37 | - git+https://github.com/pysal/inequality.git 38 | - git+https://github.com/pysal/mapclassify.git 39 | - git+https://github.com/pysal/mgwr.git 40 | - git+https://github.com/pysal/momepy.git 41 | - git+https://github.com/pysal/pointpats.git 42 | - git+https://github.com/pysal/segregation.git 43 | - git+https://github.com/pysal/spaghetti.git 44 | - git+https://github.com/pysal/spglm.git 45 | - git+https://github.com/pysal/spint.git 46 | - git+https://github.com/pysal/splot.git 47 | - git+https://github.com/pysal/spopt.git 48 | - git+https://github.com/pysal/spreg.git 49 | - git+https://github.com/pysal/tobler.git 50 | -------------------------------------------------------------------------------- /ci/312-latest.yaml: -------------------------------------------------------------------------------- 1 | name: test 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.12 6 | # ecosystem 7 | - access 8 | - esda 9 | - giddy 10 | - inequality 11 | - libpysal 12 | - mapclassify 13 | - mgwr 14 | - momepy 15 | - pointpats 16 | - segregation 17 | - spaghetti 18 | - splot 19 | - spint 20 | - spopt 21 | - spreg 22 | - spvcm 23 | - tobler>=0.2.1 24 | # external & optiopnal 25 | - bokeh 26 | - deprecated 27 | - dill 28 | - geopandas 29 | - ipywidgets 30 | - matplotlib 31 | - numba 32 | - pyyaml 33 | - quantecon 34 | - quilt3 35 | - rasterio 36 | - rasterstats 37 | - rtree 38 | - scikit-learn 39 | - seaborn 40 | - six 41 | - tqdm 42 | - twine 43 | - urbanaccess 44 | - urllib3>=1.26 45 | - wheel 46 | # testing 47 | - codecov 48 | - coverage 49 | - pytest 50 | - pytest-cov 51 | - pytest-xdist 52 | # docs 53 | - nbsphinx 54 | - numpydoc 55 | - sphinx 56 | - sphinxcontrib-bibtex 57 | - pydata-sphinx-theme 58 | - sphinx-design 59 | -------------------------------------------------------------------------------- /docs/.nojekyll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sjsrey/pysal/e6db2d7dcd340e242459e8289578bfb4a4830298/docs/.nojekyll -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = pysal 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | 22 | github: 23 | @make html 24 | 25 | sync: 26 | @rsync -avh --exclude '.nojekyll' _build/html/ ../docs/ --delete 27 | @make clean 28 | 29 | clean: 30 | rm -rf $(BUILDDIR)/* 31 | rm -rf auto_examples/ 32 | -------------------------------------------------------------------------------- /docs/_static/auto/references.el: -------------------------------------------------------------------------------- 1 | (TeX-add-style-hook 2 | "references" 3 | (lambda () 4 | (LaTeX-add-bibitems 5 | "saxon2021OpenSoftware" 6 | "fotheringham2017multiscale" 7 | "cortes2019OpensourceFramework" 8 | "rey_2021a" 9 | "rey2020VisualAnalytics" 10 | "rey2023GeographicData" 11 | "Akaike1974" 12 | "Anselin1988" 13 | "Anselin1996a" 14 | "Anselin1997" 15 | "Anselin2011" 16 | "Gaboardi2021" 17 | "Lumnitz2020" 18 | "Anselin95" 19 | "spopt2022" 20 | "Arraiz2010" 21 | "Assun_o_1999" 22 | "Belsley1980" 23 | "Benjamini:2001" 24 | "Bickenbach2003" 25 | "Breusch1979" 26 | "Castro:2006tz" 27 | "Christensen2005" 28 | "Dijkstra1959a" 29 | "Drukker2013" 30 | "Formby:2004fk" 31 | "Fotheringham2016" 32 | "Fotheringham:2017" 33 | "Getis_2010" 34 | "Greene2003" 35 | "Hubert_1981" 36 | "Ibe2009" 37 | "Jarque1980" 38 | "Jiang_2013" 39 | "Kang2018" 40 | "Kelejian1998" 41 | "Kelejian1999" 42 | "Kemeny1967" 43 | "Koenker1982" 44 | "Kullback1962" 45 | "Lee2001" 46 | "Okabe2001" 47 | "Ord_2010" 48 | "Press2007" 49 | "Reardon2008" 50 | "Rey2001" 51 | "Rey2004a" 52 | "Rey2011" 53 | "Rey2014" 54 | "Rey2014a" 55 | "Rey2016" 56 | "Rey2016a" 57 | "Rey_2013_sea" 58 | "Rey_2016" 59 | "Schwarz1978" 60 | "Silva:2016" 61 | "Slocum_2009" 62 | "VanLieshout1996" 63 | "White1980" 64 | "allen2015more" 65 | "anselin2006geoda" 66 | "anselin2014ModernSpatial" 67 | "article-minimal" 68 | "belsey_regression_1980" 69 | "brunsdon2008geographically" 70 | "brunsdon:1999" 71 | "carrington1997measuring" 72 | "cliff81" 73 | "duque18" 74 | "fleischmann_2019" 75 | "folch2016centralization" 76 | "fotheringham1999local" 77 | "fotheringham_geographically_2002" 78 | "harris_use_2010" 79 | "hong2014implementing" 80 | "hong2014measuring" 81 | "isard1967methods" 82 | "massey1988dimensions" 83 | "morgan1983distance" 84 | "morrill1991measure" 85 | "nakaya2005geographically" 86 | "oshan_comparison_2017" 87 | "osullivanwong2007surface" 88 | "pysal2007" 89 | "reardon1998measures" 90 | "reardon2002measures" 91 | "reardon2004measures" 92 | "rey2014PythonSpatial" 93 | "rey2019pysal" 94 | "rey2022PySALEcosystem" 95 | "rey_comparative_2015" 96 | "rey_interregional_2010" 97 | "roberto2015divergence" 98 | "sakoda1981generalized" 99 | "simpson1949measurement" 100 | "theil1972statistical" 101 | "tivadar2019oasisr" 102 | "wheeler_diagnostic_2007" 103 | "wong1993spatial" 104 | "yu:2019")) 105 | '(or :bibtex :latex)) 106 | 107 | -------------------------------------------------------------------------------- /docs/_static/images/nonplanar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sjsrey/pysal/e6db2d7dcd340e242459e8289578bfb4a4830298/docs/_static/images/nonplanar.png -------------------------------------------------------------------------------- /docs/_static/images/prices.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sjsrey/pysal/e6db2d7dcd340e242459e8289578bfb4a4830298/docs/_static/images/prices.png -------------------------------------------------------------------------------- /docs/_static/images/pysal_favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sjsrey/pysal/e6db2d7dcd340e242459e8289578bfb4a4830298/docs/_static/images/pysal_favicon.ico -------------------------------------------------------------------------------- /docs/_static/images/pysal_favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sjsrey/pysal/e6db2d7dcd340e242459e8289578bfb4a4830298/docs/_static/images/pysal_favicon.png -------------------------------------------------------------------------------- /docs/_static/images/pysal_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sjsrey/pysal/e6db2d7dcd340e242459e8289578bfb4a4830298/docs/_static/images/pysal_logo.png -------------------------------------------------------------------------------- /docs/_static/images/pysal_logo_light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sjsrey/pysal/e6db2d7dcd340e242459e8289578bfb4a4830298/docs/_static/images/pysal_logo_light.png -------------------------------------------------------------------------------- /docs/_static/images/rose_conditional.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sjsrey/pysal/e6db2d7dcd340e242459e8289578bfb4a4830298/docs/_static/images/rose_conditional.png -------------------------------------------------------------------------------- /docs/_static/pysal-styles.css: -------------------------------------------------------------------------------- 1 | /* Make thumbnails with equal heights */ 2 | @media only screen and (min-width : 481px) { 3 | .row.equal-height { 4 | display: flex; 5 | flex-wrap: wrap; 6 | } 7 | .row.equal-height > [class*='col-'] { 8 | display: flex; 9 | flex-direction: column; 10 | } 11 | .row.equal-height.row:after, 12 | .row.equal-height.row:before { 13 | display: flex; 14 | } 15 | 16 | .row.equal-height > [class*='col-'] > .thumbnail, 17 | .row.equal-height > [class*='col-'] > .thumbnail > .caption { 18 | display: flex; 19 | flex: 1 0 auto; 20 | flex-direction: column; 21 | } 22 | .row.equal-height > [class*='col-'] > .thumbnail > .caption > .flex-text { 23 | flex-grow: 1; 24 | } 25 | .row.equal-height > [class*='col-'] > .thumbnail > img { 26 | width: 100%; 27 | height: 200px; /* force image's height */ 28 | 29 | /* force image fit inside it's "box" */ 30 | -webkit-object-fit: cover; 31 | -moz-object-fit: cover; 32 | -ms-object-fit: cover; 33 | -o-object-fit: cover; 34 | object-fit: cover; 35 | } 36 | } 37 | 38 | .row.extra-bottom-padding{ 39 | margin-bottom: 20px; 40 | } 41 | 42 | 43 | .topnavicons { 44 | margin-left: 10% !important; 45 | } 46 | 47 | .topnavicons li { 48 | margin-left: 0px !important; 49 | min-width: 100px; 50 | text-align: center; 51 | } 52 | 53 | .topnavicons .thumbnail { 54 | margin-right: 10px; 55 | border: none; 56 | box-shadow: none; 57 | text-align: center; 58 | font-size: 85%; 59 | font-weight: bold; 60 | line-height: 10px; 61 | height: 100px; 62 | } 63 | 64 | .topnavicons .thumbnail img { 65 | display: block; 66 | margin-left: auto; 67 | margin-right: auto; 68 | } 69 | 70 | 71 | /* Table with a scrollbar */ 72 | .bodycontainer { max-height: 600px; width: 100%; margin: 0; overflow-y: auto; } 73 | .table-scrollable { margin: 0; padding: 0; } 74 | 75 | .label { 76 | color: #ff0000; 77 | /*font-size: 100%;*/ 78 | } 79 | 80 | div.body { 81 | max-width: 1080px; 82 | } 83 | -------------------------------------------------------------------------------- /docs/_static/references_all.bib: -------------------------------------------------------------------------------- 1 | @article{article-minimal, 2 | author = "Leslie B. Lamport", 3 | title = "blah blah blah", 4 | journal = "Some outlet", 5 | year = "1986" 6 | } 7 | 8 | @Article{pysal2007, 9 | author = "Rey, Sergio J. and Anselin, Luc", 10 | title = "{PySAL: A Python Library of Spatial Analytical Methods}", 11 | journal = "The Review of Regional Studies", 12 | year = "2007", 13 | volume = "37", 14 | number = "1", 15 | pages = "5-27", 16 | keywords = "Open Source; Software; Spatial" 17 | } 18 | 19 | @Article{Lee2001, 20 | author = "Lee, Sang-Il", 21 | title = "Developing a bivariate spatial association measure: An integration of {Pearson's r and Moran's I}", 22 | journal = "Journal of Geographical Systems", 23 | year = "2001", 24 | month = "Dec", 25 | day = "01", 26 | volume = "3", 27 | number = "4", 28 | pages = "369--385", 29 | abstract = "{\enspace}This research is concerned with developing a bivariate spatial association measure or spatial correlation coefficient, which is intended to capture spatial association among observations in terms of their point-to-point relationships across two spatial patterns. The need for parameterization of the bivariate spatial dependence is precipitated by the realization that aspatial bivariate association measures, such as Pearson's correlation coefficient, do not recognize spatial distributional aspects of data sets. This study devises an L statistic by integrating Pearson's r as an aspatial bivariate association measure and Moran's I as a univariate spatial association measure. The concept of a spatial smoothing scalar (SSS) plays a pivotal role in this task.", 30 | issn = "1435-5930", 31 | doi = "10.1007/s101090100064", 32 | url = "https://doi.org/10.1007/s101090100064" 33 | } 34 | 35 | @article{Benjamini:2001, 36 | Author = "Benjamini, Yoav and Yekutieli, Daniel", 37 | Abstract = "Benjamini and Hochberg suggest that the false discovery rate may be the appropriate error rate to control in many applied multiple testing problems. A simple procedure was given there as an FDR controlling procedure for independent test statistics and was shown to be much more powerful than comparable procedures which control the traditional familywise error rate. We prove that this same procedure also controls the false discovery rate when the test statistics have positive regression dependency on each of the test statistics corresponding to the true null hypotheses. This condition for positive dependency is general enough to cover many problems of practical interest, including the comparisons of many treatments with a single control, multivariate normal test statistics with positive correlation matrix and multivariate t. Furthermore, the test statistics may be discrete, and the tested hypotheses composite without posing special difficulties. For all other forms of dependency, a simple conservative modification of the procedure controls the false discovery rate. Thus the range of problems for which a procedure with proven FDR control can be offered is greatly increased.", 38 | Date-Added = "2019-03-28 22:46:12 -0700", 39 | Date-Modified = "2019-03-28 22:46:28 -0700", 40 | Issn = "00905364", 41 | Journal = "The Annals of Statistics", 42 | Number = "4", 43 | Pages = "1165--1188", 44 | Publisher = "Institute of Mathematical Statistics", 45 | Title = "The Control of the False Discovery Rate in Multiple Testing under Dependency", 46 | Url = "http://www.jstor.org/stable/2674075", 47 | Volume = "29", 48 | Year = "2001", 49 | Bdsk-File-1 = "YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAhLi4vLi4vLi4vLi4vLi4vcGFwZXJzLzI2NzQwNzUucGRmTxEBUAAAAAABUAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAAAAAAEJEAAH/////CzI2NzQwNzUucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////8AAAAAAAAAAAAAAAAABQACAAAKIGN1AAAAAAAAAAAAAAAAAAZwYXBlcnMAAgAvLzpVc2Vyczp3ZWlrYW5nOkdvb2dsZSBEcml2ZTpwYXBlcnM6MjY3NDA3NS5wZGYAAA4AGAALADIANgA3ADQAMAA3ADUALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASAC1Vc2Vycy93ZWlrYW5nL0dvb2dsZSBEcml2ZS9wYXBlcnMvMjY3NDA3NS5wZGYAABMAAS8AABUAAgAO//8AAAAIAA0AGgAkAEgAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAABnA==", 50 | Bdsk-Url-1 = "http://www.jstor.org/stable/2674075" 51 | } 52 | 53 | @article{Castro:2006tz, 54 | Author = "de Castro, Marcia Caldas and Singer, Burton H.", 55 | Citeulike-Article-Id = "647229", 56 | Date-Added = "2019-03-28 22:46:02 -0700", 57 | Date-Modified = "2019-03-28 22:46:02 -0700", 58 | Doi = "10.1111/j.0016-7363.2006.00682.x", 59 | Issn = "0016-7363", 60 | Journal = "Geographical Analysis", 61 | Keywords = "local statistics, multiple comparisons, false discovery rate", 62 | Month = "April", 63 | Number = "2", 64 | Pages = "180-208", 65 | Publisher = "Blackwell Publishing", 66 | Title = "Controlling the False Discovery Rate: A New Application to Account for Multiple and Dependent Tests in Local Statistics of Spatial Association", 67 | Url = "http://dx.doi.org/10.1111/j.0016-7363.2006.00682.x", 68 | Volume = "38", 69 | Year = "2006", 70 | Bdsk-File-1 = "YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAlLi4vLi4vLi4vLi4vLi4vcGFwZXJzL0Nhc3Ryby8yMDA2LnBkZk8RAVIAAAAAAVIAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAAAAAABCRAAB/////wgyMDA2LnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////AAAAAAAAAAAAAAAAAAUAAwAACiBjdQAAAAAAAAAAAAAAAAAGQ2FzdHJvAAIAMy86VXNlcnM6d2Vpa2FuZzpHb29nbGUgRHJpdmU6cGFwZXJzOkNhc3RybzoyMDA2LnBkZgAADgASAAgAMgAwADAANgAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAMVVzZXJzL3dlaWthbmcvR29vZ2xlIERyaXZlL3BhcGVycy9DYXN0cm8vMjAwNi5wZGYAABMAAS8AABUAAgAO//8AAAAIAA0AGgAkAEwAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAABog==", 71 | Bdsk-Url-1 = "http://dx.doi.org/10.1111/j.0016-7363.2006.00682.x" 72 | } 73 | 74 | @article{Anselin95, 75 | Author = "Anselin, Luc", 76 | Doi = "10.1111/j.1538-4632.1995.tb00338.x", 77 | Issn = "0016-7363", 78 | Journal = "Geographical Analysis", 79 | Month = "Sep", 80 | Number = "2", 81 | Pages = "93--115", 82 | Publisher = "Wiley", 83 | Title = "Local Indicators of Spatial Association-{LISA}", 84 | Url = "http://dx.doi.org/10.1111/j.1538-4632.1995.tb00338.x", 85 | Volume = "27", 86 | Year = "1995", 87 | Bdsk-Url-1 = "http://dx.doi.org/10.1111/j.1538-4632.1995.tb00338.x" 88 | } 89 | 90 | @article{Assun_o_1999, 91 | Author = "Assuncao, Renato M. and Reis, Edna A.", 92 | Doi = "10.1002/(sici)1097-0258(19990830)18:16<2147::aid-sim179>3.0.co;2-i", 93 | Issn = "1097-0258", 94 | Journal = "Statistics in Medicine", 95 | Month = "Aug", 96 | Number = "16", 97 | Pages = "2147--2162", 98 | Publisher = "Wiley", 99 | Title = "A new proposal to adjust {Moran's I} for population density", 100 | Url = "http://dx.doi.org/10.1002/(sici)1097-0258(19990830)18:16<2147::aid-sim179>3.0.co;2-i", 101 | Volume = "18", 102 | Year = "1999", 103 | Bdsk-Url-1 = "http://dx.doi.org/10.1002/(sici)1097-0258(19990830)18:16\%3C2147::aid-sim179\%3E3.0.co;2-i" 104 | } 105 | 106 | @book{cliff81, 107 | Author = "Cliff, A.D. and Ord, J.K.", 108 | Address = "London", 109 | Publisher = "Pion", 110 | Title = "Spatial Processes: Models and Applications", 111 | Year = "1981" 112 | } 113 | 114 | @article{duque18, 115 | author = "Duque, Juan C. and Laniado, H. and Polo, A.", 116 | journal = "PLOS ONE", 117 | publisher = "Public Library of Science", 118 | title = "S-maup: Statistical Test to Measure the Sensitivity to the Modifiable Areal Unit Problem", 119 | year = "2018", 120 | month = "11", 121 | volume = "13", 122 | url = "https://journals.plos.org/plosone/article?id=10.1371/journal.pone.0207377", 123 | pages = "1-25", 124 | abstract = "This work presents a nonparametric statistical test, S-maup, to measure the sensitivity of a spatially intensive variable to the effects of the Modifiable Areal Unit Problem (MAUP). To the best of our knowledge, S-maup is the first statistic of its type and focuses on determining how much the distribution of the variable, at its highest level of spatial disaggregation, will change when it is spatially aggregated. Through a computational experiment, we obtain the basis for the design of the statistical test under the null hypothesis of non-sensitivity to MAUP. We performed an exhaustive simulation study for approaching the empirical distribution of the statistical test, obtaining its critical values, and computing its power and size. The results indicate that, in general, both the statistical size and power improve with increasing sample size. Finally, for illustrative purposes, an empirical application is made using the Mincer equation in South Africa, where starting from 206 municipalities, the S-maup statistic is used to find the maximum level of spatial aggregation that avoids the negative consequences of the MAUP.", 125 | number = "11", 126 | doi = "https://doi.org/10.1371/journal.pone.0207377" 127 | } 128 | 129 | @Article{Getis_2010, 130 | author = "Getis, Arthur and Ord, J. K.", 131 | title = "The Analysis of Spatial Association by Use of Distance Statistics", 132 | year = "2010", 133 | volume = "24", 134 | number = "3", 135 | month = "Sep", 136 | pages = "189–206", 137 | issn = "0016-7363", 138 | doi = "10.1111/j.1538-4632.1992.tb00261.x", 139 | url = "http://dx.doi.org/10.1111/j.1538-4632.1992.tb00261.x", 140 | journal = "Geographical Analysis", 141 | publisher = "Wiley" 142 | } 143 | 144 | @article{Ord_2010, 145 | Author = "Ord, J. K. and Getis, Arthur", 146 | Doi = "10.1111/j.1538-4632.1995.tb00912.x", 147 | Issn = "0016-7363", 148 | Journal = "Geographical Analysis", 149 | Month = "Sep", 150 | Number = "4", 151 | Pages = "286--306", 152 | Publisher = "Wiley", 153 | Title = "Local Spatial Autocorrelation Statistics: Distributional Issues and an Application", 154 | Url = "http://dx.doi.org/10.1111/j.1538-4632.1995.tb00912.x", 155 | Volume = "27", 156 | Year = "2010", 157 | Bdsk-Url-1 = "http://dx.doi.org/10.1111/j.1538-4632.1995.tb00912.x" 158 | } 159 | 160 | @article{Hubert_1981, 161 | Author = "Hubert, L. J. and Golledge, R. G. and Costanzo, C. M.", 162 | Doi = "10.1111/j.1538-4632.1981.tb00731.x", 163 | Issn = "0016-7363", 164 | Journal = "Geographical Analysis", 165 | Month = "Sep", 166 | Number = "3", 167 | Pages = "224--233", 168 | Publisher = "Wiley", 169 | Title = "Generalized Procedures for Evaluating Spatial Autocorrelation", 170 | Url = "http://dx.doi.org/10.1111/j.1538-4632.1981.tb00731.x", 171 | Volume = "13", 172 | Year = "1981", 173 | Bdsk-Url-1 = "http://dx.doi.org/10.1111/j.1538-4632.1981.tb00731.x" 174 | } 175 | 176 | @article{Kang2018, 177 | Author = "Kang, Wei and Rey, Sergio J.", 178 | Abstract = "Spatial effects have been recognized to play an important role in transitional dynamics of regional incomes. Detection and evaluation of both spatial heterogeneity and spatial dependence in discrete Markov chain models, which have been widely applied to the study of regional income distribution dynamics and convergence, are vital, but under-explored issues. Indeed, in this spatiotemporal setting, spatial effects can take much more complex forms than that in a pure cross-sectional setting. In this paper, we address two test frameworks. The first is a conditional spatial Markov chains test framework, which can be used to detect spatial heterogeneity and temporally lagged spatial dependence; the second is a joint spatial Markov chains test framework, which tests for contemporaneous spatial dependence. A series of Monte Carlo experiments are designed to examine size, power and robustness properties of these tests for a range of sample sizes (spatial {\$}{\$}{\backslash}times {\$}{\$}{\texttimes}temporal dimensions), for different levels of discretization granularity and for different number of regimes. Results indicate that all tests display good size property except when sample size is fairly small. All tests for spatial dependence are similar in almost all aspects---size, power and robustness. Conditional spatial Markov tests for spatial heterogeneity have highest power for detecting spatial heterogeneity. Granularity of discretization has a major impact on the size properties of the tests when sample size is fairly small.", 179 | Date-Added = "2018-12-24 21:05:56 -0800", 180 | Date-Modified = "2018-12-24 21:05:56 -0800", 181 | Day = "01", 182 | Doi = "10.1007/s00168-017-0859-9", 183 | Issn = "1432-0592", 184 | Journal = "The Annals of Regional Science", 185 | Month = "Jul", 186 | Number = "1", 187 | Pages = "73--93", 188 | Title = "Conditional and joint tests for spatial effects in discrete Markov chain models of regional income distribution dynamics", 189 | Url = "https://doi.org/10.1007/s00168-017-0859-9", 190 | Volume = "61", 191 | Year = "2018", 192 | Bdsk-Url-1 = "https://doi.org/10.1007/s00168-017-0859-9" 193 | } 194 | 195 | @book{Ibe2009, 196 | Author = "Ibe, Oliver", 197 | Address = "Amsterdam", 198 | Date-Added = "2018-09-10 17:28:24 -0700", 199 | Date-Modified = "2018-09-10 17:38:32 -0700", 200 | Keywords = "markov", 201 | Publisher = "Elsevier Academic Press", 202 | Title = "Markov processes for stochastic modeling", 203 | Year = "2009" 204 | } 205 | 206 | @article{Rey2014a, 207 | Author = "Rey, Sergio J.", 208 | Date-Added = "2018-09-10 14:49:27 -0700", 209 | Date-Modified = "2018-09-10 14:50:05 -0700", 210 | Journal = "Journal of Geographical Systems", 211 | Keywords = "markov, space-time", 212 | Number = "2", 213 | Pages = "115-137", 214 | Title = "Rank-based {Markov} chains for regional income distribution dynamics", 215 | Volume = "16", 216 | Year = "2014" 217 | } 218 | 219 | @article{Formby:2004fk, 220 | Author = "Formby, John P. and Smith, W. James and Zheng, Buhong", 221 | Date-Added = "2018-07-06 22:21:00 +0000", 222 | Date-Modified = "2018-07-06 22:21:00 +0000", 223 | Doi = "https://doi.org/10.1016/S0304-4076(03)00211-2", 224 | Journal = "Journal of Econometrics", 225 | Keywords = "mobility, distributional dynamics", 226 | Number = "1", 227 | Pages = "181--205", 228 | Publisher = "Elsevier", 229 | Title = "Mobility measurement, transition matrices and statistical inference", 230 | Url = "http://www.sciencedirect.com/science/article/pii/S0304407603002112", 231 | Volume = "120", 232 | Year = "2004", 233 | Bdsk-Url-1 = "http://www.sciencedirect.com/science/article/pii/S0304407603002112", 234 | Bdsk-Url-2 = "https://doi.org/10.1016/S0304-4076(03)00211-2" 235 | } 236 | 237 | @article{Rey2011, 238 | Author = "Rey, Sergio J. and Murray, Alan T. and Anselin, Luc", 239 | Date-Added = "2018-07-03 17:53:40 +0000", 240 | Date-Modified = "2018-07-03 17:53:45 +0000", 241 | Doi = "10.1007/s12076-010-0048-2", 242 | Journal = "Letters in Spatial and Resource Sciences", 243 | Keywords = "directional.py", 244 | Number = "1", 245 | Pages = "81--90", 246 | Publisher = "Springer", 247 | Title = "Visualizing regional income distribution dynamics", 248 | Url = "https://doi.org/10.1007/s12076-010-0048-2", 249 | Volume = "4", 250 | Year = "2011", 251 | Bdsk-Url-1 = "https://doi.org/10.1007/s12076-010-0048-2" 252 | } 253 | 254 | @article{Rey2016a, 255 | Author = "Rey, Sergio J. and Kang, Wei and Wolf, Levi", 256 | Abstract = "Discrete Markov chain models (DMCs) have been widely applied to the study of regional income distribution dynamics and convergence. This popularity reflects the rich body of DMC theory on the one hand and the ability of this framework to provide insights on the internal and external properties of regional income distribution dynamics on the other. In this paper we examine the properties of tests for spatial effects in DMC models of regional distribution dynamics. We do so through a series of Monte Carlo simulations designed to examine the size, power and robustness of tests for spatial heterogeneity and spatial dependence in transitional dynamics. This requires that we specify a data generating process for not only the null, but also alternatives when spatial heterogeneity or spatial dependence is present in the transitional dynamics. We are not aware of any work which has examined these types of data generating processes in the spatial distribution dynamics literature. Results indicate that tests for spatial heterogeneity and spatial dependence display good power for the presence of spatial effects. However, tests for spatial heterogeneity are not robust to the presence of strong spatial dependence, while tests for spatial dependence are sensitive to the spatial configuration of heterogeneity. When the spatial configuration can be considered random, dependence tests are robust to the dynamic spatial heterogeneity, but not so to the process mean heterogeneity when the difference in process means is large relative to the variance of the time series.", 257 | Date-Added = "2018-06-20 17:13:29 +0000", 258 | Date-Modified = "2018-06-20 17:13:38 +0000", 259 | Doi = "10.1007/s10109-016-0234-x", 260 | Issn = "1435-5949", 261 | Journal = "Journal of Geographical Systems", 262 | Number = "4", 263 | Pages = "377--398", 264 | Title = "The properties of tests for spatial effects in discrete {Markov} chain models of regional income distribution dynamics", 265 | Url = "http://dx.doi.org/10.1007/s10109-016-0234-x", 266 | Volume = "18", 267 | Year = "2016", 268 | Bdsk-Url-1 = "http://dx.doi.org/10.1007/s10109-016-0234-x" 269 | } 270 | 271 | @article{Rey2016, 272 | Author = "Rey, Sergio J.", 273 | Abstract = "In the study of income inequality dynamics, the concept of exchange mobility plays a central role. Applications of classical rank correlation statistics have been used to assess the degree to which individual economies swap positions in the income distribution over time. These classic measures ignore the underlying geographical pattern of rank changes. Rey (2004) introduced a spatial concordance statistic as an extension of Kendall's rank correlation statistic, a commonly employed measure of exchange mobility. This article suggests local forms of the global spatial concordance statistic: local indicators of mobility association (LIMA). The LIMA statistics allow for the decomposition of the global measure into the contributions associated with individual locations. They do so by considering the degree of concordance (stability) or discordance (exchange mobility) reflected within an economy's local spatial context. Different forms of the LIMAs derive from alternative expressions of the neighborhood and neighbor set. Additionally, the additive decomposition of the LIMAs permits the development of a mesolevel analytic to examine whether the overall space--time concordance is driven by either interregional or intraregional concordance. The measures are illustrated in a case study that examines regional income dynamics in Mexico.", 274 | Date-Added = "2018-06-12 22:59:18 +0000", 275 | Date-Modified = "2018-06-12 22:59:33 +0000", 276 | Doi = "10.1080/24694452.2016.1151336", 277 | Journal = "Annals of the American Association of Geographers", 278 | Keywords = "LIMA", 279 | Number = "4", 280 | Pages = "788-803", 281 | Title = "Space--Time Patterns of Rank Concordance: Local Indicators of Mobility Association with Application to Spatial Income Inequality Dynamics", 282 | Url = "https://doi.org/10.1080/24694452.2016.1151336", 283 | Volume = "106", 284 | Year = "2016", 285 | Bdsk-Url-1 = "https://doi.org/10.1080/24694452.2016.1151336" 286 | } 287 | 288 | @incollection{Rey2004a, 289 | Author = "Rey, Sergio J.", 290 | Editor = "Getis, A. and M{\'{u}}r, J. and Zoeller, H.", 291 | Abstract = "This chapter discusses the importance of distributional dynamics in the evolution of regional incomes. Serge starts with spatial Markov stuff, but then extends it to include the spatial tau decomposition and the measures of inter-intra regional cohesion.", 292 | Address = "Hampshire", 293 | Booktitle = "Spatial econometrics and spatial statistics", 294 | Date-Added = "2018-06-12 22:57:36 +0000", 295 | Date-Modified = "2018-06-12 22:57:51 +0000", 296 | Keywords = "convergence, rank.py", 297 | Pages = "193-213", 298 | Publisher = "Palgrave", 299 | Title = "Spatial dependence in the evolution of regional income distributions", 300 | Year = "2004" 301 | } 302 | 303 | @article{Rey2014, 304 | Author = "Rey, Sergio J.", 305 | Date-Added = "2018-06-12 22:54:57 +0000", 306 | Date-Modified = "2018-06-12 22:55:04 +0000", 307 | Doi = "10.1007/s00180-013-0461-2", 308 | Journal = "Computational Statistics", 309 | Keywords = "concordance, rank.py, LIMA", 310 | Number = "3-4", 311 | Pages = "799-811", 312 | Publisher = "Springer", 313 | Title = "Fast algorithms for a space-time concordance measure", 314 | Url = "https://doi.org/10.1007/s00180-013-0461-2", 315 | Volume = "29", 316 | Year = "2014", 317 | Bdsk-Url-1 = "https://doi.org/10.1007/s00180-013-0461-2" 318 | } 319 | 320 | @book{Press2007, 321 | Author = "Press, William H and Teukolsky, Saul A and Vetterling, William T and Flannery, Brian P", 322 | Address = "Cambridge", 323 | Date-Added = "2018-06-12 22:53:57 +0000", 324 | Date-Modified = "2018-06-12 22:54:05 +0000", 325 | Edition = "3rd", 326 | Keywords = "rank.py", 327 | Publisher = "Cambridge Univ Pr", 328 | Title = "Numerical recipes: the art of scientific computing", 329 | Year = "2007" 330 | } 331 | 332 | @article{Christensen2005, 333 | Author = "Christensen, David", 334 | Abstract = "Traditional algorithms for the calculation of Kendall's $\tau$ between two datasets of n samples have a calculation time of O(n2). This paper presents a suite of algorithms with expected calculation time of O(n log n) or better using a combination of sorting and balanced tree data structures. The literature, e.g. Dwork et al. (2001), has alluded to the existence of O(n log n) algorithms without any analysis: this paper gives an explicit descriptions of such algorithms for general use both for the case with and without duplicate values in the data. Execution times for sample data are reduced from 3.8 hours to around 1--2 seconds for one million data pairs.", 335 | Date-Added = "2018-06-12 22:50:55 +0000", 336 | Date-Modified = "2018-06-12 22:51:11 +0000", 337 | Doi = "10.1007/BF02736122", 338 | Journal = "Computational Statistics", 339 | Month = "Mar", 340 | Number = "1", 341 | Pages = "51--62", 342 | Title = "Fast algorithms for the calculation of Kendall's $\tau$", 343 | Url = {https://doi.org/10.1007/BF02736122"}, 344 | Volume = "20", 345 | Year = "2005", 346 | Bdsk-Url-1 = "https://doi.org/10.1007/BF02736122\%22", 347 | Bdsk-Url-2 = "https://doi.org/10.1007/BF02736122" 348 | } 349 | 350 | @article{Kullback1962, 351 | Author = "Kullback, S. and Kupperman, M. and Ku, H. H.", 352 | Date-Added = "2018-06-12 22:49:01 +0000", 353 | Date-Modified = "2018-06-12 22:49:07 +0000", 354 | Doi = "10.2307/1266291", 355 | Journal = "Technometrics", 356 | Keywords = "markov, markov.py", 357 | Number = "4", 358 | Pages = "573--608", 359 | Publisher = "JSTOR", 360 | Title = "Tests for contingency tables and {Markov} chains", 361 | Url = "http://www.jstor.org/stable/1266291", 362 | Volume = "4", 363 | Year = "1962", 364 | Bdsk-Url-1 = "http://www.jstor.org/stable/1266291", 365 | Bdsk-Url-2 = "https://doi.org/10.2307/1266291" 366 | } 367 | 368 | @article{Bickenbach2003, 369 | Author = "Bickenbach, Frank and Bode, Eckhardt", 370 | Date-Added = "2018-06-12 22:47:20 +0000", 371 | Date-Modified = "2018-06-12 22:47:31 +0000", 372 | Doi = "10.1177/0160017603253789", 373 | Journal = "International Regional Science Review", 374 | Number = "3", 375 | Pages = "363-392", 376 | Title = "Evaluating the {Markov} property in studies of economic convergence", 377 | Url = "https://doi.org/10.1177/0160017603253789", 378 | Volume = "26", 379 | Year = "2003", 380 | Bdsk-Url-1 = "https://doi.org/10.1177/0160017603253789" 381 | } 382 | 383 | @book{Kemeny1967, 384 | Author = "Kemeny, John G. and Snell, James Laurie", 385 | Date-Added = "2018-06-12 22:44:29 +0000", 386 | Date-Modified = "2018-06-12 22:44:40 +0000", 387 | Keywords = "markov chain, ergodic.py, mobility", 388 | Publisher = "Van Nostrand", 389 | Title = "Finite markov chains", 390 | Year = "1967", 391 | Bdsk-File-1 = "YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxA2Li4vLi4vLi4vLi4vLi4vLi4vRHJvcGJveCAoQVNVKS9wYXBlcnMvS2VtZW55LzE5NjcucGRmTxEBUgAAAAABUgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAAAAAAEJEAAH/////CDE5NjcucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////8AAAAAAAAAAAAAAAAABgAEAAAKIGN1AAAAAAAAAAAAAAAAAAZLZW1lbnkAAgA0LzpVc2Vyczp3ZWlrYW5nOkRyb3Bib3ggKEFTVSk6cGFwZXJzOktlbWVueToxOTY3LnBkZgAOABIACAAxADkANgA3AC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAyVXNlcnMvd2Vpa2FuZy9Ecm9wYm94IChBU1UpL3BhcGVycy9LZW1lbnkvMTk2Ny5wZGYAEwABLwAAFQACAA7//wAAAAgADQAaACQAXQAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAGz" 392 | } 393 | 394 | @article{Rey2001, 395 | Author = "Rey, Sergio J.", 396 | Date-Added = "2018-06-12 21:11:59 +0000", 397 | Date-Modified = "2018-06-12 21:13:29 +0000", 398 | Doi = "10.1111/j.1538-4632.2001.tb00444.x", 399 | Journal = "Geographical Analysis", 400 | Number = "3", 401 | Pages = "195--214", 402 | Publisher = "Wiley Online Library", 403 | Title = "Spatial empirics for economic growth and convergence", 404 | Url = "https://onlinelibrary.wiley.com/doi/abs/10.1111/j.1538-4632.2001.tb00444.x", 405 | Volume = "33", 406 | Year = "2001", 407 | Bdsk-Url-1 = "https://onlinelibrary.wiley.com/doi/abs/10.1111/j.1538-4632.2001.tb00444.x", 408 | Bdsk-Url-2 = "https://doi.org/10.1111/j.1538-4632.2001.tb00444.x" 409 | } 410 | 411 | @Article{rey_interregional_2010, 412 | Author = "Rey, Sergio J. and Sastr\'e-Guti\'errez, Myrna L", 413 | Title = "Interregional inequality dynamics in {Mexico}", 414 | Journal = "Spatial Economic Analysis", 415 | Year = "2010", 416 | Number = "3", 417 | Pages = "277--298", 418 | Volume = "5" 419 | } 420 | 421 | @Article{rey_comparative_2015, 422 | Author = "Rey, Sergio J. and Sastr\'e Guti\'errez, Myrna L.", 423 | Title = "Comparative spatial inequality dynamics: The case of {Mexico and the United States}", 424 | Journal = "Applied Geography", 425 | Year = "2015", 426 | Month = "Jul", 427 | Pages = "70--80", 428 | Volume = "61", 429 | Bdsk-url-1 = "http://dx.doi.org/10.1016/j.apgeog.2015.01.012", 430 | Doi = "10.1016/j.apgeog.2015.01.012", 431 | ISSN = "0143-6228", 432 | Publisher = "Elsevier BV", 433 | Url = "http://dx.doi.org/10.1016/j.apgeog.2015.01.012" 434 | } 435 | 436 | @Article{Rey_2013_sea, 437 | Author = "Rey, Sergio J and Smith, Richard J", 438 | Title = "A spatial decomposition of the {Gini} coefficient", 439 | Journal = "Letters in Spatial and Resource Sciences", 440 | Year = "2013", 441 | Pages = "55--70", 442 | Volume = "6", 443 | Keywords = "gini, inequality, space-time" 444 | } 445 | 446 | @article{VanLieshout1996, 447 | Author = "Lieshout, M. N. M. and Baddeley, A. J.", 448 | Abstract = "The strength and range of interpoint interactions in a spatial point process can be quantified by the function J = (1 - G)/(1 - F), where G is the nearest-neighbour distance distribution function and F the empty space function of the process. J(r) is identically equal to 1 for a Poisson process; values of J(r) smaller or larger than 1 indicate clustering or regularity, respectively. We show that, for a large class of point processes, J(r) is constant for distances r greater than the range of spatial interaction. Hence both the range and type of interaction can be inferred from J without parametric model assumptions. It is also possible to evaluate J(r) explicitly for many point process models, so that J is also useful for parameter estimation. Various properties are derived, including the fact that the J function of the superposition of independent point processes is a weighted mean of the J functions of the individual processes. Estimators of J can be constructed from standard estimators of F and G. We compute estimates of J for several standard point pattern datasets and implement a Monte Carlo test for complete spatial randomness.", 449 | Date-Added = "2018-11-12 17:23:48 -0800", 450 | Date-Modified = "2018-11-12 17:24:22 -0800", 451 | Doi = "10.1111/j.1467-9574.1996.tb01501.x", 452 | Journal = "Statistica Neerlandica", 453 | Keywords = "clustering, empty space function, J-statistic, Monte Carlo inference, nearest-neighbour distance distribution, Nguyen-Zessin formula, point process, spatial interaction, spatial statistics, regularity", 454 | Number = "3", 455 | Pages = "344-361", 456 | Title = "A nonparametric measure of spatial interaction in point patterns", 457 | Volume = "50", 458 | Year = "1996", 459 | Bdsk-Url-1 = "https://onlinelibrary.wiley.com/doi/abs/10.1111/j.1467-9574.1996.tb01501.x", 460 | Bdsk-Url-2 = "https://doi.org/10.1111/j.1467-9574.1996.tb01501.x" 461 | } 462 | 463 | @article{Dijkstra1959a, 464 | author = "Dijkstra, E. W.", 465 | abstract = "We consider a graph with n vertices, all pairs of which are connected by an edge; each edge is of given positive length. The following two basic problems are solved. Problem 1: construct the tree of minimal total length between the n vertices. (A tree is a graph with one and only one path between any two vertices.) Problem 2: find the path of minimal total length between two given vertices.", 466 | doi = "10.1007/BF01386390", 467 | file = ":Users/jgaboardi/Library/Application Support/Mendeley Desktop/Downloaded/Dijkstra - 1959 - A Note on Two Problems in Connexion with Graphs.pdf:pdf", 468 | isbn = "0029-599X (Print) 0945-3245 (Online)", 469 | issn = "0029-599X", 470 | journal = "Numerische Mathematik", 471 | number = "1", 472 | pages = "269--271", 473 | pmid = "18215627", 474 | title = "{A Note on Two Problems in Connexion with Graphs}", 475 | volume = "1", 476 | year = "1959" 477 | } 478 | 479 | @article{Okabe2001, 480 | author = "Okabe, Atsuyuki and Yamada, Ikuho", 481 | file = ":Users/jgaboardi/Library/Application Support/Mendeley Desktop/Downloaded/Okabe, Yamada - 2001 - The K-Function Method on a Network and Its Computational Implementation.pdf:pdf", 482 | journal = "Geographical Analysis", 483 | number = "3", 484 | pages = "271--290", 485 | title = "{The K-Function Method on a Network and Its Computational Implementation}", 486 | volume = "33", 487 | year = "2001" 488 | } 489 | 490 | @article{massey1988dimensions, 491 | author = "Massey, Douglas S and Denton, Nancy A", 492 | title = "The dimensions of residential segregation", 493 | journal = "Social forces", 494 | volume = "67", 495 | number = "2", 496 | pages = "281--315", 497 | year = "1988", 498 | publisher = "Oxford University Press" 499 | } 500 | 501 | @article{hong2014measuring, 502 | author = "Hong, Seong-Yun and Sadahiro, Yukio", 503 | title = "Measuring geographic segregation: a graph-based approach", 504 | journal = "Journal of Geographical Systems", 505 | volume = "16", 506 | number = "2", 507 | pages = "211--231", 508 | year = "2014", 509 | publisher = "Springer" 510 | } 511 | 512 | @article{carrington1997measuring, 513 | author = "Carrington, William J and Troske, Kenneth R", 514 | title = "On measuring segregation in samples with small units", 515 | journal = "Journal of Business \\& Economic Statistics", 516 | volume = "15", 517 | number = "4", 518 | pages = "402--409", 519 | year = "1997", 520 | publisher = "Taylor \\& Francis" 521 | } 522 | 523 | @article{allen2015more, 524 | author = "Allen, Rebecca and Burgess, Simon and Davidson, Russell and Windmeijer, Frank", 525 | title = "More reliable inference for the dissimilarity index of segregation", 526 | journal = "The econometrics journal", 527 | volume = "18", 528 | number = "1", 529 | pages = "40--66", 530 | year = "2015", 531 | publisher = "Wiley Online Library" 532 | } 533 | 534 | @article{sakoda1981generalized, 535 | author = "Sakoda, James M", 536 | title = "A generalized index of dissimilarity", 537 | journal = "Demography", 538 | volume = "18", 539 | number = "2", 540 | pages = "245--250", 541 | year = "1981", 542 | publisher = "Springer" 543 | } 544 | 545 | @article{reardon2002measures, 546 | author = "Reardon, Sean F and Firebaugh, Glenn", 547 | title = "Measures of multigroup segregation", 548 | journal = "Sociological methodology", 549 | volume = "32", 550 | number = "1", 551 | pages = "33--67", 552 | year = "2002", 553 | publisher = "Wiley Online Library" 554 | } 555 | 556 | @inproceedings{reardon1998measures, 557 | author = "Reardon, Sean F", 558 | title = "Measures of racial diversity and segregation in multigroup and hierarchically structured populations", 559 | booktitle = "annual meeting of the Eastern Sociological Society, Philadelphia, PA", 560 | year = "1998" 561 | } 562 | 563 | @article{simpson1949measurement, 564 | author = "Simpson, Edward H", 565 | title = "Measurement of diversity", 566 | journal = "nature", 567 | volume = "163", 568 | number = "4148", 569 | pages = "688", 570 | year = "1949", 571 | publisher = "Nature Publishing Group" 572 | } 573 | 574 | @article{roberto2015divergence, 575 | author = "Roberto, Elizabeth", 576 | title = "The Divergence Index: A Decomposable Measure of Segregation and Inequality", 577 | journal = "arXiv preprint arXiv:1508.01167", 578 | year = "2015" 579 | } 580 | 581 | @inproceedings{morrill1991measure, 582 | author = "Morrill, Richard L", 583 | title = "On the measure of geographic segregation", 584 | booktitle = "Geography research forum", 585 | volume = "11", 586 | number = "1", 587 | pages = "25--36", 588 | year = "1991" 589 | } 590 | 591 | @article{hong2014implementing, 592 | author = "Hong, Seong-Yun and O'Sullivan, David and Sadahiro, Yukio", 593 | title = "Implementing spatial segregation measures in R", 594 | journal = "PloS one", 595 | volume = "9", 596 | number = "11", 597 | pages = "e113767", 598 | year = "2014", 599 | publisher = "Public Library of Science" 600 | } 601 | 602 | @article{wong1993spatial, 603 | author = "Wong, David WS", 604 | title = "Spatial indices of segregation", 605 | journal = "Urban studies", 606 | volume = "30", 607 | number = "3", 608 | pages = "559--572", 609 | year = "1993", 610 | publisher = "Sage Publications Sage UK: London, England" 611 | } 612 | 613 | @article{tivadar2019oasisr, 614 | author = "Tivadar, Mihai", 615 | title = "OasisR: An R Package to Bring Some Order to the World of Segregation Measurement", 616 | journal = "Journal of Statistical Software", 617 | volume = "89", 618 | number = "7", 619 | pages = "1--39", 620 | year = "2019", 621 | publisher = "JOURNAL STATISTICAL SOFTWARE UCLA DEPT STATISTICS, 8130 MATH SCIENCES BLDG\textasciitilde …" 622 | } 623 | 624 | @article{morgan1983distance, 625 | author = "Morgan, Barrie S", 626 | title = "A distance-decay based interaction index to measure residential segregation", 627 | journal = "Area", 628 | pages = "211--217", 629 | year = "1983", 630 | publisher = "JSTOR" 631 | } 632 | 633 | @book{isard1967methods, 634 | author = "Isard, Walter", 635 | title = "Methods of regional analysis", 636 | volume = "4", 637 | year = "1967", 638 | publisher = "Рипол Классик" 639 | } 640 | 641 | @book{theil1972statistical, 642 | author = "Theil, Henry", 643 | title = "Statistical decomposition analysis; with applications in the social and administrative sciences", 644 | year = "1972", 645 | publisher = "London North-Holland" 646 | } 647 | 648 | @article{reardon2004measures, 649 | author = "Reardon, Sean F and O’Sullivan, David", 650 | title = "Measures of spatial segregation", 651 | journal = "Sociological methodology", 652 | volume = "34", 653 | number = "1", 654 | pages = "121--162", 655 | year = "2004", 656 | publisher = "Wiley Online Library" 657 | } 658 | 659 | @article{folch2016centralization, 660 | author = "Folch, David C and Rey, Sergio J", 661 | title = "The centralization index: A measure of local spatial segregation", 662 | journal = "Papers in Regional Science", 663 | volume = "95", 664 | number = "3", 665 | pages = "555--576", 666 | year = "2016", 667 | publisher = "Wiley Online Library" 668 | } 669 | 670 | @article{Reardon2008, 671 | author = "{Sean F. Reardon} and {Stephen A. Matthews} and {David O'Sullivan} and {Barrett A. Lee} and {Glenn Firebaugh} and {Chad R. Farrell} and {Kendra Bischoff}", 672 | doi = "10.1353/dem.0.0019", 673 | issn = "1533-7790", 674 | journal = "Demography", 675 | number = "3", 676 | pages = "489--514", 677 | title = "{The Geographic Scale of Metropolitan Racial Segregation}", 678 | url = "http://link.springer.com/10.1353/dem.0.0019", 679 | volume = "45", 680 | year = "2008" 681 | } 682 | 683 | @article{osullivanwong2007surface, 684 | author = "O'Sullivan, David and Wong, David W. S.", 685 | title = "A Surface-Based Approach to Measuring Spatial Segregation", 686 | journal = "Geographical Analysis", 687 | volume = "39", 688 | number = "2", 689 | pages = "147-168", 690 | doi = "10.1111/j.1538-4632.2007.00699.x", 691 | url = "https://onlinelibrary.wiley.com/doi/abs/10.1111/j.1538-4632.2007.00699.x", 692 | year = "2007" 693 | } 694 | 695 | @article{Jiang_2013, 696 | Author = "Jiang, Bin", 697 | Doi = "10.1080/00330124.2012.700499", 698 | Issn = "1467-9272", 699 | Journal = "The Professional Geographer", 700 | Month = "Aug", 701 | Number = "3", 702 | Pages = "482--494", 703 | Publisher = "Informa UK Limited", 704 | Title = "Head/Tail Breaks: A New Classification Scheme for Data with a Heavy-Tailed Distribution", 705 | Url = "http://dx.doi.org/10.1080/00330124.2012.700499", 706 | Volume = "65", 707 | Year = "2013", 708 | Bdsk-Url-1 = "http://dx.doi.org/10.1080/00330124.2012.700499" 709 | } 710 | 711 | @article{Rey_2016, 712 | Author = "Rey, Sergio J. and Stephens, Philip and Laura, Jason", 713 | Doi = "10.1111/tgis.12236", 714 | Issn = "1361-1682", 715 | Journal = "Transactions in GIS", 716 | Month = "Oct", 717 | Number = "4", 718 | Pages = "796--810", 719 | Publisher = "Wiley", 720 | Title = "An evaluation of sampling and full enumeration strategies for {Fisher Jenks} classification in big data settings", 721 | Url = "http://dx.doi.org/10.1111/tgis.12236", 722 | Volume = "21", 723 | Year = "2016", 724 | Bdsk-Url-1 = "http://dx.doi.org/10.1111/tgis.12236" 725 | } 726 | 727 | @book{Slocum_2009, 728 | Author = "Slocum, Terry A. and McMaster, Robert B. and Kessler, Fritz C. and Howard, Hugh H.", 729 | Publisher = "Pearson Prentice Hall, Upper Saddle River", 730 | Title = "Thematic cartography and geovisualization", 731 | Year = "2009" 732 | } 733 | 734 | @Article{anselin2006geoda, 735 | author = "Anselin, Luc and Syabri, Ibnu and Kho, Youngihn", 736 | title = "GeoDa: an introduction to spatial data analysis", 737 | journal = "Geographical analysis", 738 | volume = "38", 739 | number = "1", 740 | pages = "5--22", 741 | year = "2006", 742 | publisher = "Wiley Online Library" 743 | } 744 | 745 | @article{Anselin1996a, 746 | Author = "Anselin, Luc and Bera, Anil K and Florax, Raymond and Yoon, Mann J", 747 | Date-Added = "2019-01-14 14:18:39 -0800", 748 | Date-Modified = "2019-01-14 14:18:45 -0800", 749 | Journal = "Regional science and urban economics", 750 | Number = "1", 751 | Pages = "77--104", 752 | Publisher = "Elsevier", 753 | Title = "Simple diagnostic tests for spatial dependence", 754 | Volume = "26", 755 | Year = "1996" 756 | } 757 | 758 | @article{Anselin1997, 759 | Author = "Anselin, Luc and Kelejian, Harry H", 760 | Date-Added = "2019-01-14 14:17:25 -0800", 761 | Date-Modified = "2019-01-14 14:17:31 -0800", 762 | Journal = "International Regional Science Review", 763 | Number = "1-2", 764 | Pages = "153--182", 765 | Publisher = "Sage Publications Sage CA: Thousand Oaks, CA", 766 | Title = "Testing for spatial error autocorrelation in the presence of endogenous regressors", 767 | Volume = "20", 768 | Year = "1997" 769 | } 770 | 771 | @article{White1980, 772 | Author = "White, Halbert", 773 | Date-Added = "2019-01-14 14:15:59 -0800", 774 | Date-Modified = "2019-01-14 14:16:08 -0800", 775 | Journal = "Econometrica: Journal of the Econometric Society", 776 | Pages = "817--838", 777 | Publisher = "JSTOR", 778 | Title = "A heteroskedasticity-consistent covariance matrix estimator and a direct test for heteroskedasticity", 779 | Year = "1980" 780 | } 781 | 782 | @article{Schwarz1978, 783 | Author = "Schwarz, Gideon and others", 784 | Date-Added = "2019-01-14 14:15:10 -0800", 785 | Date-Modified = "2019-01-14 14:15:15 -0800", 786 | Journal = "The annals of statistics", 787 | Number = "2", 788 | Pages = "461--464", 789 | Publisher = "Institute of Mathematical Statistics", 790 | Title = "Estimating the dimension of a model", 791 | Volume = "6", 792 | Year = "1978" 793 | } 794 | 795 | @article{Koenker1982, 796 | Author = "Koenker, Roger and Bassett Jr, Gilbert", 797 | Date-Added = "2019-01-14 14:14:06 -0800", 798 | Date-Modified = "2019-01-14 14:14:17 -0800", 799 | Journal = "Econometrica: Journal of the Econometric Society", 800 | Pages = "43--61", 801 | Publisher = "JSTOR", 802 | Title = "Robust tests for heteroscedasticity based on regression quantiles", 803 | Year = "1982" 804 | } 805 | 806 | @article{Jarque1980, 807 | Author = "Jarque, Carlos M and Bera, Anil K", 808 | Date-Added = "2019-01-14 14:13:04 -0800", 809 | Date-Modified = "2019-01-14 14:13:10 -0800", 810 | Journal = "Economics letters", 811 | Number = "3", 812 | Pages = "255--259", 813 | Publisher = "Elsevier", 814 | Title = "Efficient tests for normality, homoscedasticity and serial independence of regression residuals", 815 | Volume = "6", 816 | Year = "1980" 817 | } 818 | 819 | @article{Breusch1979, 820 | Author = "Breusch, Trevor S and Pagan, Adrian R", 821 | Date-Added = "2019-01-14 14:12:25 -0800", 822 | Date-Modified = "2019-01-14 14:12:38 -0800", 823 | Journal = "Econometrica: Journal of the Econometric Society", 824 | Pages = "1287--1294", 825 | Publisher = "JSTOR", 826 | Title = "A simple test for heteroscedasticity and random coefficient variation", 827 | Year = "1979" 828 | } 829 | 830 | @article{Akaike1974, 831 | Author = "Akaike, Hirotugu", 832 | Date-Added = "2019-01-14 14:11:32 -0800", 833 | Date-Modified = "2019-01-14 14:11:39 -0800", 834 | Journal = "IEEE transactions on automatic control", 835 | Number = "6", 836 | Pages = "716--723", 837 | Publisher = "Ieee", 838 | Title = "A new look at the statistical model identification", 839 | Volume = "19", 840 | Year = "1974" 841 | } 842 | 843 | @book{Anselin1988, 844 | Author = "Anselin, Luc", 845 | Address = "Dordrecht", 846 | Keywords = "spatial econometrics", 847 | Publisher = "{Kluwer}", 848 | Title = "Spatial {{Econometrics}}: {{Methods}} and {{Models}}", 849 | Year = "1988" 850 | } 851 | 852 | @techreport{Anselin2011, 853 | Author = "Anselin, Luc", 854 | Institution = "{GeoDa} Center for Geospatial Analysis and Computation", 855 | Title = "{GMM} Estimation of Spatial Error Autocorrelation with and without Heteroskedasticity", 856 | Year = "2011" 857 | } 858 | 859 | @article{Arraiz2010, 860 | Author = "Arraiz, Irani and Drukker, David M. and Kelejian, Harry H. and Prucha, Ingmar R.", 861 | Doi = "10.1111/j.1467-9787.2009.00618.x", 862 | Issn = "1467-9787", 863 | Journal = "Journal of Regional Science", 864 | Keywords = "spatial econometrics,sigma convergence", 865 | Number = "2", 866 | Pages = "592--614", 867 | Title = "A Spatial {{Cliff}}-{{Ord}}-Type Model with Heteroskedastic Innovations: {{Small}} and Large Sample Results", 868 | Volume = "50", 869 | Year = "2010", 870 | Bdsk-Url-1 = "https://doi.org/10.1111/j.1467-9787.2009.00618.x" 871 | } 872 | 873 | @article{Drukker2013, 874 | Author = "Drukker, David M and Egger, Peter and Prucha, Ingmar R", 875 | Journal = "Econometric Reviews", 876 | Number = "5-6", 877 | Pages = "686--733", 878 | Title = "On Two-Step Estimation of a Spatial Autoregressive Model with Autoregressive Disturbances and Endogenous Regressors", 879 | Volume = "32", 880 | Year = "2013" 881 | } 882 | 883 | @article{Kelejian1998, 884 | Author = "Kelejian, Harry H and Prucha, Ingmar R", 885 | Journal = "J. Real Estate Fin. Econ.", 886 | Number = "1", 887 | Pages = "99--121", 888 | Title = "A Generalized Spatial Two-Stage Least Squares Procedure for Estimating a Spatial Autoregressive Model with Autoregressive Disturbances", 889 | Volume = "17", 890 | Year = "1998" 891 | } 892 | 893 | @article{Kelejian1999, 894 | Author = "Kelejian, H H and Prucha, I R", 895 | Journal = "Int. Econ. Rev.", 896 | Pages = "509--534", 897 | Title = "A Generalized Moments Estimator for the Autoregressive Parameter in a Spatial Model", 898 | Volume = "40", 899 | Year = "1999" 900 | } 901 | 902 | @book{Greene2003, 903 | Author = "Greene, William H", 904 | Publisher = "Pearson Education India", 905 | Title = "Econometric analysis", 906 | Year = "2003" 907 | } 908 | 909 | @book{Belsley1980, 910 | Author = "Belsley, David A and Kuh, Edwin and Welsch, Roy E", 911 | Publisher = "John Wiley \\& Sons", 912 | Title = "Regression diagnostics: Identifying influential data and sources of collinearity", 913 | Volume = "571", 914 | Year = "2005" 915 | } 916 | 917 | @article{yu:2019, 918 | Author = "Yu, Hanchen and Fotheringham, Alexander Stewart and Li, Ziqi and Oshan, Taylor and Kang, Wei and Wolf, Levi John", 919 | Abstract = "A recent paper expands the well-known geographically weighted regression (GWR) framework significantly by allowing the bandwidth or smoothing factor in GWR to be derived separately for each covariate in the model---a framework referred to as multiscale GWR (MGWR). However, one limitation of the MGWR framework is that, until now, no inference about the local parameter estimates was possible. Formally, the so-called ``hat matrix,'' which projects the observed response vector into the predicted response vector, was available in GWR but not in MGWR. This paper addresses this limitation by reframing GWR as a Generalized Additive Model, extending this framework to MGWR and then deriving standard errors for the local parameters in MGWR. In addition, we also demonstrate how the effective number of parameters can be obtained for the overall fit of an MGWR model and for each of the covariates within the model. This statistic is essential for comparing model fit between MGWR, GWR, and traditional global models, as well as for adjusting multiple hypothesis tests. We demonstrate these advances to the MGWR framework with both a simulated data set and a real-world data set and provide a link to new software for MGWR (MGWR1.0) which includes the novel inferential framework for MGWR described here.", 920 | Date-Added = "2019-06-29 12:32:28 -0700", 921 | Date-Modified = "2019-06-29 12:34:19 -0700", 922 | Doi = "10.1111/gean.12189", 923 | Eprint = "https://onlinelibrary.wiley.com/doi/pdf/10.1111/gean.12189", 924 | Journal = "Geographical Analysis", 925 | Number = "0", 926 | Title = "Inference in Multiscale Geographically Weighted Regression", 927 | Url = "https://onlinelibrary.wiley.com/doi/abs/10.1111/gean.12189", 928 | Volume = "0", 929 | Year = "2019", 930 | Bdsk-File-1 = "YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxBXLi4vLi4vLi4vLi4vLi4vcGFwZXJzL1l1L0luZmVyZW5jZSBpbiBNdWx0aXNjYWxlIEdlb2dyYXBoaWNhbGx5IFdlaWdodGVkIFJlZ3Jlc3Npb24ucGRmTxECHgAAAAACHgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAAAAAAEJEAAH/////H0luZmVyZW5jZSBpbiBNdWx0aSNGRkZGRkZGRi5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////8AAAAAAAAAAAAAAAAABQADAAAKIGN1AAAAAAAAAAAAAAAAAAJZdQACAGUvOlVzZXJzOndlaWthbmc6R29vZ2xlIERyaXZlOnBhcGVyczpZdTpJbmZlcmVuY2UgaW4gTXVsdGlzY2FsZSBHZW9ncmFwaGljYWxseSBXZWlnaHRlZCBSZWdyZXNzaW9uLnBkZgAADgB+AD4ASQBuAGYAZQByAGUAbgBjAGUAIABpAG4AIABNAHUAbAB0AGkAcwBjAGEAbABlACAARwBlAG8AZwByAGEAcABoAGkAYwBhAGwAbAB5ACAAVwBlAGkAZwBoAHQAZQBkACAAUgBlAGcAcgBlAHMAcwBpAG8AbgAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAY1VzZXJzL3dlaWthbmcvR29vZ2xlIERyaXZlL3BhcGVycy9ZdS9JbmZlcmVuY2UgaW4gTXVsdGlzY2FsZSBHZW9ncmFwaGljYWxseSBXZWlnaHRlZCBSZWdyZXNzaW9uLnBkZgAAEwABLwAAFQACAA7//wAAAAgADQAaACQAfgAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAKg", 931 | Bdsk-Url-1 = "https://doi.org/10.1111/gean.12189" 932 | } 933 | 934 | @article{harris_use_2010, 935 | Author = "Harris, P. and Fotheringham, A. S. and Crespo, R. and Charlton, M.", 936 | Abstract = "Increasingly, the geographically weighted regression (GWR) model is being used for spatial prediction rather than for inference. Our study compares GWR as a predictor to (a) its global counterpart of multiple linear regression (MLR); (b) traditional geostatistical models such as ordinary kriging (OK) and universal kriging (UK), with MLR as a mean component; and (c) hybrids, where kriging models are specified with GWR as a mean component. For this purpose, we test the performance of each model on data simulated with differing levels of spatial heterogeneity (with respect to data relationships in the mean process) and spatial autocorrelation (in the residual process). Our results demonstrate that kriging (in a UK form) should be the preferred predictor, reflecting its optimal statistical properties. However the GWR-kriging hybrids perform with merit and, as such, a predictor of this form may provide a worthy alternative to UK for particular (non-stationary relationship) situations when UK models cannot be reliably calibrated. GWR predictors tend to perform more poorly than their more complex GWR-kriging counterparts, but both GWR-based models are useful in that they provide extra information on the spatial processes generating the data that are being predicted.", 937 | Date-Added = "2018-09-26 11:37:20 -0700", 938 | Date-Modified = "2018-09-26 11:37:20 -0700", 939 | Doi = "10.1007/s11004-010-9284-7", 940 | File = "Full Text PDF:/Users/toshan/Library/Application Support/Zotero/Profiles/hc1axs85.default/zotero/storage/TI8VUNW5/Harris et al. - 2010 - The Use of Geographically Weighted Regression for .pdf:application/pdf;Snapshot:/Users/toshan/Library/Application Support/Zotero/Profiles/hc1axs85.default/zotero/storage/BD5EGNCN/10.html:text/html", 941 | Issn = "1874-8961, 1874-8953", 942 | Journal = "Mathematical Geosciences", 943 | Keywords = "Geotechnical Engineering, GWR, Hydrogeology, Kriging, Mathematical Applications in Earth Sciences, Relationship heterogeneity, Relationship nonstationarity, Spatial interpolation, Statistics for Engineering, Physics, Computer Science, Chemistry and Earth Sciences", 944 | Language = "en", 945 | Month = "June", 946 | Number = "6", 947 | Pages = "657--680", 948 | Shorttitle = "The {Use} of {Geographically} {Weighted} {Regression} for {Spatial} {Prediction}", 949 | Title = "The {Use} of {Geographically} {Weighted} {Regression} for {Spatial} {Prediction}: {An} {Evaluation} of {Models} {Using} {Simulated} {Data} {Sets}", 950 | Url = "http://link.springer.com/article/10.1007/s11004-010-9284-7", 951 | Urldate = "2015-12-14", 952 | Volume = "42", 953 | Year = "2010", 954 | Bdsk-Url-1 = "http://link.springer.com/article/10.1007/s11004-010-9284-7", 955 | Bdsk-Url-2 = "https://doi.org/10.1007/s11004-010-9284-7" 956 | } 957 | 958 | @book{belsey_regression_1980, 959 | Author = "Belsey, D. A. and Kuh, E. and Welsch, R. E.", 960 | Address = "New York", 961 | Date-Added = "2018-09-26 11:37:05 -0700", 962 | Date-Modified = "2018-09-26 11:37:05 -0700", 963 | Publisher = "Wiley", 964 | Title = "Regression {Diagnostics}: {Identifying} {Influential} {Data} and {Sources} of {Collinearity}", 965 | Year = "1980" 966 | } 967 | 968 | @book{fotheringham_geographically_2002, 969 | Author = "Fotheringham, A. Stewart and Brunsdon, Chris and Charlton, Martin", 970 | Abstract = "Geographical Weighted Regression (GWR) is a new local modelling technique for analysing spatial analysis. This technique allows local as opposed to global models of relationships to be measured and mapped. This is the first and only book on this technique, offering comprehensive coverage on this new 'hot' topic in spatial analysis. * Provides step-by-step examples of how to use the GWR model using data sets and examples on issues such as house price determinants, educational attainment levels and school performance statistics * Contains a broad discussion of and basic concepts on GWR through to ideas on statistical inference for GWR models * uniquely features accompanying author-written software that allows users to undertake sophisticated and complex forms of GWR within a user-friendly, Windows-based, front-end (see book for details).", 971 | Date-Added = "2018-09-26 11:36:59 -0700", 972 | Date-Modified = "2018-09-26 11:36:59 -0700", 973 | Isbn = "978-0-470-85525-6", 974 | Keywords = "Science / Earth Sciences / Geography", 975 | Language = "en", 976 | Month = "February", 977 | Publisher = "John Wiley \\& Sons", 978 | Shorttitle = "Geographically {Weighted} {Regression}", 979 | Title = "Geographically {Weighted} {Regression}: {The} {Analysis} of {Spatially} {Varying} {Relationships}", 980 | Year = "2002" 981 | } 982 | 983 | @article{wheeler_diagnostic_2007, 984 | Author = "Wheeler, David C.", 985 | Abstract = "Geographically weighted regression (GWR) is drawing attention as a statistical method to estimate regression models with spatially varying relationships between explanatory variables and a response variable. Local collinearity in weighted explanatory variables leads to GWR coefficient estimates that are correlated locally and across space, have inflated variances, and are at times counterintuitive and contradictory in sign to the global regression estimates. The presence of local collinearity in the absence of global collinearity necessitates the use of diagnostic tools in the local regression model building process to highlight areas in which the results are not reliable for statistical inference. The method of ridge regression can also be integrated into the GWR framework to constrain and stabilize regression coefficients and lower prediction error. This paper presents numerous diagnostic tools and ridge regression in GWR and demonstrates the utility of these techniques with an example using the Columbus crime dataset.", 986 | Date-Added = "2018-09-26 11:36:47 -0700", 987 | Date-Modified = "2018-09-26 11:36:47 -0700", 988 | Doi = "10.1068/a38325", 989 | File = "Full Text PDF:/Users/toshan/Library/Application Support/Zotero/Profiles/hc1axs85.default/zotero/storage/AMG2X653/Wheeler - 2007 - Diagnostic Tools and a Remedial Method for Colline.pdf:application/pdf;Snapshot:/Users/toshan/Library/Application Support/Zotero/Profiles/hc1axs85.default/zotero/storage/DTMFIAI7/2464.html:text/html", 990 | Issn = "0308-518X, 1472-3409", 991 | Journal = "Environment and Planning A", 992 | Language = "en", 993 | Month = "October", 994 | Number = "10", 995 | Pages = "2464--2481", 996 | Title = "Diagnostic {Tools} and a {Remedial} {Method} for {Collinearity} in {Geographically} {Weighted} {Regression}", 997 | Url = "http://epn.sagepub.com/content/39/10/2464", 998 | Urldate = "2015-10-27", 999 | Volume = "39", 1000 | Year = "2007", 1001 | Bdsk-Url-1 = "http://epn.sagepub.com/content/39/10/2464", 1002 | Bdsk-Url-2 = "https://doi.org/10.1068/a38325" 1003 | } 1004 | 1005 | @article{oshan_comparison_2017, 1006 | Author = "Oshan, Taylor M. and Fotheringham, A. Stewart", 1007 | Date-Added = "2018-09-26 11:36:35 -0700", 1008 | Date-Modified = "2018-09-26 11:36:35 -0700", 1009 | Doi = "10.1111/gean.12133", 1010 | File = "Oshan\_et\_al-2017-Geographical\_Analysis.pdf:/Users/toshan/Library/Application Support/Zotero/Profiles/hc1axs85.default/zotero/storage/I5JFAIZQ/Oshan\_et\_al-2017-Geographical\_Analysis.pdf:application/pdf", 1011 | Issn = "00167363", 1012 | Journal = "Geographical Analysis", 1013 | Language = "en", 1014 | Month = "June", 1015 | Shorttitle = "A {Comparison} of {Spatially} {Varying} {Regression} {Coefficient} {Estimates} {Using} {Geographically} {Weighted} and {Spatial}-{Filter}-{Based} {Techniques}", 1016 | Title = "A {Comparison} of {Spatially} {Varying} {Regression} {Coefficient} {Estimates} {Using} {Geographically} {Weighted} and {Spatial}-{Filter}-{Based} {Techniques}: {A} {Comparison} of {Spatially} {Varying} {Regression}", 1017 | Url = "http://doi.wiley.com/10.1111/gean.12133", 1018 | Urldate = "2017-07-01", 1019 | Year = "2017", 1020 | Bdsk-Url-1 = "http://doi.wiley.com/10.1111/gean.12133", 1021 | Bdsk-Url-2 = "https://doi.org/10.1111/gean.12133" 1022 | } 1023 | 1024 | @article{nakaya2005geographically, 1025 | Author = "Nakaya, T and Fotheringham, AS and Brunsdon, Chris and Charlton, Martin", 1026 | Date-Added = "2018-09-26 11:04:53 -0700", 1027 | Date-Modified = "2018-09-26 11:04:53 -0700", 1028 | Journal = "Statistics in Medicine", 1029 | Number = "17", 1030 | Pages = "2695--2717", 1031 | Publisher = "Wiley Online Library", 1032 | Title = "Geographically weighted Poisson regression for disease association mapping", 1033 | Volume = "24", 1034 | Year = "2005", 1035 | Bdsk-File-1 = "YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxBfLi4vLi4vLi4vLi4vLi4vcGFwZXJzL2xpYnJhcnkvTmFrYXlhL0dlb2dyYXBoaWNhbGx5IHdlaWdodGVkIFBvaXNzb24gcmVncmVzc2lvbiBmb3IgZGlzZWFzZS5wZGZPEQIqAAAAAAIqAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAAAAAAAAQkQAAf////8fR2VvZ3JhcGhpY2FsbHkgd2VpI0ZGRkZGRkZGLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////wAAAAAAAAAAAAAAAAAFAAQAAAogY3UAAAAAAAAAAAAAAAAABk5ha2F5YQACAG0vOlVzZXJzOndlaWthbmc6R29vZ2xlIERyaXZlOnBhcGVyczpsaWJyYXJ5Ok5ha2F5YTpHZW9ncmFwaGljYWxseSB3ZWlnaHRlZCBQb2lzc29uIHJlZ3Jlc3Npb24gZm9yIGRpc2Vhc2UucGRmAAAOAHYAOgBHAGUAbwBnAHIAYQBwAGgAaQBjAGEAbABsAHkAIAB3AGUAaQBnAGgAdABlAGQAIABQAG8AaQBzAHMAbwBuACAAcgBlAGcAcgBlAHMAcwBpAG8AbgAgAGYAbwByACAAZABpAHMAZQBhAHMAZQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAa1VzZXJzL3dlaWthbmcvR29vZ2xlIERyaXZlL3BhcGVycy9saWJyYXJ5L05ha2F5YS9HZW9ncmFwaGljYWxseSB3ZWlnaHRlZCBQb2lzc29uIHJlZ3Jlc3Npb24gZm9yIGRpc2Vhc2UucGRmAAATAAEvAAAVAAIADv//AAAACAANABoAJACGAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAArQ=" 1036 | } 1037 | 1038 | @article{brunsdon2008geographically, 1039 | Author = "Brunsdon, Chris and Fotheringham, A Stewart and Charlton, Martin", 1040 | Date-Added = "2018-09-26 11:04:50 -0700", 1041 | Date-Modified = "2018-09-26 11:04:50 -0700", 1042 | Journal = "Encyclopedia of Geographic Information Science", 1043 | Pages = "558", 1044 | Publisher = "SAGE Publications", 1045 | Title = "Geographically weighted regression: a method for exploring spatial nonstationarity", 1046 | Year = "2008", 1047 | Bdsk-File-1 = "YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxBlLi4vLi4vLi4vLi4vLi4vcGFwZXJzL2xpYnJhcnkvQnJ1bnNkb24vR2VvZ3JhcGhpY2FsbHkgd2VpZ2h0ZWQgcmVncmVzc2lvbiBhIG1ldGhvZCBmb3IgZXhwbG9yaW5nMC5wZGZPEQJAAAAAAAJAAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAAAAAAAAQkQAAf////8fR2VvZ3JhcGhpY2FsbHkgd2VpI0ZGRkZGRkZGLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////wAAAAAAAAAAAAAAAAAFAAQAAAogY3UAAAAAAAAAAAAAAAAACEJydW5zZG9uAAIAcy86VXNlcnM6d2Vpa2FuZzpHb29nbGUgRHJpdmU6cGFwZXJzOmxpYnJhcnk6QnJ1bnNkb246R2VvZ3JhcGhpY2FsbHkgd2VpZ2h0ZWQgcmVncmVzc2lvbiBhIG1ldGhvZCBmb3IgZXhwbG9yaW5nMC5wZGYAAA4AfgA+AEcAZQBvAGcAcgBhAHAAaABpAGMAYQBsAGwAeQAgAHcAZQBpAGcAaAB0AGUAZAAgAHIAZQBnAHIAZQBzAHMAaQBvAG4AIABhACAAbQBlAHQAaABvAGQAIABmAG8AcgAgAGUAeABwAGwAbwByAGkAbgBnADAALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASAHFVc2Vycy93ZWlrYW5nL0dvb2dsZSBEcml2ZS9wYXBlcnMvbGlicmFyeS9CcnVuc2Rvbi9HZW9ncmFwaGljYWxseSB3ZWlnaHRlZCByZWdyZXNzaW9uIGEgbWV0aG9kIGZvciBleHBsb3JpbmcwLnBkZgAAEwABLwAAFQACAA7//wAAAAgADQAaACQAjAAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAALQ" 1048 | } 1049 | 1050 | @article{Fotheringham2016, 1051 | Author = "Fotheringham, A. Stewart and Oshan, Taylor M.", 1052 | Abstract = "Geographically weighted regression (GWR) extends the familiar regression framework by estimating a set of parameters for any number of locations within a study area, rather than producing a single parameter estimate for each relationship specified in the model. Recent literature has suggested that GWR is highly susceptible to the effects of multicollinearity between explanatory variables and has proposed a series of local measures of multicollinearity as an indicator of potential problems. In this paper, we employ a controlled simulation to demonstrate that GWR is in fact very robust to the effects of multicollinearity. Consequently, the contention that GWR is highly susceptible to multicollinearity issues needs rethinking.", 1053 | Date-Added = "2018-09-26 11:04:45 -0700", 1054 | Date-Modified = "2018-09-26 11:04:45 -0700", 1055 | Doi = "10.1007/s10109-016-0239-5", 1056 | Issn = "1435-5949", 1057 | Journal = "Journal of Geographical Systems", 1058 | Keywords = "GWR", 1059 | Number = "4", 1060 | Pages = "303--329", 1061 | Title = "Geographically weighted regression and multicollinearity: dispelling the myth", 1062 | Url = "http://dx.doi.org/10.1007/s10109-016-0239-5", 1063 | Volume = "18", 1064 | Year = "2016", 1065 | Bdsk-File-1 = "YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxArLi4vLi4vLi4vLi4vLi4vcGFwZXJzL0ZvdGhlcmluZ2hhbS8yMDE2LnBkZk8RAWQAAAAAAWQAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAAAAAABCRAAB/////wgyMDE2LnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////AAAAAAAAAAAAAAAAAAUAAwAACiBjdQAAAAAAAAAAAAAAAAAMRm90aGVyaW5naGFtAAIAOS86VXNlcnM6d2Vpa2FuZzpHb29nbGUgRHJpdmU6cGFwZXJzOkZvdGhlcmluZ2hhbToyMDE2LnBkZgAADgASAAgAMgAwADEANgAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAN1VzZXJzL3dlaWthbmcvR29vZ2xlIERyaXZlL3BhcGVycy9Gb3RoZXJpbmdoYW0vMjAxNi5wZGYAABMAAS8AABUAAgAO//8AAAAIAA0AGgAkAFIAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAABug==", 1066 | Bdsk-Url-1 = "http://dx.doi.org/10.1007/s10109-016-0239-5" 1067 | } 1068 | 1069 | @article{fotheringham1999local, 1070 | Author = "Fotheringham, A Stewart and Brunsdon, Chris", 1071 | Date-Added = "2018-09-26 11:04:32 -0700", 1072 | Date-Modified = "2018-09-26 11:04:32 -0700", 1073 | Journal = "Geographical Analysis", 1074 | Number = "4", 1075 | Pages = "340--358", 1076 | Publisher = "Wiley Online Library", 1077 | Title = "Local forms of spatial analysis", 1078 | Volume = "31", 1079 | Year = "1999", 1080 | Bdsk-File-1 = "YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxBOLi4vLi4vLi4vLi4vLi4vcGFwZXJzL2xpYnJhcnkvRm90aGVyaW5naGFtL0xvY2FsIGZvcm1zIG9mIHNwYXRpYWwgYW5hbHlzaXMucGRmTxEB3gAAAAAB3gACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAAAAAAEJEAAH/////H0xvY2FsIGZvcm1zIG9mIHNwYSNGRkZGRkZGRi5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////8AAAAAAAAAAAAAAAAABQAEAAAKIGN1AAAAAAAAAAAAAAAAAAxGb3RoZXJpbmdoYW0AAgBcLzpVc2Vyczp3ZWlrYW5nOkdvb2dsZSBEcml2ZTpwYXBlcnM6bGlicmFyeTpGb3RoZXJpbmdoYW06TG9jYWwgZm9ybXMgb2Ygc3BhdGlhbCBhbmFseXNpcy5wZGYADgBIACMATABvAGMAYQBsACAAZgBvAHIAbQBzACAAbwBmACAAcwBwAGEAdABpAGEAbAAgAGEAbgBhAGwAeQBzAGkAcwAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAWlVzZXJzL3dlaWthbmcvR29vZ2xlIERyaXZlL3BhcGVycy9saWJyYXJ5L0ZvdGhlcmluZ2hhbS9Mb2NhbCBmb3JtcyBvZiBzcGF0aWFsIGFuYWx5c2lzLnBkZgATAAEvAAAVAAIADv//AAAACAANABoAJAB1AAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAlc=" 1081 | } 1082 | 1083 | @article{Fotheringham:2017, 1084 | Author = "Fotheringham, A. Stewart and Yang, Wenbai and Kang, Wei", 1085 | Date-Added = "2018-09-26 11:04:20 -0700", 1086 | Date-Modified = "2018-09-26 11:04:20 -0700", 1087 | Doi = "10.1080/24694452.2017.1352480", 1088 | Eprint = "http://dx.doi.org/10.1080/24694452.2017.1352480", 1089 | Journal = "Annals of the American Association of Geographers", 1090 | Number = "6", 1091 | Pages = "1247-1265", 1092 | Publisher = "Taylor \& Francis", 1093 | Title = "Multiscale Geographically Weighted Regression (MGWR)", 1094 | Url = "http://dx.doi.org/10.1080/24694452.2017.1352480", 1095 | Volume = "107", 1096 | Year = "2017", 1097 | Bdsk-File-1 = "YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxBbLi4vLi4vLi4vLi4vLi4vcGFwZXJzL0ZvdGhlcmluZ2hhbS9NdWx0aXNjYWxlIEdlb2dyYXBoaWNhbGx5IFdlaWdodGVkIFJlZ3Jlc3Npb24gKE1HV1IpLnBkZk8RAiQAAAAAAiQAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAAAAAABCRAAB/////x9NdWx0aXNjYWxlIEdlb2dyYXAjRkZGRkZGRkYucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////AAAAAAAAAAAAAAAAAAUAAwAACiBjdQAAAAAAAAAAAAAAAAAMRm90aGVyaW5naGFtAAIAaS86VXNlcnM6d2Vpa2FuZzpHb29nbGUgRHJpdmU6cGFwZXJzOkZvdGhlcmluZ2hhbTpNdWx0aXNjYWxlIEdlb2dyYXBoaWNhbGx5IFdlaWdodGVkIFJlZ3Jlc3Npb24gKE1HV1IpLnBkZgAADgByADgATQB1AGwAdABpAHMAYwBhAGwAZQAgAEcAZQBvAGcAcgBhAHAAaABpAGMAYQBsAGwAeQAgAFcAZQBpAGcAaAB0AGUAZAAgAFIAZQBnAHIAZQBzAHMAaQBvAG4AIAAoAE0ARwBXAFIAKQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAZ1VzZXJzL3dlaWthbmcvR29vZ2xlIERyaXZlL3BhcGVycy9Gb3RoZXJpbmdoYW0vTXVsdGlzY2FsZSBHZW9ncmFwaGljYWxseSBXZWlnaHRlZCBSZWdyZXNzaW9uIChNR1dSKS5wZGYAABMAAS8AABUAAgAO//8AAAAIAA0AGgAkAIIAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAACqg==" 1098 | } 1099 | 1100 | @article{brunsdon:1999, 1101 | Author = "Brunsdon, Chris and Fotheringham, A Stewart and Charlton, Martin", 1102 | Date-Added = "2018-09-26 11:04:11 -0700", 1103 | Date-Modified = "2018-09-26 11:04:11 -0700", 1104 | Journal = "Journal of Regional Science", 1105 | Number = "3", 1106 | Pages = "497--524", 1107 | Publisher = "Wiley Online Library", 1108 | Title = "Some notes on parametric significance tests for geographically weighted regression", 1109 | Volume = "39", 1110 | Year = "1999", 1111 | Bdsk-File-1 = "YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxBWLi4vLi4vLi4vLi4vLi4vcGFwZXJzL2xpYnJhcnkvQnJ1bnNkb24vU29tZSBub3RlcyBvbiBwYXJhbWV0cmljIHNpZ25pZmljYW5jZSB0ZXN0cy5wZGZPEQICAAAAAAICAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAAAAAAAAQkQAAf////8fU29tZSBub3RlcyBvbiBwYXJhI0ZGRkZGRkZGLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////wAAAAAAAAAAAAAAAAAFAAQAAAogY3UAAAAAAAAAAAAAAAAACEJydW5zZG9uAAIAZC86VXNlcnM6d2Vpa2FuZzpHb29nbGUgRHJpdmU6cGFwZXJzOmxpYnJhcnk6QnJ1bnNkb246U29tZSBub3RlcyBvbiBwYXJhbWV0cmljIHNpZ25pZmljYW5jZSB0ZXN0cy5wZGYADgBgAC8AUwBvAG0AZQAgAG4AbwB0AGUAcwAgAG8AbgAgAHAAYQByAGEAbQBlAHQAcgBpAGMAIABzAGkAZwBuAGkAZgBpAGMAYQBuAGMAZQAgAHQAZQBzAHQAcwAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAYlVzZXJzL3dlaWthbmcvR29vZ2xlIERyaXZlL3BhcGVycy9saWJyYXJ5L0JydW5zZG9uL1NvbWUgbm90ZXMgb24gcGFyYW1ldHJpYyBzaWduaWZpY2FuY2UgdGVzdHMucGRmABMAAS8AABUAAgAO//8AAAAIAA0AGgAkAH0AAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAACgw==" 1112 | } 1113 | 1114 | @article{Silva:2016, 1115 | Author = "da Silva, Alan Ricardo and Fotheringham, A. Stewart", 1116 | Abstract = "This article describes the problem of multiple testing within a Geographically Weighted Regression framework and presents a possible solution to the problem which is based on a family-wise error rate for dependent processes. We compare the solution presented here to other solutions such as the Bonferroni correction and the Byrne, Charlton, and Fotheringham proposal which is based on the Benjamini and Hochberg False Discovery Rate. We conclude that our proposed correction is superior to others and that generally some correction in the conventional t-test is necessary to avoid false positives in GWR.", 1117 | Date-Added = "2018-09-26 11:02:38 -0700", 1118 | Date-Modified = "2018-09-26 11:03:28 -0700", 1119 | Doi = "10.1111/gean.12084", 1120 | Eprint = "https://onlinelibrary.wiley.com/doi/pdf/10.1111/gean.12084", 1121 | Journal = "Geographical Analysis", 1122 | Number = "3", 1123 | Pages = "233-247", 1124 | Title = "The Multiple Testing Issue in Geographically Weighted Regression", 1125 | Url = "https://onlinelibrary.wiley.com/doi/abs/10.1111/gean.12084", 1126 | Volume = "48", 1127 | Year = "2016", 1128 | Bdsk-Url-1 = "https://onlinelibrary.wiley.com/doi/abs/10.1111/gean.12084", 1129 | Bdsk-Url-2 = "https://doi.org/10.1111/gean.12084" 1130 | } 1131 | -------------------------------------------------------------------------------- /docs/api.rst: -------------------------------------------------------------------------------- 1 | .. _api_ref: 2 | 3 | ============= 4 | API Reference 5 | ============= 6 | 7 | 8 | :mod:`pysal.lib`: PySAL Core 9 | ============================= 10 | 11 | * `cg: Computational Geometry `_ 12 | * `examples: Example datasets `_ 13 | * `graph: Graph class encoding spatial weights matrices `_ 14 | * `io: Input-Output `_ 15 | * `weights: Spatial Weights `_ 16 | 17 | :mod:`pysal.explore`: Exploratory Spatial Data Analysis 18 | ======================================================= 19 | * `esda: Spatial Autocorrelation Analysis `_ 20 | * `giddy: Geospatial Distribution Dynamics `_ 21 | * `inequality: Spatial Inequality Analysis `_ 22 | * `momepy: Urban Morphology Measuring Toolkit `_ 23 | * `pointpats: Planar Point Pattern Analysis `_ 24 | * `segregation: Segregation Analysis `_ 25 | * `spaghetti: Spatial Analysis on Networks `_ 26 | 27 | 28 | :mod:`pysal.model`: Spatial Statistical Models 29 | ============================================== 30 | * `access: Spatial Accessibility `_ 31 | * `mgwr: Multiscale Geographically Weighted Regression `_ 32 | * `spglm: Sparse Generalized Linear Models `_ 33 | * `spint: Spatial Interaction Modeling `_ 34 | * `spopt: Spatial Optimization `_ 35 | * `spreg: Spatial Regression and Econometrics `_ 36 | * `tobler: Areal Interpolation and Dasymetric Mapping `_ 37 | 38 | :mod:`pysal.viz`: Geovisualization 39 | ================================== 40 | * `mapclassify: Choropleth Map Classification Schemes `_ 41 | * `splot: Lightweight Visualization Interface for PySAL Analytics `_ 42 | 43 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # pysal documentation build configuration file, created by 4 | # sphinx-quickstart on Wed Jun 6 15:54:22 2018. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | # If extensions (or modules to document with autodoc) are in another directory, 16 | # add these directories to sys.path here. If the directory is relative to the 17 | # documentation root, use os.path.abspath to make it absolute, like shown here. 18 | # 19 | import sys, os 20 | import pysal 21 | from datetime import datetime 22 | 23 | 24 | sys.path.insert(0, os.path.abspath("../")) 25 | 26 | # import your package to obtain the version info to display on the docs website 27 | import pysal 28 | 29 | 30 | # -- General configuration ------------------------------------------------ 31 | 32 | # If your documentation needs a minimal Sphinx version, state it here. 33 | # 34 | # needs_sphinx = '1.0' 35 | # Add any Sphinx extension module names here, as strings. They can be 36 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 37 | # ones. 38 | extensions = [ #'sphinx_gallery.gen_gallery', 39 | "sphinx.ext.autodoc", 40 | "sphinx.ext.autosummary", 41 | "sphinx.ext.viewcode", 42 | "sphinxcontrib.bibtex", 43 | "sphinx.ext.mathjax", 44 | "sphinx.ext.doctest", 45 | "sphinx_design", 46 | "sphinx.ext.intersphinx", 47 | "numpydoc", 48 | #'sphinx.ext.napoleon', 49 | "matplotlib.sphinxext.plot_directive", 50 | ] 51 | 52 | bibtex_bibfiles = ["_static/references.bib"] 53 | 54 | intersphinx_mapping = { 55 | 'libpysal': ('https://pysal.org/libpysal/', (None, 'libpysal-inv.txt')) 56 | } 57 | 58 | # Add any paths that contain templates here, relative to this directory. 59 | templates_path = ["_templates"] 60 | 61 | # The suffix(es) of source filenames. 62 | # You can specify multiple suffix as a list of string: 63 | # 64 | # source_suffix = ['.rst', '.md'] 65 | source_suffix = ".rst" 66 | 67 | # The master toctree document. 68 | master_doc = "index" 69 | 70 | # General information about the project. 71 | project = "pysal" # string of your project name, for example, 'giddy' 72 | 73 | copyright = f"2007 - {datetime.now().year}, pysal developers (BSD License)" 74 | 75 | author = "pysal developers" 76 | 77 | # The version info for the project you're documenting, acts as replacement for 78 | # |version| and |release|, also used in various other places throughout the 79 | # built documents. 80 | # 81 | # The full version. 82 | version = pysal.__version__ # should replace it with your pysal 83 | release = pysal.__version__ # should replace it with your pysal 84 | 85 | # The language for content autogenerated by Sphinx. Refer to documentation 86 | # for a list of supported languages. 87 | # 88 | # This is also used if you do content translation via gettext catalogs. 89 | # Usually you set "language" from the command line for these cases. 90 | language = None 91 | 92 | # List of patterns, relative to source directory, that match files and 93 | # directories to ignore when looking for source files. 94 | # This patterns also effect to html_static_path and html_extra_path 95 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "tests/*"] 96 | 97 | # The name of the Pygments (syntax highlighting) style to use. 98 | pygments_style = "sphinx" 99 | 100 | # If true, `todo` and `todoList` produce output, else they produce nothing. 101 | todo_include_todos = False 102 | 103 | # -- Options for HTML output ---------------------------------------------- 104 | 105 | # The theme to use for HTML and HTML Help pages. See the documentation for 106 | # a list of builtin themes. 107 | # 108 | # html_theme = 'alabaster' 109 | html_theme = "pydata_sphinx_theme" 110 | html_title = "%s v%s Manual" % (project, version) 111 | 112 | # (Optional) Logo of your package. Should be small enough to fit the navbar (ideally 24x24). 113 | # Path should be relative to the ``_static`` files directory. 114 | # html_logo = "_static/images/package_logo.jpg" 115 | 116 | # (Optional) PySAL favicon 117 | html_favicon = "_static/images/pysal_favicon.ico" 118 | html_logo = '_static/images/pysal_logo.png' 119 | icon_links_label = 'pysal' 120 | # Theme options are theme-specific and customize the look and feel of a theme 121 | # further. For a list of options available for each theme, see the 122 | # documentation. 123 | # 124 | html_theme_options = { 125 | # Navigation bar title. (Default: ``project`` value) 126 | "navbar_title": "pysal", # string of your project name, for example, 'giddy' 127 | # Render the next and previous page links in navbar. (Default: true) 128 | "navbar_sidebarrel": False, 129 | # Render the current pages TOC in the navbar. (Default: true) 130 | #'navbar_pagenav': True, 131 | #'navbar_pagenav': False, 132 | # No sidebar 133 | "nosidebar": True, 134 | # Tab name for the current pages TOC. (Default: "Page") 135 | #'navbar_pagenav_name': "Page", 136 | # Global TOC depth for "site" navbar tab. (Default: 1) 137 | # Switching to -1 shows all levels. 138 | "globaltoc_depth": 2, 139 | # Include hidden TOCs in Site navbar? 140 | # 141 | # Note: If this is "false", you cannot have mixed ``:hidden:`` and 142 | # non-hidden ``toctree`` directives in the same page, or else the build 143 | # will break. 144 | # 145 | # Values: "true" (default) or "false" 146 | "globaltoc_includehidden": "true", 147 | # HTML navbar class (Default: "navbar") to attach to
element. 148 | # For black navbar, do "navbar navbar-inverse" 149 | #'navbar_class': "navbar navbar-inverse", 150 | # Fix navigation bar to top of page? 151 | # Values: "true" (default) or "false" 152 | "navbar_fixed_top": "true", 153 | # Location of link to source. 154 | # Options are "nav" (default), "footer" or anything else to exclude. 155 | "source_link_position": "footer", 156 | # Bootswatch (http://bootswatch.com/) theme. 157 | # 158 | # Options are nothing (default) or the name of a valid theme 159 | # such as "amelia" or "cosmo", "yeti", "flatly". 160 | "bootswatch_theme": "yeti", 161 | # Choose Bootstrap version. 162 | # Values: "3" (default) or "2" (in quotes) 163 | "bootstrap_version": "3", 164 | # Navigation bar menu 165 | "navbar_links": [ 166 | ("Installation", "installation"), 167 | ("API", "api"), 168 | ("References", "references"), 169 | ("Release Notes", "releases") 170 | ], 171 | "logo": { 172 | "text": "PySAL", 173 | "image_light": "_static/images/pysal_logo_light.png", 174 | "image_dark": "_static/images/pysal_logo.png", 175 | } 176 | } 177 | 178 | # Add any paths that contain custom static files (such as style sheets) here, 179 | # relative to this directory. They are copied after the builtin static files, 180 | # so a file named "default.css" will overwrite the builtin "default.css". 181 | html_static_path = ["_static"] 182 | 183 | # Custom sidebar templates, maps document names to template names. 184 | # html_sidebars = {} 185 | #html_sidebars = {'sidebar': ['localtoc.html', 'sourcelink.html', 'searchbox.html']} 186 | 187 | 188 | html_sidebars = { 189 | "installation": [], 190 | "api": [], 191 | "references": [], 192 | } 193 | 194 | # -- Options for HTMLHelp output ------------------------------------------ 195 | 196 | # Output file base name for HTML help builder. 197 | htmlhelp_basename = "pysal" + "doc" 198 | 199 | 200 | # -- Options for LaTeX output --------------------------------------------- 201 | 202 | latex_elements = { 203 | # The paper size ('letterpaper' or 'a4paper'). 204 | # 205 | # 'papersize': 'letterpaper', 206 | # The font size ('10pt', '11pt' or '12pt'). 207 | # 208 | # 'pointsize': '10pt', 209 | # Additional stuff for the LaTeX preamble. 210 | # 211 | # 'preamble': '', 212 | # Latex figure (float) alignment 213 | # 214 | # 'figure_align': 'htbp', 215 | } 216 | 217 | # Grouping the document tree into LaTeX files. List of tuples 218 | # (source start file, target name, title, 219 | # author, documentclass [howto, manual, or own class]). 220 | latex_documents = [ 221 | (master_doc, "pysal.tex", u"pysal Documentation", u"pysal developers", "manual") 222 | ] 223 | 224 | 225 | # -- Options for manual page output --------------------------------------- 226 | 227 | # One entry per manual page. List of tuples 228 | # (source start file, name, description, authors, manual section). 229 | man_pages = [(master_doc, "pysal", u"pysal Documentation", [author], 1)] 230 | 231 | 232 | # -- Options for Texinfo output ------------------------------------------- 233 | 234 | # Grouping the document tree into Texinfo files. List of tuples 235 | # (source start file, target name, title, author, 236 | # dir menu entry, description, category) 237 | texinfo_documents = [ 238 | ( 239 | master_doc, 240 | "pysal", 241 | u"pysal Documentation", 242 | author, 243 | "pysal", 244 | "One line description of project.", 245 | "Miscellaneous", 246 | ) 247 | ] 248 | 249 | 250 | # ----------------------------------------------------------------------------- 251 | # Autosummary 252 | # ----------------------------------------------------------------------------- 253 | 254 | # Generate the API documentation when building 255 | autosummary_generate = True 256 | numpydoc_show_class_members = True 257 | class_members_toctree = True 258 | numpydoc_show_inherited_class_members = True 259 | numpydoc_use_plots = True 260 | 261 | # display the source code for Plot directive 262 | plot_include_source = True 263 | 264 | 265 | def setup(app): 266 | app.add_css_file("pysal-styles.css") 267 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. documentation master file 2 | 3 | PySAL: Python Spatial Analysis Library 4 | ====================================== 5 | PySAL is an open source 6 | cross-platform library for geospatial data science with an emphasis on 7 | geospatial vector data written in Python. 8 | 9 | .. raw:: html 10 | 11 | 45 | 46 | PySAL supports the development of 47 | high level applications for spatial analysis, such as 48 | 49 | - detection of spatial clusters, hot-spots, and outliers 50 | - construction of graphs from spatial data 51 | - spatial regression and statistical modeling on geographically 52 | embedded networks 53 | - spatial econometrics 54 | - exploratory spatio-temporal data analysis 55 | 56 | PySAL Components 57 | ================ 58 | 59 | - **explore** - modules to conduct exploratory analysis of spatial and spatio-temporal data, including statistical testing on points, networks, and 60 | polygonal lattices. Also includes methods for spatial inequality, distributional dynamics, and segregation. 61 | - **viz** - visualize patterns in spatial data to detect clusters, 62 | outliers, and hot-spots. 63 | - **model** - model spatial relationships in data with a variety of 64 | linear, generalized-linear, generalized-additive, and nonlinear 65 | models. 66 | - **lib** - solve a wide variety of computational geometry problems: 67 | 68 | - graph construction from polygonal lattices, lines, and points. 69 | - construction and interactive editing of spatial weights matrices 70 | & graphs 71 | - computation of alpha shapes, spatial indices, and 72 | spatial-topological relationships 73 | - reading and writing of sparse graph data, as well as pure python 74 | readers of spatial vector data. 75 | 76 | 77 | .. toctree:: 78 | :hidden: 79 | :maxdepth: 3 80 | :caption: Contents: 81 | 82 | Installation 83 | API 84 | References 85 | 86 | Details are available in the `PySAL api `_. 87 | 88 | For background information see :cite:`pysal2007, rey2014PythonSpatial, anselin2014ModernSpatial, rey2019pysal, fotheringham2017multiscale, fleischmann_2019, cortes2019OpensourceFramework, wolf2019GeosilhouettesGeographical, yu:2019, rey2020VisualAnalytics, Lumnitz2020, saxon2021OpenSoftware, rey_2021a, Gaboardi2021,rey2022PySALEcosystem, spopt2022, rey2023GeographicData`. 89 | 90 | *********** 91 | Development 92 | *********** 93 | 94 | As of version 2.0.0, PySAL is now a collection of affiliated geographic 95 | data science packages. Changes to the code for any of the subpackages 96 | should be directed at the respective `upstream repositories `_ and not made 97 | here. Infrastructural changes for the meta-package, like those for 98 | tooling, building the package, and code standards, will be considered. 99 | 100 | 101 | PySAL development is hosted on github_. 102 | 103 | .. _github : https://github.com/pysal/PySAL 104 | 105 | 106 | 107 | Discussions of development occurs on the 108 | `developer list `_ 109 | as well as discord_. 110 | 111 | .. _discord : https://discord.gg/BxFTEPFFZn 112 | 113 | **************** 114 | Getting Involved 115 | **************** 116 | 117 | If you are interested in contributing to PySAL please see our 118 | `development guidelines `_. 119 | 120 | 121 | *********** 122 | Bug reports 123 | *********** 124 | 125 | To search for or report bugs, please see PySAL's issues_. 126 | 127 | .. _issues : http://github.com/pysal/pysal/issues 128 | 129 | 130 | *************** 131 | Citing PySAL 132 | *************** 133 | 134 | If you use PySAL in a scientific publication, we would appreciate citations to the following paper: 135 | 136 | `PySAL: A Python Library of Spatial Analytical Methods `_, *Rey, S.J. and L. Anselin*, Review of Regional Studies 37, 5-27 2007. 137 | 138 | Bibtex entry:: 139 | 140 | @Article{pysal2007, 141 | author={Rey, Sergio J. and Anselin, Luc}, 142 | title={{PySAL: A Python Library of Spatial Analytical Methods}}, 143 | journal={The Review of Regional Studies}, 144 | year=2007, 145 | volume={37}, 146 | number={1}, 147 | pages={5-27}, 148 | keywords={Open Source; Software; Spatial} 149 | } 150 | 151 | 152 | 153 | ******************* 154 | License information 155 | ******************* 156 | 157 | See the file "LICENSE.txt" for information on the history of this 158 | software, terms & conditions for usage, and a DISCLAIMER OF ALL 159 | WARRANTIES. 160 | 161 | 162 | .. _PySAL: https://github.com/pysal/pysal 163 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | .. Installation 2 | 3 | Installation 4 | ============ 5 | 6 | 7 | You can install the meta package `pysal` which results in all the packages in 8 | the `pysal` ecosystem being installed. 9 | 10 | Alternatively, individual packages can be installed a la carte. 11 | 12 | 13 | Installing the pysal meta package 14 | --------------------------------- 15 | .. tab-set-code:: 16 | 17 | .. code-block:: pip 18 | 19 | pip install pysal 20 | 21 | .. code-block:: conda 22 | 23 | conda install --channel conda-forge pysal 24 | 25 | Installing individual pysal packages 26 | ------------------------------------ 27 | A similar approach can be used to select one, or a subset, of pysal packages 28 | without having to install the entire ecosystem. For example, to install `esda` 29 | and `mapclassify` 30 | 31 | .. tab-set-code:: 32 | 33 | .. code-block:: pip 34 | 35 | pip install esda mapclassify 36 | 37 | .. code-block:: conda 38 | 39 | conda install --channel conda-forge esda mapclassify 40 | 41 | 42 | 43 | Installing the development version 44 | ---------------------------------- 45 | 46 | Potentially, you might want to use the newest features in the development 47 | version of PySAL on github - `pysal/pysal`_ while have not been incorporated 48 | in the Pypi released version. You can achieve that by installing `pysal/pysal`_ 49 | by running the following from a command shell:: 50 | 51 | pip install git+https://github.com/pysal/pysal.git 52 | 53 | You can also `fork`_ the `pysal/pysal`_ repo and create a local clone of 54 | your fork. By making changes 55 | to your local clone and submitting a pull request to `pysal/PySAL`_, you can 56 | contribute to the PySAL development. 57 | 58 | 59 | .. _dependencies: 60 | 61 | Dependencies 62 | ------------ 63 | 64 | Required 65 | ++++++++ 66 | - `geopandas`_ 67 | - `seaborn`_ 68 | - `descartes`_ 69 | - `palettable`_ 70 | - `scikit-learn`_ 71 | - `rtree`_ 72 | - `tqdm`_ 73 | - `statsmodels`_ 74 | - `deprecated`_ 75 | 76 | 77 | Optional 78 | ++++++++ 79 | - `urbanaccess`_ 80 | - `pandana`_ 81 | - `numba`_ 82 | - `numexpr`_ 83 | - `bokeh`_ 84 | 85 | 86 | 87 | 88 | Installing versions supporting Python 2 89 | --------------------------------------- 90 | 91 | Users requiring Python 2 support can install the legacy version of PySAL: 1.1.14 via pip:: 92 | 93 | pip install pysal==1.14.4.post2 94 | 95 | Note that this version is only receiving bug fixes. All new enhancements (post 2019-01) to PySAL are Python 3+ only, and are not available in 1.14.4. 96 | 97 | .. _3.7: https://docs.python.org/3.7/ 98 | .. _3.6: https://docs.python.org/3.6/ 99 | .. _Python Package Index: https://pypi.org/project/PySAL/ 100 | .. _pysal/PySAL: https://github.com/pysal/PySAL 101 | .. _conda: https://docs.conda.io/en/latest/ 102 | .. _Anaconda Distribution: https://docs.continuum.io/anaconda/ 103 | .. _fork: https://help.github.com/articles/fork-a-repo/ 104 | .. _geopandas: http://geopandas.org/install.html 105 | .. _seaborn: https://seaborn.pydata.org/installing.html 106 | .. _descartes: https://pypi.org/project/descartes/ 107 | .. _palettable: https://jiffyclub.github.io/palettable/ 108 | .. _scikit-learn: https://scikit-learn.org/stable/install.html 109 | .. _rtree: http://toblerity.org/rtree/install.html 110 | .. _tqdm: https://pypi.org/project/tqdm/ 111 | .. _statsmodels: https://www.statsmodels.org/stable/install.html 112 | .. _deprecated: https://pypi.org/project/Deprecated/ 113 | .. _urbanaccess: https://github.com/UDST/urbanaccess 114 | .. _pandana: https://pypi.org/project/pandana/ 115 | .. _numba: https://numba.pydata.org/numba-doc/dev/user/installing.html 116 | .. _numexpr: https://pypi.org/project/numexpr/ 117 | .. _bokeh: https://bokeh.pydata.org/en/latest/docs/installation.html 118 | -------------------------------------------------------------------------------- /docs/references.rst: -------------------------------------------------------------------------------- 1 | .. reference for the docs 2 | 3 | References 4 | ========== 5 | 6 | .. bibliography:: _static/references.bib 7 | :cited: 8 | -------------------------------------------------------------------------------- /docs/releases.rst: -------------------------------------------------------------------------------- 1 | PySAL Release Notes 2 | =================== 3 | 4 | This is the list of changes to PySAL for each release. 5 | 6 | - `PySAL 2.6.0 `_ 7 | - `PySAL 2.5.0 `_ 8 | - `PySAL 2.4.0 `_ 9 | - `PySAL 2.3.0 `_ 10 | - `PySAL 2.2.0 `_ 11 | - `PySAL 2.1.0 `_ 12 | - `PySAL 2.0.0 `_ 13 | - `PySAL 1.14.4 `_ 14 | - `PySAL 1.14.3 `_ 15 | - `PySAL 1.14.2 `_ 16 | - `PySAL 1.13.0 `_ 17 | - `PySAL 1.12.0 `_ 18 | - `PySAL 1.11.2 `_ 19 | - `PySAL 1.11.1 `_ 20 | - `PySAL 1.11.0 `_ 21 | - `PySAL 1.10 `_ 22 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: pysal 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python>=3.10 6 | # ecosystem 7 | - access>=1.1.9 8 | - esda>=2.3.5 9 | - giddy>=2.3.5 10 | - inequality>=1.0.1 11 | - libpysal>=4.12.0 12 | - mapclassify>=2.7.0 13 | - mgwr>=2.2.1 14 | - momepy>=0.8.0 15 | - pointpats>=2.5.0 16 | - segregation>=2.5 17 | - spaghetti>=1.7.6 18 | - spglm>=1.1.0 19 | - spint>=1.0.7 20 | - splot>=1.1.5.post1 21 | - spopt>=0.6.1 22 | - spreg>=1.5.0 23 | - tobler>=0.11.3 24 | # externel 25 | - urllib3 >=1.26 26 | -------------------------------------------------------------------------------- /figs/lisamaps.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sjsrey/pysal/e6db2d7dcd340e242459e8289578bfb4a4830298/figs/lisamaps.png -------------------------------------------------------------------------------- /figs/lisamaps7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sjsrey/pysal/e6db2d7dcd340e242459e8289578bfb4a4830298/figs/lisamaps7.png -------------------------------------------------------------------------------- /figs/pysal_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sjsrey/pysal/e6db2d7dcd340e242459e8289578bfb4a4830298/figs/pysal_logo.png -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=61.0", "setuptools_scm[toml]>=6.2"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [tool.setuptools_scm] 6 | 7 | [project] 8 | name = "pysal" 9 | dynamic = ["version"] 10 | authors = [ # in alphabetical order 11 | { name = "Serge Rey", email = "sjsrey@gmail.com" }, 12 | { name = "Levi Wolf", email = "levi.john.wolf@gmail.com" }, 13 | ] 14 | maintainers = [{ name = "PySAL Developers" }] 15 | license = { text = "BSD 3-Clause" } 16 | description = "Meta Package for PySAL - A library of spatial analysis functions" 17 | keywords = ["spatial statistics", "spatial graphs"] 18 | readme = "README.md" 19 | classifiers = [ 20 | "Programming Language :: Python :: 3", 21 | "License :: OSI Approved :: BSD License", 22 | "Operating System :: OS Independent", 23 | "Intended Audience :: Science/Research", 24 | "Topic :: Scientific/Engineering :: GIS", 25 | ] 26 | requires-python = ">=3.10" 27 | dependencies = [ 28 | "beautifulsoup4>=4.10", 29 | "geopandas>=0.10.0", 30 | "numpy>=1.22", 31 | "packaging>=22", 32 | "pandas>=1.4", 33 | "platformdirs>=2.0.2", 34 | "requests>=2.27", 35 | "scipy>=1.8", 36 | "shapely>=2.0.1", 37 | "scikit-learn>=1.1", 38 | "libpysal>=4.12.1", 39 | "access>=1.1.9", 40 | "esda>=2.6.0", 41 | "giddy>=2.3.6", 42 | "inequality>=1.1.1", 43 | "pointpats>=2.5.1", 44 | "segregation>=2.5.1", 45 | "spaghetti>=1.7.6", 46 | "mgwr>=2.2.1", 47 | "momepy>=0.9.1", 48 | "spglm>=1.1.0", 49 | "spint>=1.0.7", 50 | "spreg>=1.8.1", 51 | "tobler>=0.12.1", 52 | "mapclassify>=2.8.1", 53 | "splot>=1.1.7", 54 | "spopt>=0.6.1", 55 | ] 56 | 57 | [project.urls] 58 | Home = "https://github.com/pysal/pysal/" 59 | Repository = "https://github.com/pysal/pysal" 60 | 61 | [project.optional-dependencies] 62 | plus = [ 63 | "joblib>=1.2", 64 | "networkx>=2.7", 65 | "numba>=0.55", 66 | "pyarrow>=7.0", 67 | "sqlalchemy>=2.0", 68 | "xarray>=2022.3", 69 | "zstd", 70 | ] 71 | dev = [ 72 | "pre-commit", 73 | "ruff", 74 | "watermark", 75 | ] 76 | docs = [ 77 | "mkdocs-jupyter", 78 | "myst-parser", 79 | "nbsphinx", 80 | "numpydoc", 81 | "pandoc", 82 | "sphinx", 83 | 'sphinx_design', 84 | "sphinxcontrib-bibtex", 85 | "sphinx_pydata_theme", 86 | ] 87 | tests = [ 88 | "codecov", 89 | "geodatasets>=2023.3.0", 90 | "matplotlib>=3.6", 91 | "pytest", 92 | "pytest-mpl", 93 | "pytest-cov", 94 | "pytest-xdist", 95 | ] 96 | 97 | [tool.setuptools.packages.find] 98 | include = ["pysal", "pysal.*"] 99 | 100 | [tool.ruff] 101 | line-length = 88 102 | lint.select = ["E", "F", "W", "I", "UP", "N", "B", "A", "C4", "SIM", "ARG"] 103 | target-version = "py310" 104 | exclude = ["pysal/tests/*", "docs/*"] 105 | [tool.ruff.lint.per-file-ignores] 106 | "*__init__.py" = [ 107 | "F401", # imported but unused 108 | "F403", # star import; unable to detect undefined names 109 | ] 110 | 111 | 112 | [tool.coverage.run] 113 | source = ["./pysal"] 114 | 115 | [tool.coverage.report] 116 | exclude_lines = [ 117 | "raise NotImplementedError", 118 | "except ModuleNotFoundError:", 119 | "except ImportError", 120 | ] 121 | ignore_errors = true 122 | omit = ["pysal/tests/*", "docs/conf.py"] 123 | 124 | [tool.pytest.ini_options] 125 | filterwarnings = [ 126 | "ignore:The numba package is used", 127 | "ignore:numba cannot be imported", 128 | "ignore:Numba not imported", 129 | "ignore:The weights matrix is not fully connected", 130 | "ignore:You are trying to build a full W object from", 131 | "ignore:Multiple layers detected. Using first layer as default", 132 | "ignore:Geometry is in a geographic CRS", 133 | "ignore:`use_index` defaults to False", 134 | "ignore:Objects based on the `Geometry` class will deprecated", 135 | "ignore:PolygonLocator is deprecated", 136 | "ignore:SegmentGrid is deprecated", 137 | "ignore:In the next version of pysal, observations with no neighbors", 138 | "ignore:divide by zero encountered", 139 | "ignore:invalid value encountered", 140 | "ignore:Passing a SingleBlockManager", # https://github.com/geopandas/geopandas/issues/3060 141 | "ignore:Passing a BlockManager", # https://github.com/geopandas/geopandas/issues/3060 142 | ] 143 | -------------------------------------------------------------------------------- /pysal/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | PySAL: Python Spatial Analysis Library 3 | ====================================== 4 | 5 | A federation of packages for spatial data science. 6 | 7 | 8 | Layers and Subpackages 9 | ---------------------- 10 | PySAL is organized into four layers (lib, explore, model, and viz), each of which contains subpackages for a particular type of spatial data analysis. 11 | 12 | 13 | Use of any of these layers requires an explicit import. For example, 14 | ``from pysal.explore import esda`` 15 | 16 | lib: core algorithms, weights, and spatial data structures 17 | ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 18 | 19 | cg -- Computational Geometry 20 | examples -- Example data sets 21 | graph -– Graph class encoding spatial weights matrices 22 | io -- Input/Output 23 | weights -- Spatial Weights 24 | 25 | 26 | explore: exploratory spatial data analysis 27 | ++++++++++++++++++++++++++++++++++++++++++ 28 | 29 | esda -- Global and local spatial autocorrelation 30 | giddy -- Spatial distribution dynamics 31 | inequality -- Spatial inequality measures 32 | pointpats -- Planar point pattern analysis 33 | segregation -- Segregation analytics 34 | spaghetti -- Spatial analysis on networks 35 | 36 | 37 | model: spatial statistical models 38 | +++++++++++++++++++++++++++++++++ 39 | 40 | access -- Measures of spatial accessibility 41 | mgwr -- Multi-scale geographically weighted regression 42 | spint -- Spatial interaction modeling 43 | spglm -- Spatial general linear modeling 44 | spopt -- Spatial optimization 45 | spreg -- Spatial econometrics 46 | tobler -- Spatial areal interpolation models 47 | 48 | 49 | viz: geovisualization 50 | +++++++++++++++++++++ 51 | 52 | mapclassify -- Classification schemes for choropleth maps 53 | splot -- Geovisualization for pysal 54 | 55 | """ 56 | from .base import memberships, federation_hierarchy 57 | 58 | import contextlib 59 | from importlib.metadata import PackageNotFoundError, version 60 | 61 | 62 | with contextlib.suppress(PackageNotFoundError): 63 | __version__ = version("pysal") 64 | -------------------------------------------------------------------------------- /pysal/base.py: -------------------------------------------------------------------------------- 1 | """ 2 | Base information for pysal meta package 3 | """ 4 | 5 | 6 | federation_hierarchy = { 7 | 'explore': ['esda', 'giddy', 'segregation', 8 | 'pointpats', 'inequality', 9 | 'spaghetti', 'access', 'momepy'], 10 | 'model': ['spreg', 'spglm', 'tobler', 'spint', 11 | 'mgwr', 'access', 'spopt'], 12 | 'viz': ['splot', 'mapclassify'], 13 | 'lib': ['libpysal'] 14 | } 15 | 16 | memberships = {} 17 | for key in federation_hierarchy: 18 | for package in federation_hierarchy[key]: 19 | memberships[package] = key 20 | 21 | 22 | 23 | class cached_property(object): 24 | """ A property that is only computed once per instance and then replaces 25 | itself with an ordinary attribute. Deleting the attribute resets the 26 | property. 27 | 28 | Source: https://github.com/bottlepy/bottle/commit/fa7733e075da0d790d809aa3d2f53071897e6f76 29 | """ 30 | 31 | def __init__(self, func): 32 | self.__doc__ = getattr(func, '__doc__') 33 | self.func = func 34 | 35 | def __get__(self, obj, cls): 36 | if obj is None: 37 | return self 38 | value = obj.__dict__[self.func.__name__] = self.func(obj) 39 | return value 40 | 41 | def _installed_version(package): 42 | try: 43 | exec(f'import {package}') 44 | except ModuleNotFoundError: 45 | v = 'NA' 46 | try: 47 | v = eval(f'{package}.__version__') 48 | except AttributeError: 49 | v = 'NA' 50 | return v 51 | 52 | def _installed_versions(): 53 | ver = {} 54 | for package in memberships.keys(): 55 | ver[package] = _installed_version(package) 56 | return ver 57 | 58 | def _released_versions(): 59 | from .frozen import frozen_packages 60 | return frozen_packages 61 | 62 | 63 | class Versions: 64 | @cached_property 65 | def installed(self): 66 | """ 67 | Inventory versions of pysal packages that are installed 68 | 69 | Attributes 70 | ---------- 71 | installed: dict 72 | key is package name, value is version string 73 | """ 74 | return _installed_versions() 75 | 76 | @cached_property 77 | def released(self): 78 | """ 79 | Inventory versions of pysal packages that are released in the meta 80 | package. 81 | 82 | Attributes 83 | ---------- 84 | released: dict 85 | key is package name, value is version string 86 | """ 87 | 88 | return _released_versions() 89 | 90 | def check(self): 91 | """ 92 | Print a tabular string that reports installed and released versions of 93 | PySAL packages. 94 | """ 95 | table = [] 96 | package = "Package" 97 | installed = "Installed" 98 | released = "Released" 99 | match = "Match" 100 | s = f'{package:>12} | {installed:>15} | {released:>15} | {match:>5}' 101 | table.append(s) 102 | table.append("-"*len(s)) 103 | for package in self.installed: 104 | installed = self.installed[package] 105 | released = self.released[package] 106 | match = installed == released 107 | s = f'{package:>12} | {installed:>15} | {released:>15} | {match:>5}' 108 | table.append(s) 109 | print("\n".join(table)) 110 | 111 | 112 | versions = Versions() 113 | 114 | -------------------------------------------------------------------------------- /pysal/explore/__init__.py: -------------------------------------------------------------------------------- 1 | import esda 2 | import giddy 3 | import inequality 4 | import pointpats 5 | import spaghetti 6 | import segregation 7 | -------------------------------------------------------------------------------- /pysal/explore/esda/__init__.py: -------------------------------------------------------------------------------- 1 | from esda.moran import ( 2 | Moran, 3 | Moran_BV, 4 | Moran_BV_matrix, 5 | Moran_Local, 6 | Moran_Local_BV, 7 | Moran_Rate, 8 | Moran_Local_Rate, 9 | ) 10 | from esda.getisord import G, G_Local 11 | from esda.geary import Geary 12 | from esda.join_counts import Join_Counts 13 | from esda.gamma import Gamma 14 | from esda.util import fdr 15 | from esda.smaup import Smaup 16 | from esda.lee import Spatial_Pearson, Local_Spatial_Pearson 17 | from esda.silhouettes import (path_silhouette, boundary_silhouette, 18 | silhouette_alist, nearest_label) 19 | -------------------------------------------------------------------------------- /pysal/explore/giddy/__init__.py: -------------------------------------------------------------------------------- 1 | from giddy import directional 2 | from giddy import ergodic 3 | from giddy import markov 4 | from giddy import mobility 5 | from giddy import rank 6 | from giddy import util 7 | from giddy import sequence 8 | -------------------------------------------------------------------------------- /pysal/explore/inequality/__init__.py: -------------------------------------------------------------------------------- 1 | from inequality import theil 2 | from inequality import gini 3 | -------------------------------------------------------------------------------- /pysal/explore/momepy/__init__.py: -------------------------------------------------------------------------------- 1 | from momepy import * 2 | -------------------------------------------------------------------------------- /pysal/explore/pointpats/__init__.py: -------------------------------------------------------------------------------- 1 | from pointpats.pointpattern import PointPattern 2 | from pointpats.window import as_window, poly_from_bbox, to_ccf, Window 3 | from pointpats.centrography import * 4 | from pointpats.process import * 5 | from pointpats.quadrat_statistics import * 6 | from pointpats.distance_statistics import * 7 | -------------------------------------------------------------------------------- /pysal/explore/segregation/__init__.py: -------------------------------------------------------------------------------- 1 | from segregation import aspatial 2 | from segregation import spatial 3 | from segregation import inference 4 | from segregation import decomposition 5 | from segregation import util 6 | from segregation import network 7 | from segregation import local 8 | from segregation import compute_all 9 | -------------------------------------------------------------------------------- /pysal/explore/spaghetti/__init__.py: -------------------------------------------------------------------------------- 1 | from spaghetti.network import Network, PointPattern, SimulatedPointPattern 2 | from spaghetti.network import element_as_gdf 3 | -------------------------------------------------------------------------------- /pysal/lib/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "4.2.1" 2 | 3 | # __version__ has to be define in the first line 4 | 5 | """ 6 | pysal.lib: Python Spatial Analysis Library (core) 7 | ================================================ 8 | 9 | 10 | Documentation 11 | ------------- 12 | PySAL documentation is available in two forms: python docstrings and an html \ 13 | webpage at http://pysal.org/ 14 | 15 | Available sub-packages 16 | ---------------------- 17 | 18 | cg 19 | Basic data structures and tools for Computational Geometry 20 | examples 21 | Example data sets for testing and documentation 22 | io 23 | Basic functions used by several sub-packages 24 | weights 25 | Tools for creating and manipulating weights 26 | """ 27 | 28 | from libpysal import cg 29 | from libpysal import io 30 | from libpysal import weights 31 | from libpysal import examples 32 | 33 | -------------------------------------------------------------------------------- /pysal/lib/common.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import sys 3 | import time 4 | 5 | # external imports 6 | import numpy as np 7 | import numpy.linalg as la 8 | 9 | import scipy as sp 10 | import scipy.stats as stats 11 | from libpysal.cg.kdtree import KDTree 12 | from scipy.spatial.distance import pdist, cdist 13 | 14 | import pandas 15 | 16 | try: 17 | from patsy import PatsyError 18 | except ImportError: 19 | PatsyError = Exception 20 | 21 | RTOL = .00001 22 | ATOL = 1e-7 23 | MISSINGVALUE = None 24 | 25 | ###################### 26 | # Decorators/Utils # 27 | ###################### 28 | 29 | # import numba.jit OR create mimic decorator and set existence flag 30 | try: 31 | from numba import jit 32 | HAS_JIT = True 33 | except ImportError: 34 | def jit(function=None, **kwargs): 35 | """Mimic numba.jit() with synthetic wrapper 36 | """ 37 | if function is not None: 38 | def wrapped(*original_args, **original_kw): 39 | """Case 1 - structure of a standard decorator 40 | i.e., jit(function)(*args, **kwargs) 41 | """ 42 | return function(*original_args, **original_kw) 43 | return wrapped 44 | else: 45 | def partial_inner(func): 46 | """Case 2 - returns Case 1 47 | i.e., jit()(function)(*args, **kwargs) 48 | """ 49 | return jit(func) 50 | return partial_inner 51 | HAS_JIT = False 52 | 53 | def simport(modname): 54 | """ 55 | Safely import a module without raising an error. 56 | 57 | Parameters 58 | ----------- 59 | modname : str 60 | module name needed to import 61 | 62 | Returns 63 | -------- 64 | tuple of (True, Module) or (False, None) depending on whether the import 65 | succeeded. 66 | 67 | Notes 68 | ------ 69 | Wrapping this function around an iterative context or a with context would 70 | allow the module to be used without necessarily attaching it permanently in 71 | the global namespace: 72 | 73 | 74 | for t,mod in simport('pandas'): 75 | if t: 76 | mod.DataFrame() 77 | else: 78 | #do alternative behavior here 79 | del mod #or don't del, your call 80 | 81 | instead of: 82 | 83 | t, mod = simport('pandas') 84 | if t: 85 | mod.DataFrame() 86 | else: 87 | #do alternative behavior here 88 | 89 | The first idiom makes it work kind of a like a with statement. 90 | """ 91 | try: 92 | exec('import {}'.format(modname)) 93 | return True, eval(modname) 94 | except: 95 | return False, None 96 | 97 | def requires(*args, **kwargs): 98 | """ 99 | Decorator to wrap functions with extra dependencies: 100 | 101 | Arguments 102 | --------- 103 | args : list 104 | list of strings containing module to import 105 | verbose : bool 106 | boolean describing whether to print a warning message on import 107 | failure 108 | Returns 109 | ------- 110 | Original function is all arg in args are importable, otherwise returns a 111 | function that passes. 112 | """ 113 | v = kwargs.pop('verbose', True) 114 | wanted = copy.deepcopy(args) 115 | def inner(function): 116 | available = [simport(arg)[0] for arg in args] 117 | if all(available): 118 | return function 119 | else: 120 | def passer(*args,**kwargs): 121 | if v: 122 | missing = [arg for i, arg in enumerate(wanted) if not available[i]] 123 | print(('missing dependencies: {d}'.format(d=missing))) 124 | print(('not running {}'.format(function.__name__))) 125 | else: 126 | pass 127 | return passer 128 | return inner 129 | -------------------------------------------------------------------------------- /pysal/lib/examples/__init__.py: -------------------------------------------------------------------------------- 1 | from libpysal.examples import (get_path, available, 2 | load_example, explain) 3 | -------------------------------------------------------------------------------- /pysal/model/__init__.py: -------------------------------------------------------------------------------- 1 | import access 2 | import mgwr 3 | import spglm 4 | import spint 5 | import spreg 6 | import tobler 7 | import spopt 8 | -------------------------------------------------------------------------------- /pysal/model/access/__init__.py: -------------------------------------------------------------------------------- 1 | from access import fca 2 | from access import raam 3 | from access import weights 4 | from access import helpers 5 | from access.datasets import datasets 6 | from access import access 7 | -------------------------------------------------------------------------------- /pysal/model/mgwr/__init__.py: -------------------------------------------------------------------------------- 1 | from mgwr import gwr 2 | from mgwr import sel_bw 3 | from mgwr import diagnostics 4 | from mgwr import kernels 5 | -------------------------------------------------------------------------------- /pysal/model/spglm/__init__.py: -------------------------------------------------------------------------------- 1 | from spglm import glm 2 | from spglm import family 3 | from spglm import utils 4 | from spglm import iwls 5 | -------------------------------------------------------------------------------- /pysal/model/spint/__init__.py: -------------------------------------------------------------------------------- 1 | from spint.gravity import Gravity, Production, Attraction, Doubly 2 | from spint.utils import CPC, sorensen, srmse 3 | from spint.vec_SA import VecMoran as Moran_Vector 4 | from spint.dispersion import phi_disp, alpha_disp 5 | -------------------------------------------------------------------------------- /pysal/model/spopt/__init__.py: -------------------------------------------------------------------------------- 1 | from spopt import region 2 | -------------------------------------------------------------------------------- /pysal/model/spreg/__init__.py: -------------------------------------------------------------------------------- 1 | from spreg.ols import * 2 | from spreg.diagnostics import * 3 | from spreg.diagnostics_sp import * 4 | from spreg.diagnostics_sur import * 5 | from spreg.diagnostics_tsls import * 6 | from spreg.user_output import * 7 | from spreg.twosls import * 8 | from spreg.twosls_sp import * 9 | from spreg.error_sp import * 10 | from spreg.error_sp_het import * 11 | from spreg.error_sp_hom import * 12 | from spreg.ols_regimes import * 13 | from spreg.twosls_regimes import * 14 | from spreg.twosls_sp_regimes import * 15 | from spreg.error_sp_regimes import * 16 | from spreg.error_sp_het_regimes import * 17 | from spreg.error_sp_hom_regimes import * 18 | from spreg.probit import * 19 | from spreg.ml_lag import * 20 | from spreg.ml_lag_regimes import * 21 | from spreg.ml_error import * 22 | from spreg.ml_error_regimes import * 23 | from spreg.sur import * 24 | from spreg.sur_error import * 25 | from spreg.sur_lag import * 26 | from spreg.sur_utils import * 27 | from spreg.utils import * 28 | from spreg.regimes import * 29 | from spreg.sputils import * 30 | -------------------------------------------------------------------------------- /pysal/model/tobler/__init__.py: -------------------------------------------------------------------------------- 1 | from tobler import area_weighted 2 | from tobler import dasymetric 3 | from tobler import model 4 | -------------------------------------------------------------------------------- /pysal/tests/test_imports.py: -------------------------------------------------------------------------------- 1 | from pkgutil import iter_modules 2 | from pysal import federation_hierarchy 3 | 4 | 5 | def module_exists(module_name): 6 | return module_name in (name for loader, name, ispkg in iter_modules()) 7 | 8 | def test_imports(): 9 | for layer in federation_hierarchy: 10 | packages = federation_hierarchy[layer] 11 | for package in packages: 12 | assert module_exists(package), f"{package} not installed." 13 | -------------------------------------------------------------------------------- /pysal/viz/__init__.py: -------------------------------------------------------------------------------- 1 | import mapclassify 2 | import splot 3 | -------------------------------------------------------------------------------- /pysal/viz/mapclassify/__init__.py: -------------------------------------------------------------------------------- 1 | from mapclassify.classifiers import ( 2 | BoxPlot, 3 | EqualInterval, 4 | FisherJenks, 5 | FisherJenksSampled, 6 | HeadTailBreaks, 7 | JenksCaspall, 8 | JenksCaspallForced, 9 | JenksCaspallSampled, 10 | MaxP, 11 | MaximumBreaks, 12 | NaturalBreaks, 13 | Quantiles, 14 | Percentiles, 15 | StdMean, 16 | UserDefined, 17 | load_example, 18 | gadf, 19 | KClassifiers, 20 | CLASSIFIERS, 21 | ) 22 | -------------------------------------------------------------------------------- /tools/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile 2 | 3 | # Define the default target that runs all commands in sequence 4 | .PHONY: all 5 | all: frozen gitcount changelog 6 | 7 | # Define individual commands 8 | .PHONY: frozen 9 | frozen: 10 | @echo "Running frozen.py..." 11 | python frozen.py 12 | 13 | .PHONY: gitcount 14 | gitcount: 15 | @echo "Running gitcount.py..." 16 | python gitcount.py 17 | 18 | .PHONY: changelog 19 | changelog: 20 | @echo "Running change_log.py..." 21 | python change_log.py 22 | -------------------------------------------------------------------------------- /tools/README.md: -------------------------------------------------------------------------------- 1 | # Tooling to Build PySAL Meta Package 2 | 3 | ## Dependencies 4 | 5 | - [personal github token](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line 6 | ): store it in the file `token` 7 | 8 | ## Instructions 9 | 10 | ### Updating package information 11 | - If any new packages have been added to the ecosystem update the `packages` list in `release.yaml` 12 | - Update relevant data on `start_date` (day after last release), `release_date` (day 13 | of this release), `version`, and `user` in `release.yaml` 14 | 15 | If this is a release candidate, do not start the `version` string with `v` but 16 | do add `rcX` ad the end of the string, where `X` is the number for the current 17 | release candidate. 18 | 19 | If this a production release, the first character in `version` needs to b `v` 20 | to ensure the publish and release workflow is run in the CI. 21 | 22 | 23 | 24 | ### Updating the changelog 25 | - `make` will run all the steps required to build the new change log 26 | 27 | For debugging purposes, the individual steps can be run using: 28 | - `make frozen` will get information about latest package releases 29 | - `make gitcount` will get issues and pulls closed for each package 30 | - `make changelog` will update the change log and write to `changes.md` 31 | 32 | These require `release_info.py` 33 | 34 | ### Add and Commit 35 | - `git add ../pyproject.toml` 36 | - `git add release.yaml` 37 | - `git commit -m "REL: "` 38 | 39 | 40 | ### Create a tag and push upstream 41 | - `git tag ` 42 | - `git push upstream ` 43 | 44 | 45 | ### Updating meta package release notes 46 | - edit the file `changelog.md` and incorporate into the release notes on github 47 | -------------------------------------------------------------------------------- /tools/change_log.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | 4 | # ## PySAL Change Log Statistics: Table Generation 5 | 6 | from __future__ import print_function 7 | from collections import Counter 8 | from datetime import date, datetime, time 9 | from datetime import datetime 10 | from release_info import get_pypi_info, get_github_info, clone_masters 11 | import pickle 12 | import release_info 13 | from release_info import release_date, start_date, PYSALVER 14 | import os 15 | import json 16 | import re 17 | import sys 18 | import pandas 19 | import subprocess 20 | from subprocess import check_output 21 | 22 | # import yaml 23 | from datetime import datetime, timedelta, time 24 | 25 | from dateutil.parser import parse 26 | import pytz 27 | 28 | utc = pytz.UTC 29 | 30 | try: 31 | from urllib import urlopen 32 | except: 33 | from urllib.request import urlopen 34 | 35 | since = datetime.combine(start_date, time(0, 0)) 36 | CWD = os.path.abspath(os.path.curdir) 37 | 38 | with open('frozen.txt', 'r') as package_list: 39 | packages = package_list.readlines() 40 | packages = dict([package.strip().split(">=") for package in packages]) 41 | 42 | packages['pysal'] = release_info.PYSALVER 43 | issues_closed = pickle.load(open("issues_closed.p", 'rb')) 44 | pulls_closed = pickle.load(open('pulls_closed.p', 'rb')) 45 | github_releases = pickle.load(open("releases.p", 'rb')) 46 | ISO8601 = "%Y-%m-%dT%H:%M:%SZ" 47 | pysal_date = release_date 48 | 49 | # Create a date object 50 | date_obj = release_date 51 | # Create a time object (optional, default is midnight if not specified) 52 | time_obj = time(0, 0) 53 | # Combine date and time to create a datetime object 54 | datetime_obj = datetime.combine(date_obj, time_obj) 55 | pysal_rel = {'version': f'v{PYSALVER}', 56 | 'release_date': datetime_obj} 57 | github_releases['pysal'] = pysal_rel 58 | 59 | 60 | final_pulls = {} 61 | final_issues = {} 62 | for package in packages: 63 | filtered_issues = [] 64 | filtered_pulls = [] 65 | released = github_releases[package]['release_date'] 66 | package_pulls = pulls_closed[package] 67 | package_issues = issues_closed[package] 68 | for issue in package_issues: 69 | # print(issue['number'], issue['title'], issue['closed_at']) 70 | closed = datetime.strptime(issue['closed_at'], ISO8601) 71 | if closed <= released and closed > since: 72 | filtered_issues.append(issue) 73 | final_issues[package] = filtered_issues 74 | for pull in package_pulls: 75 | # print(pull['number'], pull['title'], pull['closed_at']) 76 | closed = datetime.strptime(pull['closed_at'], ISO8601) 77 | if closed <= released and closed > since: 78 | filtered_pulls.append(pull) 79 | final_pulls[package] = filtered_pulls 80 | 81 | issue_details = final_issues 82 | pull_details = final_pulls 83 | github_releases['pysal']['release_date'] = release_date 84 | 85 | 86 | # skip packages not released since last meta release 87 | # handle meta 88 | mrd = github_releases['pysal']['release_date'] 89 | github_releases['pysal']['release_date'] = datetime.combine(mrd, time(0, 0)) 90 | 91 | for package in github_releases: 92 | if github_releases[package]['release_date'] > since: 93 | print("new: ", package) 94 | else: 95 | print('old:', package) 96 | 97 | since_date = '--since="{start}"'.format(start=start_date.strftime("%Y-%m-%d")) 98 | 99 | # commits 100 | cmd = ['git', 'log', '--oneline', since_date] 101 | 102 | activity = {} 103 | total_commits = 0 104 | tag_dates = {} 105 | ncommits_total = 0 106 | for subpackage in packages: 107 | released = github_releases[subpackage]['release_date'] 108 | tag_date = released.strftime("%Y-%m-%d") 109 | tag_dates[subpackage] = tag_date 110 | print(tag_date) 111 | # tag_date = tag_dates[subpackage] 112 | ncommits = 0 113 | if released > since: 114 | os.chdir(CWD) 115 | os.chdir('tmp/{subpackage}'.format(subpackage=subpackage)) 116 | cmd_until = cmd + ['--until="{tag_date}"'.format(tag_date=tag_date)] 117 | ncommits = len(check_output(cmd_until).splitlines()) 118 | ncommits_total = len(check_output(cmd).splitlines()) 119 | print(subpackage, ncommits_total, ncommits, tag_date) 120 | total_commits += ncommits 121 | activity[subpackage] = ncommits 122 | 123 | 124 | cmd = ['git', 'log', '--oneline', since_date] 125 | 126 | activity = {} 127 | total_commits = 0 128 | for subpackage in packages: 129 | ncommits = 0 130 | tag_date = tag_dates[subpackage] 131 | released = github_releases[subpackage]['release_date'] 132 | if released > since: 133 | os.chdir(CWD) 134 | os.chdir('tmp/{subpackage}'.format(subpackage=subpackage)) 135 | cmd_until = cmd + ['--until="{tag_date}"'.format(tag_date=tag_date)] 136 | ncommits = len(check_output(cmd_until).splitlines()) 137 | print(ncommits) 138 | ncommits_total = len(check_output(cmd).splitlines()) 139 | print(subpackage, ncommits_total, ncommits, tag_date) 140 | total_commits += ncommits 141 | activity[subpackage] = ncommits 142 | 143 | 144 | identities = {'Levi John Wolf': ('ljwolf', 'Levi John Wolf'), 145 | 'Serge Rey': ('Serge Rey', 'Sergio Rey', 'sjsrey', 'serge'), 146 | 'Wei Kang': ('Wei Kang', 'weikang9009'), 147 | 'Dani Arribas-Bel': ('Dani Arribas-Bel', 'darribas'), 148 | 'Antti Härkönen': ('antth', 'Antti Härkönen', 'Antti Härkönen', 'Antth'), 149 | 'Juan C Duque': ('Juan C Duque', "Juan Duque"), 150 | 'Renan Xavier Cortes': ('Renan Xavier Cortes', 'renanxcortes', 'Renan Xavier Cortes'), 151 | 'Taylor Oshan': ('Tayloroshan', 'Taylor Oshan', 'TaylorOshan'), 152 | 'Tom Gertin': ('@Tomgertin', 'Tom Gertin', '@tomgertin') 153 | } 154 | 155 | 156 | def regularize_identity(string): 157 | string = string.decode() 158 | for name, aliases in identities.items(): 159 | for alias in aliases: 160 | if alias in string: 161 | string = string.replace(alias, name) 162 | if len(string.split(' ')) > 1: 163 | string = string.title() 164 | return string.lstrip('* ') 165 | 166 | 167 | author_cmd = ['git', 'log', '--format=* %aN', since_date] 168 | 169 | 170 | author_cmd.append('blank') 171 | 172 | 173 | authors_global = set() 174 | authors = {} 175 | global_counter = Counter() 176 | counters = dict() 177 | cmd = ['git', 'log', '--oneline', since_date] 178 | total_commits = 0 179 | activity = {} 180 | for subpackage in packages: 181 | ncommits = 0 182 | released = github_releases[subpackage]['release_date'] 183 | if released > since: 184 | os.chdir(CWD) 185 | os.chdir('tmp/{subpackage}'.format(subpackage=subpackage)) 186 | ncommits = len(check_output(cmd).splitlines()) 187 | tag_date = tag_dates[subpackage] 188 | tag_date = (datetime.strptime(tag_date, '%Y-%m-%d') + 189 | timedelta(days=1)).strftime('%Y-%m-%d') 190 | author_cmd[-1] = '--until="{tag_date}"'.format(tag_date=tag_date) 191 | # cmd_until = cmd + ['--until="{tag_date}"'.format(tag_date=tag_date)] 192 | print(subpackage, author_cmd) 193 | 194 | all_authors = check_output(author_cmd).splitlines() 195 | counter = Counter([regularize_identity(author) 196 | for author in all_authors]) 197 | global_counter += counter 198 | counters.update({subpackage: counter}) 199 | unique_authors = sorted(set(all_authors)) 200 | authors[subpackage] = unique_authors 201 | authors_global.update(unique_authors) 202 | total_commits += ncommits 203 | activity[subpackage] = ncommits 204 | 205 | 206 | def get_tag(title, level="##", as_string=True): 207 | words = title.split() 208 | tag = "-".join([word.lower() for word in words]) 209 | heading = level+" "+title 210 | line = "\n\n".format(tag) 211 | lines = [line] 212 | lines.append(heading) 213 | if as_string: 214 | return "\n".join(lines) 215 | else: 216 | return lines 217 | 218 | 219 | subs = issue_details.keys() 220 | table = [] 221 | txt = [] 222 | lines = get_tag("Changes by Package", as_string=False) 223 | 224 | for sub in github_releases: 225 | total = issue_details[sub] 226 | pr = pull_details[sub] 227 | 228 | row = [sub, activity[sub], len(total), len(pr)] 229 | table.append(row) 230 | # line = "\n".format(sub=sub) 231 | # lines.append(line) 232 | # line = "### {sub}".format(sub=sub) 233 | # lines.append(line) 234 | sub_lower = sub.lower() 235 | sub_version = github_releases[sub_lower]['version'] 236 | print(f'{sub_lower}, {sub_version}') 237 | title = f'{sub_lower} {sub_version}' 238 | lines.extend(get_tag(title, "###", as_string=False)) 239 | for issue in total: 240 | url = issue['html_url'] 241 | title = issue['title'] 242 | number = issue['number'] 243 | line = "* [#{number}:]({url}) {title} ".format(title=title, 244 | number=number, 245 | url=url) 246 | lines.append(line) 247 | 248 | 249 | os.chdir(CWD) 250 | 251 | df = pandas.DataFrame( 252 | table, columns=['package', 'commits', 'total issues', 'pulls']) 253 | 254 | df.sort_values(['commits', 'pulls'], ascending=False)\ 255 | .to_html('./commit_table.html', index=None) 256 | 257 | contributor_table = pandas.DataFrame.from_dict( 258 | counters).fillna(0).astype(int).T 259 | 260 | contributor_table.to_html('./contributor_table.html') 261 | 262 | totals = contributor_table.sum(axis=0).T 263 | totals.sort_index().to_frame('commits') 264 | 265 | totals = contributor_table.sum(axis=0).T 266 | totals.sort_index().to_frame('commits').to_html('./commits_by_person.html') 267 | 268 | n_commits = df.commits.sum() 269 | n_issues = df['total issues'].sum() 270 | n_pulls = df.pulls.sum() 271 | 272 | line = ('Overall, there were {n_commits} commits that closed {n_issues} issues' 273 | ' since our last release' 274 | ' on {since_date}.\n'.format(n_commits=n_commits, n_issues=n_issues, 275 | since_date=start_date)) 276 | 277 | 278 | with open('changelog.md', 'w') as fout: 279 | fout.write(line) 280 | fout.write("\n".join(lines)) 281 | fout.write(get_tag("Contributors")) 282 | fout.write( 283 | "\n\nMany thanks to all of the following individuals who contributed to this release:\n\n") 284 | 285 | totals = contributor_table.sum(axis=0).T 286 | contributors = totals.index.values 287 | contributors.sort() 288 | contributors = contributors.tolist() 289 | contributors = [f'\n - {contributor}' for contributor in contributors] 290 | fout.write("".join(contributors)) 291 | 292 | 293 | df.head() 294 | 295 | 296 | # Update ../pyproject.toml for minimum pysal package pinning 297 | # get version numbers from frozen.txt 298 | with open('frozen.txt', 'r') as frozen: 299 | packages = [line.rstrip() for line in frozen.readlines()] 300 | 301 | # search pyproject.toml for lines containing package 302 | with open('../pyproject.toml', 'r') as project: 303 | lines = [line.rstrip() for line in project.readlines()] 304 | 305 | # split line ->" package", ">=", "version", 306 | # replace version and rebuild line to update 307 | for package in packages: 308 | name, version = package.split(">=") 309 | i, match = [(i, line) for i, line in enumerate(lines) if name in line][0] 310 | old_name, old_version = match.split(">=") 311 | new_line = ">=".join([old_name, version+'",']) 312 | lines[i] = new_line 313 | 314 | # write out new pyproject.toml file 315 | with open("../pyproject.toml", 'w') as output: 316 | output.write("\n".join(lines)) 317 | -------------------------------------------------------------------------------- /tools/changelog.md: -------------------------------------------------------------------------------- 1 | Overall, there were 302 commits that closed 122 issues since our last release on 2024-07-31. 2 | 3 | 4 | 5 | ## Changes by Package 6 | 7 | 8 | 9 | ### libpysal v4.12.1 10 | * [#765:](https://github.com/pysal/libpysal/pull/765) ENH: ensure lag_spatial is compatible with both W and Graph 11 | * [#761:](https://github.com/pysal/libpysal/pull/761) Add exponential kernel to Graph 12 | * [#763:](https://github.com/pysal/libpysal/pull/763) allow continuous weights for knn graph 13 | * [#760:](https://github.com/pysal/libpysal/pull/760) Fix for Graph.describe() when the graph has a string index (#759) 14 | * [#759:](https://github.com/pysal/libpysal/issues/759) BUG: Graph.describe() does not work with non-integer index 15 | 16 | 17 | 18 | ### giddy v2.3.6 19 | * [#228:](https://github.com/pysal/giddy/pull/228) update CI - minimum versions, naming, etc. 20 | * [#229:](https://github.com/pysal/giddy/issues/229) some linting adjustments [2024-07-15] 21 | * [#227:](https://github.com/pysal/giddy/issues/227) update chat from `gitter` to `discord` 22 | * [#226:](https://github.com/pysal/giddy/issues/226) drop 3.9 as minimally support Python version? 23 | * [#225:](https://github.com/pysal/giddy/issues/225) CI maint for standard naming, oldest dependencies, & Python 3.12 24 | 25 | 26 | 27 | ### inequality v1.1.1 28 | * [#96:](https://github.com/pysal/inequality/pull/96) move conftest 29 | * [#97:](https://github.com/pysal/inequality/pull/97) remove print statements 30 | * [#95:](https://github.com/pysal/inequality/pull/95) Correct sphinx theme 31 | * [#94:](https://github.com/pysal/inequality/pull/94) Fix theme for doc build 32 | * [#93:](https://github.com/pysal/inequality/pull/93) Fix inconsistency in efficient gini 33 | * [#16:](https://github.com/pysal/inequality/issues/16) Inconsistent results with different input shape for Spatial_Gini 34 | * [#92:](https://github.com/pysal/inequality/pull/92) Documentation and landing page updates 35 | * [#15:](https://github.com/pysal/inequality/issues/15) TheilD within group inequality 36 | * [#91:](https://github.com/pysal/inequality/pull/91) [pre-commit.ci] pre-commit autoupdate 37 | * [#90:](https://github.com/pysal/inequality/pull/90) Theil doc 38 | * [#89:](https://github.com/pysal/inequality/pull/89) wolfson nb narrative 39 | * [#88:](https://github.com/pysal/inequality/pull/88) Polarization indices and new documentation 40 | * [#86:](https://github.com/pysal/inequality/pull/86) added narrative to theil nb 41 | * [#87:](https://github.com/pysal/inequality/pull/87) Gini polarization 42 | * [#85:](https://github.com/pysal/inequality/pull/85) [pre-commit.ci] pre-commit autoupdate 43 | * [#84:](https://github.com/pysal/inequality/pull/84) remove [Nijkamp & Poot (2013)] citation 44 | * [#36:](https://github.com/pysal/inequality/issues/36) Nijkamp & Poot (2013)? 45 | * [#82:](https://github.com/pysal/inequality/issues/82) add root-level `conftest.py` to skip doctest only in `_indices.py` 46 | * [#81:](https://github.com/pysal/inequality/issues/81) adjust tests for warnings 47 | * [#83:](https://github.com/pysal/inequality/pull/83) resolve all warnings from CI -- [2024-09-02] 48 | * [#80:](https://github.com/pysal/inequality/pull/80) add matplotlib as a requirement 49 | * [#79:](https://github.com/pysal/inequality/issues/79) add `matplotlib` as a dependency 50 | * [#75:](https://github.com/pysal/inequality/pull/75) Schutz inequality measures 51 | * [#78:](https://github.com/pysal/inequality/pull/78) Wolfson bipolarization index 52 | * [#74:](https://github.com/pysal/inequality/pull/74) Atkinson inequality measures 53 | * [#77:](https://github.com/pysal/inequality/pull/77) reup lint+format - rename CI env 54 | * [#76:](https://github.com/pysal/inequality/issues/76) rename CI envs 55 | * [#73:](https://github.com/pysal/inequality/issues/73) drop `black` -> adopt `ruff` for formatting 56 | * [#72:](https://github.com/pysal/inequality/pull/72) Pengram 57 | * [#69:](https://github.com/pysal/inequality/pull/69) deprecation of _indices 58 | * [#67:](https://github.com/pysal/inequality/issues/67) CI adjustment needed for `numpy>=2.1.0.dev0` 59 | 60 | 61 | 62 | ### pointpats v2.5.1 63 | * [#148:](https://github.com/pysal/pointpats/pull/148) TST: relax assertion to avoid floating point issues 64 | * [#147:](https://github.com/pysal/pointpats/pull/147) COMPAT: compatibility with numpy 65 | * [#145:](https://github.com/pysal/pointpats/pull/145) ENH: support geopandas objects in distance statistics 66 | 67 | 68 | 69 | ### segregation v2.5.1 70 | * [#231:](https://github.com/pysal/segregation/pull/231) testing 71 | * [#230:](https://github.com/pysal/segregation/pull/230) old numpy nan 72 | * [#229:](https://github.com/pysal/segregation/pull/229) (bug) numpy 2.0 is not supporting np.NaN 73 | 74 | 75 | 76 | 77 | ### momepy v0.9.1 78 | * [#661:](https://github.com/pysal/momepy/pull/661) ENH: do not fail with 3d nodes - `preprocess.remove_false_nodes()` 79 | * [#660:](https://github.com/pysal/momepy/pull/660) DOC: clear the installation instructions 80 | * [#658:](https://github.com/pysal/momepy/pull/658) ENH: add Streetscape class 81 | * [#654:](https://github.com/pysal/momepy/pull/654) CI: pin fiona 82 | * [#675:](https://github.com/pysal/momepy/pull/675) Get mean of actual values (do not imply missing == 0) in Streetscape 83 | * [#673:](https://github.com/pysal/momepy/issues/673) Streetscape assumes 0 when nan is given 84 | * [#674:](https://github.com/pysal/momepy/pull/674) BUG: fix corner case of empty intersection in streetscape 85 | * [#671:](https://github.com/pysal/momepy/pull/671) BUG: fix extraction of ids if there is only a single hit when retrieving point level data in Streetscape 86 | * [#670:](https://github.com/pysal/momepy/pull/670) DOC: more osmnx compat 87 | * [#669:](https://github.com/pysal/momepy/pull/669) DOC: user guide compat with osmnx 2.0 88 | * [#668:](https://github.com/pysal/momepy/pull/668) Bump codecov/codecov-action from 4 to 5 89 | * [#667:](https://github.com/pysal/momepy/pull/667) ENH: retain index of buildings and plots intersecting sightlines 90 | * [#666:](https://github.com/pysal/momepy/pull/666) handling edge cases in `preprocessing.FaceArtifacts` 91 | * [#665:](https://github.com/pysal/momepy/issues/665) all equivalent `"face_artifact_index"` leads to `LinAlgError` in `preprocessing.FaceArtifacts` 92 | * [#664:](https://github.com/pysal/momepy/issues/664) fail gracefully when no initial polygons generated – `preprocessing.FaceArtifacts` 93 | * [#657:](https://github.com/pysal/momepy/issues/657) remove_false_nodes throws an unexpected error 94 | * [#659:](https://github.com/pysal/momepy/issues/659) momepy.COINS: raise ValueError if empty geodataframe is passed 95 | * [#656:](https://github.com/pysal/momepy/pull/656) [pre-commit.ci] pre-commit autoupdate 96 | * [#655:](https://github.com/pysal/momepy/pull/655) Bump mamba-org/setup-micromamba from 1 to 2 97 | * [#653:](https://github.com/pysal/momepy/pull/653) Allow nodes as an input to graph construction method 'gdf_to_nx' 98 | * [#652:](https://github.com/pysal/momepy/pull/652) let COINS run even if there's overlapping geometry 99 | 100 | 101 | 102 | ### spreg v1.8.1 103 | * [#170:](https://github.com/pysal/spreg/pull/170) Fixing GM_KPP in the presence of pandas DF 104 | * [#167:](https://github.com/pysal/spreg/pull/167) Updating DPG api listing 105 | * [#165:](https://github.com/pysal/spreg/pull/165) add spsearch to docs, rm sphinx-bibtex pin 106 | * [#166:](https://github.com/pysal/spreg/pull/166) Update tutorials.rst to include new notebooks 107 | * [#164:](https://github.com/pysal/spreg/pull/164) Update build docs using segregation's version 108 | * [#163:](https://github.com/pysal/spreg/pull/163) Spreg version 1.8 109 | * [#160:](https://github.com/pysal/spreg/pull/160) Adding spsearch.py 110 | * [#162:](https://github.com/pysal/spreg/pull/162) doc: Fix typo in DGP docs 111 | * [#159:](https://github.com/pysal/spreg/pull/159) Bump mamba-org/setup-micromamba from 1 to 2 112 | * [#158:](https://github.com/pysal/spreg/pull/158) Updating spreg to 1.7 113 | * [#156:](https://github.com/pysal/spreg/pull/156) `ruff` format repo 114 | * [#151:](https://github.com/pysal/spreg/issues/151) update `pre-commit` -- add `ruff` ; drop `black` 115 | * [#150:](https://github.com/pysal/spreg/issues/150) swap from `black` to `ruff` for formatting 116 | * [#154:](https://github.com/pysal/spreg/pull/154) update `environment.yml` & remove `.coveragerc` 117 | * [#149:](https://github.com/pysal/spreg/pull/149) build docs with 3.12 environment 118 | * [#153:](https://github.com/pysal/spreg/issues/153) purge `.coveragerc` - no longer needed 119 | * [#152:](https://github.com/pysal/spreg/issues/152) update `environment.yml` 120 | * [#148:](https://github.com/pysal/spreg/issues/148) Build docs failure 121 | * [#146:](https://github.com/pysal/spreg/issues/146) spatial diagnostics in OLS fail with Graph 122 | * [#147:](https://github.com/pysal/spreg/pull/147) Deprecating check_spat_diag 123 | * [#145:](https://github.com/pysal/spreg/pull/145) Updating to version 1.6.0 124 | * [#127:](https://github.com/pysal/spreg/issues/127) ENH: support pandas object as X, y 125 | * [#57:](https://github.com/pysal/spreg/issues/57) Docstring test failures 126 | * [#125:](https://github.com/pysal/spreg/issues/125) `scipy.sparse.csr` DeprecationWarning in `user_output.py` 127 | * [#135:](https://github.com/pysal/spreg/issues/135) 2 tests in CI failing [2024-04-23] 128 | 129 | 130 | 131 | ### tobler v0.12.1 132 | * [#223:](https://github.com/pysal/tobler/pull/223) h3compat 133 | * [#222:](https://github.com/pysal/tobler/pull/222) CI: fetch examples ahead of time 134 | * [#221:](https://github.com/pysal/tobler/pull/221) COMPAT: fix compatibility with scipy 1.15 135 | * [#219:](https://github.com/pysal/tobler/pull/219) DOCS: use nbsphinx-link and include myst beta 136 | * [#220:](https://github.com/pysal/tobler/pull/220) codecov4 137 | * [#204:](https://github.com/pysal/tobler/issues/204) Site docs return 404 138 | * [#213:](https://github.com/pysal/tobler/issues/213) docs action is failing 139 | * [#206:](https://github.com/pysal/tobler/pull/206) Docs notebook links 140 | * [#214:](https://github.com/pysal/tobler/pull/214) Docs 141 | * [#205:](https://github.com/pysal/tobler/pull/205) infrastructure 142 | * [#218:](https://github.com/pysal/tobler/pull/218) COMPAT: compatibility with h3 v4 143 | * [#207:](https://github.com/pysal/tobler/issues/207) update `h3fy` for h3 api changes 144 | * [#216:](https://github.com/pysal/tobler/issues/216) cut new release of tobler? 145 | * [#217:](https://github.com/pysal/tobler/issues/217) area_interpolate() fills gap areas where target and source geopandas frames don't intersect with 0s instead of NaNs 146 | 147 | 148 | 149 | ### mapclassify v2.8.1 150 | * [#229:](https://github.com/pysal/mapclassify/pull/229) fix nan handling in color array 151 | * [#230:](https://github.com/pysal/mapclassify/pull/230) pin max fiona in oldest 152 | 153 | 154 | 155 | ### splot v1.1.7 156 | * [#187:](https://github.com/pysal/splot/pull/187) BUG: fix scatter plots and regression lines in Moran plots 157 | * [#186:](https://github.com/pysal/splot/issues/186) BUG: OLS regression in moran is wrong 158 | * [#178:](https://github.com/pysal/splot/issues/178) BUG: plot_local_autocorrelation colors do not match between subplots 159 | * [#131:](https://github.com/pysal/splot/issues/131) Inconsistent colors for `moran_scatterplot` and `lisa_cluster` when p values are small 160 | * [#185:](https://github.com/pysal/splot/pull/185) Consistent hot/cold spot colors for local moran 161 | * [#184:](https://github.com/pysal/splot/pull/184) (bug) support graph in moran's viz 162 | 163 | 164 | 165 | ### pysal v25.01 166 | * [#1337:](https://github.com/pysal/pysal/pull/1337) removing `spvcm` from meta release -- archived - #1330 167 | * [#1330:](https://github.com/pysal/pysal/issues/1330) inclusion of `spvcm` in PySAL meta-release 168 | * [#1362:](https://github.com/pysal/pysal/pull/1362) 25.01rc1 169 | * [#1360:](https://github.com/pysal/pysal/pull/1360) Bump codecov/codecov-action from 4 to 5 170 | * [#1358:](https://github.com/pysal/pysal/pull/1358) Bump mamba-org/setup-micromamba from 1 to 2 171 | * [#1357:](https://github.com/pysal/pysal/pull/1357) pin fiona in oldest 172 | * [#1355:](https://github.com/pysal/pysal/pull/1355) MNT: New release and publish action 173 | 174 | 175 | ## Contributors 176 | 177 | Many thanks to all of the following individuals who contributed to this release: 178 | 179 | 180 | - Eli Knaap 181 | - James Gaboardi 182 | - Josiah Parry 183 | - Knaaptime 184 | - Krasen Samardzhiev 185 | - Martin Fleischmann 186 | - Pedro Amaral 187 | - Serge Rey 188 | - Wei Kang 189 | 190 | 191 | -------------------------------------------------------------------------------- /tools/frozen.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import json 4 | from release_info import get_github_info 5 | 6 | releases = get_github_info() 7 | 8 | requirements = [] 9 | frozen_dict = {} 10 | for package in releases: 11 | version = releases[package]['version'] 12 | version = version.replace("v","") 13 | version = version.replace("V","") 14 | requirements.append(f'{package}>={version}') 15 | frozen_dict[package] = version 16 | 17 | with open('frozen.txt', 'w') as f: 18 | f.write("\n".join(requirements)) 19 | 20 | import pickle 21 | pickle.dump(releases, open( "releases.p", "wb" ) ) 22 | 23 | -------------------------------------------------------------------------------- /tools/gitcount.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | 4 | # # PySAL Change Log Statistics 5 | 6 | # urllib3>=1.26 7 | # python-dateutil<=2.8.0 8 | # pytest 9 | # pytest-cov 10 | # coverage## Approach 11 | # - get date of last gh release of each package -> github_released 12 | # - get date of last pypi release of each package -> pypi_released 13 | # - get data of last meta-release -> start_date 14 | # - for each package 15 | # - get issues between start_date and package_released in master/main 16 | # - get pulls between start_date and package_released in master/main 17 | 18 | 19 | import pickle 20 | from release_info import (issues_closed_since, packages, 21 | is_pull_request, 22 | sorted_by_field, 23 | clone_defaults, 24 | release_date, 25 | start_date, 26 | PYSALVER, 27 | USER 28 | ) 29 | import datetime 30 | packages.append('pysal') 31 | clone_defaults(packages) 32 | since = datetime.datetime.combine(start_date, datetime.time(0, 0)) 33 | issues = {} 34 | for package in packages: 35 | issues[package] = issues_closed_since(since, project=f'pysal/{package}') 36 | pulls = {} 37 | for package in packages: 38 | pulls[package] = issues_closed_since(since, project=f'pysal/{package}', 39 | pulls=True) 40 | pickle.dump(issues, open("issues_closed.p", "wb")) 41 | 42 | pickle.dump(pulls, open("pulls_closed.p", "wb")) 43 | -------------------------------------------------------------------------------- /tools/release.yaml: -------------------------------------------------------------------------------- 1 | version: 25.01 2 | release_date: 2025-01-31 3 | start_date: 2024-07-31 4 | user: sjsrey 5 | -------------------------------------------------------------------------------- /tools/release_info.py: -------------------------------------------------------------------------------- 1 | """ 2 | Grab most recent releases tagged on Github for PySAL subpackages 3 | 4 | 5 | TODO 6 | - [ ] update dependencies in pyproj.toml for pinning new releases of 7 | pysal packages 8 | 9 | """ 10 | 11 | import os 12 | import subprocess 13 | import json 14 | import urllib 15 | import re 16 | import yaml 17 | from urllib.request import urlopen 18 | from datetime import datetime, timedelta 19 | import requests 20 | 21 | 22 | with open("release.yaml", "r") as stream: 23 | info = yaml.safe_load(stream) 24 | 25 | release_date = info["release_date"] 26 | PYSALVER = info["version"] 27 | start_date = info["start_date"] 28 | USER = info["user"] 29 | 30 | 31 | ISO8601 = "%Y-%m-%dT%H:%M:%SZ" 32 | PER_PAGE = 100 33 | element_pat = re.compile(r"<(.+?)>") 34 | rel_pat = re.compile(r'rel=[\'"](\w+)[\'"]') 35 | 36 | 37 | # get github token: 38 | with open("token", "r") as token_file: 39 | token = token_file.read().strip() 40 | 41 | gh_session = requests.Session() 42 | gh_session.auth = (USER, token) 43 | 44 | 45 | packages = [ 46 | "libpysal", 47 | "access", 48 | "esda", 49 | "giddy", 50 | "inequality", 51 | "pointpats", 52 | "segregation", 53 | "spaghetti", 54 | "mgwr", 55 | "momepy", 56 | "spglm", 57 | "spint", 58 | "spreg", 59 | "tobler", 60 | "mapclassify", 61 | "splot", 62 | "spopt", 63 | ] 64 | 65 | 66 | def get_github_info(packages=packages): 67 | """ 68 | Get information about subpackage releases that have been tagged on github 69 | """ 70 | no_release = [] 71 | release = {} 72 | 73 | for package in packages: 74 | url = f"https://api.github.com/repos/pysal/{package}/releases/latest" 75 | print(url) 76 | d = json.loads(gh_session.get(url).text) 77 | if "message" in d: 78 | if d["message"] == "Not Found": 79 | print(f"{package} has no latest release") 80 | no_release.append(package) 81 | else: 82 | print("Something else happened") 83 | print(d) 84 | else: 85 | tag_name = d["tag_name"] 86 | tarball_url = d["tarball_url"] 87 | release[package] = { 88 | "version": tag_name, 89 | "url": tarball_url, 90 | "release_date": d["published_at"], 91 | } 92 | 93 | with open("tarballs.json", "w") as fp: 94 | json.dump(release, fp) 95 | 96 | for package in release: 97 | dt = release[package]["release_date"] 98 | date_format = "%Y-%m-%dT%H:%M:%SZ" 99 | release_date = datetime.strptime(dt, date_format) 100 | release[package]["release_date"] = release_date 101 | 102 | return release 103 | 104 | 105 | def get_pypi_info(): 106 | """ 107 | Get information about subpackage releases that have been tagged on pypi 108 | """ 109 | releases = {} 110 | for package in packages: 111 | url = f"https://pypi.python.org/pypi/{package}/json" 112 | data = json.load(urllib.request.urlopen(url)) 113 | keys = list(data["releases"].keys()) 114 | last = keys[-1] 115 | release = data["releases"][last][0]["upload_time"] 116 | release_date = datetime.strptime(release, "%Y-%m-%dT%H:%M:%S") 117 | releases[package] = {"version": last, "released": release_date} 118 | 119 | return releases 120 | 121 | 122 | def clone_masters(): 123 | clone_releases(tag="master") 124 | 125 | 126 | def clone_mains(): 127 | clone_releases(tag="main") 128 | 129 | 130 | def clone_defaults(packages=packages, cwd=os.getcwd()): 131 | for package in packages: 132 | directory_path = f"tmp/{package}" 133 | 134 | # if already cloned, we pull, otherwise clone 135 | if os.path.isdir(directory_path): 136 | print(f"{directory_path} exists, git pull required") 137 | os.chdir(directory_path) 138 | result = subprocess.run(["git", "pull"], 139 | capture_output=True, text=True) 140 | print("Output:\n", result.stdout) 141 | print("Errors:\n", result.stderr) 142 | os.chdir(cwd) 143 | else: 144 | url = f"https://api.github.com/repos/pysal/{package}" 145 | data = json.load(urllib.request.urlopen(url)) 146 | branch = data["default_branch"] 147 | pkgstr = ( 148 | f"git clone --branch {branch}" 149 | f" https://github.com/pysal/{package}.git" 150 | f" tmp/{package}" 151 | ) 152 | print(pkgstr) 153 | os.system(pkgstr) 154 | 155 | 156 | def clone_releases(tag=None): 157 | """ 158 | Clone the releases in tmprelease_date 159 | """ 160 | os.system("rm -rf tmp") 161 | os.system("mkdir tmp") 162 | for package in packages: 163 | print(package, packages[package]) 164 | if tag: 165 | branch = tag 166 | else: 167 | branch = packages[package] 168 | pkgstr = ( 169 | f"git clone --branch {branch}" 170 | f" https://github.com/pysal/{package}.git" 171 | f" tmp/{package}" 172 | ) 173 | print(pkgstr) 174 | os.system(pkgstr) 175 | 176 | 177 | def parse_link_header(headers): 178 | link_s = headers.get("link", "") 179 | urls = element_pat.findall(link_s) 180 | rels = rel_pat.findall(link_s) 181 | d = {} 182 | for rel, url in zip(rels, urls): 183 | d[rel] = url 184 | return d 185 | 186 | 187 | def get_paged_request(url): 188 | """get a full list, handling APIv3's paging""" 189 | results = [] 190 | while url: 191 | # print("fetching %s" % url, file=sys.stderr) 192 | f = urlopen(url) 193 | results.extend(json.load(f)) 194 | links = parse_link_header(f.headers) 195 | url = links.get("next") 196 | return results 197 | 198 | 199 | def _parse_datetime(s): 200 | """Parse dates in the format returned by the Github API.""" 201 | if s: 202 | return datetime.strptime(s, ISO8601) 203 | else: 204 | return datetime.fromtimestamp(0) 205 | 206 | 207 | def issues2dict(issues): 208 | """Convert a list of issues to a dict, keyed by issue number.""" 209 | idict = {} 210 | for i in issues: 211 | idict[i["number"]] = i 212 | return idict 213 | 214 | 215 | def get_url(url): 216 | d = json.loads(gh_session.get(url).text) 217 | return d 218 | 219 | 220 | def get_issues(project="pysal/pysal", state="closed", pulls=False): 221 | """Get a list of the issues from Github api""" 222 | which = "pulls" if pulls else "issues" 223 | url = f"https://api.github.com/repos/{project}/{which}?state={state}" 224 | return get_url(url) 225 | 226 | 227 | def is_pull_request(issue): 228 | """Return True if the given issue is a pull request.""" 229 | return "pull_request_url" in issue 230 | 231 | 232 | def issues_closed_since( 233 | period=timedelta(days=365), project="pysal/pysal", pulls=False 234 | ): 235 | """Get all issues closed since a particular point in time. period 236 | can either be a datetime object, or a timedelta object. In the 237 | latter case, it is used as a time before the present.""" 238 | 239 | which = "pulls" if pulls else "issues" 240 | 241 | if isinstance(period, timedelta): 242 | period = datetime.now() - period 243 | 244 | url = ( 245 | "https://api.github.com/repos/{}/{}?state=closed&sort=updated&since={}" 246 | "&per_page={}".format( 247 | project, which, period.strftime(ISO8601), PER_PAGE 248 | ) 249 | ) 250 | 251 | allclosed = get_url(url) 252 | filtered = [ 253 | i for i in allclosed if _parse_datetime(i["closed_at"]) > period 254 | ] 255 | 256 | # exclude rejected PRs 257 | if pulls: 258 | filtered = [pr for pr in filtered if pr["merged_at"]] 259 | 260 | return filtered 261 | 262 | 263 | def sorted_by_field(issues, field="closed_at", reverse=False): 264 | """Return a list of issues sorted by closing date date.""" 265 | return sorted(issues, key=lambda i: i[field], reverse=reverse) 266 | 267 | 268 | def report(issues, show_urls=False): 269 | """Summary report about a list of issues, printing number and title.""" 270 | # titles may have unicode in them, so we must encode everything below 271 | if show_urls: 272 | for i in issues: 273 | role = "ghpull" if "merged_at" in i else "ghissue" 274 | title = i["title"].encode("utf-8") 275 | print(f"* :{role}:`{i['number']}`: {title}") 276 | else: 277 | for i in issues: 278 | title = i["title"].encode("utf-8") 279 | print(f"* {i['number']}: {title}") 280 | 281 | 282 | def get_meta_releases(): 283 | url = "https://api.github.com/repos/pysal/pysal/releases" 284 | return get_url(url) 285 | --------------------------------------------------------------------------------