├── geodatasets ├── tests │ ├── __init__.py │ ├── test_data.py │ ├── test_api.py │ └── test_lib.py ├── data.py ├── __init__.py ├── api.py ├── lib.py └── json │ └── database.json ├── setup.cfg ├── doc ├── requirements.txt ├── source │ ├── _static │ │ ├── xyzmaps.jpg │ │ ├── custom.css │ │ └── generate_gallery.js │ ├── index.md │ ├── api.rst │ ├── conf.py │ └── contributing.md ├── Makefile └── make.bat ├── ci ├── latest.yaml ├── min.yaml └── dev.yaml ├── readthedocs.yml ├── .pre-commit-config.yaml ├── pyproject.toml ├── LICENSE ├── .github └── workflows │ ├── tests.yaml │ └── release_to_pypi.yml ├── .gitignore └── README.md /geodatasets/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max_line_length = 88 -------------------------------------------------------------------------------- /doc/requirements.txt: -------------------------------------------------------------------------------- 1 | myst-nb 2 | numpydoc 3 | sphinx 4 | sphinx-copybutton 5 | furo -------------------------------------------------------------------------------- /doc/source/_static/xyzmaps.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geopandas/geodatasets/HEAD/doc/source/_static/xyzmaps.jpg -------------------------------------------------------------------------------- /geodatasets/data.py: -------------------------------------------------------------------------------- 1 | import pkgutil 2 | 3 | from . import json 4 | from .lib import _load_json 5 | 6 | json = pkgutil.get_data("geodatasets", "json/database.json") 7 | 8 | data = _load_json(json) 9 | -------------------------------------------------------------------------------- /ci/latest.yaml: -------------------------------------------------------------------------------- 1 | name: test 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python 6 | - pooch 7 | # tests 8 | - pytest 9 | - pytest-cov 10 | - geopandas-base 11 | - pyogrio 12 | -------------------------------------------------------------------------------- /ci/min.yaml: -------------------------------------------------------------------------------- 1 | name: test 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.8 6 | - pooch 7 | # tests 8 | - pytest 9 | - pytest-cov 10 | - geopandas-base 11 | - pyogrio 12 | -------------------------------------------------------------------------------- /ci/dev.yaml: -------------------------------------------------------------------------------- 1 | name: test 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python 6 | # tests 7 | - pytest 8 | - pytest-cov 9 | - geopandas-base 10 | - pyogrio 11 | - pip 12 | - pip: 13 | - git+https://github.com/fatiando/pooch.git@main 14 | -------------------------------------------------------------------------------- /doc/source/index.md: -------------------------------------------------------------------------------- 1 | ```{include} ../../README.md 2 | ``` 3 | 4 | ```{toctree} 5 | --- 6 | maxdepth: 2 7 | caption: Documentation 8 | hidden: true 9 | --- 10 | introduction 11 | api 12 | contributing 13 | GitHub 14 | ``` 15 | -------------------------------------------------------------------------------- /readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: ubuntu-22.04 5 | tools: 6 | python: "3.11" 7 | 8 | sphinx: 9 | configuration: doc/source/conf.py 10 | 11 | python: 12 | install: 13 | - requirements: doc/requirements.txt 14 | - method: pip 15 | path: . 16 | -------------------------------------------------------------------------------- /geodatasets/__init__.py: -------------------------------------------------------------------------------- 1 | from .api import get_path, get_url, fetch # noqa 2 | from .data import data # noqa 3 | from .lib import Bunch, Dataset # noqa 4 | 5 | from importlib.metadata import PackageNotFoundError, version 6 | 7 | try: 8 | __version__ = version("geodatasets") 9 | except PackageNotFoundError: # noqa 10 | # package is not installed 11 | pass 12 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | files: 'xvec\/' 2 | repos: 3 | - repo: https://github.com/psf/black 4 | rev: 22.12.0 5 | hooks: 6 | - id: black 7 | language_version: python3 8 | - repo: https://github.com/pycqa/flake8 9 | rev: 6.0.0 10 | hooks: 11 | - id: flake8 12 | language: python_venv 13 | - repo: https://github.com/pycqa/isort 14 | rev: 5.11.4 15 | hooks: 16 | - id: isort 17 | language_version: python3 18 | 19 | ci: 20 | autofix_prs: false 21 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /geodatasets/tests/test_data.py: -------------------------------------------------------------------------------- 1 | import geopandas as gpd 2 | import pandas as pd 3 | import pytest 4 | 5 | import geodatasets 6 | 7 | 8 | @pytest.mark.request 9 | @pytest.mark.parametrize("name", geodatasets.data.flatten()) 10 | def test_data_exists(name): 11 | dataset = geodatasets.data.query_name(name) 12 | gdf = gpd.read_file(geodatasets.get_path(name), engine="pyogrio") 13 | assert isinstance(gdf, pd.DataFrame) 14 | assert gdf.shape == (dataset.nrows, dataset.ncols) 15 | if not ( 16 | ".csv" in dataset.filename 17 | or ( 18 | hasattr(dataset, "members") 19 | and any(".csv" in member for member in dataset.members) 20 | ) 21 | ): 22 | assert gdf.geom_type.str.endswith(dataset.geometry_type).all() 23 | -------------------------------------------------------------------------------- /doc/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /doc/source/_static/custom.css: -------------------------------------------------------------------------------- 1 | span#release { 2 | font-size: x-small; 3 | } 4 | 5 | .main-container { 6 | position: relative; 7 | margin-left: auto; 8 | margin-right: auto; 9 | margin-top: 50px; 10 | margin-bottom: 10px; 11 | } 12 | 13 | .table-container { 14 | font-size: 18px; 15 | width: 100%; 16 | margin-top: 30px; 17 | margin-bottom: 30px; 18 | background-color: rgba(128, 128, 128, 0.1); 19 | } 20 | 21 | .map-container { 22 | height: 250px; 23 | margin-left: auto; 24 | margin-right: auto; 25 | margin-top: 20px; 26 | margin-bottom: 20px; 27 | padding: 20px; 28 | } 29 | 30 | .key-container { 31 | font-size: 16px; 32 | } 33 | 34 | .key-cell { 35 | font-size: 16px; 36 | line-height: 22px; 37 | vertical-align: top; 38 | width: 200px; 39 | color: rgba(128, 128, 128, 1); 40 | align-items: top; 41 | } 42 | 43 | .val-cell { 44 | font-size: 16px; 45 | width: 200px; 46 | margin-right: 50px; 47 | line-height: 22px; 48 | } -------------------------------------------------------------------------------- /doc/source/api.rst: -------------------------------------------------------------------------------- 1 | .. _reference: 2 | 3 | 4 | API reference 5 | ============= 6 | 7 | The ``geodatasets`` package has top-level functions that will cover 95% of use cases and 8 | other tooling handling the database. 9 | 10 | Top-level API 11 | ------------- 12 | 13 | In most cases, you will be using :func:`~geodatasets.get_path` to download the data and get the path 14 | to the local storage, :func:`~geodatasets.get_url` to get the link to the original dataset in its 15 | online location and :func:`~geodatasets.fetch` to pre-download data to the local storage. 16 | 17 | .. currentmodule:: geodatasets 18 | 19 | .. autofunction:: get_path 20 | 21 | .. autofunction:: get_url 22 | 23 | .. autofunction:: fetch 24 | 25 | Database-level API 26 | ------------------ 27 | 28 | The database of dataset metadata is handled via custom dict-based classes. 29 | 30 | .. autoclass:: Dataset 31 | :members: path 32 | 33 | A dict with attribute-access and that can be called to update keys. 34 | 35 | .. autoclass:: Bunch 36 | :exclude-members: clear, copy, fromkeys, get, items, keys, pop, popitem, setdefault, update, values 37 | :members: filter, flatten, query_name -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=61.0", "setuptools_scm[toml]>=6.2"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [tool.setuptools_scm] 6 | 7 | [project] 8 | name = "geodatasets" 9 | dynamic = ["version"] 10 | authors = [ 11 | {name = "Martin Fleischmann", email = "martin@martinfleischmann.net"}, 12 | ] 13 | maintainers = [ 14 | {name = "geodatasets contributors"}, 15 | ] 16 | license = { text = "BSD 3-Clause" } 17 | description = "Spatial data examples" 18 | readme = "README.md" 19 | classifiers = [ 20 | "Intended Audience :: Science/Research", 21 | "License :: OSI Approved :: BSD License", 22 | "Operating System :: OS Independent", 23 | "Programming Language :: Python :: 3", 24 | "Topic :: Scientific/Engineering :: GIS", 25 | ] 26 | requires-python = ">=3.8" 27 | dependencies = [ 28 | "pooch" 29 | ] 30 | 31 | [project.urls] 32 | Home = "https://github.com/geopandas/geodatasets" 33 | Repository = "https://github.com/geopandas/geodatasets" 34 | 35 | [tool.setuptools.packages.find] 36 | include = [ 37 | "geodatasets", 38 | "geodatasets.*", 39 | ] 40 | 41 | [tool.setuptools.package-data] 42 | json = ["json/providers.json"] 43 | 44 | [tool.coverage.run] 45 | omit = ["geodatasets/tests/*"] 46 | 47 | [tool.pytest.ini_options] 48 | markers = [ 49 | "request: fetching data from remote server", 50 | ] -------------------------------------------------------------------------------- /geodatasets/tests/test_api.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | 4 | import pooch 5 | import pytest 6 | import geodatasets 7 | 8 | 9 | def test_get_url(): 10 | url = geodatasets.get_url("nybb") 11 | assert ( 12 | url 13 | == "https://www.nyc.gov/assets/planning/download/zip/data-maps/open-data/nybb_16a.zip" # noqa 14 | ) 15 | 16 | 17 | @pytest.mark.request 18 | def test_get_path(): 19 | in_cache = pooch.os_cache("geodatasets").joinpath("nybb_16a.zip") 20 | if Path(in_cache).exists(): 21 | os.remove(in_cache) 22 | 23 | assert Path(geodatasets.get_path("nybb")).exists() 24 | 25 | # cleanup 26 | os.remove(in_cache) 27 | 28 | 29 | @pytest.mark.request 30 | def test_fetch(): 31 | # clear cache 32 | for data in ["airbnb.zip", "nybb_16a.zip", "nyc_neighborhoods.zip"]: 33 | in_cache = pooch.os_cache("geodatasets").joinpath(data) 34 | if Path(in_cache).exists(): 35 | os.remove(in_cache) 36 | 37 | geodatasets.fetch("nybb") 38 | assert pooch.os_cache("geodatasets").joinpath("nybb_16a.zip").exists() 39 | 40 | geodatasets.fetch(["geoda airbnb", "geoda atlanta"]) 41 | 42 | for data in ["airbnb.zip", "atlanta_hom.zip"]: 43 | assert pooch.os_cache("geodatasets").joinpath(data).exists() 44 | 45 | # cleanup 46 | for data in ["airbnb.zip", "nybb_16a.zip", "atlanta_hom.zip"]: 47 | in_cache = pooch.os_cache("geodatasets").joinpath(data) 48 | os.remove(in_cache) 49 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2023, Martin Fleischmann 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | 1. Redistributions of source code must retain the above copyright notice, this 9 | list of conditions and the following disclaimer. 10 | 11 | 2. Redistributions in binary form must reproduce the above copyright notice, 12 | this list of conditions and the following disclaimer in the documentation 13 | and/or other materials provided with the distribution. 14 | 15 | 3. Neither the name of the copyright holder nor the names of its 16 | contributors may be used to endorse or promote products derived from 17 | this software without specific prior written permission. 18 | 19 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 23 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 27 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | -------------------------------------------------------------------------------- /.github/workflows/tests.yaml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | branches: 8 | - "*" 9 | 10 | schedule: 11 | - cron: "59 23 * * 3" 12 | 13 | workflow_dispatch: 14 | inputs: 15 | version: 16 | description: Manual test execution 17 | default: test 18 | required: false 19 | 20 | jobs: 21 | Tests: 22 | name: ${{ matrix.os }}, ${{ matrix.environment-file }} 23 | runs-on: ${{ matrix.os }} 24 | strategy: 25 | matrix: 26 | os: [macos-latest, ubuntu-latest, windows-latest] 27 | environment-file: [ci/latest.yaml] 28 | include: 29 | - environment-file: ci/dev.yaml 30 | os: ubuntu-latest 31 | - environment-file: ci/min.yaml 32 | os: ubuntu-latest 33 | defaults: 34 | run: 35 | shell: bash -l {0} 36 | 37 | steps: 38 | - name: checkout repo 39 | uses: actions/checkout@v4 40 | 41 | - name: setup micromamba 42 | uses: mamba-org/setup-micromamba@main 43 | with: 44 | environment-file: ${{ matrix.environment-file }} 45 | micromamba-version: "latest" 46 | 47 | - name: Install geodatasets 48 | run: pip install . 49 | 50 | - name: run tests 51 | run: pytest -v . -m "not request" --cov=geodatasets --cov-append --cov-report term-missing --cov-report xml --color=yes 52 | 53 | - name: test data 54 | run: pytest -v . -m "request" --cov=geodatasets --cov-append --cov-report term-missing --cov-report xml --color=yes 55 | if: matrix.os == 'ubuntu-latest' && matrix.environment-file == 'ci/latest.yaml' 56 | 57 | - uses: codecov/codecov-action@v4 58 | -------------------------------------------------------------------------------- /.github/workflows/release_to_pypi.yml: -------------------------------------------------------------------------------- 1 | name: Publish geodatasets to PyPI / GitHub 2 | 3 | on: 4 | push: 5 | tags: 6 | - "2*" 7 | 8 | jobs: 9 | build-n-publish: 10 | name: Build and publish geodatasets to PyPI 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Checkout source 15 | uses: actions/checkout@v3 16 | 17 | - name: Set up Python 18 | uses: actions/setup-python@v2 19 | with: 20 | python-version: "3.x" 21 | 22 | - name: Build a binary wheel and a source tarball 23 | run: | 24 | python -m pip install --upgrade build 25 | python -m build 26 | 27 | - name: Publish distribution to PyPI 28 | uses: pypa/gh-action-pypi-publish@release/v1 29 | with: 30 | user: __token__ 31 | password: ${{ secrets.PYPI_API_TOKEN }} 32 | 33 | - name: Create GitHub Release 34 | id: create_release 35 | uses: actions/create-release@v1 36 | env: 37 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token is provided by Actions, you do not need to create your own token 38 | with: 39 | tag_name: ${{ github.ref }} 40 | release_name: ${{ github.ref }} 41 | draft: false 42 | prerelease: false 43 | 44 | - name: Get Asset name 45 | run: | 46 | export PKG=$(ls dist/ | grep tar) 47 | set -- $PKG 48 | echo "name=$1" >> $GITHUB_ENV 49 | 50 | - name: Upload Release Asset (sdist) to GitHub 51 | id: upload-release-asset 52 | uses: actions/upload-release-asset@v1 53 | env: 54 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 55 | with: 56 | upload_url: ${{ steps.create_release.outputs.upload_url }} 57 | asset_path: dist/${{ env.name }} 58 | asset_name: ${{ env.name }} 59 | asset_content_type: application/zip 60 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | 131 | .vscode/settings.json 132 | .DS_Store 133 | 134 | cache/ -------------------------------------------------------------------------------- /doc/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | import os 14 | import sys 15 | 16 | sys.path.insert(0, os.path.abspath("../..")) 17 | import geodatasets # noqa 18 | 19 | # -- Project information ----------------------------------------------------- 20 | 21 | project = "geodatasets" 22 | copyright = "2023, Martin Fleischmann" 23 | author = "Martin Fleischmann" 24 | 25 | version = geodatasets.__version__ 26 | # The full version, including alpha/beta/rc tags 27 | release = version 28 | 29 | html_title = f'geodatasets {release}' 30 | 31 | 32 | # -- General configuration --------------------------------------------------- 33 | 34 | # Add any Sphinx extension module names here, as strings. They can be 35 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 36 | # ones. 37 | extensions = [ 38 | "sphinx.ext.autodoc", 39 | "numpydoc", 40 | "sphinx.ext.autosummary", 41 | "myst_nb", 42 | "sphinx_copybutton", 43 | ] 44 | 45 | autosummary_generate = True 46 | 47 | # Add any paths that contain templates here, relative to this directory. 48 | templates_path = ["_templates"] 49 | 50 | # List of patterns, relative to source directory, that match files and 51 | # directories to ignore when looking for source files. 52 | # This pattern also affects html_static_path and html_extra_path. 53 | exclude_patterns = [] 54 | 55 | 56 | # -- Options for HTML output ------------------------------------------------- 57 | 58 | # The theme to use for HTML and HTML Help pages. See the documentation for 59 | # a list of builtin themes. 60 | # 61 | html_theme = "furo" 62 | 63 | # Add any paths that contain custom static files (such as style sheets) here, 64 | # relative to this directory. They are copied after the builtin static files, 65 | # so a file named "default.css" will overwrite the builtin "default.css". 66 | html_static_path = ["_static"] 67 | 68 | html_css_files = [ 69 | "custom.css", 70 | ] 71 | # html_sidebars = { 72 | # "**": ["docs-sidebar.html"], 73 | # } 74 | # html_logo = "_static/logo.svg" 75 | -------------------------------------------------------------------------------- /doc/source/contributing.md: -------------------------------------------------------------------------------- 1 | # Contributing to `geodatasets` 2 | 3 | Contributions to `geodatasets` are very welcome. They are likely to be accepted more 4 | quickly if they follow these guidelines. 5 | 6 | There are two main groups of contributions - adding new data sources and 7 | contributions to the codebase and documentation. 8 | 9 | ## Data sources 10 | 11 | If you want to add a new dataset, simply add its details to 12 | `geodatasets/json/database.json`. 13 | 14 | You can add a single `Dataset` or a `Bunch` of `Dataset`s. Use the following 15 | schema to add a single dataset: 16 | 17 | ```json 18 | { 19 | "dataset_name": { 20 | "url": "https://your-site.com/direct-link-to/my_file.zip", 21 | "license": "CC-0", 22 | "attribution": "University of Github", 23 | "name": "dataset_name", 24 | "description": "Contents of my file", 25 | "geometry_type": "Polygon", 26 | "nrows": 77, 27 | "ncols": 20, 28 | "details": "https://your-site.com/link-to-explanantion/", 29 | "hash": "a2ab1e3f938226d287dd76cde18c00e2d3a260640dd826da7131827d9e76c824", 30 | "filename": "my_file.zip" 31 | }, 32 | } 33 | ``` 34 | 35 | If you want to add a bunch of related datasets (e.g. different files from a single source), 36 | you can group then within a `Bunch` using the following schema: 37 | 38 | ```json 39 | { 40 | "provider_bunch_name": { 41 | "first_dataset_name": { 42 | "url": "https://your-site.com/direct-link-to/my_file.zip", 43 | "license": "CC-0", 44 | "attribution": "University of Github", 45 | "name": "dataset_name", 46 | "description": "Contents of my file", 47 | "geometry_type": "Polygon", 48 | "nrows": 77, 49 | "ncols": 20, 50 | "details": "https://your-site.com/link-to-explanantion/", 51 | "hash": "a2ab1e3f938226d287dd76cde18c00e2d3a260640dd826da7131827d9e76c824", 52 | "filename": "my_file.zip" 53 | }, 54 | "second_dataset_name": { 55 | "url": "https://your-site.com/direct-link-to/my_file.zip", 56 | "license": "CC-0", 57 | "attribution": "University of Github", 58 | "name": "dataset_name", 59 | "description": "Contents of my file", 60 | "geometry_type": "Point", 61 | "nrows": 77, 62 | "ncols": 20, 63 | "details": "https://your-site.com/link-to-explanantion/", 64 | "hash": "a2ab1e3f938226d287dd76cde18c00e2d3a260640dd826da7131827d9e76c824", 65 | "filename": "my_file.zip", 66 | "members": ["use_only_this.geojson"] 67 | } 68 | }, 69 | } 70 | ``` 71 | 72 | It is mandatory to always specify at least `name`, `url`, `hash` and `filename`. `hash` 73 | is a sha256 hash of the file to check that a user gets the expected file and a 74 | `filename` specifies how the downloaded file will be called. Ensure that it has a correct 75 | suffix. Don't forget to add any other custom attributes you'd like. Attribute `members` has 76 | a specific meaning and specifies file (or files in case of ESRI Shapefile) that shall be 77 | extracted from the archive and used. 78 | 79 | ## Code and documentation 80 | 81 | At this stage of `geodatasets` development, the priorities are to define a simple, 82 | usable, and stable API and to have clean, maintainable, readable code. 83 | 84 | In general, `geodatasets` follows the conventions of the GeoPandas project where 85 | applicable. 86 | 87 | In particular, when submitting a pull request: 88 | 89 | - All existing tests should pass. Please make sure that the test suite passes, both 90 | locally and on GitHub Actions. Status on GHA will be visible on a pull request. GHA 91 | are automatically enabled on your own fork as well. To trigger a check, make a PR to 92 | your own fork. 93 | - Ensure that documentation has built correctly. It will be automatically built for each 94 | PR. 95 | - New functionality should include tests. Please write reasonable tests for your code 96 | and make sure that they pass on your pull request. 97 | - Classes, methods, functions, etc. should have docstrings and type hints. The first 98 | line of a docstring should be a standalone summary. Parameters and return values 99 | should be documented explicitly. 100 | - Follow PEP 8 when possible. We use Black and Flake8 to ensure a consistent code format 101 | throughout the project. For more details see the [GeoPandas contributing 102 | guide](https://geopandas.readthedocs.io/en/latest/community/contributing.html). 103 | - Imports should be grouped with standard library imports first, 3rd-party libraries 104 | next, and `geodatasets` imports third. Within each grouping, imports should be 105 | alphabetized. Always use absolute imports when possible, and explicit relative imports 106 | for local imports when necessary in tests. 107 | - `geodatasets` supports Python 3.7+ only. When possible, do not introduce additional 108 | dependencies. If that is necessary, make sure they can be treated as optional. 109 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # geodatasets 2 | 3 | Fetch links or download and cache spatial data example files. 4 | 5 | The `geodatasets` contains an API on top of a JSON with metadata of externally hosted 6 | datasets containing geospatial information useful for illustrative and educational 7 | purposes. 8 | 9 | See the documentation at [geodatasets.readthedocs.io/](https://geodatasets.readthedocs.io/). 10 | 11 | ## Install 12 | 13 | From PyPI: 14 | 15 | ```sh 16 | pip install geodatasets 17 | ``` 18 | 19 | or using `conda` or `mamba` from conda-forge: 20 | 21 | ```sh 22 | conda install geodatasets -c conda-forge 23 | ``` 24 | 25 | The development version can be installed using `pip` from GitHub. 26 | 27 | ```sh 28 | pip install git+https://github.com/geopandas/geodatasets.git 29 | ``` 30 | 31 | ## How to use 32 | 33 | The package comes with a database of datasets. To see all: 34 | 35 | ```py 36 | In [1]: import geodatasets 37 | 38 | In [2]: geodatasets.data 39 | Out[2]: 40 | {'geoda': {'airbnb': {'url': 'https://geodacenter.github.io/data-and-lab//data/airbnb.zip', 41 | 'license': 'NA', 42 | 'attribution': 'Center for Spatial Data Science, University of Chicago', 43 | 'name': 'geoda.airbnb', 44 | 'description': 'Airbnb rentals, socioeconomics, and crime in Chicago', 45 | 'geometry_type': 'Polygon', 46 | 'nrows': 77, 47 | 'ncols': 21, 48 | 'details': 'https://geodacenter.github.io/data-and-lab//airbnb/', 49 | 'hash': 'a2ab1e3f938226d287dd76cde18c00e2d3a260640dd826da7131827d9e76c824', 50 | 'filename': 'airbnb.zip'}, 51 | 'atlanta': {'url': 'https://geodacenter.github.io/data-and-lab//data/atlanta_hom.zip', 52 | 'license': 'NA', 53 | 'attribution': 'Center for Spatial Data Science, University of Chicago', 54 | 'name': 'geoda.atlanta', 55 | 'description': 'Atlanta, GA region homicide counts and rates', 56 | 'geometry_type': 'Polygon', 57 | 'nrows': 90, 58 | 'ncols': 24, 59 | 'details': 'https://geodacenter.github.io/data-and-lab//atlanta_old/', 60 | 'hash': 'a33a76e12168fe84361e60c88a9df4856730487305846c559715c89b1a2b5e09', 61 | 'filename': 'atlanta_hom.zip', 62 | 'members': ['atlanta_hom/atl_hom.geojson']}, 63 | ... 64 | ``` 65 | 66 | There is also a convenient top-level API. One to get only the URL: 67 | 68 | ```py 69 | In [3]: geodatasets.get_url("geoda airbnb") 70 | Out[3]: 'https://geodacenter.github.io/data-and-lab//data/airbnb.zip' 71 | ``` 72 | 73 | And one to get the local path. If the file is not available in the cache, it will be 74 | downloaded first. 75 | 76 | ```py 77 | In [4]: geodatasets.get_path('geoda airbnb') 78 | Out[4]: '/Users/martin/Library/Caches/geodatasets/airbnb.zip' 79 | ``` 80 | 81 | You can also get all the details: 82 | 83 | ```py 84 | In [5]: geodatasets.data.geoda.airbnb 85 | Out[5]: 86 | {'url': 'https://geodacenter.github.io/data-and-lab//data/airbnb.zip', 87 | 'license': 'NA', 88 | 'attribution': 'Center for Spatial Data Science, University of Chicago', 89 | 'name': 'geoda.airbnb', 90 | 'description': 'Airbnb rentals, socioeconomics, and crime in Chicago', 91 | 'geometry_type': 'Polygon', 92 | 'nrows': 77, 93 | 'ncols': 21, 94 | 'details': 'https://geodacenter.github.io/data-and-lab//airbnb/', 95 | 'hash': 'a2ab1e3f938226d287dd76cde18c00e2d3a260640dd826da7131827d9e76c824', 96 | 'filename': 'airbnb.zip'} 97 | ``` 98 | 99 | Or using the name query: 100 | 101 | ```py 102 | In [6]: geodatasets.data.query_name('geoda airbnb') 103 | Out[6]: 104 | {'url': 'https://geodacenter.github.io/data-and-lab//data/airbnb.zip', 105 | 'license': 'NA', 106 | 'attribution': 'Center for Spatial Data Science, University of Chicago', 107 | 'name': 'geoda.airbnb', 108 | 'description': 'Airbnb rentals, socioeconomics, and crime in Chicago', 109 | 'geometry_type': 'Polygon', 110 | 'nrows': 77, 111 | 'ncols': 21, 112 | 'details': 'https://geodacenter.github.io/data-and-lab//airbnb/', 113 | 'hash': 'a2ab1e3f938226d287dd76cde18c00e2d3a260640dd826da7131827d9e76c824', 114 | 'filename': 'airbnb.zip'} 115 | ``` 116 | 117 | The whole structure `Bunch` class is based on a dictionary and can be flattened. If you want 118 | to see all available datasets, you can use: 119 | 120 | ```py 121 | In [7]: geodatasets.data.flatten().keys() 122 | Out[7]: dict_keys(['geoda.airbnb', 'geoda.atlanta', 'geoda.cars', 'geoda.charleston1', 'geoda.charleston2', 'geoda.chicago_health', 'geoda.chicago_commpop', 'geoda.chile_labor', 'geoda.cincinnati', 'geoda.cleveland', 'geoda.columbus', 'geoda.grid100', 'geoda.groceries', 'geoda.guerry', 'geoda.health', 'geoda.health_indicators', 'geoda.hickory1', 'geoda.hickory2', 'geoda.home_sales', 'geoda.houston', 'geoda.juvenile', 'geoda.lansing1', 'geoda.lansing2', 'geoda.lasrosas', 'geoda.liquor_stores', 'geoda.malaria', 'geoda.milwaukee1', 'geoda.milwaukee2', 'geoda.ncovr', 'geoda.natregimes', 'geoda.ndvi', 'geoda.nepal', 'geoda.nyc', 'geoda.nyc_earnings', 'geoda.nyc_education', 'geoda.nyc_neighborhoods', 'geoda.orlando1', 'geoda.orlando2', 'geoda.oz9799', 'geoda.phoenix_acs', 'geoda.police', 'geoda.sacramento1', 'geoda.sacramento2', 'geoda.savannah1', 'geoda.savannah2', 'geoda.seattle1', 'geoda.seattle2', 'geoda.sids', 'geoda.sids2', 'geoda.south', 'geoda.spirals', 'geoda.stlouis', 'geoda.tampa1', 'geoda.us_sdoh', 'ny.bb', 'eea.large_rivers', 'naturalearth.land']) 123 | ``` 124 | -------------------------------------------------------------------------------- /geodatasets/tests/test_lib.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | 4 | import pooch 5 | import pytest 6 | 7 | from geodatasets import Bunch, Dataset, data 8 | from geodatasets.lib import GEOMETRY_TYPES 9 | 10 | 11 | @pytest.fixture 12 | def data1(): 13 | return Dataset( 14 | url="https://myserver.com/data.zip", 15 | attribution="(C) geodatasets", 16 | name="my_public_data", 17 | filename="data.zip", 18 | geometry_type="Polygon", 19 | hash="qwertyuiopasdfghjklzxcvbnm1234567890", 20 | ) 21 | 22 | 23 | @pytest.fixture 24 | def data2(): 25 | return Dataset( 26 | url="https://myserver.com/?dghrtnkmjnkju", 27 | attribution="(C) geodatasets", 28 | name="my_public_data2", 29 | filename="data2.json", 30 | geometry_type="Point", 31 | hash="qwertyuiopasdfghjklzxcvbnm1234567890", 32 | ) 33 | 34 | 35 | @pytest.fixture 36 | def test_bunch( 37 | data1, 38 | data2, 39 | ): 40 | return Bunch( 41 | data1=data1, 42 | data2=data2, 43 | ) 44 | 45 | 46 | def test_dir(data1): 47 | assert dir(data1) == sorted( 48 | ["url", "attribution", "name", "filename", "geometry_type", "hash"] 49 | ) 50 | 51 | 52 | def test_expect_name_url_attribution(): 53 | with pytest.raises(AttributeError, match="`name`, `url`, `hash`, `filename`"): 54 | Dataset({}) 55 | with pytest.raises(AttributeError, match="`url`, `hash`, `filename`"): 56 | Dataset({"name": "myname"}) 57 | with pytest.raises(AttributeError, match="`hash`, `filename`"): 58 | Dataset({"url": "my_url", "name": "my_name"}) 59 | 60 | 61 | def test_html_repr(data1, data2): 62 | item_strings = [ 63 | '
', 64 | '
', 65 | '
geodatasets.Dataset
', 66 | '
my_public_data
', 67 | '
', 68 | '
', 69 | "
url
https://myserver.com/data.zip
", 70 | "
attribution
(C) geodatasets
", 71 | ] 72 | 73 | for html_string in item_strings: 74 | assert html_string in data1._repr_html_() 75 | 76 | bunch = Bunch( 77 | { 78 | "first": data1, 79 | "second": data2, 80 | } 81 | ) 82 | 83 | bunch_strings = [ 84 | '
geodatasets.Bunch
', 85 | '
2 items
', 86 | '
    ', 87 | '
  • ', 88 | "geodatasets.Dataset", 89 | '
    ', 90 | ] 91 | 92 | bunch_repr = bunch._repr_html_() 93 | for html_string in item_strings + bunch_strings: 94 | assert html_string in bunch_repr 95 | assert bunch_repr.count('
  • ') == 2 96 | assert bunch_repr.count('
    ') == 3 97 | assert bunch_repr.count('
    ') == 3 98 | 99 | 100 | def test_copy(data1): 101 | copied = data1.copy() 102 | assert isinstance(copied, Dataset) 103 | 104 | 105 | def test_callable(): 106 | # only testing the callable functionality to override a keyword, as we 107 | # cannot test the actual items that need an API key 108 | updated_item = data.ny.bb(hash="myhash") 109 | assert isinstance(updated_item, Dataset) 110 | assert "url" in updated_item 111 | assert updated_item["hash"] == "myhash" 112 | # check that original item dict is not modified 113 | assert ( 114 | data.ny.bb["hash"] 115 | == "a303be17630990455eb079777a6b31980549e9096d66d41ce0110761a7e2f92a" 116 | ) 117 | 118 | 119 | def test_flatten(data1, data2): 120 | nested_bunch = Bunch( 121 | first_bunch=Bunch(first=data1, second=data2), 122 | second_bunch=Bunch(first=data1(name="data3"), second=data2(name="data4")), 123 | ) 124 | 125 | assert len(nested_bunch) == 2 126 | assert len(nested_bunch.flatten()) == 4 127 | 128 | 129 | def test_query_name(): 130 | options = [ 131 | "ny.bb", 132 | "ny bb", 133 | "NY BB", 134 | "ny-bb", 135 | "NY_BB", 136 | "NY/BB", 137 | ] 138 | 139 | for option in options: 140 | queried = data.query_name(option) 141 | assert isinstance(queried, Dataset) 142 | assert queried.name == "ny.bb" 143 | 144 | with pytest.raises(ValueError, match="No matching item found"): 145 | data.query_name("i don't exist") 146 | 147 | 148 | def test_filter(test_bunch): 149 | assert len(test_bunch.filter(keyword="json").flatten()) == 1 150 | assert len(test_bunch.filter(name="data2").flatten()) == 1 151 | assert len(test_bunch.filter(geometry_type="Point").flatten()) == 1 152 | assert ( 153 | len(test_bunch.filter(keyword="json", geometry_type="Polygon").flatten()) == 0 154 | ) 155 | assert len(test_bunch.filter(name="nonsense").flatten()) == 0 156 | 157 | def custom(provider): 158 | if hasattr(provider, "filename") and provider.filename == "data.zip": 159 | return True 160 | return False 161 | 162 | assert len(test_bunch.filter(function=custom).flatten()) == 1 163 | 164 | 165 | @pytest.mark.request 166 | def test_get_path(): 167 | in_cache = pooch.os_cache("geodatasets").joinpath("nybb_16a.zip") 168 | if Path(in_cache).exists(): 169 | os.remove(in_cache) 170 | 171 | assert Path(data.ny.bb.path).exists() 172 | 173 | # cleanup 174 | os.remove(in_cache) 175 | -------------------------------------------------------------------------------- /geodatasets/api.py: -------------------------------------------------------------------------------- 1 | import pooch 2 | 3 | from .data import data 4 | 5 | flat = data.flatten() 6 | 7 | registry = {value["filename"]: value["hash"] for value in flat.values()} 8 | urls = {value["filename"]: value["url"] for value in flat.values()} 9 | 10 | CACHE = pooch.create( 11 | path=pooch.os_cache("geodatasets"), base_url="", registry=registry, urls=urls 12 | ) 13 | 14 | 15 | def get_url(name): 16 | """Get the URL from which the dataset can be fetched. 17 | 18 | ``name`` is queried using :meth:`~geodatasets.Bunch.query_name`, so it only needs to 19 | contain the same letters in the same order as the item's name irrespective 20 | of the letter case, spaces, dashes and other characters. 21 | 22 | No data is downloaded. 23 | 24 | Parameters 25 | ---------- 26 | name : str 27 | Name of the data item. Formatting does not matter. 28 | 29 | Returns 30 | ------- 31 | str 32 | link to the online dataset 33 | 34 | See also 35 | -------- 36 | get_path 37 | 38 | Examples 39 | -------- 40 | >>> geodatasets.get_url('GeoDa AirBnB') 41 | 'https://geodacenter.github.io/data-and-lab//data/airbnb.zip' 42 | 43 | >>> geodatasets.get_url('geoda_airbnb') 44 | 'https://geodacenter.github.io/data-and-lab//data/airbnb.zip' 45 | """ 46 | return data.query_name(name).url 47 | 48 | 49 | def get_path(name): 50 | """Get the absolute path to a file in the local storage. 51 | 52 | If it’s not in the local storage, it will be downloaded. 53 | 54 | ``name`` is queried using :meth:`~geodatasets.Bunch.query_name`, so it only needs to 55 | contain the same letters in the same order as the item's name irrespective 56 | of the letter case, spaces, dashes and other characters. 57 | 58 | For Datasets containing multiple files, the archive is automatically extracted. 59 | 60 | Parameters 61 | ---------- 62 | name : str 63 | Name of the data item. Formatting does not matter. 64 | 65 | See also 66 | -------- 67 | get_url 68 | fetch 69 | 70 | Examples 71 | -------- 72 | When it does not exist in the cache yet, it gets downloaded first: 73 | 74 | >>> path = geodatasets.get_path('GeoDa AirBnB') 75 | Downloading file 'airbnb.zip' from 'https://geodacenter.github.io/data-and-lab/\ 76 | /data/airbnb.zip' to '/Users/martin/Library/Caches/geodatasets'. 77 | >>> path 78 | '/Users/martin/Library/Caches/geodatasets/airbnb.zip' 79 | 80 | Every other call returns the path directly: 81 | 82 | >>> path2 = geodatasets.get_path("geoda_airbnb") 83 | >>> path2 84 | '/Users/martin/Library/Caches/geodatasets/airbnb.zip' 85 | """ 86 | dataset = data.query_name(name) 87 | return dataset.path 88 | 89 | 90 | def fetch(name): 91 | """Download the data to the local storage. 92 | 93 | This is useful when it is expected that some data will be needed later but you 94 | want to avoid download at that time. 95 | 96 | ``name`` is queried using :meth:`~geodatasets.Bunch.query_name`, so it only needs to 97 | contain the same letters in the same order as the item's name irrespective 98 | of the letter case, spaces, dashes and other characters. 99 | 100 | For Datasets containing multiple files, the archive is automatically extracted. 101 | 102 | Parameters 103 | ---------- 104 | name : str, list 105 | Name of the data item(s). Formatting does not matter. 106 | 107 | See also 108 | -------- 109 | get_path 110 | 111 | Examples 112 | -------- 113 | >>> geodatasets.fetch('nybb') 114 | Downloading file 'nybb_22c.zip' from 'https://data.cityofnewyork.us/api/geospatial\ 115 | /tqmj-j8zm?method=export&format=Original' to '/Users/martin/Library/Caches/geodatasets'. 116 | Extracting 'nybb_22c/nybb.shp' from '/Users/martin/Library/Caches/geodatasets/nybb_\ 117 | 22c.zip' to '/Users/martin/Library/Caches/geodatasets/nybb_22c.zip.unzip' 118 | Extracting 'nybb_22c/nybb.shx' from '/Users/martin/Library/Caches/geodatasets/nybb_\ 119 | 22c.zip' to '/Users/martin/Library/Caches/geodatasets/nybb_22c.zip.unzip' 120 | Extracting 'nybb_22c/nybb.dbf' from '/Users/martin/Library/Caches/geodatasets/nybb_\ 121 | 22c.zip' to '/Users/martin/Library/Caches/geodatasets/nybb_22c.zip.unzip' 122 | Extracting 'nybb_22c/nybb.prj' from '/Users/martin/Library/Caches/geodatasets/nybb_\ 123 | 22c.zip' to '/Users/martin/Library/Caches/geodatasets/nybb_22c.zip.unzip' 124 | 125 | >>> geodatasets.fetch(['geoda airbnb', 'geoda guerry']) 126 | Downloading file 'airbnb.zip' from 'https://geodacenter.github.io/data-and-lab//dat\ 127 | a/airbnb.zip' to '/Users/martin/Library/Caches/geodatasets'. 128 | Downloading file 'guerry.zip' from 'https://geodacenter.github.io/data-and-lab//dat\ 129 | a/guerry.zip' to '/Users/martin/Library/Caches/geodatasets'. 130 | Extracting 'guerry/guerry.shp' from '/Users/martin/Library/Caches/geodatasets/guerr\ 131 | y.zip' to '/Users/martin/Library/Caches/geodatasets/guerry.zip.unzip' 132 | Extracting 'guerry/guerry.dbf' from '/Users/martin/Library/Caches/geodatasets/guerr\ 133 | y.zip' to '/Users/martin/Library/Caches/geodatasets/guerry.zip.unzip' 134 | Extracting 'guerry/guerry.shx' from '/Users/martin/Library/Caches/geodatasets/guerr\ 135 | y.zip' to '/Users/martin/Library/Caches/geodatasets/guerry.zip.unzip' 136 | Extracting 'guerry/guerry.prj' from '/Users/martin/Library/Caches/geodatasets/guerr\ 137 | y.zip' to '/Users/martin/Library/Caches/geodatasets/guerry.zip.unzip' 138 | 139 | """ 140 | if isinstance(name, str): 141 | name = [name] 142 | 143 | for n in name: 144 | dataset = data.query_name(n) 145 | if "members" in dataset.keys(): 146 | _ = CACHE.fetch( 147 | data.query_name(n).filename, 148 | processor=pooch.Unzip(members=dataset.members), 149 | ) 150 | else: 151 | _ = CACHE.fetch(data.query_name(n).filename) 152 | -------------------------------------------------------------------------------- /doc/source/_static/generate_gallery.js: -------------------------------------------------------------------------------- 1 | function getBaseMapName(data) { 2 | var name = data["name"]; 3 | if (name.includes(".")) { 4 | var basemap = name.split(".")[0]; 5 | } else { 6 | var basemap = name; 7 | } 8 | return basemap; 9 | } 10 | 11 | var accessData = { 12 | // contains the keys for basemaps that need some identification data (apikey, access token or ID code) 13 | Thunderforest: { 14 | keyString: "apikey", 15 | idString: "", 16 | name: "Thunderforest", 17 | }, 18 | OpenWeatherMap: { 19 | keyString: "apiKey", 20 | idString: "", 21 | name: "OpenWeatherMap", 22 | }, 23 | MapTiler: { 24 | keyString: "key", 25 | idString: "", 26 | name: "MapTiler", 27 | }, 28 | MapBox: { 29 | keyString: "accessToken", 30 | idString: "", 31 | name: "MapBox", 32 | }, 33 | Jawg: { 34 | keyString: "accessToken", 35 | idString: "", 36 | name: "Jawg", 37 | }, 38 | TomTom: { 39 | keyString: "apikey", 40 | idString: "", 41 | name: "TomTom", 42 | }, 43 | HERE: { 44 | keyString: "app_code", 45 | idString: "app_id", 46 | name: "HERE", 47 | }, 48 | HEREv3: { 49 | keyString: "apiKey", 50 | idString: "", 51 | name: "HEREv3", 52 | }, 53 | AzureMaps: { 54 | keyString: "subscriptionKey", 55 | idString: "", 56 | name: "AzureMaps", 57 | }, 58 | }; 59 | 60 | function initMap(el, data, accessData) { 61 | basemap = getBaseMapName(data); 62 | 63 | const mainContainer = document.createElement("div"); 64 | mainContainer.className = "main-container"; 65 | el.append(mainContainer); 66 | 67 | var titleDiv = document.createElement("h2"); 68 | titleDiv.className = "title-container"; 69 | titleDiv.textContent = data["name"]; 70 | mainContainer.append(titleDiv); 71 | 72 | var mapContainer = document.createElement("div"); 73 | mapContainer.className = "map-container"; 74 | mainContainer.append(mapContainer); 75 | 76 | key = Object.keys(data); 77 | val = Object.values(data); 78 | nbOfRows = Object.keys(data).length; 79 | var latitude = 0; 80 | var longitude = 0; 81 | var zoom = 1; 82 | 83 | try { 84 | //---------------------Basemaps with specific locations ------------------------------- 85 | //----and zooms to optimize the map views restricted to given geographic areas--------- 86 | if ( 87 | data["name"] === "Esri.ArcticOceanReference" || 88 | data["name"] === "Esri.ArcticOceanBase" 89 | ) { 90 | latitude = 65.421505; // Artic ocean 91 | longitude = -70.965421; 92 | zoom = 1; 93 | } else if (data["name"] === "Esri.AntarcticBasemap") { 94 | latitude = 82.8628; // Antarctic ocean 95 | longitude = 135.0; 96 | zoom = 6; 97 | } else if (basemap == 'GeoportailFrance'){ 98 | latitude = 46.749998; 99 | longitude = 1.85; 100 | zoom = 6 101 | } else if (basemap === "OpenFireMap" || basemap === "OpenSeaMap") { 102 | latitude = 50.1109; // Frankfurt 103 | longitude = 8.6821; 104 | zoom = 14; 105 | } else if (basemap === "OpenAIP") { 106 | latitude = 50.1109; // Frankfurt 107 | longitude = 8.6821; 108 | zoom = 9; 109 | } else if (basemap === "Stamen") { 110 | latitude = 32.7766642; // Dallas 111 | longitude = -96.7969879; 112 | zoom = 6; 113 | } else if (basemap === "FreeMapSK") { 114 | latitude = 48.736277; // Banská Bystrica Slovaky 115 | longitude = 19.146192; 116 | zoom = 14; 117 | } else if (basemap === "JusticeMap") { 118 | latitude = 39.7392358; // Denver 119 | longitude = -104.990251; 120 | zoom = 3; 121 | } else if ( 122 | basemap === "OpenWeatherMap" || 123 | basemap === "Esri" || 124 | basemap === "USGS" || 125 | basemap === "WaymarkedTrails" 126 | ) { 127 | latitude = 32.7766642; // Dallas 128 | longitude = -96.7969879; 129 | zoom = 4; 130 | } else if (basemap === "BasemapAT") { 131 | latitude = 47.5652; // Liezen 132 | longitude = 14.2424; 133 | zoom = 14; 134 | } else if (basemap === "nlmaps") { 135 | latitude = 52.370216; // Amsterdam 136 | longitude = 4.895168; 137 | zoom = 14; 138 | } else if (basemap === "NLS") { 139 | latitude = 53.381129; // Sheffield 140 | longitude = -1.470085; 141 | zoom = 12; 142 | } else if (basemap === "OneMapSG") { 143 | latitude = 1.352083; // Singapore 144 | longitude = 103.819836; 145 | zoom = 14; 146 | } else if (basemap === "SwissFederalGeoportal") { 147 | latitude = 46.5196535; // Lausanne 148 | longitude = 6.6322734; 149 | zoom = 10; 150 | } else if (basemap === "OpenSnowMap") { 151 | latitude = 45.923697; // Chamonix 152 | longitude = 6.869433; 153 | zoom = 14; 154 | } else if (basemap === "Gaode") { 155 | latitude = 39.904211; // Pekin 156 | longitude = 116.407395; 157 | zoom = 14; 158 | } else if (basemap === "NASAGIBS" || basemap === "Strava") { 159 | latitude = 48.856614; // Paris 160 | longitude = 2.3522219; 161 | zoom = 4; 162 | } else { 163 | latitude = 48.856614; // Paris 164 | longitude = 2.3522219; 165 | zoom = 14; 166 | } 167 | 168 | var sampleMap = L.map(mapContainer, { attributionControl: true }).setView( 169 | [latitude, longitude], 170 | zoom 171 | ); 172 | 173 | // Case with no apikey 174 | if (accessData[basemap] === undefined) { 175 | L.tileLayer(data["url"], data).addTo(sampleMap); 176 | tbl1 = document.createElement("table"); 177 | tbl1.className = "table-container"; 178 | 179 | for (let i = 0; i < nbOfRows; i++) { 180 | const tr1 = tbl1.insertRow(); 181 | tr1.className = "line-container"; 182 | for (let j = 0; j < 2; j++) { 183 | if (i === nbOfRows - 2 && j === 2) { 184 | break; 185 | } else { 186 | const td1 = tr1.insertCell(); 187 | if (j == 0) { 188 | // First column of the table : the one with the keys 189 | td1.className = "key-cell"; 190 | td1.textContent = key[i]; 191 | } else { 192 | // Second column of the table : the one with the values of the metadata 193 | td1.className = "val-cell"; 194 | td1.textContent = val[i]; 195 | } 196 | } 197 | } 198 | } 199 | mainContainer.appendChild(tbl1); 200 | } else { 201 | // Case with apikey 202 | var dict = accessData[basemap]; 203 | var keyString = dict["keyString"]; 204 | 205 | tbl2 = document.createElement("table"); 206 | tbl2.className = "table-container"; 207 | for (let i = 0; i < nbOfRows; i++) { 208 | const tr2 = tbl2.insertRow(); 209 | tr2.className = "line-container"; 210 | 211 | for (let j = 0; j < 2; j++) { 212 | if (i === nbOfRows - 2 && j === 2) { 213 | break; 214 | } else { 215 | const td2 = tr2.insertCell(); 216 | 217 | if (j == 0) { 218 | // First column of the table containing the keys of the metadata 219 | td2.className = "key-cell"; 220 | td2.textContent = key[i]; 221 | } else { 222 | // Second column of the table containing the values of the metadata 223 | td2.className = "val-cell"; 224 | 225 | // create a single input and a button with onclick function for apikey 226 | if (key[i] === keyString) { 227 | var keyInput = document.createElement("input"); 228 | keyInput.type = "password"; 229 | keyInput.placeholder = "Enter your API key please"; 230 | keyInput.className = "key-container"; 231 | td2.append(keyInput); 232 | 233 | var validationButton = document.createElement("button"); 234 | validationButton.className = "button-container"; 235 | td2.append(validationButton); 236 | validationButton.innerHTML = "validate"; 237 | validationButton.onclick = get_keyCode; 238 | 239 | function get_keyCode() { 240 | val[i] = keyInput.value; 241 | data[keyString] = keyInput.value; 242 | L.tileLayer(data["url"], data).addTo(sampleMap); 243 | } 244 | } else { 245 | td2.textContent = val[i]; 246 | } 247 | } 248 | } 249 | } 250 | } 251 | mainContainer.appendChild(tbl2); 252 | } 253 | } catch {} 254 | } 255 | 256 | function initLeafletGallery(el) { 257 | fetch('_static/providers.json') 258 | .then(response => response.json()) 259 | .then(data => { 260 | var dataList = []; 261 | for ([key, val] of Object.entries(data)) { 262 | if (val["url"] === undefined) { 263 | // check if url is a key of the JSON object, if not go one level deeper and define the val as the new object 264 | newData = val; 265 | 266 | for ([newKey, newVal] of Object.entries(newData)) { 267 | /*if (newVal["bounds"] !== undefined) { 268 | newVal["bounds"] = undefined; 269 | }*/ 270 | dataList.push(newVal); 271 | } 272 | } else { 273 | dataList.push(val); 274 | } 275 | } 276 | dataList.forEach((baseMapData) => { 277 | initMap(el, baseMapData, accessData); 278 | }); 279 | }); 280 | } -------------------------------------------------------------------------------- /geodatasets/lib.py: -------------------------------------------------------------------------------- 1 | """ 2 | Utilities to support geodatasets 3 | 4 | Heavily based on top of geodatasets (Copyright (c) 2021, GeoPandas) licensed 5 | under BSD 3-clause 6 | """ 7 | from __future__ import annotations 8 | 9 | import json 10 | import uuid 11 | from typing import Callable 12 | 13 | import pooch 14 | 15 | GEOMETRY_TYPES = ["POINT", "LINESTRING", "POLYGON", "MIXED"] 16 | QUERY_NAME_TRANSLATION = str.maketrans({x: "" for x in "., -_/"}) 17 | 18 | 19 | class Bunch(dict): 20 | """A dict with attribute-access 21 | 22 | :class:`Bunch` is used to store :class:`Dataset` objects. 23 | """ 24 | 25 | def __getattr__(self, key): 26 | try: 27 | return self.__getitem__(key) 28 | except KeyError: 29 | raise AttributeError(key) 30 | 31 | def __dir__(self): 32 | return self.keys() 33 | 34 | def _repr_html_(self, inside=False): 35 | children = "" 36 | for key in self.keys(): 37 | if isinstance(self[key], Dataset): 38 | obj = "geodatasets.Dataset" 39 | else: 40 | obj = "geodatasets.Bunch" 41 | uid = str(uuid.uuid4()) 42 | children += f""" 43 |
  • 44 | 45 | 46 |
    47 | {self[key]._repr_html_(inside=True)} 48 |
    49 |
  • 50 | """ 51 | 52 | style = "" if inside else f"" 53 | html = f""" 54 |
    55 | {style} 56 |
    57 |
    58 |
    geodatasets.Bunch
    59 |
    {len(self)} items
    60 |
    61 |
    62 |
      63 | {children} 64 |
    65 |
    66 |
    67 |
    68 | """ 69 | 70 | return html 71 | 72 | def flatten(self) -> dict: 73 | """Return the nested :class:`Bunch` collapsed into the one level dictionary. 74 | 75 | Dictionary keys are :class:`Dataset` names (e.g. ``geoda.airbnb``) 76 | and its values are :class:`Dataset` objects. 77 | 78 | Returns 79 | ------- 80 | flattened : dict 81 | dictionary of :class:`Dataset` objects 82 | """ 83 | 84 | flat = {} 85 | 86 | def _get_items(item): 87 | if isinstance(item, Dataset): 88 | flat[item.name] = item 89 | else: 90 | for prov in item.values(): 91 | _get_items(prov) 92 | 93 | _get_items(self) 94 | 95 | return flat 96 | 97 | def query_name(self, name: str) -> Dataset: 98 | """Return :class:`Dataset` based on the name query 99 | 100 | Returns a matching :class:`Dataset` from the :class:`Bunch` if the ``name`` 101 | contains the same letters in the same order as the item's name irrespective 102 | of the letter case, spaces, dashes and other characters. 103 | See examples for details. 104 | 105 | Parameters 106 | ---------- 107 | name : str 108 | Name of the data item. Formatting does not matter. 109 | 110 | Returns 111 | ------- 112 | match: Dataset 113 | """ 114 | xyz_flat_lower = { 115 | k.translate(QUERY_NAME_TRANSLATION).lower(): v 116 | for k, v in self.flatten().items() 117 | } 118 | name_clean = name.translate(QUERY_NAME_TRANSLATION).lower() 119 | if name_clean in xyz_flat_lower: 120 | return xyz_flat_lower[name_clean] 121 | 122 | raise ValueError(f"No matching item found for the query '{name}'.") 123 | 124 | def filter( 125 | self, 126 | keyword: str | None = None, 127 | name: str | None = None, 128 | geometry_type: str | None = None, 129 | function: Callable[[Dataset], bool] = None, 130 | ) -> Bunch: 131 | """Return a subset of the :class:`Bunch` matching the filter conditions 132 | 133 | Each :class:`Dataset` within a :class:`Bunch` is checked against one or 134 | more specified conditions and kept if they are satisfied or removed if at least 135 | one condition is not met. 136 | 137 | Parameters 138 | ---------- 139 | keyword : str (optional) 140 | Condition returns ``True`` if ``keyword`` string is present in any string 141 | value in a :class:`Dataset` object. 142 | The comparison is not case sensitive. 143 | name : str (optional) 144 | Condition returns ``True`` if ``name`` string is present in 145 | the name attribute of :class:`Dataset` object. 146 | The comparison is not case sensitive. 147 | geometry_type : str (optional) 148 | Condition returns ``True`` if :meth:`Dataset.geometry_type` is 149 | matches the ``geometry_type``. 150 | Possible options are ``["Point", "LineString", "Polygon", "Mixed"]``. 151 | The comparison is not case sensitive. 152 | function : callable (optional) 153 | Custom function taking :class:`Dataset` as an argument and returns 154 | bool. If ``function`` is given, other parameters are ignored. 155 | 156 | Returns 157 | ------- 158 | filtered : Bunch 159 | 160 | Examples 161 | -------- 162 | >>> from geodatasets import data 163 | 164 | You can filter all Point datasets: 165 | 166 | >>> points = data.filter(geometry_type="Point") 167 | 168 | Or all datasets with ``chicago`` in the name: 169 | 170 | >>> chicago_datasets = data.filter(name="chicago") 171 | 172 | You can use keyword search to find all datasets in a CSV format: 173 | 174 | >>> csv_datasets = data.filter(keyword="csv") 175 | 176 | You can combine multiple conditions to find datasets based with ``chicago`` in 177 | name of Polygon geometry type: 178 | 179 | >>> chicago_polygons = data.filter(name="chicago", geometry_type="Polygon") 180 | 181 | You can also pass custom function that takes :class:`Dataset` and returns 182 | boolean value. You can then find all datasets with ``nrows`` smaller than 183 | 100: 184 | 185 | >>> def small_data(dataset): 186 | ... if hasattr(dataset, "nrows") and dataset.nrows < 100: 187 | ... return True 188 | ... return False 189 | >>> small = data.filter(function=small_data) 190 | """ 191 | 192 | def _validate(dataset, keyword, name, geometry_type): 193 | cond = [] 194 | 195 | if keyword is not None: 196 | keyword_match = False 197 | for v in dataset.values(): 198 | if isinstance(v, str) and keyword.lower() in v.lower(): 199 | keyword_match = True 200 | break 201 | cond.append(keyword_match) 202 | 203 | if name is not None: 204 | name_match = False 205 | if name.lower() in dataset.name.lower(): 206 | name_match = True 207 | cond.append(name_match) 208 | 209 | if geometry_type is not None: 210 | geom_type_match = False 211 | if ( 212 | dataset.geometry_type.upper() 213 | == geometry_type.translate(QUERY_NAME_TRANSLATION).upper() 214 | ): 215 | geom_type_match = True 216 | cond.append(geom_type_match) 217 | 218 | return all(cond) 219 | 220 | def _filter_bunch(bunch, keyword, name, geometry_type, function): 221 | new = Bunch() 222 | for key, value in bunch.items(): 223 | if isinstance(value, Dataset): 224 | if function is None: 225 | if _validate( 226 | value, 227 | keyword=keyword, 228 | name=name, 229 | geometry_type=geometry_type, 230 | ): 231 | new[key] = value 232 | else: 233 | if function(value): 234 | new[key] = value 235 | 236 | else: 237 | filtered = _filter_bunch( 238 | value, 239 | keyword=keyword, 240 | name=name, 241 | geometry_type=geometry_type, 242 | function=function, 243 | ) 244 | if filtered: 245 | new[key] = filtered 246 | 247 | return new 248 | 249 | return _filter_bunch( 250 | self, 251 | keyword=keyword, 252 | name=name, 253 | geometry_type=geometry_type, 254 | function=function, 255 | ) 256 | 257 | 258 | class Dataset(Bunch): 259 | """ 260 | A dict with attribute-access and that 261 | can be called to update keys 262 | """ 263 | 264 | def __init__(self, *args, **kwargs): 265 | super().__init__(*args, **kwargs) 266 | required = ["name", "url", "hash", "filename"] 267 | missing = [] 268 | for el in required: 269 | if el not in self.keys(): 270 | missing.append(el) 271 | if len(missing) > 0: 272 | msg = ( 273 | f"The attributes {required} " 274 | f"are required to initialise " 275 | f"a `Dataset`. Please provide values for: " 276 | f'`{"`, `".join(missing)}`' 277 | ) 278 | raise AttributeError(msg) 279 | 280 | def __call__(self, **kwargs) -> Dataset: 281 | new = Dataset(self) # takes a copy preserving the class 282 | new.update(kwargs) 283 | return new 284 | 285 | def copy(self, **kwargs) -> Dataset: 286 | new = Dataset(self) # takes a copy preserving the class 287 | return new 288 | 289 | @property 290 | def path(self) -> str: 291 | """Get the absolute path to a file in the local storage. 292 | 293 | If it’s not in the local storage, it will be downloaded. 294 | 295 | For Datasets containing multiple files, the archive is automatically extracted. 296 | 297 | Returns 298 | ------- 299 | str 300 | loacal path 301 | """ 302 | from .api import CACHE 303 | 304 | if "members" in self.keys(): 305 | unzipped_files = CACHE.fetch( 306 | self.filename, processor=pooch.Unzip(members=self.members) 307 | ) 308 | if len(unzipped_files) == 1: 309 | return unzipped_files[0] 310 | elif len(unzipped_files) > 1: # shapefile 311 | return [f for f in unzipped_files if f.endswith(".shp")][0] 312 | else: 313 | raise 314 | 315 | return CACHE.fetch(self.filename) 316 | 317 | def _repr_html_(self, inside=False): 318 | item_info = "" 319 | for key, val in self.items(): 320 | if key != "name": 321 | item_info += f"
    {key}
    {val}
    " 322 | 323 | style = "" if inside else f"" 324 | html = f""" 325 |
    326 | {style} 327 |
    328 |
    329 |
    geodatasets.Dataset
    330 |
    {self.name}
    331 |
    332 |
    333 |
    334 | {item_info} 335 |
    336 |
    337 |
    338 |
    339 | """ 340 | 341 | return html 342 | 343 | 344 | def _load_json(f): 345 | data = json.loads(f) 346 | 347 | items = Bunch() 348 | 349 | for item_name in data.keys(): 350 | item = data[item_name] 351 | 352 | if "url" in item.keys(): 353 | items[item_name] = Dataset(item) 354 | else: 355 | items[item_name] = Bunch({i: Dataset(item[i]) for i in item}) 356 | 357 | return items 358 | 359 | 360 | CSS_STYLE = """ 361 | /* CSS stylesheet for displaying geodatasets objects in Jupyter.*/ 362 | .xyz-wrap { 363 | --xyz-border-color: var(--jp-border-color2, #ddd); 364 | --xyz-font-color2: var(--jp-content-font-color2, rgba(128, 128, 128, 1)); 365 | --xyz-background-color-white: var(--jp-layout-color1, white); 366 | --xyz-background-color: var(--jp-layout-color2, rgba(128, 128, 128, 0.1)); 367 | } 368 | 369 | html[theme=dark] .xyz-wrap, 370 | body.vscode-dark .xyz-wrap, 371 | body.vscode-high-contrast .xyz-wrap { 372 | --xyz-border-color: #222; 373 | --xyz-font-color2: rgba(255, 255, 255, 0.54); 374 | --xyz-background-color-white: rgba(255, 255, 255, 1); 375 | --xyz-background-color: rgba(255, 255, 255, 0.05); 376 | 377 | } 378 | 379 | .xyz-header { 380 | padding-top: 6px; 381 | padding-bottom: 6px; 382 | margin-bottom: 4px; 383 | border-bottom: solid 1px var(--xyz-border-color); 384 | } 385 | 386 | .xyz-header>div { 387 | display: inline; 388 | margin-top: 0; 389 | margin-bottom: 0; 390 | } 391 | 392 | .xyz-obj, 393 | .xyz-name { 394 | margin-left: 2px; 395 | margin-right: 10px; 396 | } 397 | 398 | .xyz-obj { 399 | color: var(--xyz-font-color2); 400 | } 401 | 402 | .xyz-attrs { 403 | grid-column: 1 / -1; 404 | } 405 | 406 | dl.xyz-attrs { 407 | padding: 0 5px 0 5px; 408 | margin: 0; 409 | display: grid; 410 | grid-template-columns: 135px auto; 411 | background-color: var(--xyz-background-color); 412 | } 413 | 414 | .xyz-attrs dt, 415 | dd { 416 | padding: 0; 417 | margin: 0; 418 | float: left; 419 | padding-right: 10px; 420 | width: auto; 421 | } 422 | 423 | .xyz-attrs dt { 424 | font-weight: normal; 425 | grid-column: 1; 426 | } 427 | 428 | .xyz-attrs dd { 429 | grid-column: 2; 430 | white-space: pre-wrap; 431 | word-break: break-all; 432 | } 433 | 434 | .xyz-details ul>li>label>span { 435 | color: var(--xyz-font-color2); 436 | padding-left: 10px; 437 | } 438 | 439 | .xyz-inside { 440 | display: none; 441 | } 442 | 443 | .xyz-checkbox:checked~.xyz-inside { 444 | display: contents; 445 | } 446 | 447 | .xyz-collapsible li>input { 448 | display: none; 449 | } 450 | 451 | .xyz-collapsible>li>label { 452 | cursor: pointer; 453 | } 454 | 455 | .xyz-collapsible>li>label:hover { 456 | color: var(--xyz-font-color2); 457 | } 458 | 459 | ul.xyz-collapsible { 460 | list-style: none!important; 461 | padding-left: 20px!important; 462 | } 463 | 464 | .xyz-checkbox+label:before { 465 | content: '►'; 466 | font-size: 11px; 467 | } 468 | 469 | .xyz-checkbox:checked+label:before { 470 | content: '▼'; 471 | } 472 | 473 | .xyz-wrap { 474 | margin-bottom: 10px; 475 | } 476 | """ 477 | -------------------------------------------------------------------------------- /geodatasets/json/database.json: -------------------------------------------------------------------------------- 1 | { 2 | "geoda": { 3 | "airbnb": { 4 | "url": "https://geodacenter.github.io/data-and-lab//data/airbnb.zip", 5 | "license": "NA", 6 | "attribution": "Center for Spatial Data Science, University of Chicago", 7 | "name": "geoda.airbnb", 8 | "description": "Airbnb rentals, socioeconomics, and crime in Chicago", 9 | "geometry_type": "Polygon", 10 | "nrows": 77, 11 | "ncols": 21, 12 | "details": "https://geodacenter.github.io/data-and-lab//airbnb/", 13 | "hash": "a2ab1e3f938226d287dd76cde18c00e2d3a260640dd826da7131827d9e76c824", 14 | "filename": "airbnb.zip" 15 | }, 16 | "atlanta": { 17 | "url": "https://geodacenter.github.io/data-and-lab//data/atlanta_hom.zip", 18 | "license": "NA", 19 | "attribution": "Center for Spatial Data Science, University of Chicago", 20 | "name": "geoda.atlanta", 21 | "description": "Atlanta, GA region homicide counts and rates", 22 | "geometry_type": "Polygon", 23 | "nrows": 90, 24 | "ncols": 24, 25 | "details": "https://geodacenter.github.io/data-and-lab//atlanta_old/", 26 | "hash": "a33a76e12168fe84361e60c88a9df4856730487305846c559715c89b1a2b5e09", 27 | "filename": "atlanta_hom.zip", 28 | "members": [ 29 | "atlanta_hom/atl_hom.geojson" 30 | ] 31 | }, 32 | "cars": { 33 | "url": "https://geodacenter.github.io/data-and-lab//data/Abandoned_Vehicles_Map.csv", 34 | "license": "NA", 35 | "attribution": "Center for Spatial Data Science, University of Chicago", 36 | "name": "geoda.cars", 37 | "description": "2011 abandoned vehicles in Chicago (311 complaints).", 38 | "geometry_type": "Point", 39 | "nrows": 137867, 40 | "ncols": 21, 41 | "details": "https://geodacenter.github.io/data-and-lab//1-source-and-description/", 42 | "hash": "6a0b23bc7eda2dcf1af02d43ccf506b24ca8d8c6dc2fe86a2a1cc051b03aae9e", 43 | "filename": "Abandoned_Vehicles_Map.csv" 44 | }, 45 | "charleston1": { 46 | "url": "https://geodacenter.github.io/data-and-lab//data/CharlestonMSA.zip", 47 | "license": "NA", 48 | "attribution": "Center for Spatial Data Science, University of Chicago", 49 | "name": "geoda.charleston1", 50 | "description": "2000 Census Tract Data for Charleston, SC MSA and counties", 51 | "geometry_type": "Polygon", 52 | "nrows": 117, 53 | "ncols": 31, 54 | "details": "https://geodacenter.github.io/data-and-lab//charleston-1_old/", 55 | "hash": "4a4fa9c8dd4231ae0b2f12f24895b8336bcab0c28c48653a967cffe011f63a7c", 56 | "filename": "CharlestonMSA.zip", 57 | "members": [ 58 | "CharlestonMSA/sc_final_census2.gpkg" 59 | ] 60 | }, 61 | "charleston2": { 62 | "url": "https://geodacenter.github.io/data-and-lab//data/CharlestonMSA2.zip", 63 | "license": "NA", 64 | "attribution": "Center for Spatial Data Science, University of Chicago", 65 | "name": "geoda.charleston2", 66 | "description": "1998 and 2001 Zip Code Business Patterns (Census Bureau) for Charleston, SC MSA", 67 | "geometry_type": "Polygon", 68 | "nrows": 42, 69 | "ncols": 60, 70 | "details": "https://geodacenter.github.io/data-and-lab//charleston2/", 71 | "hash": "056d5d6e236b5bd95f5aee26c77bbe7d61bd07db5aaf72866c2f545205c1d8d7", 72 | "filename": "CharlestonMSA2.zip", 73 | "members": [ 74 | "CharlestonMSA2/CharlestonMSA2.gpkg" 75 | ] 76 | }, 77 | "chicago_health": { 78 | "url": "https://geodacenter.github.io/data-and-lab//data/comarea.zip", 79 | "license": "NA", 80 | "attribution": "Center for Spatial Data Science, University of Chicago", 81 | "name": "geoda.chicago_health", 82 | "description": "Chicago Health + Socio-Economics", 83 | "geometry_type": "Polygon", 84 | "nrows": 77, 85 | "ncols": 87, 86 | "details": "https://geodacenter.github.io/data-and-lab//comarea_vars/", 87 | "hash": "4e872adb552786eae2fcd745524696e5e4cd33cc9a6c032471c0e75328871401", 88 | "filename": "comarea.zip" 89 | }, 90 | "chicago_commpop": { 91 | "url": "https://geodacenter.github.io/data-and-lab//data/chicago_commpop.zip", 92 | "license": "NA", 93 | "attribution": "Center for Spatial Data Science, University of Chicago", 94 | "name": "geoda.chicago_commpop", 95 | "description": "Chicago Community Area Population Percent Change for 2000 and 2010", 96 | "geometry_type": "Polygon", 97 | "nrows": 77, 98 | "ncols": 9, 99 | "details": "https://geodacenter.github.io/data-and-lab//commpop/", 100 | "hash": "1dbebb50c8ea47e2279ea819ef64ba793bdee2b88e4716bd6c6ec0e0d8e0e05b", 101 | "filename": "chicago_commpop.zip", 102 | "members": [ 103 | "chicago_commpop/chicago_commpop.geojson" 104 | ] 105 | }, 106 | "chile_labor": { 107 | "url": "https://geodacenter.github.io/data-and-lab//data/flma.zip", 108 | "license": "NA", 109 | "attribution": "Center for Spatial Data Science, University of Chicago", 110 | "name": "geoda.chile_labor", 111 | "description": "Labor Markets in Chile (1982-2002)", 112 | "geometry_type": "Polygon", 113 | "nrows": 64, 114 | "ncols": 140, 115 | "details": "https://geodacenter.github.io/data-and-lab//FLMA/", 116 | "hash": "4777072268d0127b3d0be774f51d0f66c15885e9d3c92bc72c641a72f220796c", 117 | "filename": "flma.zip", 118 | "members": [ 119 | "flma/FLMA.geojson" 120 | ] 121 | }, 122 | "cincinnati": { 123 | "url": "https://geodacenter.github.io/data-and-lab//data/walnuthills_updated.zip", 124 | "license": "NA", 125 | "attribution": "Center for Spatial Data Science, University of Chicago", 126 | "name": "geoda.cincinnati", 127 | "description": "2008 Cincinnati Crime + Socio-Demographics", 128 | "geometry_type": "Polygon", 129 | "nrows": 457, 130 | "ncols": 73, 131 | "details": "https://geodacenter.github.io/data-and-lab//walnut_hills/", 132 | "hash": "d6871dd688bd14cf4710a218d721d34f6574456f2a14d5c5cfe5a92054ee9763", 133 | "filename": "walnuthills_updated.zip", 134 | "members": [ 135 | "walnuthills_updated" 136 | ] 137 | }, 138 | "cleveland": { 139 | "url": "https://geodacenter.github.io/data-and-lab//data/cleveland.zip", 140 | "license": "NA", 141 | "attribution": "Center for Spatial Data Science, University of Chicago", 142 | "name": "geoda.cleveland", 143 | "description": "2015 sales prices of homes in Cleveland, OH.", 144 | "geometry_type": "Point", 145 | "nrows": 205, 146 | "ncols": 10, 147 | "details": "https://geodacenter.github.io/data-and-lab//clev_sls_154_core/", 148 | "hash": "49aeba03eb06bf9b0d9cddd6507eb4a226b7c7a7561145562885c5cddfaeaadf", 149 | "filename": "cleveland.zip" 150 | }, 151 | "grid100": { 152 | "url": "https://geodacenter.github.io/data-and-lab//data/grid100.zip", 153 | "license": "NA", 154 | "attribution": "Center for Spatial Data Science, University of Chicago", 155 | "name": "geoda.grid100", 156 | "description": "Grid with simulated variables", 157 | "geometry_type": "Polygon", 158 | "nrows": 100, 159 | "ncols": 37, 160 | "details": "https://geodacenter.github.io/data-and-lab//grid100/", 161 | "hash": "5702ba39606044f71d53ae6a83758b81332bd3aa216b7b7b6e1c60dd0e72f476", 162 | "filename": "grid100.zip", 163 | "members": [ 164 | "grid100/grid100s.gpkg" 165 | ] 166 | }, 167 | "groceries": { 168 | "url": "https://geodacenter.github.io/data-and-lab//data/grocery.zip", 169 | "license": "NA", 170 | "attribution": "Center for Spatial Data Science, University of Chicago", 171 | "name": "geoda.groceries", 172 | "description": "2015 Chicago supermarkets", 173 | "geometry_type": "Point", 174 | "nrows": 148, 175 | "ncols": 8, 176 | "details": "https://geodacenter.github.io/data-and-lab//chicago_sup_vars/", 177 | "hash": "ead10e53b21efcaa29b798428b93ba2a1c0ba1b28f046265c1737712fa83f88a", 178 | "filename": "grocery.zip", 179 | "members": [ 180 | "grocery/chicago_sup.shp", 181 | "grocery/chicago_sup.dbf", 182 | "grocery/chicago_sup.shx", 183 | "grocery/chicago_sup.prj" 184 | ] 185 | }, 186 | "guerry": { 187 | "url": "https://geodacenter.github.io/data-and-lab//data/guerry.zip", 188 | "license": "NA", 189 | "attribution": "Center for Spatial Data Science, University of Chicago", 190 | "name": "geoda.guerry", 191 | "description": "Mortal statistics of France (Guerry, 1833)", 192 | "geometry_type": "Polygon", 193 | "nrows": 85, 194 | "ncols": 24, 195 | "details": "https://geodacenter.github.io/data-and-lab//Guerry/", 196 | "hash": "80d2b355ad3340fcffa0a28e5cec0698af01067f8059b1a60388d200a653b3e8", 197 | "filename": "guerry.zip", 198 | "members": [ 199 | "guerry/guerry.shp", 200 | "guerry/guerry.dbf", 201 | "guerry/guerry.shx", 202 | "guerry/guerry.prj" 203 | ] 204 | }, 205 | "health": { 206 | "url": "https://geodacenter.github.io/data-and-lab//data/income_diversity.zip", 207 | "license": "NA", 208 | "attribution": "Center for Spatial Data Science, University of Chicago", 209 | "name": "geoda.health", 210 | "description": "2000 Health, Income + Diversity", 211 | "geometry_type": "Polygon", 212 | "nrows": 3984, 213 | "ncols": 65, 214 | "details": "https://geodacenter.github.io/data-and-lab//co_income_diversity_variables/", 215 | "hash": "eafee1063040258bc080e7b501bdf1438d6e45ba208954d8c2e1a7562142d0a7", 216 | "filename": "income_diversity.zip", 217 | "members": [ 218 | "income_diversity/income_diversity.shp", 219 | "income_diversity/income_diversity.dbf", 220 | "income_diversity/income_diversity.shx", 221 | "income_diversity/income_diversity.prj" 222 | ] 223 | }, 224 | "health_indicators": { 225 | "url": "https://geodacenter.github.io/data-and-lab//data/healthIndicators.zip", 226 | "license": "NA", 227 | "attribution": "Center for Spatial Data Science, University of Chicago", 228 | "name": "geoda.health_indicators", 229 | "description": "Chicago Health Indicators (2005-11)", 230 | "geometry_type": "Polygon", 231 | "nrows": 77, 232 | "ncols": 32, 233 | "details": "https://geodacenter.github.io/data-and-lab//healthindicators-variables/", 234 | "hash": "b43683245f8fc3b4ab69ffa75d2064920a1a91dc76b9dcc08e288765ba0c94f3", 235 | "filename": "healthIndicators.zip" 236 | }, 237 | "hickory1": { 238 | "url": "https://geodacenter.github.io/data-and-lab//data/HickoryMSA.zip", 239 | "license": "NA", 240 | "attribution": "Center for Spatial Data Science, University of Chicago", 241 | "name": "geoda.hickory1", 242 | "description": "2000 Census Tract Data for Hickory, NC MSA and counties", 243 | "geometry_type": "Polygon", 244 | "nrows": 68, 245 | "ncols": 31, 246 | "details": "https://geodacenter.github.io/data-and-lab//hickory1/", 247 | "hash": "4c0804608d303e6e44d51966bb8927b1f5f9e060a9b91055a66478b9039d2b44", 248 | "filename": "HickoryMSA.zip", 249 | "members": [ 250 | "HickoryMSA/nc_final_census2.geojson" 251 | ] 252 | }, 253 | "hickory2": { 254 | "url": "https://geodacenter.github.io/data-and-lab//data/HickoryMSA2.zip", 255 | "license": "NA", 256 | "attribution": "Center for Spatial Data Science, University of Chicago", 257 | "name": "geoda.hickory2", 258 | "description": "1998 and 2001 Zip Code Business Patterns (Census Bureau) for Hickory, NC MSA", 259 | "geometry_type": "Polygon", 260 | "nrows": 29, 261 | "ncols": 56, 262 | "details": "https://geodacenter.github.io/data-and-lab//hickory2/", 263 | "hash": "5e9498e1ff036297c3eea3cc42ac31501680a43b50c71b486799ef9021679d07", 264 | "filename": "HickoryMSA2.zip", 265 | "members": [ 266 | "HickoryMSA2/HickoryMSA2.geojson" 267 | ] 268 | }, 269 | "home_sales": { 270 | "url": "https://geodacenter.github.io/data-and-lab//data/kingcounty.zip", 271 | "license": "NA", 272 | "attribution": "Center for Spatial Data Science, University of Chicago", 273 | "name": "geoda.home_sales", 274 | "description": "2014-15 Home Sales in King County, WA", 275 | "geometry_type": "Point", 276 | "nrows": 21613, 277 | "ncols": 22, 278 | "details": "https://geodacenter.github.io/data-and-lab//KingCounty-HouseSales2015/", 279 | "hash": "b979f0eb2cef6ebd2c761d552821353f795635eb8db53a95f2815fc46e1f644c", 280 | "filename": "kingcounty.zip", 281 | "members": [ 282 | "kingcounty/kc_house.shp", 283 | "kingcounty/kc_house.dbf", 284 | "kingcounty/kc_house.shx", 285 | "kingcounty/kc_house.prj" 286 | ] 287 | }, 288 | "houston": { 289 | "url": "https://geodacenter.github.io/data-and-lab//data/houston_hom.zip", 290 | "license": "NA", 291 | "attribution": "Center for Spatial Data Science, University of Chicago", 292 | "name": "geoda.houston", 293 | "description": "Houston, TX region homicide counts and rates", 294 | "geometry_type": "Polygon", 295 | "nrows": 52, 296 | "ncols": 24, 297 | "details": "https://geodacenter.github.io/data-and-lab//houston/", 298 | "hash": "d3167fd150a1369d9a32b892d3b2a8747043d3d382c3dd81e51f696b191d0d15", 299 | "filename": "houston_hom.zip", 300 | "members": [ 301 | "houston_hom/hou_hom.geojson" 302 | ] 303 | }, 304 | "juvenile": { 305 | "url": "https://geodacenter.github.io/data-and-lab//data/juvenile.zip", 306 | "license": "NA", 307 | "attribution": "Center for Spatial Data Science, University of Chicago", 308 | "name": "geoda.juvenile", 309 | "description": "Cardiff juvenile delinquent residences", 310 | "geometry_type": "Point", 311 | "nrows": 168, 312 | "ncols": 4, 313 | "details": "https://geodacenter.github.io/data-and-lab//juvenile/", 314 | "hash": "811cfcfa613578214d907bfbdd396c6e02261e5cda6d56b25a6f961148de961c", 315 | "filename": "juvenile.zip", 316 | "members": [ 317 | "juvenile/juvenile.shp", 318 | "juvenile/juvenile.shx", 319 | "juvenile/juvenile.dbf" 320 | ] 321 | }, 322 | "lansing1": { 323 | "url": "https://geodacenter.github.io/data-and-lab//data/LansingMSA.zip", 324 | "license": "NA", 325 | "attribution": "Center for Spatial Data Science, University of Chicago", 326 | "name": "geoda.lansing1", 327 | "description": "2000 Census Tract Data for Lansing, MI MSA and counties", 328 | "geometry_type": "Polygon", 329 | "nrows": 117, 330 | "ncols": 31, 331 | "details": "https://geodacenter.github.io/data-and-lab//lansing1/", 332 | "hash": "724ce3d889fa50e7632d16200cf588d40168d49adaf5bca45049dc1b3758bde1", 333 | "filename": "LansingMSA.zip", 334 | "members": [ 335 | "LansingMSA/mi_final_census2.geojson" 336 | ] 337 | }, 338 | "lansing2": { 339 | "url": "https://geodacenter.github.io/data-and-lab//data/LansingMSA2.zip", 340 | "license": "NA", 341 | "attribution": "Center for Spatial Data Science, University of Chicago", 342 | "name": "geoda.lansing2", 343 | "description": "1998 and 2001 Zip Code Business Patterns (Census Bureau) for Lansing, MI MSA", 344 | "geometry_type": "Polygon", 345 | "nrows": 46, 346 | "ncols": 56, 347 | "details": "https://geodacenter.github.io/data-and-lab//lansing2/", 348 | "hash": "7657c05d3bd6090c4d5914cfe5aaf01f694601c1e0c29bc3ecbe9bc523662303", 349 | "filename": "LansingMSA2.zip", 350 | "members": [ 351 | "LansingMSA2/LansingMSA2.geojson" 352 | ] 353 | }, 354 | "lasrosas": { 355 | "url": "https://geodacenter.github.io/data-and-lab//data/lasrosas.zip", 356 | "license": "NA", 357 | "attribution": "Center for Spatial Data Science, University of Chicago", 358 | "name": "geoda.lasrosas", 359 | "description": "Corn yield, fertilizer and field data for precision agriculture, Argentina, 1999", 360 | "geometry_type": "Polygon", 361 | "nrows": 1738, 362 | "ncols": 35, 363 | "details": "https://geodacenter.github.io/data-and-lab//lasrosas/", 364 | "hash": "038d0e82203f2875b50499dbd8498ca9c762ebd8003b2f2203ebc6acada8f8fd", 365 | "filename": "lasrosas.zip", 366 | "members": [ 367 | "lasrosas/rosas1999.gpkg" 368 | ] 369 | }, 370 | "liquor_stores": { 371 | "url": "https://geodacenter.github.io/data-and-lab//data/liquor.zip", 372 | "license": "NA", 373 | "attribution": "Center for Spatial Data Science, University of Chicago", 374 | "name": "geoda.liquor_stores", 375 | "description": "2015 Chicago Liquor Stores", 376 | "geometry_type": "Point", 377 | "nrows": 571, 378 | "ncols": 3, 379 | "details": "https://geodacenter.github.io/data-and-lab//liq_chicago/", 380 | "hash": "6a483a6a7066a000bc97bfe71596cf28834d3088fbc958455b903a0938b3b530", 381 | "filename": "liquor.zip", 382 | "members": [ 383 | "liq_Chicago.shp", 384 | "liq_Chicago.dbf", 385 | "liq_Chicago.shx", 386 | "liq_Chicago.prj" 387 | ] 388 | }, 389 | "malaria": { 390 | "url": "https://geodacenter.github.io/data-and-lab//data/malariacolomb.zip", 391 | "license": "NA", 392 | "attribution": "Center for Spatial Data Science, University of Chicago", 393 | "name": "geoda.malaria", 394 | "description": "Malaria incidence and population (1973, 95, 93 censuses and projections until 2005)", 395 | "geometry_type": "Polygon", 396 | "nrows": 1068, 397 | "ncols": 51, 398 | "details": "https://geodacenter.github.io/data-and-lab//colomb_malaria/", 399 | "hash": "ca77477656829833a4e3e384b02439632fa28bb577610fe5aef9e0b094c41a95", 400 | "filename": "malariacolomb.zip", 401 | "members": [ 402 | "malariacolomb/colmunic.gpkg" 403 | ] 404 | }, 405 | "milwaukee1": { 406 | "url": "https://geodacenter.github.io/data-and-lab//data/MilwaukeeMSA.zip", 407 | "license": "NA", 408 | "attribution": "Center for Spatial Data Science, University of Chicago", 409 | "name": "geoda.milwaukee1", 410 | "description": "2000 Census Tract Data for Milwaukee, WI MSA", 411 | "geometry_type": "Polygon", 412 | "nrows": 417, 413 | "ncols": 35, 414 | "details": "https://geodacenter.github.io/data-and-lab//milwaukee1/", 415 | "hash": "bf3c9617c872db26ea56f20e82a449f18bb04d8fb76a653a2d3842d465bc122c", 416 | "filename": "MilwaukeeMSA.zip", 417 | "members": [ 418 | "MilwaukeeMSA/wi_final_census2_random4.gpkg" 419 | ] 420 | }, 421 | "milwaukee2": { 422 | "url": "https://geodacenter.github.io/data-and-lab//data/MilwaukeeMSA2.zip", 423 | "license": "NA", 424 | "attribution": "Center for Spatial Data Science, University of Chicago", 425 | "name": "geoda.milwaukee2", 426 | "description": "1998 and 2001 Zip Code Business Patterns (Census Bureau) for Milwaukee, WI MSA", 427 | "geometry_type": "Polygon", 428 | "nrows": 83, 429 | "ncols": 60, 430 | "details": "https://geodacenter.github.io/data-and-lab//milwaukee2/", 431 | "hash": "7f74212d63addb9ab84fac9447ee898498c8fafc284edcffe1f1ac79c2175d60", 432 | "filename": "MilwaukeeMSA2.zip", 433 | "members": [ 434 | "MilwaukeeMSA2/MilwaukeeMSA2.gpkg" 435 | ] 436 | }, 437 | "ncovr": { 438 | "url": "https://geodacenter.github.io/data-and-lab//data/ncovr.zip", 439 | "license": "NA", 440 | "attribution": "Center for Spatial Data Science, University of Chicago", 441 | "name": "geoda.ncovr", 442 | "description": "US county homicides 1960-1990", 443 | "geometry_type": "Polygon", 444 | "nrows": 3085, 445 | "ncols": 70, 446 | "details": "https://geodacenter.github.io/data-and-lab//ncovr/", 447 | "hash": "e8cb04e6da634c6cd21808bd8cfe4dad6e295b22e8d40cc628e666887719cfe9", 448 | "filename": "ncovr.zip", 449 | "members": [ 450 | "ncovr/NAT.gpkg" 451 | ] 452 | }, 453 | "natregimes": { 454 | "url": "https://geodacenter.github.io/data-and-lab//data/natregimes.zip", 455 | "license": "NA", 456 | "attribution": "Center for Spatial Data Science, University of Chicago", 457 | "name": "geoda.natregimes", 458 | "description": "NCOVR with regimes (book/PySAL)", 459 | "geometry_type": "Polygon", 460 | "nrows": 3085, 461 | "ncols": 74, 462 | "details": "https://geodacenter.github.io/data-and-lab//natregimes/", 463 | "hash": "431d0d95ffa000692da9319e6bd28701b1156f7b8e716d4bfcd1e09b6e357918", 464 | "filename": "natregimes.zip" 465 | }, 466 | "ndvi": { 467 | "url": "https://geodacenter.github.io/data-and-lab//data/ndvi.zip", 468 | "license": "NA", 469 | "attribution": "Center for Spatial Data Science, University of Chicago", 470 | "name": "geoda.ndvi", 471 | "description": "Normalized Difference Vegetation Index grid", 472 | "geometry_type": "Polygon", 473 | "nrows": 49, 474 | "ncols": 8, 475 | "details": "https://geodacenter.github.io/data-and-lab//ndvi/", 476 | "hash": "a89459e50a4495c24ead1d284930467ed10eb94829de16a693a9fa89dea2fe22", 477 | "filename": "ndvi.zip", 478 | "members": [ 479 | "ndvi/ndvigrid.gpkg" 480 | ] 481 | }, 482 | "nepal": { 483 | "url": "https://geodacenter.github.io/data-and-lab//data/nepal.zip", 484 | "license": "NA", 485 | "attribution": "Center for Spatial Data Science, University of Chicago", 486 | "name": "geoda.nepal", 487 | "description": "Health, poverty and education indicators for Nepal districts", 488 | "geometry_type": "Polygon", 489 | "nrows": 75, 490 | "ncols": 62, 491 | "details": "https://geodacenter.github.io/data-and-lab//nepal/", 492 | "hash": "d7916568fe49ff258d0f03ac115e68f64cdac572a9fd2b29de2d70554ac2b20d", 493 | "filename": "nepal.zip" 494 | }, 495 | "nyc": { 496 | "url": "https://geodacenter.github.io/data-and-lab///data/nyc.zip", 497 | "license": "NA", 498 | "attribution": "Center for Spatial Data Science, University of Chicago", 499 | "name": "geoda.nyc", 500 | "description": "Demographic and housing data for New York City subboroughs, 2002-09", 501 | "geometry_type": "Polygon", 502 | "nrows": 55, 503 | "ncols": 35, 504 | "details": "https://geodacenter.github.io/data-and-lab//nyc/", 505 | "hash": "a67dff2f9e6da9e11737e6be5a16e1bc33954e2c954332d68bcbf6ff7203702b", 506 | "filename": "nyc.zip" 507 | }, 508 | "nyc_earnings": { 509 | "url": "https://geodacenter.github.io/data-and-lab//data/lehd.zip", 510 | "license": "NA", 511 | "attribution": "Center for Spatial Data Science, University of Chicago", 512 | "name": "geoda.nyc_earnings", 513 | "description": "Block-level Earnings in NYC (2002-14)", 514 | "geometry_type": "Polygon", 515 | "nrows": 108487, 516 | "ncols": 71, 517 | "details": "https://geodacenter.github.io/data-and-lab//LEHD_Data/", 518 | "hash": "771fe11e59a16d4c15c6471d9a81df5e9c9bda5ef0a207e77d8ff21b2c16891b", 519 | "filename": "lehd.zip" 520 | }, 521 | "nyc_education": { 522 | "url": "https://geodacenter.github.io/data-and-lab//data/nyc_2000Census.zip", 523 | "license": "NA", 524 | "attribution": "Center for Spatial Data Science, University of Chicago", 525 | "name": "geoda.nyc_education", 526 | "description": "NYC Education (2000)", 527 | "geometry_type": "Polygon", 528 | "nrows": 2216, 529 | "ncols": 57, 530 | "details": "https://geodacenter.github.io/data-and-lab//NYC-Census-2000/", 531 | "hash": "ecdf342654415107911291a8076c1685bd2c8a08d8eaed3ce9c3e9401ef714f2", 532 | "filename": "nyc_2000Census.zip" 533 | }, 534 | "nyc_neighborhoods": { 535 | "url": "https://geodacenter.github.io/data-and-lab//data/nycnhood_acs.zip", 536 | "license": "NA", 537 | "attribution": "Center for Spatial Data Science, University of Chicago", 538 | "name": "geoda.nyc_neighborhoods", 539 | "description": "Demographics for New York City neighborhoods", 540 | "geometry_type": "Polygon", 541 | "nrows": 195, 542 | "ncols": 99, 543 | "details": "https://geodacenter.github.io/data-and-lab//NYC-Nhood-ACS-2008-12/", 544 | "hash": "aeb75fc5c95fae1088093827fca69928cee3ad27039441bb35c03013d2ee403f", 545 | "filename": "nycnhood_acs.zip" 546 | }, 547 | "orlando1": { 548 | "url": "https://geodacenter.github.io/data-and-lab//data/OrlandoMSA.zip", 549 | "license": "NA", 550 | "attribution": "Center for Spatial Data Science, University of Chicago", 551 | "name": "geoda.orlando1", 552 | "description": "2000 Census Tract Data for Orlando, FL MSA and counties", 553 | "geometry_type": "Polygon", 554 | "nrows": 328, 555 | "ncols": 31, 556 | "details": "https://geodacenter.github.io/data-and-lab//orlando1/", 557 | "hash": "e98ea5b9ffaf3e421ed437f665c739d1e92d9908e2b121c75ac02ecf7de2e254", 558 | "filename": "OrlandoMSA.zip", 559 | "members": [ 560 | "OrlandoMSA/orlando_final_census2.gpkg" 561 | ] 562 | }, 563 | "orlando2": { 564 | "url": "https://geodacenter.github.io/data-and-lab//data/OrlandoMSA2.zip", 565 | "license": "NA", 566 | "attribution": "Center for Spatial Data Science, University of Chicago", 567 | "name": "geoda.orlando2", 568 | "description": "1998 and 2001 Zip Code Business Patterns (Census Bureau) for Orlando, FL MSA", 569 | "geometry_type": "Polygon", 570 | "nrows": 94, 571 | "ncols": 60, 572 | "details": "https://geodacenter.github.io/data-and-lab//orlando2/", 573 | "hash": "4cd8c3469cb7edea5f0fb615026192e12b1d4b50c22b28345adf476bc85d0f03", 574 | "filename": "OrlandoMSA2.zip", 575 | "members": [ 576 | "OrlandoMSA2/OrlandoMSA2.gpkg" 577 | ] 578 | }, 579 | "oz9799": { 580 | "url": "https://geodacenter.github.io/data-and-lab//data/oz9799.zip", 581 | "license": "NA", 582 | "attribution": "Center for Spatial Data Science, University of Chicago", 583 | "name": "geoda.oz9799", 584 | "description": "Monthly ozone data, 1997-99", 585 | "geometry_type": "Point", 586 | "nrows": 30, 587 | "ncols": 78, 588 | "details": "https://geodacenter.github.io/data-and-lab//oz96/", 589 | "hash": "1ecc7c46f5f42af6057dedc1b73f56b576cb9716d2c08d23cba98f639dfddb82", 590 | "filename": "oz9799.zip", 591 | "members": [ 592 | "oz9799/oz9799.csv" 593 | ] 594 | }, 595 | "phoenix_acs": { 596 | "url": "https://geodacenter.github.io/data-and-lab//data/phx2.zip", 597 | "license": "NA", 598 | "attribution": "Center for Spatial Data Science, University of Chicago", 599 | "name": "geoda.phoenix_acs", 600 | "description": "Phoenix American Community Survey Data (2010, 5-year averages)", 601 | "geometry_type": "Polygon", 602 | "nrows": 985, 603 | "ncols": 18, 604 | "details": "https://geodacenter.github.io/data-and-lab//phx/", 605 | "hash": "b2f6e196bacb6f3fe1fc909af482e7e75b83d1f8363fc73038286364c13334ee", 606 | "filename": "phx2.zip", 607 | "members": [ 608 | "phx/phx.gpkg" 609 | ] 610 | }, 611 | "police": { 612 | "url": "https://geodacenter.github.io/data-and-lab//data/police.zip", 613 | "license": "NA", 614 | "attribution": "Center for Spatial Data Science, University of Chicago", 615 | "name": "geoda.police", 616 | "description": "Police expenditures Mississippi counties", 617 | "geometry_type": "Polygon", 618 | "nrows": 82, 619 | "ncols": 22, 620 | "details": "https://geodacenter.github.io/data-and-lab//police/", 621 | "hash": "596270d62dea8207001da84883ac265591e5de053f981c7491e7b5c738e9e9ff", 622 | "filename": "police.zip", 623 | "members": [ 624 | "police/police.gpkg" 625 | ] 626 | }, 627 | "sacramento1": { 628 | "url": "https://geodacenter.github.io/data-and-lab//data/sacramento.zip", 629 | "license": "NA", 630 | "attribution": "Center for Spatial Data Science, University of Chicago", 631 | "name": "geoda.sacramento1", 632 | "description": "2000 Census Tract Data for Sacramento MSA", 633 | "geometry_type": "Polygon", 634 | "nrows": 403, 635 | "ncols": 32, 636 | "details": "https://geodacenter.github.io/data-and-lab//sacramento1/", 637 | "hash": "72ddeb533cf2917dc1f458add7c6042b93c79b31316ae2d22f1c855a9da275f9", 638 | "filename": "sacramento.zip", 639 | "members": [ 640 | "sacramento/sacramentot2.gpkg" 641 | ] 642 | }, 643 | "sacramento2": { 644 | "url": "https://geodacenter.github.io/data-and-lab//data/SacramentoMSA2.zip", 645 | "license": "NA", 646 | "attribution": "Center for Spatial Data Science, University of Chicago", 647 | "name": "geoda.sacramento2", 648 | "description": "1998 and 2001 Zip Code Business Patterns (Census Bureau) for Sacramento MSA", 649 | "geometry_type": "Polygon", 650 | "nrows": 125, 651 | "ncols": 59, 652 | "details": "https://geodacenter.github.io/data-and-lab//sacramento2/", 653 | "hash": "3f6899efd371804ea8bfaf3cdfd3ed4753ea4d009fed38a57c5bbf442ab9468b", 654 | "filename": "SacramentoMSA2.zip", 655 | "members": [ 656 | "SacramentoMSA2/SacramentoMSA2.gpkg" 657 | ] 658 | }, 659 | "savannah1": { 660 | "url": "https://geodacenter.github.io/data-and-lab//data/SavannahMSA.zip", 661 | "license": "NA", 662 | "attribution": "Center for Spatial Data Science, University of Chicago", 663 | "name": "geoda.savannah1", 664 | "description": "2000 Census Tract Data for Savannah, GA MSA and counties", 665 | "geometry_type": "Polygon", 666 | "nrows": 77, 667 | "ncols": 31, 668 | "details": "https://geodacenter.github.io/data-and-lab//savannah1/", 669 | "hash": "df48c228776d2122c38935b2ebbf4cbb90c0bacc68df01161e653aab960e4208", 670 | "filename": "SavannahMSA.zip", 671 | "members": [ 672 | "SavannahMSA/ga_final_census2.gpkg" 673 | ] 674 | }, 675 | "savannah2": { 676 | "url": "https://geodacenter.github.io/data-and-lab//data/SavannahMSA2.zip", 677 | "license": "NA", 678 | "attribution": "Center for Spatial Data Science, University of Chicago", 679 | "name": "geoda.savannah2", 680 | "description": "1998 and 2001 Zip Code Business Patterns (Census Bureau) for Savannah, GA MSA", 681 | "geometry_type": "Polygon", 682 | "nrows": 24, 683 | "ncols": 60, 684 | "details": "https://geodacenter.github.io/data-and-lab//savannah2/", 685 | "hash": "5b22b84a8665434cb91e800a039337f028b888082b8ef7a26d77eb6cc9aea8c1", 686 | "filename": "SavannahMSA2.zip", 687 | "members": [ 688 | "SavannahMSA2/SavannahMSA2.gpkg" 689 | ] 690 | }, 691 | "seattle1": { 692 | "url": "https://geodacenter.github.io/data-and-lab//data/SeattleMSA.zip", 693 | "license": "NA", 694 | "attribution": "Center for Spatial Data Science, University of Chicago", 695 | "name": "geoda.seattle1", 696 | "description": "2000 Census Tract Data for Seattle, WA MSA and counties", 697 | "geometry_type": "Polygon", 698 | "nrows": 664, 699 | "ncols": 31, 700 | "details": "https://geodacenter.github.io/data-and-lab//seattle1/", 701 | "hash": "46fb75a30f0e7963e6108bdb19af4d7db4c72c3d5a020025cafa528c96e09daa", 702 | "filename": "SeattleMSA.zip", 703 | "members": [ 704 | "SeattleMSA/wa_final_census2.gpkg" 705 | ] 706 | }, 707 | "seattle2": { 708 | "url": "https://geodacenter.github.io/data-and-lab//data/SeattleMSA2.zip", 709 | "license": "NA", 710 | "attribution": "Center for Spatial Data Science, University of Chicago", 711 | "name": "geoda.seattle2", 712 | "description": "1998 and 2001 Zip Code Business Patterns (Census Bureau) for Seattle, WA MSA", 713 | "geometry_type": "Polygon", 714 | "nrows": 145, 715 | "ncols": 60, 716 | "details": "https://geodacenter.github.io/data-and-lab//seattle2/", 717 | "hash": "3dac2fa5b8c8dfa9dd5273a85de7281e06e18ab4f197925607f815f4e44e4d0c", 718 | "filename": "SeattleMSA2.zip", 719 | "members": [ 720 | "SeattleMSA2/SeattleMSA2.gpkg" 721 | ] 722 | }, 723 | "sids": { 724 | "url": "https://geodacenter.github.io/data-and-lab//data/sids.zip", 725 | "license": "NA", 726 | "attribution": "Center for Spatial Data Science, University of Chicago", 727 | "name": "geoda.sids", 728 | "description": "North Carolina county SIDS death counts", 729 | "geometry_type": "Polygon", 730 | "nrows": 100, 731 | "ncols": 15, 732 | "details": "https://geodacenter.github.io/data-and-lab//sids/", 733 | "hash": "e2f7b210b9a57839423fd170e47c02cf7a2602a480a1036bb0324e1112a4eaab", 734 | "filename": "sids.zip", 735 | "members": [ 736 | "sids/sids.gpkg" 737 | ] 738 | }, 739 | "sids2": { 740 | "url": "https://geodacenter.github.io/data-and-lab//data/sids2.zip", 741 | "license": "NA", 742 | "attribution": "Center for Spatial Data Science, University of Chicago", 743 | "name": "geoda.sids2", 744 | "description": "North Carolina county SIDS death counts and rates", 745 | "geometry_type": "Polygon", 746 | "nrows": 100, 747 | "ncols": 19, 748 | "details": "https://geodacenter.github.io/data-and-lab//sids2/", 749 | "hash": "b5875ffbdb261e6fa75dc4580d67111ef1434203f2d6a5d63ffac16db3a14bd0", 750 | "filename": "sids2.zip", 751 | "members": [ 752 | "sids2/sids2.gpkg" 753 | ] 754 | }, 755 | "south": { 756 | "url": "https://geodacenter.github.io/data-and-lab//data/south.zip", 757 | "license": "NA", 758 | "attribution": "Center for Spatial Data Science, University of Chicago", 759 | "name": "geoda.south", 760 | "description": "US Southern county homicides 1960-1990", 761 | "geometry_type": "Polygon", 762 | "nrows": 1412, 763 | "ncols": 70, 764 | "details": "https://geodacenter.github.io/data-and-lab//south/", 765 | "hash": "8f151d99c643b187aad37cfb5c3212353e1bc82804a4399a63de369490e56a7a", 766 | "filename": "south.zip", 767 | "members": [ 768 | "south/south.gpkg" 769 | ] 770 | }, 771 | "spirals": { 772 | "url": "https://geodacenter.github.io/data-and-lab//data/spirals.csv", 773 | "license": "NA", 774 | "attribution": "Center for Spatial Data Science, University of Chicago", 775 | "name": "geoda.spirals", 776 | "description": "Synthetic spiral points", 777 | "geometry_type": "Point", 778 | "nrows": 300, 779 | "ncols": 2, 780 | "details": "https://geodacenter.github.io/data-and-lab//spirals/", 781 | "hash": "3203b0a6db37c1207b0f1727c980814f541ce0a222597475f9c91540b1d372f1", 782 | "filename": "spirals.csv" 783 | }, 784 | "stlouis": { 785 | "url": "https://geodacenter.github.io/data-and-lab//data/stlouis.zip", 786 | "license": "NA", 787 | "attribution": "Center for Spatial Data Science, University of Chicago", 788 | "name": "geoda.stlouis", 789 | "description": "St Louis region county homicide counts and rates", 790 | "geometry_type": "Polygon", 791 | "nrows": 78, 792 | "ncols": 24, 793 | "details": "https://geodacenter.github.io/data-and-lab//stlouis/", 794 | "hash": "181a17a12e9a2b2bfc9013f399e149da935e0d5cb95c3595128f67898c4365f3", 795 | "filename": "stlouis.zip" 796 | }, 797 | "tampa1": { 798 | "url": "https://geodacenter.github.io/data-and-lab//data/TampaMSA.zip", 799 | "license": "NA", 800 | "attribution": "Center for Spatial Data Science, University of Chicago", 801 | "name": "geoda.tampa1", 802 | "description": "2000 Census Tract Data for Tampa, FL MSA and counties", 803 | "geometry_type": "Polygon", 804 | "nrows": 547, 805 | "ncols": 31, 806 | "details": "https://geodacenter.github.io/data-and-lab//tampa1/", 807 | "hash": "9a7ea0746138f62aa589e8377edafea48a7b1be0cdca2b38798ba21665bfb463", 808 | "filename": "TampaMSA.zip", 809 | "members": [ 810 | "TampaMSA/tampa_final_census2.gpkg" 811 | ] 812 | }, 813 | "us_sdoh": { 814 | "url": "https://geodacenter.github.io/data-and-lab//data/us-sdoh-2014.zip", 815 | "license": "NA", 816 | "attribution": "Center for Spatial Data Science, University of Chicago", 817 | "name": "geoda.us_sdoh", 818 | "description": "2014 US Social Determinants of Health Data", 819 | "geometry_type": "Polygon", 820 | "nrows": 71901, 821 | "ncols": 26, 822 | "details": "https://geodacenter.github.io/data-and-lab//us-sdoh/", 823 | "hash": "076701725c4b67248f79c8b8a40e74f9ad9e194d3237e1858b3d20176a6562a5", 824 | "filename": "us-sdoh-2014.zip", 825 | "members": [ 826 | "us-sdoh-2014/us-sdoh-2014.shp", 827 | "us-sdoh-2014/us-sdoh-2014.dbf", 828 | "us-sdoh-2014/us-sdoh-2014.shx", 829 | "us-sdoh-2014/us-sdoh-2014.prj" 830 | ] 831 | } 832 | }, 833 | "ny": { 834 | "bb": { 835 | "url": "https://www.nyc.gov/assets/planning/download/zip/data-maps/open-data/nybb_16a.zip", 836 | "license": "NA", 837 | "attribution": "Department of City Planning (DCP)", 838 | "name": "ny.bb", 839 | "description": "The borough boundaries of New York City clipped to the shoreline at mean high tide for 2016.", 840 | "geometry_type": "Polygon", 841 | "details": "https://data.cityofnewyork.us/City-Government/Borough-Boundaries/tqmj-j8zm", 842 | "nrows": 5, 843 | "ncols": 5, 844 | "hash": "a303be17630990455eb079777a6b31980549e9096d66d41ce0110761a7e2f92a", 845 | "filename": "nybb_16a.zip", 846 | "members": [ 847 | "nybb_16a/nybb.shp", 848 | "nybb_16a/nybb.shx", 849 | "nybb_16a/nybb.dbf", 850 | "nybb_16a/nybb.prj" 851 | ] 852 | } 853 | }, 854 | "eea": { 855 | "large_rivers": { 856 | "url": "https://zenodo.org/records/17857144/files/wise_large_rivers.zip?download=1", 857 | "license": "ODC-by", 858 | "attribution": "European Environmental Agency", 859 | "name": "eea.large_rivers", 860 | "description": "Large rivers in Europe that have a catchment area large than 50,000 km2.", 861 | "geometry_type": "LineString", 862 | "details": "https://doi.org/10.5281/zenodo.17857143", 863 | "nrows": 20, 864 | "ncols": 3, 865 | "hash": "97b37b781cba30c2292122ba2bdfe2e156a791cefbdfedf611c8473facc6be50", 866 | "filename": "wise_large_rivers.zip" 867 | } 868 | }, 869 | "abs": { 870 | "australia": { 871 | "url": "https://www.abs.gov.au/statistics/standards/australian-statistical-geography-standard-asgs-edition-3/jul2021-jun2026/access-and-downloads/digital-boundary-files/AUS_2021_AUST_SHP_GDA2020.zip", 872 | "license": "CC BY 4.0", 873 | "attribution": "Australian Bureau of Statistics (ABS)", 874 | "name": "abs.australia", 875 | "description": "Australia land polygon including Norfolk Island, the Territory of Christmas Island, and the Territory of Cocos (Keeling) Islands.", 876 | "geometry_type": "Polygon", 877 | "nrows": 2, 878 | "ncols": 7, 879 | "details": "https://www.abs.gov.au/statistics/standards/australian-statistical-geography-standard-asgs-edition-3/jul2021-jun2026/access-and-downloads/digital-boundary-files", 880 | "hash": "086752a6b0b3978247be201f02e02cd4e3c4f36d4f4ca74802e6831083b67129", 881 | "filename": "AUS_2021_AUST_SHP_GDA2020.zip" 882 | }, 883 | "australia_states_territories": { 884 | "url": "https://www.abs.gov.au/statistics/standards/australian-statistical-geography-standard-asgs-edition-3/jul2021-jun2026/access-and-downloads/digital-boundary-files/STE_2021_AUST_SHP_GDA2020.zip", 885 | "license": "CC BY 4.0", 886 | "attribution": "Australian Bureau of Statistics (ABS)", 887 | "name": "abs.australia_states_territories", 888 | "description": "Australian state and territory boundaries.", 889 | "geometry_type": "Polygon", 890 | "nrows": 10, 891 | "ncols": 9, 892 | "details": "https://www.abs.gov.au/statistics/standards/australian-statistical-geography-standard-asgs-edition-3/jul2021-jun2026/access-and-downloads/digital-boundary-files", 893 | "hash": "d9b7f735de6085b37414faf011f796dda3c7f768c55e9dce01f96e790f399a21", 894 | "filename": "STE_2021_AUST_SHP_GDA2020.zip" 895 | } 896 | }, 897 | "naturalearth": { 898 | "cities": { 899 | "url": "https://naciscdn.org/naturalearth/110m/cultural/ne_110m_populated_places_simple.zip", 900 | "license": "CC0", 901 | "attribution": "Natural Earth", 902 | "name": "naturalearth.cities", 903 | "description": "Point symbols with name attributes of all admin-0 capitals and some other major cities.", 904 | "geometry_type": "Point", 905 | "details": "https://www.naturalearthdata.com/downloads/110m-cultural-vectors/110m-populated-places/", 906 | "nrows": 243, 907 | "ncols": 32, 908 | "hash": "3f3d99a9a5d84605bb3be07b94c9122b4d69d7545de478b314d75f5b0742afdf", 909 | "filename": "ne_110m_populated_places_simple.zip" 910 | }, 911 | "lakes": { 912 | "url": "https://naciscdn.org/naturalearth/110m/physical/ne_110m_lakes.zip", 913 | "license": "CC0", 914 | "attribution": "Natural Earth", 915 | "name": "naturalearth.lakes", 916 | "description": "Major natural and artificial lakes polygons.", 917 | "geometry_type": "Polygon", 918 | "details": "https://www.naturalearthdata.com/downloads/10m-physical-vectors/10m-lakes/", 919 | "nrows": 24, 920 | "ncols": 38, 921 | "hash": "f2eed3c738a93010770acb0ba44273ea6a83b053641588bc902d9d6fd1cdafcb", 922 | "filename": "ne_110m_lakes.zip" 923 | }, 924 | "land": { 925 | "url": "https://naciscdn.org/naturalearth/110m/physical/ne_110m_land.zip", 926 | "license": "CC0", 927 | "attribution": "Natural Earth", 928 | "name": "naturalearth.land", 929 | "description": "Land polygons including major islands in a 1:110m resolution.", 930 | "geometry_type": "Polygon", 931 | "details": "https://www.naturalearthdata.com/downloads/110m-physical-vectors/110m-land/", 932 | "nrows": 127, 933 | "ncols": 4, 934 | "hash": "1926c621afd6ac67c3f36639bb1236134a48d82226dc675d3e3df53d02d2a3de", 935 | "filename": "ne_110m_land.zip" 936 | } 937 | }, 938 | "spdata": { 939 | "nydata": { 940 | "url": "https://github.com/Nowosad/spData/raw/refs/heads/master/inst/shapes/NY8_bna_utm18.gpkg", 941 | "license": "CC0", 942 | "attribution": "spData", 943 | "name": "spdata.nydata", 944 | "description": "New York leukemia data taken from the data sets supporting Waller and Gotway 2004 to demonstrate spatial data import techniques.", 945 | "geometry_type": "Polygon", 946 | "details": "https://jakubnowosad.com/spData/reference/nydata.html", 947 | "nrows": 281, 948 | "ncols": 13, 949 | "hash": "80f7814b3064824e7656c55b3cadfc82926d8e7c4337ddc274e2c54ef939bb43", 950 | "filename": "NY8_bna_utm18.gpkg" 951 | }, 952 | "auckland": { 953 | "url": "https://github.com/Nowosad/spData/raw/refs/heads/master/inst/shapes/auckland.gpkg", 954 | "license": "CC0", 955 | "attribution": "spData", 956 | "name": "spdata.auckland", 957 | "description": "Marshall's infant mortality in Auckland dataset", 958 | "geometry_type": "Polygon", 959 | "details": "https://jakubnowosad.com/spData/reference/auckland.html", 960 | "nrows": 167, 961 | "ncols": 5, 962 | "hash": "bdef254513a90f1f3a59db6f3a1bfd70c4d905b81e4103fa0959e7d7b1b8476e", 963 | "filename": "auckland.gpkg" 964 | }, 965 | "boston": { 966 | "url": "https://github.com/Nowosad/spData/raw/refs/heads/master/inst/shapes/boston_tracts.gpkg", 967 | "license": "CC0", 968 | "attribution": "spData", 969 | "name": "spdata.boston", 970 | "description": "Boston Housing Data", 971 | "geometry_type": "Polygon", 972 | "details": "https://jakubnowosad.com/spData/reference/boston.html", 973 | "nrows": 506, 974 | "ncols": 37, 975 | "hash": "ab883a3047d342e035c826896efaa048b4b1442b3be1f70f63c280fe154e8d3a", 976 | "filename": "boston_tracts.gpkg" 977 | }, 978 | "columbus": { 979 | "url": "https://github.com/Nowosad/spData/raw/refs/heads/master/inst/shapes/columbus.gpkg", 980 | "license": "CC0", 981 | "attribution": "spData", 982 | "name": "spdata.columbus", 983 | "description": "Columbus OH spatial analysis dataset", 984 | "geometry_type": "Polygon", 985 | "details": "https://jakubnowosad.com/spData/reference/columbus.html", 986 | "nrows": 49, 987 | "ncols": 21, 988 | "hash": "8c7cf24780306d731b48095d10567f14c91ddb2278a976d42425c2cbe46624be", 989 | "filename": "columbus.gpkg" 990 | }, 991 | "cycle_hire": { 992 | "url": "https://github.com/Nowosad/spData/raw/refs/heads/master/inst/shapes/cycle_hire.geojson", 993 | "license": "CC0", 994 | "attribution": "spData", 995 | "name": "spdata.cycle_hire", 996 | "description": "Cycle hire points in London", 997 | "geometry_type": "Point", 998 | "details": "https://jakubnowosad.com/spData/reference/cycle_hire.html", 999 | "nrows": 742, 1000 | "ncols": 6, 1001 | "hash": "07177c46a2aba4f82f20b7584e9fac9ce9e9d157d17bf4dea04cb25d4e720edd", 1002 | "filename": "cycle_hire.geojson" 1003 | }, 1004 | "cycle_hire_osm": { 1005 | "url": "https://github.com/Nowosad/spData/raw/refs/heads/master/inst/shapes/cycle_hire_osm.geojson", 1006 | "license": "CC0", 1007 | "attribution": "spData", 1008 | "name": "spdata.cycle_hire_osm", 1009 | "description": "Cycle hire points in London from OSM", 1010 | "geometry_type": "Point", 1011 | "details": "https://jakubnowosad.com/spData/reference/cycle_hire_osm.html", 1012 | "nrows": 532, 1013 | "ncols": 6, 1014 | "hash": "804e7298bc5e84d6a73f212bdd19b7f3926ee0f50a3dbd957c46c07950dcc267", 1015 | "filename": "cycle_hire_osm.geojson" 1016 | }, 1017 | "eire": { 1018 | "url": "https://github.com/Nowosad/spData/raw/refs/heads/master/inst/shapes/eire.gpkg", 1019 | "license": "CC0", 1020 | "attribution": "spData", 1021 | "name": "spdata.eire", 1022 | "description": "Eire datasets", 1023 | "geometry_type": "Polygon", 1024 | "details": "https://jakubnowosad.com/spData/reference/eire.html", 1025 | "nrows": 26, 1026 | "ncols": 11, 1027 | "hash": "e8c4969a819baa6c29cc0126dbbcef33cbec0459e349f0bce4fe3f37e211d175", 1028 | "filename": "eire.gpkg" 1029 | }, 1030 | "ncsids": { 1031 | "url": "https://github.com/Nowosad/spData/raw/refs/heads/master/inst/shapes/sids.gpkg", 1032 | "license": "CC0", 1033 | "attribution": "spData", 1034 | "name": "spdata.ncsids", 1035 | "description": "North Carolina SIDS data", 1036 | "geometry_type": "Polygon", 1037 | "details": "https://jakubnowosad.com/spData/reference/nc.sids.html", 1038 | "nrows": 100, 1039 | "ncols": 23, 1040 | "hash": "67ab56c847e1524385adf3b4e67648054035d137e5fe956f96e16941bbc5a00f", 1041 | "filename": "sids.gpkg" 1042 | }, 1043 | "wheat": { 1044 | "url": "https://github.com/Nowosad/spData/raw/refs/heads/master/inst/shapes/wheat.gpkg", 1045 | "license": "CC0", 1046 | "attribution": "spData", 1047 | "name": "spdata.wheat", 1048 | "description": "Mercer and Hall wheat yield data", 1049 | "geometry_type": "Polygon", 1050 | "details": "https://jakubnowosad.com/spData/reference/wheat.html", 1051 | "nrows": 500, 1052 | "ncols": 9, 1053 | "hash": "4e4c2bfd72e3abfe240155ee6952e8235ed75aa21c39868c360e95e70c39f438", 1054 | "filename": "wheat.gpkg" 1055 | }, 1056 | "zion": { 1057 | "url": "https://github.com/Nowosad/spDataLarge/raw/refs/heads/master/inst/vector/zion.gpkg", 1058 | "license": "CC0", 1059 | "attribution": "spDataLarge", 1060 | "name": "spdata.zion", 1061 | "description": "The borders of Zion National Park", 1062 | "geometry_type": "Polygon", 1063 | "details": "https://jakubnowosad.com/spDataLarge/reference/zion.gpkg.html", 1064 | "nrows": 1, 1065 | "ncols": 12, 1066 | "hash": "03b8584ae4f4e329c0626749596733e07158cf503b5ae8590af88fa70e9d6290", 1067 | "filename": "zion.gpkg" 1068 | }, 1069 | "zion_points": { 1070 | "url": "https://github.com/Nowosad/spDataLarge/raw/refs/heads/master/inst/vector/zion_points.gpkg", 1071 | "license": "CC0", 1072 | "attribution": "spDataLarge", 1073 | "name": "spdata.zion_points", 1074 | "description": "Dataset containing 30 randomly located points in the Zion National Park", 1075 | "geometry_type": "Point", 1076 | "details": "https://jakubnowosad.com/spDataLarge/reference/zion_points.gpkg.html", 1077 | "nrows": 30, 1078 | "ncols": 1, 1079 | "hash": "c69683c8a1a9775b0b77689ecadbbec0251bdd603633822d0c85282b41fc9bc4", 1080 | "filename": "zion_points.gpkg" 1081 | } 1082 | } 1083 | } --------------------------------------------------------------------------------