├── .github ├── FUNDING.yml └── workflows │ ├── build.yaml │ ├── docs.yaml │ ├── downstream_tests.yaml │ ├── nightly_lock.yaml │ └── test.yaml ├── .gitignore ├── .pre-commit-config.yaml ├── CHANGELOG.rst ├── CODE_OF_CONDUCT.md ├── LICENSE.txt ├── README.md ├── ROADMAP.md ├── benchmarks ├── README.md ├── asv.conf.json └── benchmarks │ ├── __init__.py │ ├── common.py │ ├── line.py │ └── shade.py ├── datashader ├── __init__.py ├── __main__.py ├── __version.py ├── antialias.py ├── bundling.py ├── colors.py ├── compiler.py ├── composite.py ├── core.py ├── data_libraries │ ├── __init__.py │ ├── cudf.py │ ├── dask.py │ ├── dask_cudf.py │ ├── dask_xarray.py │ ├── pandas.py │ └── xarray.py ├── datashape │ ├── __init__.py │ ├── coretypes.py │ ├── discovery.py │ ├── dispatch.py │ ├── error.py │ ├── internal_utils.py │ ├── lexer.py │ ├── parser.py │ ├── predicates.py │ ├── promote.py │ ├── tests │ │ ├── __init__.py │ │ ├── test_coretypes.py │ │ ├── test_creation.py │ │ ├── test_discovery.py │ │ ├── test_lexer.py │ │ ├── test_operations.py │ │ ├── test_parser.py │ │ ├── test_predicates.py │ │ ├── test_promote.py │ │ ├── test_str.py │ │ ├── test_typeset.py │ │ ├── test_user.py │ │ └── test_util.py │ ├── type_symbol_table.py │ ├── typesets.py │ ├── user.py │ ├── util │ │ ├── __init__.py │ │ ├── testing.py │ │ └── tests │ │ │ └── test_testing.py │ └── validation.py ├── datatypes.py ├── glyphs │ ├── __init__.py │ ├── area.py │ ├── glyph.py │ ├── line.py │ ├── points.py │ ├── polygon.py │ ├── quadmesh.py │ └── trimesh.py ├── layout.py ├── macros.py ├── mpl_ext.py ├── pipeline.py ├── reductions.py ├── resampling.py ├── tests │ ├── __init__.py │ ├── benchmarks │ │ ├── README.md │ │ ├── __init__.py │ │ ├── test_bundling.py │ │ ├── test_canvas.py │ │ ├── test_draw_line.py │ │ ├── test_extend_line.py │ │ └── test_layout.py │ ├── conftest.py │ ├── data │ │ ├── test_001_antialias_clipped.nc │ │ ├── test_001_antialias_clipped.png │ │ ├── test_001_antialias_normal.nc │ │ ├── test_001_antialias_normal.png │ │ ├── test_001_noaa_clipped.nc │ │ ├── test_001_noaa_clipped.png │ │ ├── test_001_noaa_normal.nc │ │ ├── test_001_noaa_normal.png │ │ ├── test_002_antialias_clipped.nc │ │ ├── test_002_antialias_clipped.png │ │ ├── test_002_antialias_normal.nc │ │ ├── test_002_antialias_normal.png │ │ ├── test_002_noaa_clipped.nc │ │ ├── test_002_noaa_clipped.png │ │ ├── test_002_noaa_normal.nc │ │ ├── test_002_noaa_normal.png │ │ ├── test_003_antialias_clipped.nc │ │ ├── test_003_antialias_clipped.png │ │ ├── test_003_antialias_normal.nc │ │ ├── test_003_antialias_normal.png │ │ ├── test_003_noaa_clipped.nc │ │ ├── test_003_noaa_clipped.png │ │ ├── test_003_noaa_normal.nc │ │ ├── test_003_noaa_normal.png │ │ ├── test_004_antialias_clipped.nc │ │ ├── test_004_antialias_clipped.png │ │ ├── test_004_antialias_normal.nc │ │ ├── test_004_antialias_normal.png │ │ ├── test_004_noaa_clipped.nc │ │ ├── test_004_noaa_clipped.png │ │ ├── test_004_noaa_normal.nc │ │ ├── test_004_noaa_normal.png │ │ ├── test_005_antialias_clipped.nc │ │ ├── test_005_antialias_clipped.png │ │ ├── test_005_antialias_normal.nc │ │ ├── test_005_antialias_normal.png │ │ ├── test_005_noaa_clipped.nc │ │ ├── test_005_noaa_clipped.png │ │ ├── test_005_noaa_normal.nc │ │ ├── test_005_noaa_normal.png │ │ ├── test_006_antialias_clipped.nc │ │ ├── test_006_antialias_clipped.png │ │ ├── test_006_antialias_normal.nc │ │ ├── test_006_antialias_normal.png │ │ ├── test_006_noaa_clipped.nc │ │ ├── test_006_noaa_clipped.png │ │ ├── test_006_noaa_normal.nc │ │ ├── test_006_noaa_normal.png │ │ ├── test_007_antialias_clipped.nc │ │ ├── test_007_antialias_clipped.png │ │ ├── test_007_antialias_normal.nc │ │ ├── test_007_antialias_normal.png │ │ ├── test_007_noaa_clipped.nc │ │ ├── test_007_noaa_clipped.png │ │ ├── test_007_noaa_normal.nc │ │ ├── test_007_noaa_normal.png │ │ └── world.rgb.tif │ ├── test_antialias.py │ ├── test_bundling.py │ ├── test_colors.py │ ├── test_composite.py │ ├── test_dask.py │ ├── test_datatypes.py │ ├── test_geopandas.py │ ├── test_glyphs.py │ ├── test_layout.py │ ├── test_macros.py │ ├── test_mpl_ext.py │ ├── test_pandas.py │ ├── test_pipeline.py │ ├── test_polygons.py │ ├── test_quadmesh.py │ ├── test_raster.py │ ├── test_reductions.py │ ├── test_tiles.py │ ├── test_transfer_functions.py │ ├── test_utils.py │ ├── test_xarray.py │ └── utils.py ├── tiles.py ├── transfer_functions │ ├── __init__.py │ ├── _cpu_utils.py │ └── _cuda_utils.py └── utils.py ├── doc ├── _static │ ├── css │ │ └── custom.css │ ├── datashader-logo.png │ ├── favicon.ico │ ├── holoviz-icon-white.svg │ ├── logo_horizontal.png │ ├── logo_horizontal.svg │ ├── logo_horizontal_s.png │ ├── logo_stacked.png │ ├── logo_stacked.svg │ └── logo_stacked_s.png ├── about.rst ├── api.rst ├── conf.py ├── getting_started │ └── index.rst ├── governance │ └── project-doc │ │ ├── CONTRIBUTING.md │ │ ├── GOVERNANCE.md │ │ ├── LICENSE.md │ │ └── MEMBERS.md ├── index.rst ├── reduction.csv ├── releases.rst └── user_guide │ └── index.rst ├── examples ├── FAQ.ipynb ├── README.md ├── assets │ └── images │ │ ├── airport_connections.png │ │ ├── chesapeake_farout.png │ │ ├── chesbay_detail.png │ │ ├── dashboard.png │ │ ├── ds_hv_bokeh.png │ │ ├── ds_hv_bokeh2.png │ │ ├── featured-badge-gh.svg │ │ ├── fire.png │ │ ├── hot.png │ │ ├── houston_district29.png │ │ ├── jet.png │ │ ├── landsat.png │ │ ├── nyc_buildings.png │ │ ├── nyc_pickups_vs_dropoffs.jpg │ │ ├── nyc_races.jpg │ │ ├── nyc_taxi-paramnb.png │ │ ├── nyc_taxi_100k.png │ │ ├── parambokeh.png │ │ ├── pcap.png │ │ ├── pipeline.png │ │ ├── pipeline2.png │ │ ├── rainbow.png │ │ ├── rainbow4.png │ │ ├── sym_attractors.jpg │ │ ├── uk_researchers.png │ │ └── usa_census.jpg ├── conftest.py ├── data │ └── .data_stubs │ │ └── nyc_taxi.csv ├── datasets.yml ├── environment.yml ├── getting_started │ ├── 1_Introduction.ipynb │ ├── 2_Pipeline.ipynb │ ├── 3_Interactivity.ipynb │ └── index.ipynb ├── index.ipynb ├── pcap_to_parquet.py ├── raster.py ├── taxi_preprocessing_example.py ├── tiling.ipynb └── user_guide │ ├── 10_Performance.ipynb │ ├── 11_Geography.ipynb │ ├── 12_Inspection_Reductions.ipynb │ ├── 13_Geopandas.ipynb │ ├── 1_Plotting_Pitfalls.ipynb │ ├── 2_Points.ipynb │ ├── 3_Timeseries.ipynb │ ├── 4_Trajectories.ipynb │ ├── 5_Grids.ipynb │ ├── 6_Trimesh.ipynb │ ├── 7_Networks.ipynb │ ├── 8_Polygons.ipynb │ └── 9_Extending.ipynb ├── pixi.toml ├── pyproject.toml └── scripts ├── conda ├── build.sh └── recipe │ └── meta.yaml ├── download_data.py └── filetimes ├── filetimes.py ├── filetimes.sh └── filetimes.yml /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | open_collective: holoviz 4 | -------------------------------------------------------------------------------- /.github/workflows/build.yaml: -------------------------------------------------------------------------------- 1 | name: packages 2 | on: 3 | push: 4 | tags: 5 | - "v[0-9]+.[0-9]+.[0-9]+" 6 | - "v[0-9]+.[0-9]+.[0-9]+a[0-9]+" 7 | - "v[0-9]+.[0-9]+.[0-9]+b[0-9]+" 8 | - "v[0-9]+.[0-9]+.[0-9]+rc[0-9]+" 9 | # Dry-run only 10 | workflow_dispatch: 11 | schedule: 12 | - cron: "0 16 * * SUN" 13 | 14 | defaults: 15 | run: 16 | shell: bash -e {0} 17 | 18 | env: 19 | PYTHON_VERSION: "3.11" 20 | PACKAGE: "datashader" 21 | 22 | jobs: 23 | waiting_room: 24 | name: Waiting Room 25 | runs-on: ubuntu-latest 26 | needs: [conda_build, pip_install] 27 | if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') 28 | environment: 29 | name: publish 30 | steps: 31 | - run: echo "All builds have finished, have been approved, and ready to publish" 32 | 33 | pixi_lock: 34 | name: Pixi lock 35 | runs-on: ubuntu-latest 36 | steps: 37 | - uses: holoviz-dev/holoviz_tasks/pixi_lock@v0 38 | 39 | conda_build: 40 | name: Build Conda 41 | needs: [pixi_lock] 42 | runs-on: "ubuntu-latest" 43 | steps: 44 | - uses: holoviz-dev/holoviz_tasks/pixi_install@v0 45 | with: 46 | environments: "build" 47 | download-data: false 48 | install: false 49 | - name: conda build 50 | run: pixi run -e build build-conda 51 | - uses: actions/upload-artifact@v4 52 | if: always() 53 | with: 54 | name: conda 55 | path: dist/*.tar.bz2 56 | if-no-files-found: error 57 | 58 | conda_publish: 59 | name: Publish Conda 60 | runs-on: ubuntu-latest 61 | needs: [conda_build, waiting_room] 62 | if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') 63 | defaults: 64 | run: 65 | shell: bash -el {0} 66 | steps: 67 | - uses: actions/download-artifact@v4 68 | with: 69 | name: conda 70 | path: dist/ 71 | - name: Set environment variables 72 | run: | 73 | echo "TAG=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV 74 | echo "CONDA_FILE=$(ls dist/*.tar.bz2)" >> $GITHUB_ENV 75 | - uses: conda-incubator/setup-miniconda@v3 76 | with: 77 | miniconda-version: "latest" 78 | channels: "conda-forge" 79 | - name: conda setup 80 | run: | 81 | conda install -y anaconda-client 82 | - name: conda dev upload 83 | if: contains(env.TAG, 'a') || contains(env.TAG, 'b') || contains(env.TAG, 'rc') 84 | run: | 85 | anaconda --token ${{ secrets.CONDA_UPLOAD_TOKEN }} upload --user pyviz --label=dev $CONDA_FILE 86 | - name: conda main upload 87 | if: (!(contains(env.TAG, 'a') || contains(env.TAG, 'b') || contains(env.TAG, 'rc'))) 88 | run: | 89 | anaconda --token ${{ secrets.CONDA_UPLOAD_TOKEN }} upload --user pyviz --label=dev --label=main $CONDA_FILE 90 | 91 | pip_build: 92 | name: Build PyPI 93 | needs: [pixi_lock] 94 | runs-on: "ubuntu-latest" 95 | steps: 96 | - uses: holoviz-dev/holoviz_tasks/pixi_install@v0 97 | with: 98 | environments: "build" 99 | download-data: false 100 | install: false 101 | - name: Build package 102 | run: pixi run -e build build-pip 103 | - uses: actions/upload-artifact@v4 104 | if: always() 105 | with: 106 | name: pip 107 | path: dist/ 108 | if-no-files-found: error 109 | 110 | pip_install: 111 | name: Install PyPI 112 | runs-on: "ubuntu-latest" 113 | needs: [pip_build] 114 | steps: 115 | - uses: actions/setup-python@v5 116 | with: 117 | python-version: ${{ env.PYTHON_VERSION }} 118 | - uses: actions/download-artifact@v4 119 | with: 120 | name: pip 121 | path: dist/ 122 | - name: Install package 123 | run: python -m pip install dist/*.whl 124 | - name: Import package 125 | run: python -c "import $PACKAGE; print($PACKAGE._version.__version__)" 126 | 127 | pip_publish: 128 | name: Publish PyPI 129 | runs-on: ubuntu-latest 130 | needs: [pip_build, waiting_room] 131 | if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') 132 | steps: 133 | - uses: actions/download-artifact@v4 134 | with: 135 | name: pip 136 | path: dist/ 137 | - name: Publish to PyPI 138 | uses: pypa/gh-action-pypi-publish@release/v1 139 | with: 140 | user: ${{ secrets.PPU }} 141 | password: ${{ secrets.PPP }} 142 | repository-url: "https://upload.pypi.org/legacy/" 143 | -------------------------------------------------------------------------------- /.github/workflows/docs.yaml: -------------------------------------------------------------------------------- 1 | name: docs 2 | on: 3 | push: 4 | tags: 5 | - "v[0-9]+.[0-9]+.[0-9]+" 6 | - "v[0-9]+.[0-9]+.[0-9]+a[0-9]+" 7 | - "v[0-9]+.[0-9]+.[0-9]+b[0-9]+" 8 | - "v[0-9]+.[0-9]+.[0-9]+rc[0-9]+" 9 | workflow_dispatch: 10 | inputs: 11 | target: 12 | description: "Site to build and deploy" 13 | type: choice 14 | options: 15 | - dev 16 | - main 17 | - dryrun 18 | required: true 19 | default: dryrun 20 | schedule: 21 | - cron: "0 16 * * SUN" 22 | 23 | defaults: 24 | run: 25 | shell: bash -e {0} 26 | 27 | jobs: 28 | pixi_lock: 29 | name: Pixi lock 30 | runs-on: ubuntu-latest 31 | steps: 32 | - uses: holoviz-dev/holoviz_tasks/pixi_lock@v0 33 | 34 | docs_build: 35 | name: Build Documentation 36 | needs: [pixi_lock] 37 | runs-on: "macos-latest" 38 | timeout-minutes: 180 39 | outputs: 40 | tag: ${{ steps.vars.outputs.tag }} 41 | env: 42 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 43 | steps: 44 | - uses: holoviz-dev/holoviz_tasks/pixi_install@v0 45 | with: 46 | environments: docs 47 | - name: Build documentation 48 | run: pixi run -e docs docs-build 49 | - uses: actions/upload-artifact@v4 50 | if: always() 51 | with: 52 | name: docs 53 | if-no-files-found: error 54 | path: builtdocs 55 | - name: Set output 56 | id: vars 57 | run: echo "tag=${GITHUB_REF#refs/*/}" >> $GITHUB_OUTPUT 58 | 59 | docs_publish: 60 | name: Publish Documentation 61 | runs-on: "ubuntu-latest" 62 | needs: [docs_build] 63 | env: 64 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 65 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 66 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 67 | AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} 68 | steps: 69 | - uses: actions/download-artifact@v4 70 | with: 71 | name: docs 72 | path: builtdocs/ 73 | - name: Set output 74 | id: vars 75 | run: echo "tag=${{ needs.docs_build.outputs.tag }}" >> $GITHUB_OUTPUT 76 | - name: upload dev 77 | uses: peaceiris/actions-gh-pages@v3 78 | if: | 79 | (github.event_name == 'workflow_dispatch' && github.event.inputs.target == 'dev') || 80 | (github.event_name == 'push' && (contains(steps.vars.outputs.tag, 'a') || contains(steps.vars.outputs.tag, 'b') || contains(steps.vars.outputs.tag, 'rc'))) 81 | with: 82 | personal_token: ${{ secrets.ACCESS_TOKEN }} 83 | external_repository: holoviz-dev/datashader 84 | publish_dir: ./builtdocs 85 | force_orphan: true 86 | - name: upload main 87 | if: | 88 | (github.event_name == 'workflow_dispatch' && github.event.inputs.target == 'main') || 89 | (github.event_name == 'push' && !(contains(steps.vars.outputs.tag, 'a') || contains(steps.vars.outputs.tag, 'b') || contains(steps.vars.outputs.tag, 'rc'))) 90 | uses: peaceiris/actions-gh-pages@v3 91 | with: 92 | github_token: ${{ secrets.GITHUB_TOKEN }} 93 | publish_dir: ./builtdocs 94 | cname: datashader.org 95 | force_orphan: true 96 | -------------------------------------------------------------------------------- /.github/workflows/downstream_tests.yaml: -------------------------------------------------------------------------------- 1 | name: downstream_tests 2 | 3 | on: 4 | # Run this workflow after the build workflow has completed. 5 | workflow_run: 6 | workflows: [packages] 7 | types: [completed] 8 | # Or by triggering it manually via Github's UI 9 | workflow_dispatch: 10 | inputs: 11 | manual: 12 | description: don't change me! 13 | type: boolean 14 | required: true 15 | default: true 16 | 17 | jobs: 18 | downstream_tests: 19 | uses: holoviz-dev/holoviz_tasks/.github/workflows/run_downstream_tests.yaml@main 20 | with: 21 | downstream_repos_as_json: "{\"downstream_repo\":[\"holoviews\", \"hvplot\"]}" 22 | secrets: 23 | ACCESS_TOKEN: ${{ secrets.ACCESS_TOKEN }} 24 | -------------------------------------------------------------------------------- /.github/workflows/nightly_lock.yaml: -------------------------------------------------------------------------------- 1 | name: nightly_lock 2 | on: 3 | workflow_dispatch: 4 | schedule: 5 | - cron: "0 0 * * *" 6 | 7 | env: 8 | PACKAGE: "datashader" 9 | 10 | jobs: 11 | pixi_lock: 12 | if: ${{ !github.event.repository.fork }} 13 | name: Pixi lock 14 | runs-on: ubuntu-latest 15 | timeout-minutes: 5 16 | steps: 17 | - uses: holoviz-dev/holoviz_tasks/pixi_lock@v0 18 | - name: Upload lock-file to S3 19 | env: 20 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 21 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 22 | AWS_DEFAULT_REGION: "eu-west-1" 23 | run: | 24 | zip $(date +%Y-%m-%d).zip pixi.lock pixi.toml 25 | aws s3 cp ./$(date +%Y-%m-%d).zip s3://assets.holoviz.org/lock/$PACKAGE/ 26 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | pip-wheel-metadata/ 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *,cover 47 | .hypothesis/ 48 | test_tiles_output/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | 57 | # (Mostly) auto-generated during Sphinx doc builds 58 | doc/_build/ 59 | doc/_static/nyc_races.jpg 60 | doc/_static/usa_census.jpg 61 | doc/*.ipynb 62 | doc/*/*.ipynb 63 | doc/*.rst 64 | doc/*/*.rst 65 | builtdocs/ 66 | jupyter_execute/ 67 | 68 | # PyBuilder 69 | target/ 70 | 71 | #Ipython Notebook 72 | .ipynb_checkpoints 73 | 74 | #Editor files 75 | *~ 76 | \.idea/ 77 | 78 | # Example output, notes 79 | examples/times 80 | examples/data 81 | examples/export 82 | examples/tiles_output_directory 83 | examples/user_guide/export 84 | examples/user_guide/df_world.parq 85 | examples/user_guide/sgeodf.parq 86 | 87 | **.org 88 | .doit* 89 | datashader/.version 90 | /.pytest_cache 91 | 92 | .asv/ 93 | 94 | # pixi + hatchling 95 | .pixi 96 | pixi.lock 97 | _version.py 98 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | exclude: (\.(js|svg)$) 2 | 3 | repos: 4 | - repo: https://github.com/pre-commit/pre-commit-hooks 5 | rev: v5.0.0 6 | hooks: 7 | - id: check-builtin-literals 8 | - id: check-case-conflict 9 | - id: check-docstring-first 10 | - id: check-executables-have-shebangs 11 | - id: check-toml 12 | - id: detect-private-key 13 | - id: end-of-file-fixer 14 | - id: trailing-whitespace 15 | - repo: https://github.com/astral-sh/ruff-pre-commit 16 | rev: v0.10.0 17 | hooks: 18 | - id: ruff 19 | files: datashader/ 20 | - repo: https://github.com/codespell-project/codespell 21 | rev: v2.4.1 22 | hooks: 23 | - id: codespell 24 | additional_dependencies: 25 | - tomli 26 | - repo: https://github.com/hoxbro/clean_notebook 27 | rev: v0.1.15 28 | hooks: 29 | - id: clean-notebook 30 | 31 | ci: 32 | autofix_prs: false 33 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | For the code of conduct, see [HoloViz/HoloViz - CODE_OF_CONDUCT.md](). 4 | 5 | The Datashader Project’s equivalently named documents take precedence over any external materials referenced within this linked document above. 6 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2015, Continuum Analytics, Inc. and contributors 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, 5 | are permitted provided that the following conditions are met: 6 | 7 | Redistributions of source code must retain the above copyright notice, 8 | this list of conditions and the following disclaimer. 9 | 10 | Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | Neither the name of Continuum Analytics nor the names of any contributors 15 | may be used to endorse or promote products derived from this software 16 | without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 21 | ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE 22 | LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 24 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 25 | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 26 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 27 | ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF 28 | THE POSSIBILITY OF SUCH DAMAGE. 29 | -------------------------------------------------------------------------------- /ROADMAP.md: -------------------------------------------------------------------------------- 1 | # Datashader Roadmap, as of 4/2018 2 | 3 | Datashader is an open-source project, with contributions from a variety of developers with different priorities, so it is not possible to lay out a fully detailed timeline of upcoming features. That said, there are clear priorities that the current developers have agreed on, which will be described here and updated occasionally. 4 | 5 | If you need any of the functionality listed below and want to help make it a priority, please respond to the relevant issue listed (preferably with offers of coding, financial, or other assistance!). 6 | 7 | 1. **Ongoing maintenance, improved documentation and examples** 8 | - As always, there are various bugs and usability issues reported on the issue tracker, and we will address these as time permits. 9 | - Some of the notebooks and the included dashboard need rework to use HoloViews, to make them simpler and to use a recommended workflow. 10 | 11 | 2. **Optimizing data access (via quadtree/kdtree dataset partitioning) and deployment** (including as slippy-map tiles [#246](../../issues/246)) [Scheduled for 2018] 12 | 13 | 3. **Better integration with external plotting libraries (Bokeh, HoloViews, matplotlib)** 14 | - Datashader needs to provide functions for supporting hover information, legends, colorbars, and interactivity, which each plotting library can then use ([#126](../../issues/126), [#127](../../issues/127), [#136](../../issues/136), [#251](../../issues/251)) 15 | - There is a draft of Matplotlib support ([#200](../../issues/200)), but it will need significant work before it is usable for most purposes. 16 | - HoloViews should be able to allow users to set criteria for when datashader will be substituted for a Points or Path plot, based on size 17 | 18 | 4. **More consistent and powerful shading and aggregation** 19 | - Should be able to aggregate any field categorically, not just counts ([#140](../../issues/140)) 20 | 21 | 5. **Visualizing uncertainty, anomalies, stability** 22 | - Example of plotting points with associated probabilities ([#102](../../issues/102)) 23 | - Tools for stability analysis ([#115](../../issues/115)) 24 | 25 | 6. **Misc:** 26 | - [#132](../../issues/132) GPU support 27 | - [#110](../../issues/110) 1D aggregation example 28 | - [#105](../../issues/105) Cyclical data example 29 | - [#103](../../issues/103) Symbolic rendering of aggregate array 30 | - [#92](../../issues/92) Box select support 31 | - [#61](../../issues/61) Add information on requirements for osm example 32 | - [#242](../../issues/242) Spatiotemporal data animation 33 | -------------------------------------------------------------------------------- /benchmarks/README.md: -------------------------------------------------------------------------------- 1 | Benchmarking 2 | ============ 3 | 4 | `Datashader` uses ASV (https://asv.readthedocs.io) for benchmarking. 5 | 6 | Preparation 7 | ----------- 8 | 9 | ASV runs benchmarks in isolated virtual environments that it creates. It identifies whether you are running in a `conda` or `virtualenv` environment so that it knows what type of environment to use. Before you run any benchmarks you need to install `asv` itself; if you are using `conda`: 10 | ``` 11 | conda install -c conda-forge asv==0.4.2 12 | ``` 13 | 14 | and if you are using `virtualenv`: 15 | ``` 16 | pip install asv==0.4.2 virtualenv 17 | ``` 18 | 19 | Running benchmarks 20 | ------------------ 21 | 22 | To run all benchmarks against the default `main` branch: 23 | ``` 24 | cd benchmarks 25 | asv run 26 | ``` 27 | 28 | The first time this is run it will create a machine file to store information about your machine. Then a virtual environment will be created and each benchmark will be run multiple times to obtain a statistically valid benchmark time. 29 | 30 | To list the benchmark timings stored for the `main` branch use: 31 | ``` 32 | asv show main 33 | ``` 34 | 35 | ASV ships with its own simple webserver to interactively display the results in a webbrowser. To use this: 36 | ``` 37 | asv publish 38 | asv preview 39 | ``` 40 | and then open a web browser at the URL specified. 41 | 42 | If you want to quickly run all benchmarks once only to check for errors, etc, use: 43 | ``` 44 | asv dev 45 | ``` 46 | instead of `asv run`. 47 | 48 | 49 | Running cuDF and Dask-cuDF benchmarks 50 | ------------------------------------- 51 | 52 | Benchmarks that use `pandas` and `dask` DataFrames are always run whereas those that use `cudf` and `dask-cudf` are only run if the required libraries are installed and appropriate GPU hardware is available. Because installing the required libraries is non-trivial it is recommended to run the benchmarks in your default `cudf`-enabled development environment rather than allow `asv` to create new environments specifically for the benchmarking. 53 | 54 | Before running `cudf` and `dask-cudf` benchmarks you should first check that you can run the Datashader `pytest` test suite as debugging your environment is much easier using `pytest` than `asv`. 55 | 56 | The `asv` command to run all benchmarks using your default development environment is: 57 | ``` 58 | asv run --python=same --launch-method spawn 59 | ``` 60 | 61 | The `--launch-method spawn` is recommended to avoid problems in accessing the GPU from subprocesses which is how `asv` runs individual isolated benchmarks. 62 | 63 | 64 | Adding new benchmarks 65 | --------------------- 66 | 67 | Add new benchmarks to existing or new classes in the `benchmarks/benchmarks` directory. Any class member function with a name that starts with `time` will be identified as a timing benchmark when `asv` is run. 68 | 69 | Data that is required to run benchmarks is usually created in the `setup()` member function. This ensures that the time taken to setup the data is not included in the benchmark time. The `setup()` function is called once for each invocation of each benchmark, the data are not cached. 70 | 71 | At the top of each benchmark class there are lists of parameter names and values. Each benchmark is repeated for each unique combination of these parameters. 72 | 73 | If you only want to run a subset of benchmarks, use syntax like: 74 | ``` 75 | asv run -b ShadeCategorical 76 | ``` 77 | where the text after the `-b` flag is used as a regex to match benchmark file, class and function names. 78 | 79 | 80 | Benchmarking code changes 81 | ------------------------- 82 | 83 | You can compare the performance of code on different branches and in different commits. Usually if you want to determine how much faster a new algorithm is, the old code will be in the `main` branch and the new code will be in a new feature branch. Because ASV uses virtual environments and checks out the `datashader` source code into these virtual environments, your new code must be committed into the new feature branch locally. 84 | 85 | To benchmark the latest commits on `main` and your new feature branch, edit `asv.conf.json` to change the line 86 | ``` 87 | "branches": ["main"], 88 | ``` 89 | into 90 | ``` 91 | "branches": ["main", "new_feature_branch"], 92 | ``` 93 | or similar. 94 | 95 | Now when you `asv run` the benchmarks will be run against both branches in turn. 96 | 97 | Then use 98 | ``` 99 | asv show 100 | ``` 101 | to list the commits that have been benchmarked, and 102 | ``` 103 | asv compare commit1 commit2 104 | ``` 105 | to give you a side-by-side comparison of the two commits. 106 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/benchmarks/benchmarks/__init__.py -------------------------------------------------------------------------------- /benchmarks/benchmarks/common.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class DataLibrary(Enum): 5 | PandasDF = 1 6 | DaskDF = 2 7 | CuDF = 3 8 | DaskCuDF = 4 9 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/line.py: -------------------------------------------------------------------------------- 1 | import dask.dataframe as dd 2 | import datashader as ds 3 | import numpy as np 4 | import pandas as pd 5 | from .common import DataLibrary 6 | 7 | try: 8 | import cudf 9 | except: 10 | cudf = None 11 | 12 | try: 13 | import dask_cudf 14 | except: 15 | dask_cudf = None 16 | 17 | 18 | class Line: 19 | param_names = ("data_library", "line_count", "line_width", "self_intersect") 20 | params = ( 21 | [DataLibrary.PandasDF, DataLibrary.DaskDF, DataLibrary.CuDF, DataLibrary.DaskCuDF], 22 | [1000, 10000], [0, 1, 2], [False, True], 23 | ) 24 | 25 | def setup(self, data_library, line_count, line_width, self_intersect): 26 | canvas_size = 1000 27 | points_per_line = 10 28 | 29 | self.canvas = ds.Canvas(canvas_size, canvas_size) 30 | 31 | self.x = np.linspace(0, 1, points_per_line) 32 | rng = np.random.default_rng(428921) 33 | y = np.cumsum( 34 | np.c_[np.zeros((line_count, 1)), rng.standard_normal((line_count, points_per_line))], 35 | axis=1, 36 | ) 37 | self.df = pd.DataFrame(y) 38 | 39 | if data_library == DataLibrary.PandasDF: 40 | pass 41 | elif data_library == DataLibrary.DaskDF: 42 | self.df = dd.from_pandas(self.df, npartitions=4) 43 | elif data_library == DataLibrary.CuDF: 44 | if cudf: 45 | self.df = cudf.DataFrame.from_pandas(self.df) 46 | else: 47 | raise NotImplementedError("CuDF not available") 48 | elif data_library == DataLibrary.DaskCuDF: 49 | if dask_cudf: 50 | cdf = cudf.DataFrame.from_pandas(self.df) 51 | self.df = dask_cudf.from_cudf(cdf, npartitions=4) 52 | else: 53 | raise NotImplementedError("Dask-cuDF not available") 54 | else: 55 | raise NotImplementedError(f"data_library {data_library} not supported in this test") 56 | 57 | def time_LinesAxis1XConstant(self, data_library, line_count, line_width, self_intersect): 58 | if line_width == 0 and not self_intersect: 59 | raise NotImplementedError # Same as line_width=0, self_intersect=False 60 | elif line_width > 0 and data_library not in [DataLibrary.PandasDF, DataLibrary.DaskDF]: 61 | raise NotImplementedError # Antialiased lines only work on CPU not GPU 62 | 63 | agg = ds.count(self_intersect=self_intersect) 64 | self.canvas.line( 65 | self.df, x=self.x, y=list(self.df.columns), agg=agg, axis=1, line_width=line_width, 66 | ) 67 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/shade.py: -------------------------------------------------------------------------------- 1 | import colorcet 2 | import datashader.transfer_functions as tf 3 | import numpy as np 4 | import xarray as xr 5 | 6 | 7 | class Shade: 8 | param_names = ("canvas_size", "how") 9 | params = ([300, 1000], ["linear", "log", "eq_hist"]) 10 | 11 | def setup(self, canvas_size, how): 12 | rng = np.random.default_rng(349120) 13 | data = rng.random((canvas_size, canvas_size), dtype=np.float32) 14 | data[data < 0.1] = np.nan # Want some nans in data. 15 | x = np.arange(canvas_size, dtype=np.float32) 16 | y = np.arange(canvas_size, dtype=np.float32) 17 | self.agg = xr.DataArray(data=data, dims=["y", "x"], coords=dict(x=x, y=y)) 18 | 19 | def time_shade(self, canvas_size, how): 20 | tf.shade(self.agg, how=how, cmap=colorcet.fire) 21 | 22 | 23 | class ShadeCategorical: 24 | param_names = ("canvas_size", "how", "category_count") 25 | params = ([300, 1000], ["linear", "log", "eq_hist"], [3, 10, 30, 100]) 26 | 27 | def setup(self, canvas_size, how, category_count): 28 | rng = np.random.default_rng(349120) 29 | data = rng.random((canvas_size, canvas_size, category_count), dtype=np.float32) 30 | data[data < 0.1] = np.nan # Want some nans in data. 31 | x = np.arange(canvas_size, dtype=np.float32) 32 | y = np.arange(canvas_size, dtype=np.float32) 33 | cat = [f"cat{i}" for i in range(category_count)] 34 | self.agg = xr.DataArray(data=data, dims=["y", "x", "cat"], coords=dict(x=x, y=y, cat=cat)) 35 | 36 | random_colors = rng.choice(colorcet.rainbow, category_count) 37 | self.color_key = {k: v for k, v in zip(cat, random_colors)} 38 | 39 | def time_shade_categorical(self, canvas_size, how, category_count): 40 | tf.shade(self.agg, how=how, color_key=self.color_key) 41 | -------------------------------------------------------------------------------- /datashader/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from packaging.version import Version 4 | 5 | from .__version import __version__ # noqa: F401 6 | 7 | from .core import Canvas # noqa (API import) 8 | from .reductions import * # noqa (API import) 9 | from .glyphs import Point # noqa (API import) 10 | from .pipeline import Pipeline # noqa (API import) 11 | from . import transfer_functions as tf # noqa (API import) 12 | from . import data_libraries # noqa (API import) 13 | 14 | # Make RaggedArray pandas extension array available for 15 | # pandas >= 0.24.0 is installed 16 | from pandas import __version__ as pandas_version 17 | if Version(pandas_version) >= Version('0.24.0'): 18 | from . import datatypes # noqa (API import) 19 | 20 | # make pyct's example/data commands available if possible 21 | from functools import partial 22 | try: 23 | from pyct.cmd import copy_examples as _copy, fetch_data as _fetch, examples as _examples 24 | copy_examples = partial(_copy,'datashader') 25 | fetch_data = partial(_fetch,'datashader') 26 | examples = partial(_examples,'datashader') 27 | except ImportError: 28 | def _missing_cmd(*args,**kw): 29 | return("install pyct to enable this command (e.g. `conda install pyct or " 30 | "`pip install pyct[cmd]`)") 31 | _copy = _fetch = _examples = _missing_cmd 32 | def err(): 33 | raise ValueError(_missing_cmd()) 34 | fetch_data = copy_examples = examples = err 35 | del partial, _examples, _copy, _fetch 36 | -------------------------------------------------------------------------------- /datashader/__main__.py: -------------------------------------------------------------------------------- 1 | def main(args=None): 2 | try: 3 | import pyct.cmd 4 | except ImportError: 5 | import sys 6 | from . import _missing_cmd 7 | print(_missing_cmd()) 8 | sys.exit(1) 9 | return pyct.cmd.substitute_main('datashader',args=args) 10 | 11 | if __name__ == "__main__": 12 | main() 13 | -------------------------------------------------------------------------------- /datashader/__version.py: -------------------------------------------------------------------------------- 1 | """Define the package version. 2 | 3 | Called __version.py as setuptools_scm will create a _version.py 4 | """ 5 | 6 | import os.path 7 | 8 | PACKAGE = "datashader" 9 | 10 | try: 11 | # For performance reasons on imports, avoid importing setuptools_scm 12 | # if not in a .git folder 13 | if os.path.exists(os.path.join(os.path.dirname(__file__), "..", ".git")): 14 | # If setuptools_scm is installed (e.g. in a development environment with 15 | # an editable install), then use it to determine the version dynamically. 16 | from setuptools_scm import get_version 17 | 18 | # This will fail with LookupError if the package is not installed in 19 | # editable mode or if Git is not installed. 20 | __version__ = get_version(root="..", relative_to=__file__) 21 | else: 22 | raise FileNotFoundError 23 | except (ImportError, LookupError, FileNotFoundError): 24 | # As a fallback, use the version that is hard-coded in the file. 25 | try: 26 | # __version__ was added in _version in setuptools-scm 7.0.0, we rely on 27 | # the hopefully stable version variable. 28 | from ._version import version as __version__ 29 | except (ModuleNotFoundError, ImportError): 30 | # Either _version doesn't exist (ModuleNotFoundError) or version isn't 31 | # in _version (ImportError). ModuleNotFoundError is a subclass of 32 | # ImportError, let's be explicit anyway. 33 | 34 | # Try something else: 35 | from importlib.metadata import PackageNotFoundError, version 36 | 37 | try: 38 | __version__ = version(PACKAGE) 39 | except PackageNotFoundError: 40 | # The user is probably trying to run this without having installed 41 | # the package. 42 | __version__ = "0.0.0+unknown" 43 | 44 | __all__ = ("__version__",) 45 | -------------------------------------------------------------------------------- /datashader/antialias.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from enum import Enum 3 | from typing import NamedTuple, TYPE_CHECKING 4 | 5 | 6 | # Enum used to specify how the second stage aggregation is performed 7 | # for 2-stage antialiased lines. 8 | class AntialiasCombination(Enum): 9 | SUM_1AGG = 1 10 | SUM_2AGG = 2 11 | MIN = 3 12 | MAX = 4 13 | FIRST = 5 14 | LAST = 6 15 | 16 | 17 | class AntialiasStage2(NamedTuple): 18 | """Configuration for second-stage combination of a single antialiased reduction.""" 19 | combination: AntialiasCombination 20 | zero: float 21 | n_reduction: bool = False 22 | categorical: bool = False 23 | 24 | 25 | if TYPE_CHECKING: 26 | UnzippedAntialiasStage2 = \ 27 | tuple[tuple[AntialiasCombination], tuple[float], tuple[bool], tuple[bool]] 28 | 29 | 30 | def two_stage_agg(antialias_stage_2: UnzippedAntialiasStage2 | None): 31 | """Information used to perform the correct stage 2 aggregation.""" 32 | if not antialias_stage_2: 33 | # Not using antialiased lines, doesn't matter what is returned. 34 | return False, False 35 | 36 | aa_combinations = antialias_stage_2[0] 37 | 38 | # A single combination in (SUM_2AGG, FIRST, LAST, MIN) means that a 2-stage 39 | # aggregation will be used, otherwise use a 1-stage aggregation that is 40 | # faster. 41 | use_2_stage_agg = False 42 | for comb in aa_combinations: 43 | if comb in (AntialiasCombination.SUM_2AGG, AntialiasCombination.MIN, 44 | AntialiasCombination.FIRST, AntialiasCombination.LAST): 45 | use_2_stage_agg = True 46 | break 47 | 48 | # Boolean overwrite flag is used in _full_antialias() is True to overwrite 49 | # pixel values (using max of previous and new values) or False for the more 50 | # complicated correction algorithm. Prefer overwrite=True for speed, but 51 | # any SUM_1AGG implies overwrite=False. 52 | overwrite = True 53 | for comb in aa_combinations: 54 | if comb == AntialiasCombination.SUM_1AGG: 55 | overwrite = False 56 | break 57 | 58 | return overwrite, use_2_stage_agg 59 | -------------------------------------------------------------------------------- /datashader/composite.py: -------------------------------------------------------------------------------- 1 | """ 2 | Binary graphical composition operators 3 | 4 | See https://www.cairographics.org/operators/; more could easily be added from there. 5 | """ 6 | 7 | from __future__ import annotations 8 | 9 | import numba as nb 10 | import numpy as np 11 | import os 12 | 13 | image_operators = ('over', 'add', 'saturate', 'source') 14 | array_operators = ('add_arr', 'max_arr', 'min_arr', 'source_arr') 15 | __all__ = ('composite_op_lookup', 'validate_operator') + image_operators + array_operators 16 | 17 | 18 | def validate_operator(how, is_image): 19 | name = how if is_image else how + '_arr' 20 | if is_image: 21 | if name not in image_operators: 22 | raise ValueError('Operator %r not one of the supported image operators: %s' 23 | % (how, ', '.join(repr(el) for el in image_operators))) 24 | elif name not in array_operators: 25 | raise ValueError('Operator %r not one of the supported array operators: %s' 26 | % (how, ', '.join(repr(el[:-4]) for el in array_operators))) 27 | 28 | 29 | @nb.jit('(uint32,)', nopython=True, nogil=True, cache=True) 30 | def extract_scaled(x): 31 | """Extract components as float64 values in [0.0, 1.0]""" 32 | r = np.float64(( x & 255) / 255) 33 | g = np.float64(((x >> 8) & 255) / 255) 34 | b = np.float64(((x >> 16) & 255) / 255) 35 | a = np.float64(((x >> 24) & 255) / 255) 36 | return r, g, b, a 37 | 38 | 39 | @nb.jit('(float64, float64, float64, float64)', nopython=True, 40 | nogil=True, cache=True) 41 | def combine_scaled(r, g, b, a): 42 | """Combine components in [0, 1] to rgba uint32""" 43 | r2 = min(255, np.uint32(r * 255)) 44 | g2 = min(255, np.uint32(g * 255)) 45 | b2 = min(255, np.uint32(b * 255)) 46 | a2 = min(255, np.uint32(a * 255)) 47 | return np.uint32((a2 << 24) | (b2 << 16) | (g2 << 8) | r2) 48 | 49 | 50 | jit_enabled = os.environ.get('NUMBA_DISABLE_JIT', '0') == '0' 51 | 52 | 53 | if jit_enabled: 54 | extract_scaled.disable_compile() 55 | combine_scaled.disable_compile() 56 | 57 | # Lookup table for storing compositing operators by function name 58 | composite_op_lookup = {} 59 | 60 | 61 | def operator(f): 62 | """Define and register a new image composite operator""" 63 | 64 | if jit_enabled: 65 | f2 = nb.vectorize(f) 66 | f2._compile_for_argtys((nb.types.uint32, nb.types.uint32)) 67 | f2._frozen = True 68 | else: 69 | f2 = np.vectorize(f) 70 | 71 | composite_op_lookup[f.__name__] = f2 72 | return f2 73 | 74 | 75 | @operator 76 | def source(src, dst): 77 | if src & 0xff000000: 78 | return src 79 | else: 80 | return dst 81 | 82 | 83 | @operator 84 | def over(src, dst): 85 | sr, sg, sb, sa = extract_scaled(src) 86 | dr, dg, db, da = extract_scaled(dst) 87 | 88 | factor = 1 - sa 89 | a = sa + da * factor 90 | if a == 0: 91 | return np.uint32(0) 92 | r = (sr * sa + dr * da * factor)/a 93 | g = (sg * sa + dg * da * factor)/a 94 | b = (sb * sa + db * da * factor)/a 95 | return combine_scaled(r, g, b, a) 96 | 97 | 98 | @operator 99 | def add(src, dst): 100 | sr, sg, sb, sa = extract_scaled(src) 101 | dr, dg, db, da = extract_scaled(dst) 102 | 103 | a = min(1, sa + da) 104 | if a == 0: 105 | return np.uint32(0) 106 | r = (sr * sa + dr * da)/a 107 | g = (sg * sa + dg * da)/a 108 | b = (sb * sa + db * da)/a 109 | return combine_scaled(r, g, b, a) 110 | 111 | 112 | @operator 113 | def saturate(src, dst): 114 | sr, sg, sb, sa = extract_scaled(src) 115 | dr, dg, db, da = extract_scaled(dst) 116 | 117 | a = min(1, sa + da) 118 | if a == 0: 119 | return np.uint32(0) 120 | factor = min(sa, 1 - da) 121 | r = (factor * sr + dr * da)/a 122 | g = (factor * sg + dg * da)/a 123 | b = (factor * sb + db * da)/a 124 | return combine_scaled(r, g, b, a) 125 | 126 | 127 | 128 | def arr_operator(f): 129 | """Define and register a new array composite operator""" 130 | 131 | if jit_enabled: 132 | f2 = nb.vectorize(f) 133 | f2._compile_for_argtys( 134 | (nb.types.int32, nb.types.int32)) 135 | f2._compile_for_argtys( 136 | (nb.types.int64, nb.types.int64)) 137 | f2._compile_for_argtys( 138 | (nb.types.float32, nb.types.float32)) 139 | f2._compile_for_argtys( 140 | (nb.types.float64, nb.types.float64)) 141 | f2._frozen = True 142 | else: 143 | f2 = np.vectorize(f) 144 | 145 | composite_op_lookup[f.__name__] = f2 146 | return f2 147 | 148 | 149 | @arr_operator 150 | def source_arr(src, dst): 151 | if src: 152 | return src 153 | else: 154 | return dst 155 | 156 | @arr_operator 157 | def add_arr(src, dst): 158 | return src + dst 159 | 160 | @arr_operator 161 | def max_arr(src, dst): 162 | return max([src, dst]) 163 | 164 | @arr_operator 165 | def min_arr(src, dst): 166 | return min([src, dst]) 167 | -------------------------------------------------------------------------------- /datashader/data_libraries/__init__.py: -------------------------------------------------------------------------------- 1 | from . import pandas, xarray # noqa (API import) 2 | 3 | try: 4 | import dask as _dask # noqa (Test dask installed) 5 | from . import dask # noqa (API import) 6 | except ImportError: 7 | pass 8 | 9 | try: 10 | import cudf as _cudf # noqa (Test cudf installed) 11 | import cupy as _cupy # noqa (Test cupy installed) 12 | from . import cudf # noqa (API import) 13 | 14 | import dask_cudf as _dask_cudf # noqa (Test dask_cudf installed) 15 | from . import dask_cudf # noqa (API import) 16 | 17 | except Exception: 18 | pass 19 | -------------------------------------------------------------------------------- /datashader/data_libraries/cudf.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from contextlib import suppress 3 | from datashader.data_libraries.pandas import default 4 | from datashader.core import bypixel 5 | 6 | 7 | def cudf_pipeline(df, schema, canvas, glyph, summary, *, antialias=False): 8 | return default(glyph, df, schema, canvas, summary, antialias=antialias, cuda=True) 9 | 10 | 11 | with suppress(ImportError): 12 | import cudf 13 | 14 | cudf_pipeline = bypixel.pipeline.register(cudf.DataFrame)(cudf_pipeline) 15 | -------------------------------------------------------------------------------- /datashader/data_libraries/dask_cudf.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from contextlib import suppress 3 | from datashader.data_libraries.dask import dask_pipeline 4 | from datashader.core import bypixel 5 | 6 | 7 | def dask_cudf_pipeline(df, schema, canvas, glyph, summary, *, antialias=False): 8 | return dask_pipeline(df, schema, canvas, glyph, summary, antialias=antialias, cuda=True) 9 | 10 | 11 | with suppress(ImportError): 12 | import dask_cudf 13 | 14 | dask_cudf_pipeline = bypixel.pipeline.register(dask_cudf.DataFrame)(dask_cudf_pipeline) 15 | -------------------------------------------------------------------------------- /datashader/data_libraries/pandas.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pandas as pd 4 | 5 | from datashader.core import bypixel 6 | from datashader.compiler import compile_components 7 | from datashader.glyphs.points import _PointLike, _GeometryLike 8 | from datashader.glyphs.area import _AreaToLineLike 9 | from datashader.glyphs.line import LinesXarrayCommonX 10 | from datashader.utils import Dispatcher 11 | 12 | __all__ = () 13 | 14 | 15 | @bypixel.pipeline.register(pd.DataFrame) 16 | def pandas_pipeline(df, schema, canvas, glyph, summary, *, antialias=False): 17 | return glyph_dispatch(glyph, df, schema, canvas, summary, antialias=antialias) 18 | 19 | 20 | glyph_dispatch = Dispatcher() 21 | 22 | 23 | @glyph_dispatch.register(_PointLike) 24 | @glyph_dispatch.register(_GeometryLike) 25 | @glyph_dispatch.register(_AreaToLineLike) 26 | def default(glyph, source, schema, canvas, summary, *, antialias=False, cuda=False): 27 | create, info, append, _, finalize, antialias_stage_2, antialias_stage_2_funcs, _ = \ 28 | compile_components(summary, schema, glyph, antialias=antialias, cuda=cuda, 29 | partitioned=False) 30 | x_mapper = canvas.x_axis.mapper 31 | y_mapper = canvas.y_axis.mapper 32 | extend = glyph._build_extend( 33 | x_mapper, y_mapper, info, append, antialias_stage_2, antialias_stage_2_funcs) 34 | 35 | x_range = canvas.x_range or glyph.compute_x_bounds(source) 36 | y_range = canvas.y_range or glyph.compute_y_bounds(source) 37 | canvas.validate_ranges(x_range, y_range) 38 | 39 | width = canvas.plot_width 40 | height = canvas.plot_height 41 | 42 | x_st = canvas.x_axis.compute_scale_and_translate(x_range, width) 43 | y_st = canvas.y_axis.compute_scale_and_translate(y_range, height) 44 | 45 | x_axis = canvas.x_axis.compute_index(x_st, width) 46 | y_axis = canvas.y_axis.compute_index(y_st, height) 47 | 48 | bases = create((height, width)) 49 | 50 | if isinstance(glyph, LinesXarrayCommonX) and summary.uses_row_index(cuda, partitioned=False): 51 | # Need to use a row index and extract.apply() doesn't have enough 52 | # information to determine the coordinate length itself so do so here 53 | # and pass it along as an xarray attribute in the usual manner. 54 | other_dim_index = 1 - glyph.x_dim_index 55 | other_dim_name = source[glyph.y].coords.dims[other_dim_index] 56 | length = len(source[other_dim_name]) 57 | source = source.assign_attrs(_datashader_row_offset=0, _datashader_row_length=length) 58 | 59 | extend(bases, source, x_st + y_st, x_range + y_range) 60 | 61 | return finalize(bases, 62 | cuda=cuda, 63 | coords=dict([(glyph.x_label, x_axis), 64 | (glyph.y_label, y_axis)]), 65 | dims=[glyph.y_label, glyph.x_label], 66 | attrs=dict(x_range=x_range, y_range=y_range)) 67 | -------------------------------------------------------------------------------- /datashader/data_libraries/xarray.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from datashader.glyphs.line import LinesXarrayCommonX 3 | from datashader.glyphs.quadmesh import _QuadMeshLike 4 | from datashader.data_libraries.pandas import default 5 | from datashader.core import bypixel 6 | import xarray as xr 7 | from datashader.utils import Dispatcher 8 | 9 | 10 | try: 11 | import cupy 12 | except Exception: 13 | cupy = None 14 | 15 | glyph_dispatch = Dispatcher() 16 | 17 | 18 | @bypixel.pipeline.register(xr.Dataset) 19 | def xarray_pipeline(xr_ds, schema, canvas, glyph, summary, *, antialias=False): 20 | cuda = False 21 | if cupy: 22 | if isinstance(glyph, LinesXarrayCommonX): 23 | cuda = isinstance(xr_ds[glyph.y].data, cupy.ndarray) 24 | else: 25 | cuda = isinstance(xr_ds[glyph.name].data, cupy.ndarray) 26 | 27 | if not xr_ds.chunks: 28 | return glyph_dispatch( 29 | glyph, xr_ds, schema, canvas, summary, antialias=antialias, cuda=cuda) 30 | else: 31 | from datashader.data_libraries.dask_xarray import dask_xarray_pipeline 32 | return dask_xarray_pipeline( 33 | glyph, xr_ds, schema, canvas, summary, antialias=antialias, cuda=cuda) 34 | 35 | 36 | # Default to default pandas implementation 37 | glyph_dispatch.register(_QuadMeshLike)(default) 38 | glyph_dispatch.register(LinesXarrayCommonX)(default) 39 | -------------------------------------------------------------------------------- /datashader/datashape/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | from . import lexer, parser # noqa (API import) 3 | from .coretypes import * # noqa (API import) 4 | from .predicates import * # noqa (API import) 5 | from .typesets import * # noqa (API import) 6 | from .user import * # noqa (API import) 7 | from .type_symbol_table import * # noqa (API import) 8 | from .discovery import discover # noqa (API import) 9 | from .util import * # noqa (API import) 10 | from .promote import promote, optionify # noqa (API import) 11 | from .error import DataShapeSyntaxError # noqa (API import) 12 | -------------------------------------------------------------------------------- /datashader/datashape/dispatch.py: -------------------------------------------------------------------------------- 1 | from multipledispatch import dispatch 2 | from functools import partial 3 | 4 | namespace = {} 5 | 6 | dispatch = partial(dispatch, namespace=namespace) 7 | -------------------------------------------------------------------------------- /datashader/datashape/error.py: -------------------------------------------------------------------------------- 1 | """Error handling""" 2 | 3 | syntax_error = """ 4 | 5 | File {filename}, line {lineno} 6 | {line} 7 | {pointer} 8 | 9 | {error}: {msg} 10 | """ 11 | 12 | class DataShapeSyntaxError(SyntaxError): 13 | """ 14 | Makes datashape parse errors look like Python SyntaxError. 15 | """ 16 | def __init__(self, lexpos, filename, text, msg=None): 17 | self.lexpos = lexpos 18 | self.filename = filename 19 | self.text = text 20 | self.msg = msg or 'invalid syntax' 21 | self.lineno = text.count('\n', 0, lexpos) + 1 22 | # Get the extent of the line with the error 23 | linestart = text.rfind('\n', 0, lexpos) 24 | if linestart < 0: 25 | linestart = 0 26 | else: 27 | linestart = linestart + 1 28 | lineend = text.find('\n', lexpos) 29 | if lineend < 0: 30 | lineend = len(text) 31 | self.line = text[linestart:lineend] 32 | self.col_offset = lexpos - linestart 33 | 34 | def __str__(self): 35 | pointer = ' ' * self.col_offset + '^' 36 | 37 | return syntax_error.format( 38 | filename=self.filename, 39 | lineno=self.lineno, 40 | line=self.line, 41 | pointer=pointer, 42 | msg=self.msg, 43 | error=self.__class__.__name__, 44 | ) 45 | 46 | def __repr__(self): 47 | return str(self) 48 | -------------------------------------------------------------------------------- /datashader/datashape/internal_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Utility functions that are unrelated to datashape 3 | 4 | Do not import datashape modules into this module. See util.py in that case 5 | """ 6 | 7 | 8 | import keyword 9 | import re 10 | 11 | 12 | class IndexCallable: 13 | """ Provide getitem syntax for functions 14 | 15 | >>> def inc(x): 16 | ... return x + 1 17 | 18 | >>> I = IndexCallable(inc) 19 | >>> I[3] 20 | 4 21 | """ 22 | __slots__ = 'fn', 23 | 24 | def __init__(self, fn): 25 | self.fn = fn 26 | 27 | def __getitem__(self, key): 28 | return self.fn(key) 29 | 30 | 31 | def remove(predicate, seq): 32 | return filter(lambda x: not predicate(x), seq) 33 | 34 | 35 | # Taken from theano/theano/gof/sched.py 36 | # Avoids licensing issues because this was written by Matthew Rocklin 37 | def reverse_dict(d): 38 | """Reverses direction of dependence dict 39 | 40 | >>> d = {'a': (1, 2), 'b': (2, 3), 'c':()} 41 | >>> reverse_dict(d) # doctest: +SKIP 42 | {1: ('a',), 2: ('a', 'b'), 3: ('b',)} 43 | 44 | :note: dict order are not deterministic. As we iterate on the 45 | input dict, it make the output of this function depend on the 46 | dict order. So this function output order should be considered 47 | as undeterministic. 48 | 49 | """ 50 | result = {} 51 | for key in d: 52 | for val in d[key]: 53 | result[val] = result.get(val, tuple()) + (key, ) 54 | return result 55 | 56 | 57 | # Taken from theano/theano/gof/sched.py 58 | # Avoids licensing issues because this was written by Matthew Rocklin 59 | def _toposort(edges): 60 | """ Topological sort algorithm by Kahn [1] - O(nodes + vertices) 61 | 62 | inputs: 63 | edges - a dict of the form {a: {b, c}} where b and c depend on a 64 | outputs: 65 | L - an ordered list of nodes that satisfy the dependencies of edges 66 | 67 | >>> _toposort({1: (2, 3), 2: (3, )}) 68 | [1, 2, 3] 69 | 70 | Closely follows the wikipedia page [2] 71 | 72 | [1] Kahn, Arthur B. (1962), "Topological sorting of large networks", 73 | Communications of the ACM 74 | [2] http://en.wikipedia.org/wiki/Toposort#Algorithms 75 | """ 76 | incoming_edges = reverse_dict(edges) 77 | incoming_edges = dict((k, set(val)) for k, val in incoming_edges.items()) 78 | S = {v for v in edges if v not in incoming_edges} 79 | L = [] 80 | 81 | while S: 82 | n = S.pop() 83 | L.append(n) 84 | for m in edges.get(n, ()): 85 | assert n in incoming_edges[m] 86 | incoming_edges[m].remove(n) 87 | if not incoming_edges[m]: 88 | S.add(m) 89 | if any(incoming_edges.get(v) for v in edges): 90 | raise ValueError("Input has cycles") 91 | return L 92 | 93 | 94 | # Taken from toolz 95 | # Avoids licensing issues because this version was authored by Matthew Rocklin 96 | def groupby(func, seq): 97 | """ Group a collection by a key function 98 | 99 | >>> names = ['Alice', 'Bob', 'Charlie', 'Dan', 'Edith', 'Frank'] 100 | >>> groupby(len, names) # doctest: +SKIP 101 | {3: ['Bob', 'Dan'], 5: ['Alice', 'Edith', 'Frank'], 7: ['Charlie']} 102 | 103 | >>> iseven = lambda x: x % 2 == 0 104 | >>> groupby(iseven, [1, 2, 3, 4, 5, 6, 7, 8]) 105 | {False: [1, 3, 5, 7], True: [2, 4, 6, 8]} 106 | 107 | See Also: 108 | ``countby`` 109 | """ 110 | 111 | d = {} 112 | for item in seq: 113 | key = func(item) 114 | if key not in d: 115 | d[key] = [] 116 | d[key].append(item) 117 | return d 118 | 119 | 120 | def isidentifier(s): 121 | return (keyword.iskeyword(s) or 122 | re.match(r'^[_a-zA-Z][_a-zA-Z0-9]*$', s) is not None) 123 | -------------------------------------------------------------------------------- /datashader/datashape/lexer.py: -------------------------------------------------------------------------------- 1 | """ 2 | Lexer for the datashape grammar. 3 | """ 4 | 5 | 6 | import re 7 | import ast 8 | import collections 9 | 10 | from . import error 11 | 12 | # This is updated to include all the token names from _tokens, 13 | # where e.g. _tokens[NAME_LOWER-1] is the entry for NAME_LOWER 14 | __all__ = ['lex', 'Token'] 15 | 16 | def _str_val(s): 17 | # Use the Python parser via the ast module to parse the string, 18 | # since the string_escape and unicode_escape codecs do the wrong thing 19 | return ast.parse('u' + s).body[0].value.value 20 | 21 | # A list of the token names, corresponding regex, and value extraction function 22 | _tokens = [ 23 | ('BOOLEAN', r'True|False', ast.literal_eval), 24 | ('NAME_LOWER', r'[a-z][a-zA-Z0-9_]*', lambda x : x), 25 | ('NAME_UPPER', r'[A-Z][a-zA-Z0-9_]*', lambda x : x), 26 | ('NAME_OTHER', r'_[a-zA-Z0-9_]*', lambda x : x), 27 | ('ASTERISK', r'\*'), 28 | ('COMMA', r','), 29 | ('EQUAL', r'='), 30 | ('COLON', r':'), 31 | ('LBRACKET', r'\['), 32 | ('RBRACKET', r'\]'), 33 | ('LBRACE', r'\{'), 34 | ('RBRACE', r'\}'), 35 | ('LPAREN', r'\('), 36 | ('RPAREN', r'\)'), 37 | ('ELLIPSIS', r'\.\.\.'), 38 | ('RARROW', r'->'), 39 | ('QUESTIONMARK', r'\?'), 40 | ('INTEGER', r'0(?![0-9])|-?[1-9][0-9]*', int), 41 | ('STRING', (r"""(?:"(?:[^"\n\r\\]|(?:\\u[0-9a-fA-F]{4})|(?:\\["bfnrt]))*")|""" + 42 | r"""(?:'(?:[^'\n\r\\]|(?:\\u[0-9a-fA-F]{4})|(?:\\['bfnrt]))*')"""), 43 | _str_val), 44 | ] 45 | 46 | # Dynamically add all the token indices to globals() and __all__ 47 | __all__.extend(tok[0] for tok in _tokens) 48 | globals().update((tok[0], i) for i, tok in enumerate(_tokens, 1)) 49 | 50 | # Regex for skipping whitespace and comments 51 | _whitespace = r'(?:\s|(?:#.*$))*' 52 | 53 | # Compile the token-matching and whitespace-matching regular expressions 54 | _tokens_re = re.compile('|'.join('(' + tok[1] + ')' for tok in _tokens), 55 | re.MULTILINE) 56 | _whitespace_re = re.compile(_whitespace, re.MULTILINE) 57 | 58 | Token = collections.namedtuple('Token', 'id, name, span, val') 59 | 60 | def lex(ds_str): 61 | """A generator which lexes a datashape string into a 62 | sequence of tokens. 63 | 64 | Example 65 | ------- 66 | 67 | import datashape 68 | s = ' -> ... A... "string" 1234 Blah _eil(# comment' 69 | print('lexing %r' % s) 70 | for tok in datashape.lexer.lex(s): 71 | print(tok.id, tok.name, tok.span, repr(tok.val)) 72 | """ 73 | pos = 0 74 | # Skip whitespace 75 | m = _whitespace_re.match(ds_str, pos) 76 | if m: 77 | pos = m.end() 78 | while pos < len(ds_str): 79 | # Try to match a token 80 | m = _tokens_re.match(ds_str, pos) 81 | if m: 82 | # m.lastindex gives us which group was matched, which 83 | # is one greater than the index into the _tokens list. 84 | id = m.lastindex 85 | tokinfo = _tokens[id - 1] 86 | name = tokinfo[0] 87 | span = m.span() 88 | if len(tokinfo) > 2: 89 | val = tokinfo[2](ds_str[span[0]:span[1]]) 90 | else: 91 | val = None 92 | pos = m.end() 93 | yield Token(id, name, span, val) 94 | else: 95 | raise error.DataShapeSyntaxError(pos, '', 96 | ds_str, 97 | 'Invalid DataShape token') 98 | # Skip whitespace 99 | m = _whitespace_re.match(ds_str, pos) 100 | if m: 101 | pos = m.end() 102 | -------------------------------------------------------------------------------- /datashader/datashape/promote.py: -------------------------------------------------------------------------------- 1 | 2 | import numpy as np 3 | from datashader import datashape 4 | 5 | 6 | __all__ = ['promote', 'optionify'] 7 | 8 | 9 | def promote(lhs, rhs, promote_option=True): 10 | """Promote two scalar dshapes to a possibly larger, but compatible type. 11 | 12 | Examples 13 | -------- 14 | >>> from datashader.datashape import int32, int64, Option, string 15 | >>> x = Option(int32) 16 | >>> y = int64 17 | >>> promote(x, y) 18 | Option(ty=ctype("int64")) 19 | >>> promote(int64, int64) 20 | ctype("int64") 21 | 22 | Don't promote to option types. 23 | >>> promote(x, y, promote_option=False) 24 | ctype("int64") 25 | 26 | Strings are handled differently than NumPy, which promotes to ctype("object") 27 | >>> x = string 28 | >>> y = Option(string) 29 | >>> promote(x, y) == promote(y, x) == Option(string) 30 | True 31 | >>> promote(x, y, promote_option=False) 32 | ctype("string") 33 | 34 | Notes 35 | ---- 36 | Except for ``datashader.datashape.string`` types, this uses ``numpy.result_type`` for 37 | type promotion logic. See the numpy documentation at: 38 | 39 | http://docs.scipy.org/doc/numpy/reference/generated/numpy.result_type.html 40 | """ 41 | if lhs == rhs: 42 | return lhs 43 | left, right = getattr(lhs, 'ty', lhs), getattr(rhs, 'ty', rhs) 44 | if left == right == datashape.string: 45 | # Special case string promotion, since numpy promotes to `object`. 46 | dtype = datashape.string 47 | else: 48 | np_res_type = np.result_type(datashape.to_numpy_dtype(left), 49 | datashape.to_numpy_dtype(right)) 50 | dtype = datashape.CType.from_numpy_dtype(np_res_type) 51 | if promote_option: 52 | dtype = optionify(lhs, rhs, dtype) 53 | return dtype 54 | 55 | 56 | def optionify(lhs, rhs, dshape): 57 | """Check whether a binary operation's dshape came from 58 | :class:`~datashape.coretypes.Option` typed operands and construct an 59 | :class:`~datashape.coretypes.Option` type accordingly. 60 | 61 | Examples 62 | -------- 63 | >>> from datashader.datashape import int32, int64, Option 64 | >>> x = Option(int32) 65 | >>> x 66 | Option(ty=ctype("int32")) 67 | >>> y = int64 68 | >>> y 69 | ctype("int64") 70 | >>> optionify(x, y, int64) 71 | Option(ty=ctype("int64")) 72 | """ 73 | if hasattr(dshape.measure, 'ty'): 74 | return dshape 75 | if hasattr(lhs, 'ty') or hasattr(rhs, 'ty'): 76 | return datashape.Option(dshape) 77 | return dshape 78 | -------------------------------------------------------------------------------- /datashader/datashape/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/datashape/tests/__init__.py -------------------------------------------------------------------------------- /datashader/datashape/tests/test_operations.py: -------------------------------------------------------------------------------- 1 | from datashader import datashape 2 | import pytest 3 | 4 | 5 | def test_scalar_subarray(): 6 | assert datashape.int32.subarray(0) == datashape.int32 7 | with pytest.raises(IndexError): 8 | datashape.int32.subarray(1) 9 | assert datashape.string.subarray(0) == datashape.string 10 | with pytest.raises(IndexError): 11 | datashape.string.subarray(1) 12 | 13 | 14 | def test_array_subarray(): 15 | assert (datashape.dshape('3 * int32').subarray(0) == 16 | datashape.dshape('3 * int32')) 17 | assert (datashape.dshape('3 * int32').subarray(1) == 18 | datashape.DataShape(datashape.int32)) 19 | assert (str(datashape.dshape('3 * var * M * int32').subarray(2)) == 20 | str(datashape.dshape('M * int32'))) 21 | assert (str(datashape.dshape('3 * var * M * float64').subarray(3)) == 22 | str(datashape.float64)) 23 | 24 | 25 | def test_dshape_compare(): 26 | assert datashape.int32 != datashape.dshape('1 * int32') 27 | -------------------------------------------------------------------------------- /datashader/datashape/tests/test_predicates.py: -------------------------------------------------------------------------------- 1 | from datashader.datashape.predicates import isfixed, _dimensions, isnumeric, isscalar 2 | from datashader.datashape.coretypes import TypeVar, int32, Categorical 3 | 4 | 5 | def test_isfixed(): 6 | assert not isfixed(TypeVar('M') * int32) 7 | 8 | 9 | def test_isscalar(): 10 | assert isscalar('?int32') 11 | assert isscalar('float32') 12 | assert isscalar(int32) 13 | assert isscalar(Categorical(['a', 'b', 'c'])) 14 | assert not isscalar('{a: int32, b: float64}') 15 | 16 | 17 | def test_option(): 18 | assert _dimensions('?int') == _dimensions('int') 19 | assert _dimensions('3 * ?int') == _dimensions('3 * int') 20 | 21 | 22 | def test_time(): 23 | assert not isnumeric('time') 24 | -------------------------------------------------------------------------------- /datashader/datashape/tests/test_promote.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from datashader.datashape import ( 4 | promote, Option, float64, int64, float32, optionify, 5 | string, datetime_ as datetime, dshape) 6 | 7 | 8 | def test_simple(): 9 | x = int64 10 | y = float32 11 | z = promote(x, y) 12 | assert z == float64 13 | 14 | 15 | def test_option(): 16 | x = int64 17 | y = Option(float32) 18 | z = promote(x, y) 19 | assert z == Option(float64) 20 | 21 | 22 | def test_no_promote_option(): 23 | x = int64 24 | y = Option(float64) 25 | z = promote(x, y, promote_option=False) 26 | assert z == float64 27 | 28 | 29 | def test_option_in_parent(): 30 | x = int64 31 | y = Option(float32) 32 | z = optionify(x, y, y) 33 | assert z == y 34 | 35 | 36 | @pytest.mark.parametrize('x,y,p,r', 37 | [[string, string, True, string], 38 | [string, string, False, string], 39 | 40 | [Option(string), 41 | Option(string), 42 | True, 43 | Option(string)], 44 | 45 | [Option(string), 46 | Option(string), 47 | False, 48 | Option(string)], 49 | 50 | [Option(string), 51 | string, 52 | True, 53 | Option(string)], 54 | 55 | [Option(string), 56 | string, 57 | False, 58 | string], 59 | 60 | [Option(string), 61 | dshape('?string'), 62 | True, 63 | Option(string)], 64 | 65 | [dshape('?string'), 66 | Option(string), 67 | False, 68 | Option(string)], 69 | 70 | [dshape('string'), 71 | Option(string), 72 | True, 73 | Option(string)], 74 | 75 | [dshape('string'), 76 | Option(string), 77 | False, 78 | string]]) 79 | def test_promote_string_with_option(x, y, p, r): 80 | assert (promote(x, y, promote_option=p) == 81 | promote(y, x, promote_option=p) == 82 | r) 83 | 84 | 85 | @pytest.mark.parametrize('x,y,p,r', 86 | [[datetime, datetime, True, datetime], 87 | [datetime, datetime, False, datetime], 88 | 89 | [Option(datetime), 90 | Option(datetime), 91 | True, 92 | Option(datetime)], 93 | 94 | [Option(datetime), 95 | Option(datetime), 96 | False, 97 | Option(datetime)], 98 | 99 | [Option(datetime), 100 | datetime, 101 | True, 102 | Option(datetime)], 103 | 104 | [Option(datetime), 105 | datetime, 106 | False, 107 | datetime], 108 | 109 | [Option(datetime), 110 | dshape('?datetime'), 111 | True, 112 | Option(datetime)], 113 | 114 | [dshape('?datetime'), 115 | Option(datetime), 116 | False, 117 | Option(datetime)], 118 | 119 | [dshape('datetime'), 120 | Option(datetime), 121 | True, 122 | Option(datetime)], 123 | 124 | [dshape('datetime'), 125 | Option(datetime), 126 | False, 127 | datetime]]) 128 | def test_promote_datetime_with_option(x, y, p, r): 129 | assert (promote(x, y, promote_option=p) == 130 | promote(y, x, promote_option=p) == 131 | r) 132 | -------------------------------------------------------------------------------- /datashader/datashape/tests/test_str.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import pytest 3 | 4 | from datashader import datashape 5 | from datashader.datashape import dshape, DataShapeSyntaxError 6 | 7 | 8 | class TestDataShapeStr(unittest.TestCase): 9 | def test_primitive_measure_str(self): 10 | self.assertEqual(str(datashape.int8), 'int8') 11 | self.assertEqual(str(datashape.int16), 'int16') 12 | self.assertEqual(str(datashape.int32), 'int32') 13 | self.assertEqual(str(datashape.int64), 'int64') 14 | self.assertEqual(str(datashape.uint8), 'uint8') 15 | self.assertEqual(str(datashape.uint16), 'uint16') 16 | self.assertEqual(str(datashape.uint32), 'uint32') 17 | self.assertEqual(str(datashape.uint64), 'uint64') 18 | self.assertEqual(str(datashape.float32), 'float32') 19 | self.assertEqual(str(datashape.float64), 'float64') 20 | self.assertEqual(str(datashape.string), 'string') 21 | self.assertEqual(str(datashape.String(3)), 'string[3]') 22 | self.assertEqual(str(datashape.String('A')), "string['A']") 23 | 24 | def test_structure_str(self): 25 | self.assertEqual(str(dshape('{x:int32, y:int64}')), 26 | '{x: int32, y: int64}') 27 | 28 | def test_array_str(self): 29 | self.assertEqual(str(dshape('3*5*int16')), 30 | '3 * 5 * int16') 31 | 32 | def test_primitive_measure_repr(self): 33 | self.assertEqual(repr(datashape.int8), 'ctype("int8")') 34 | self.assertEqual(repr(datashape.int16), 'ctype("int16")') 35 | self.assertEqual(repr(datashape.int32), 'ctype("int32")') 36 | self.assertEqual(repr(datashape.int64), 'ctype("int64")') 37 | self.assertEqual(repr(datashape.uint8), 'ctype("uint8")') 38 | self.assertEqual(repr(datashape.uint16), 'ctype("uint16")') 39 | self.assertEqual(repr(datashape.uint32), 'ctype("uint32")') 40 | self.assertEqual(repr(datashape.uint64), 'ctype("uint64")') 41 | self.assertEqual(repr(datashape.float32), 'ctype("float32")') 42 | self.assertEqual(repr(datashape.float64), 'ctype("float64")') 43 | self.assertEqual(repr(datashape.string), 'ctype("string")') 44 | self.assertEqual(repr(datashape.String(3)), 'ctype("string[3]")') 45 | self.assertEqual(repr(datashape.String('A')), 46 | """ctype("string['A']")""") 47 | 48 | def test_structure_repr(self): 49 | self.assertEqual(repr(dshape('{x:int32, y:int64}')), 50 | 'dshape("{x: int32, y: int64}")') 51 | 52 | def test_array_repr(self): 53 | self.assertEqual(repr(dshape('3*5*int16')), 54 | 'dshape("3 * 5 * int16")') 55 | 56 | 57 | @pytest.mark.parametrize('s', 58 | ['{"./abc": int64}', 59 | '{"./a b c": float64}', 60 | '{"./a b\tc": string}', 61 | '{"./a/[0 1 2]/b/\\n": float32}']) 62 | def test_arbitrary_string(s): 63 | ds = dshape(s) 64 | assert dshape(str(ds)) == ds 65 | 66 | 67 | @pytest.mark.parametrize('s', 68 | ['{"/a/b/0/c\v/d": int8}', 69 | '{"/a/b/0/c\n/d": int8}', 70 | '{"/a/b/0/c\r/d": int8}']) 71 | def test_arbitrary_string2(s): 72 | with pytest.raises(DataShapeSyntaxError): 73 | ds = dshape(s) 74 | _ = dshape(str(ds)) 75 | -------------------------------------------------------------------------------- /datashader/datashape/tests/test_typeset.py: -------------------------------------------------------------------------------- 1 | from datashader import datashape 2 | import pytest 3 | 4 | 5 | def test_equal(): 6 | assert datashape.integral == datashape.integral 7 | assert datashape.floating != datashape.integral 8 | 9 | 10 | def test_repr(): 11 | assert repr(datashape.integral) == '{integral}' 12 | 13 | 14 | def test_custom_typeset_repr(): 15 | mytypeset = datashape.TypeSet(datashape.int64, datashape.float64) 16 | assert repr(mytypeset).startswith('TypeSet(') 17 | assert repr(mytypeset).endswith('name=None)') 18 | 19 | 20 | def test_register_already_existing_typeset_fails(): 21 | mytypeset = datashape.TypeSet(datashape.int64, datashape.float64, 22 | name='foo') 23 | with pytest.raises(TypeError): 24 | datashape.typesets.register_typeset('foo', mytypeset) 25 | 26 | 27 | def test_getitem(): 28 | assert datashape.typesets.registry['integral'] == datashape.integral 29 | 30 | 31 | def test_getitem_non_existent_typeset(): 32 | with pytest.raises(KeyError): 33 | datashape.typesets.registry['footypeset'] 34 | -------------------------------------------------------------------------------- /datashader/datashape/tests/test_user.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from datashader.datashape.user import issubschema, validate 4 | from datashader.datashape import dshape 5 | from datetime import date, time, datetime 6 | import numpy as np 7 | 8 | 9 | min_np = pytest.mark.skipif( 10 | np.__version__ > '1.14', 11 | reason="issubdtype no longer downcasts" 12 | ) 13 | 14 | 15 | @min_np 16 | def test_validate(): 17 | assert validate(int, 1) 18 | assert validate('int', 1) 19 | assert validate(str, 'Alice') 20 | assert validate(dshape('string'), 'Alice') 21 | assert validate(dshape('int'), 1) 22 | assert validate(dshape('int')[0], 1) 23 | assert validate('real', 2.0) 24 | assert validate('2 * int', (1, 2)) 25 | assert not validate('3 * int', (1, 2)) 26 | assert not validate('2 * int', 2) 27 | 28 | 29 | @min_np 30 | def test_nested_iteratables(): 31 | assert validate('2 * 3 * int', [(1, 2, 3), (4, 5, 6)]) 32 | 33 | 34 | def test_numeric_tower(): 35 | assert validate(np.integer, np.int32(1)) 36 | assert validate(np.number, np.int32(1)) 37 | 38 | 39 | @min_np 40 | def test_validate_dicts(): 41 | assert validate('{x: int, y: int}', {'x': 1, 'y': 2}) 42 | assert not validate('{x: int, y: int}', {'x': 1, 'y': 2.0}) 43 | assert not validate('{x: int, y: int}', {'x': 1, 'z': 2}) 44 | 45 | assert validate('var * {x: int, y: int}', [{'x': 1, 'y': 2}]) 46 | 47 | assert validate('var * {x: int, y: int}', [{'x': 1, 'y': 2}, 48 | {'x': 3, 'y': 4}]) 49 | 50 | 51 | @min_np 52 | def test_tuples_can_be_records_too(): 53 | assert validate('{x: int, y: real}', (1, 2.0)) 54 | assert not validate('{x: int, y: real}', (1.0, 2)) 55 | 56 | 57 | def test_datetimes(): 58 | assert validate('time', time(12, 0, 0)) 59 | assert validate('date', date(1999, 1, 20)) 60 | assert validate('datetime', datetime(1999, 1, 20, 12, 0, 0)) 61 | 62 | 63 | def test_numpy(): 64 | assert validate('2 * int32', np.array([1, 2], dtype='int32')) 65 | 66 | 67 | def test_issubschema(): 68 | assert issubschema('int', 'int') 69 | assert not issubschema('int', 'float32') 70 | 71 | assert issubschema('2 * int', '2 * int') 72 | assert not issubschema('2 * int', '3 * int') 73 | 74 | # assert issubschema('float32', 'real') 75 | 76 | 77 | def test_integration(): 78 | assert validate('{name: string, arrived: date}', 79 | {'name': 'Alice', 'arrived': date(2012, 1, 5)}) 80 | -------------------------------------------------------------------------------- /datashader/datashape/tests/test_util.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from datashader import datashape 4 | from datashader.datashape import dshape, has_var_dim, has_ellipsis 5 | 6 | 7 | def test_cat_dshapes(): 8 | # concatenating 1 dshape is a no-op 9 | dslist = [dshape('3 * 10 * int32')] 10 | assert datashape.cat_dshapes(dslist) == dslist[0] 11 | # two dshapes 12 | dslist = [dshape('3 * 10 * int32'), 13 | dshape('7 * 10 * int32')] 14 | assert datashape.cat_dshapes(dslist) == dshape('10 * 10 * int32') 15 | 16 | 17 | def test_cat_dshapes_errors(): 18 | # need at least one dshape 19 | with pytest.raises(ValueError): 20 | datashape.cat_dshapes([]) 21 | 22 | # dshapes need to match after the first dimension 23 | with pytest.raises(ValueError): 24 | datashape.cat_dshapes([dshape('3 * 10 * int32'), 25 | dshape('3 * 1 * int32')]) 26 | 27 | 28 | @pytest.mark.parametrize('ds_pos', 29 | ["... * float32", 30 | "A... * float32", 31 | "var * float32", 32 | "10 * { f0: int32, f1: A... * float32 }", 33 | "{ f0 : { g0 : var * int }, f1: int32 }", 34 | (dshape("var * int32"),)]) 35 | def test_has_var_dim(ds_pos): 36 | assert has_var_dim(dshape(ds_pos)) 37 | 38 | 39 | @pytest.mark.parametrize('ds_neg', 40 | [dshape("float32"), 41 | dshape("10 * float32"), 42 | dshape("10 * { f0: int32, f1: 10 * float32 }"), 43 | dshape("{ f0 : { g0 : 2 * int }, f1: int32 }"), 44 | (dshape("int32"),)]) 45 | def test_not_has_var_dim(ds_neg): 46 | assert not has_var_dim(ds_neg) 47 | 48 | 49 | @pytest.mark.parametrize('ds', 50 | [dshape("... * float32"), 51 | dshape("A... * float32"), 52 | dshape("var * ... * float32"), 53 | dshape("(int32, M... * int16) -> var * int8"), 54 | dshape("(int32, var * int16) -> ... * int8"), 55 | dshape("10 * { f0: int32, f1: A... * float32 }"), 56 | dshape("{ f0 : { g0 : ... * int }, f1: int32 }"), 57 | (dshape("... * int32"),)]) 58 | def test_has_ellipsis(ds): 59 | assert has_ellipsis(ds) 60 | 61 | 62 | @pytest.mark.parametrize('ds', 63 | [dshape("float32"), 64 | dshape("10 * var * float32"), 65 | dshape("M * float32"), 66 | dshape("(int32, M * int16) -> var * int8"), 67 | dshape("(int32, int16) -> var * int8"), 68 | dshape("10 * { f0: int32, f1: 10 * float32 }"), 69 | dshape("{ f0 : { g0 : 2 * int }, f1: int32 }"), 70 | (dshape("M * int32"),)]) 71 | def test_not_has_ellipsis(ds): 72 | assert not has_ellipsis(ds) 73 | -------------------------------------------------------------------------------- /datashader/datashape/type_symbol_table.py: -------------------------------------------------------------------------------- 1 | """ 2 | A symbol table object to hold types for the parser. 3 | """ 4 | 5 | import ctypes 6 | from itertools import chain 7 | 8 | from . import coretypes as ct 9 | 10 | __all__ = ['TypeSymbolTable', 'sym'] 11 | 12 | 13 | _is_64bit = (ctypes.sizeof(ctypes.c_void_p) == 8) 14 | 15 | 16 | def _complex(tp): 17 | """Simple temporary type constructor for complex""" 18 | if tp == ct.DataShape(ct.float32): 19 | return ct.complex_float32 20 | elif tp == ct.DataShape(ct.float64): 21 | return ct.complex_float64 22 | else: 23 | raise TypeError( 24 | 'Cannot construct a complex type with real component %s' % tp) 25 | 26 | 27 | def _struct(names, dshapes): 28 | """Simple temporary type constructor for struct""" 29 | return ct.Record(list(zip(names, dshapes))) 30 | 31 | 32 | def _funcproto(args, ret): 33 | """Simple temporary type constructor for funcproto""" 34 | return ct.Function(*chain(args, (ret,))) 35 | 36 | 37 | def _typevar_dim(name): 38 | """Simple temporary type constructor for typevar as a dim""" 39 | # Note: Presently no difference between dim and dtype typevar 40 | return ct.TypeVar(name) 41 | 42 | 43 | def _typevar_dtype(name): 44 | """Simple temporary type constructor for typevar as a dtype""" 45 | # Note: Presently no difference between dim and dtype typevar 46 | return ct.TypeVar(name) 47 | 48 | 49 | def _ellipsis(name): 50 | return ct.Ellipsis(ct.TypeVar(name)) 51 | 52 | # data types with no type constructor 53 | no_constructor_types = [ 54 | ('bool', ct.bool_), 55 | ('int8', ct.int8), 56 | ('int16', ct.int16), 57 | ('int32', ct.int32), 58 | ('int64', ct.int64), 59 | ('intptr', ct.int64 if _is_64bit else ct.int32), 60 | ('int', ct.int32), 61 | ('uint8', ct.uint8), 62 | ('uint16', ct.uint16), 63 | ('uint32', ct.uint32), 64 | ('uint64', ct.uint64), 65 | ('uintptr', ct.uint64 if _is_64bit else ct.uint32), 66 | ('float16', ct.float16), 67 | ('float32', ct.float32), 68 | ('float64', ct.float64), 69 | ('complex64', ct.complex64), 70 | ('complex128', ct.complex128), 71 | ('real', ct.float64), 72 | ('complex', ct.complex_float64), 73 | ('string', ct.string), 74 | ('json', ct.json), 75 | ('date', ct.date_), 76 | ('time', ct.time_), 77 | ('datetime', ct.datetime_), 78 | ('timedelta', ct.timedelta_), 79 | ('null', ct.null), 80 | ('void', ct.void), 81 | ('object', ct.object_), 82 | ] 83 | 84 | # data types with a type constructor 85 | constructor_types = [ 86 | ('complex', _complex), 87 | ('string', ct.String), 88 | ('struct', _struct), 89 | ('tuple', ct.Tuple), 90 | ('funcproto', _funcproto), 91 | ('typevar', _typevar_dtype), 92 | ('option', ct.Option), 93 | ('map', ct.Map), 94 | ('time', ct.Time), 95 | ('datetime', ct.DateTime), 96 | ('timedelta', ct.TimeDelta), 97 | ('units', ct.Units), 98 | ('decimal', ct.Decimal), 99 | ('categorical', ct.Categorical), 100 | ] 101 | 102 | # dim types with no type constructor 103 | dim_no_constructor = [ 104 | ('var', ct.Var()), 105 | ('ellipsis', ct.Ellipsis()), 106 | ] 107 | 108 | # dim types with a type constructor 109 | dim_constructor = [ 110 | ('fixed', ct.Fixed), 111 | ('typevar', _typevar_dim), 112 | ('ellipsis', _ellipsis), 113 | ] 114 | 115 | 116 | class TypeSymbolTable: 117 | 118 | """ 119 | This is a class which holds symbols for types and type constructors, 120 | and is used by the datashape parser to build types during its parsing. 121 | A TypeSymbolTable sym has four tables, as follows: 122 | 123 | sym.dtype 124 | Data type symbols with no type constructor. 125 | sym.dtype_constr 126 | Data type symbols with a type constructor. This may contain 127 | symbols also in sym.dtype, e.g. for 'complex' and 'complex[float64]'. 128 | sym.dim 129 | Dimension symbols with no type constructor. 130 | sym.dim_constr 131 | Dimension symbols with a type constructor. 132 | """ 133 | __slots__ = ['dtype', 'dtype_constr', 'dim', 'dim_constr'] 134 | 135 | def __init__(self, bare=False): 136 | # Initialize all the symbol tables to empty dicts1 137 | self.dtype = {} 138 | self.dtype_constr = {} 139 | self.dim = {} 140 | self.dim_constr = {} 141 | if not bare: 142 | self.add_default_types() 143 | 144 | def add_default_types(self): 145 | """ 146 | Adds all the default datashape types to the symbol table. 147 | """ 148 | self.dtype.update(no_constructor_types) 149 | self.dtype_constr.update(constructor_types) 150 | self.dim.update(dim_no_constructor) 151 | self.dim_constr.update(dim_constructor) 152 | 153 | # Create the default global type symbol table 154 | sym = TypeSymbolTable() 155 | -------------------------------------------------------------------------------- /datashader/datashape/user.py: -------------------------------------------------------------------------------- 1 | from .dispatch import dispatch 2 | from .coretypes import ( 3 | CType, Date, DateTime, DataShape, Record, String, Time, Var, from_numpy, to_numpy_dtype) 4 | from .predicates import isdimension 5 | from .util import dshape 6 | from datetime import date, time, datetime 7 | import numpy as np 8 | 9 | 10 | __all__ = ['validate', 'issubschema'] 11 | 12 | 13 | basetypes = np.generic, int, float, str, date, time, datetime 14 | 15 | 16 | @dispatch(np.dtype, basetypes) 17 | def validate(schema, value): 18 | return np.issubdtype(type(value), schema) 19 | 20 | 21 | @dispatch(CType, basetypes) 22 | def validate(schema, value): # noqa: F811 23 | return validate(to_numpy_dtype(schema), value) 24 | 25 | 26 | @dispatch(DataShape, (tuple, list)) 27 | def validate(schema, value): # noqa: F811 28 | head = schema[0] 29 | return ((len(schema) == 1 and validate(head, value)) 30 | or (isdimension(head) 31 | and (isinstance(head, Var) or int(head) == len(value)) 32 | and all(validate(DataShape(*schema[1:]), item) for item in value))) 33 | 34 | 35 | @dispatch(DataShape, object) 36 | def validate(schema, value): # noqa: F811 37 | if len(schema) == 1: 38 | return validate(schema[0], value) 39 | 40 | 41 | @dispatch(Record, dict) 42 | def validate(schema, d): # noqa: F811 43 | return all(validate(sch, d.get(k)) for k, sch in schema.parameters[0]) 44 | 45 | 46 | @dispatch(Record, (tuple, list)) 47 | def validate(schema, seq): # noqa: F811 48 | return all(validate(sch, item) for (k, sch), item 49 | in zip(schema.parameters[0], seq)) 50 | 51 | 52 | @dispatch(str, object) 53 | def validate(schema, value): # noqa: F811 54 | return validate(dshape(schema), value) 55 | 56 | 57 | @dispatch(type, object) 58 | def validate(schema, value): # noqa: F811 59 | return isinstance(value, schema) 60 | 61 | 62 | @dispatch(tuple, object) 63 | def validate(schemas, value): # noqa: F811 64 | return any(validate(schema, value) for schema in schemas) 65 | 66 | 67 | @dispatch(object, object) 68 | def validate(schema, value): # noqa: F811 69 | return False 70 | 71 | 72 | @validate.register(String, str) 73 | @validate.register(Time, time) 74 | @validate.register(Date, date) 75 | @validate.register(DateTime, datetime) 76 | def validate_always_true(schema, value): 77 | return True 78 | 79 | 80 | @dispatch(DataShape, np.ndarray) 81 | def validate(schema, value): 82 | return issubschema(from_numpy(value.shape, value.dtype), schema) 83 | 84 | 85 | @dispatch(object, object) 86 | def issubschema(a, b): 87 | return issubschema(dshape(a), dshape(b)) 88 | 89 | 90 | @dispatch(DataShape, DataShape) 91 | def issubschema(a, b): # noqa: F811 92 | if a == b: 93 | return True 94 | # TODO, handle cases like float < real 95 | # TODO, handle records {x: int, y: int, z: int} < {x: int, y: int} 96 | 97 | return None # We don't know, return something falsey 98 | -------------------------------------------------------------------------------- /datashader/datashape/util/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | from itertools import chain 3 | import operator 4 | 5 | from .. import parser 6 | from .. import type_symbol_table 7 | from ..validation import validate 8 | from .. import coretypes 9 | 10 | 11 | __all__ = 'dshape', 'dshapes', 'has_var_dim', 'has_ellipsis', 'cat_dshapes' 12 | 13 | subclasses = operator.methodcaller('__subclasses__') 14 | 15 | #------------------------------------------------------------------------ 16 | # Utility Functions for DataShapes 17 | #------------------------------------------------------------------------ 18 | 19 | def dshapes(*args): 20 | """ 21 | Parse a bunch of datashapes all at once. 22 | 23 | >>> a, b = dshapes('3 * int32', '2 * var * float64') 24 | """ 25 | return [dshape(arg) for arg in args] 26 | 27 | 28 | def dshape(o): 29 | """ 30 | Parse a datashape. For a thorough description see 31 | https://datashape.readthedocs.io/en/latest/ 32 | 33 | >>> ds = dshape('2 * int32') 34 | >>> ds[1] 35 | ctype("int32") 36 | """ 37 | if isinstance(o, coretypes.DataShape): 38 | return o 39 | if isinstance(o, str): 40 | ds = parser.parse(o, type_symbol_table.sym) 41 | elif isinstance(o, (coretypes.CType, coretypes.String, 42 | coretypes.Record, coretypes.JSON, 43 | coretypes.Date, coretypes.Time, coretypes.DateTime, 44 | coretypes.Unit)): 45 | ds = coretypes.DataShape(o) 46 | elif isinstance(o, coretypes.Mono): 47 | ds = o 48 | elif isinstance(o, (list, tuple)): 49 | ds = coretypes.DataShape(*o) 50 | else: 51 | raise TypeError('Cannot create dshape from object of type %s' % type(o)) 52 | validate(ds) 53 | return ds 54 | 55 | 56 | def cat_dshapes(dslist): 57 | """ 58 | Concatenates a list of dshapes together along 59 | the first axis. Raises an error if there is 60 | a mismatch along another axis or the measures 61 | are different. 62 | 63 | Requires that the leading dimension be a known 64 | size for all data shapes. 65 | TODO: Relax this restriction to support 66 | streaming dimensions. 67 | 68 | >>> cat_dshapes(dshapes('10 * int32', '5 * int32')) 69 | dshape("15 * int32") 70 | """ 71 | if len(dslist) == 0: 72 | raise ValueError('Cannot concatenate an empty list of dshapes') 73 | elif len(dslist) == 1: 74 | return dslist[0] 75 | 76 | outer_dim_size = operator.index(dslist[0][0]) 77 | inner_ds = dslist[0][1:] 78 | for ds in dslist[1:]: 79 | outer_dim_size += operator.index(ds[0]) 80 | if ds[1:] != inner_ds: 81 | raise ValueError(('The datashapes to concatenate much' 82 | ' all match after' 83 | ' the first dimension (%s vs %s)') % 84 | (inner_ds, ds[1:])) 85 | return coretypes.DataShape(*[coretypes.Fixed(outer_dim_size)] + list(inner_ds)) 86 | 87 | 88 | def collect(pred, expr): 89 | """ Collect terms in expression that match predicate 90 | 91 | >>> from datashader.datashape import Unit, dshape 92 | >>> predicate = lambda term: isinstance(term, Unit) 93 | >>> dshape = dshape('var * {value: int64, loc: 2 * int32}') 94 | >>> sorted(set(collect(predicate, dshape)), key=str) 95 | [Fixed(val=2), ctype("int32"), ctype("int64"), Var()] 96 | >>> from datashader.datashape import var, int64 97 | >>> sorted(set(collect(predicate, [var, int64])), key=str) 98 | [ctype("int64"), Var()] 99 | """ 100 | if pred(expr): 101 | return [expr] 102 | if isinstance(expr, coretypes.Record): 103 | return chain.from_iterable(collect(pred, typ) for typ in expr.types) 104 | if isinstance(expr, coretypes.Mono): 105 | return chain.from_iterable(collect(pred, typ) for typ in expr.parameters) 106 | if isinstance(expr, (list, tuple)): 107 | return chain.from_iterable(collect(pred, item) for item in expr) 108 | 109 | 110 | def has_var_dim(ds): 111 | """Returns True if datashape has a variable dimension 112 | 113 | Note currently treats variable length string as scalars. 114 | 115 | >>> has_var_dim(dshape('2 * int32')) 116 | False 117 | >>> has_var_dim(dshape('var * 2 * int32')) 118 | True 119 | """ 120 | return has((coretypes.Ellipsis, coretypes.Var), ds) 121 | 122 | 123 | def has(typ, ds): 124 | if isinstance(ds, typ): 125 | return True 126 | if isinstance(ds, coretypes.Record): 127 | return any(has(typ, t) for t in ds.types) 128 | if isinstance(ds, coretypes.Mono): 129 | return any(has(typ, p) for p in ds.parameters) 130 | if isinstance(ds, (list, tuple)): 131 | return any(has(typ, item) for item in ds) 132 | return False 133 | 134 | 135 | def has_ellipsis(ds): 136 | """Returns True if the datashape has an ellipsis 137 | 138 | >>> has_ellipsis(dshape('2 * int')) 139 | False 140 | >>> has_ellipsis(dshape('... * int')) 141 | True 142 | """ 143 | return has(coretypes.Ellipsis, ds) 144 | -------------------------------------------------------------------------------- /datashader/datashape/validation.py: -------------------------------------------------------------------------------- 1 | """ 2 | Datashape validation. 3 | """ 4 | 5 | from . import coretypes as T 6 | 7 | 8 | def traverse(f, t): 9 | """ 10 | Map `f` over `t`, calling `f` with type `t` and the map result of the 11 | mapping `f` over `t` 's parameters. 12 | 13 | Parameters 14 | ---------- 15 | f : callable 16 | t : DataShape 17 | 18 | Returns 19 | ------- 20 | DataShape 21 | """ 22 | if isinstance(t, T.Mono) and not isinstance(t, T.Unit): 23 | return f(t, [traverse(f, p) for p in t.parameters]) 24 | return t 25 | 26 | 27 | def validate(ds): 28 | """ 29 | Validate a datashape to see whether it is well-formed. 30 | 31 | Parameters 32 | ---------- 33 | ds : DataShape 34 | 35 | Examples 36 | -------- 37 | >>> from datashader.datashape import dshape 38 | >>> dshape('10 * int32') 39 | dshape("10 * int32") 40 | >>> dshape('... * int32') 41 | dshape("... * int32") 42 | >>> dshape('... * ... * int32') # doctest: +IGNORE_EXCEPTION_DETAIL 43 | Traceback (most recent call last): 44 | ... 45 | TypeError: Can only use a single wildcard 46 | >>> dshape('T * ... * X * ... * X') # doctest: +IGNORE_EXCEPTION_DETAIL 47 | Traceback (most recent call last): 48 | ... 49 | TypeError: Can only use a single wildcard 50 | >>> dshape('T * ...') # doctest: +IGNORE_EXCEPTION_DETAIL 51 | Traceback (most recent call last): 52 | ... 53 | DataShapeSyntaxError: Expected a dtype 54 | """ 55 | traverse(_validate, ds) 56 | 57 | 58 | def _validate(ds, params): 59 | if isinstance(ds, T.DataShape): 60 | # Check ellipses 61 | ellipses = [x for x in ds.parameters if isinstance(x, T.Ellipsis)] 62 | if len(ellipses) > 1: 63 | raise TypeError("Can only use a single wildcard") 64 | elif isinstance(ds.parameters[-1], T.Ellipsis): 65 | raise TypeError("Measure may not be an Ellipsis (...)") 66 | -------------------------------------------------------------------------------- /datashader/glyphs/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from .points import Point, MultiPointGeometry # noqa (API import) 3 | from .line import ( # noqa (API import) 4 | LineAxis0, 5 | LineAxis0Multi, 6 | LinesAxis1, 7 | LinesAxis1XConstant, 8 | LinesAxis1YConstant, 9 | LinesAxis1Ragged, 10 | LineAxis1Geometry, 11 | LineAxis1GeoPandas, 12 | LinesXarrayCommonX, 13 | ) 14 | from .area import ( # noqa (API import) 15 | AreaToZeroAxis0, 16 | AreaToZeroAxis0Multi, 17 | AreaToZeroAxis1, 18 | AreaToZeroAxis1XConstant, 19 | AreaToZeroAxis1YConstant, 20 | AreaToZeroAxis1Ragged, 21 | AreaToLineAxis0, 22 | AreaToLineAxis0Multi, 23 | AreaToLineAxis1, 24 | AreaToLineAxis1XConstant, 25 | AreaToLineAxis1YConstant, 26 | AreaToLineAxis1Ragged, 27 | ) 28 | from .trimesh import Triangles # noqa (API import) 29 | from .polygon import PolygonGeom # noqa (API import) 30 | from .quadmesh import ( # noqa (API import) 31 | QuadMeshRaster, QuadMeshRectilinear, QuadMeshCurvilinear 32 | ) 33 | from .glyph import Glyph # noqa (API import) 34 | -------------------------------------------------------------------------------- /datashader/pipeline.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from toolz import identity 4 | 5 | from . import transfer_functions as tf 6 | from . import reductions 7 | from . import core 8 | 9 | 10 | class Pipeline: 11 | """A datashading pipeline callback. 12 | 13 | Given a declarative specification, creates a callable with the following 14 | signature: 15 | 16 | ``callback(x_range, y_range, width, height)`` 17 | 18 | where ``x_range`` and ``y_range`` form the bounding box on the viewport, 19 | and ``width`` and ``height`` specify the output image dimensions. 20 | 21 | Parameters 22 | ---------- 23 | df : pandas.DataFrame, dask.DataFrame 24 | glyph : Glyph 25 | The glyph to bin by. 26 | agg : Reduction, optional 27 | The reduction to compute per-pixel. Default is ``count()``. 28 | transform_fn : callable, optional 29 | A callable that takes the computed aggregate as an argument, and 30 | returns another aggregate. This can be used to do preprocessing before 31 | passing to the ``color_fn`` function. 32 | color_fn : callable, optional 33 | A callable that takes the output of ``tranform_fn``, and returns an 34 | ``Image`` object. Default is ``shade``. 35 | spread_fn : callable, optional 36 | A callable that takes the output of ``color_fn``, and returns another 37 | ``Image`` object. Default is ``dynspread``. 38 | height_scale: float, optional 39 | Factor by which to scale the provided height 40 | width_scale: float, optional 41 | Factor by which to scale the provided width 42 | """ 43 | def __init__(self, df, glyph, agg=reductions.count(), 44 | transform_fn=identity, color_fn=tf.shade, spread_fn=tf.dynspread, 45 | width_scale=1.0, height_scale=1.0): 46 | self.df = df 47 | self.glyph = glyph 48 | self.agg = agg 49 | self.transform_fn = transform_fn 50 | self.color_fn = color_fn 51 | self.spread_fn = spread_fn 52 | self.width_scale = width_scale 53 | self.height_scale = height_scale 54 | 55 | def __call__(self, x_range=None, y_range=None, width=600, height=600): 56 | """Compute an image from the specified pipeline. 57 | 58 | Parameters 59 | ---------- 60 | x_range, y_range : tuple, optional 61 | The bounding box on the viewport, specified as tuples of 62 | ``(min, max)`` 63 | width, height : int, optional 64 | The shape of the image 65 | """ 66 | canvas = core.Canvas(plot_width=int(width*self.width_scale), 67 | plot_height=int(height*self.height_scale), 68 | x_range=x_range, y_range=y_range) 69 | bins = core.bypixel(self.df, canvas, self.glyph, self.agg, 70 | antialias=self.glyph.antialiased) 71 | img = self.color_fn(self.transform_fn(bins)) 72 | return self.spread_fn(img) 73 | -------------------------------------------------------------------------------- /datashader/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/__init__.py -------------------------------------------------------------------------------- /datashader/tests/benchmarks/README.md: -------------------------------------------------------------------------------- 1 | TODO: need to reorganize benchmarking 2 | -------------------------------------------------------------------------------- /datashader/tests/benchmarks/__init__.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | pytestmark = pytest.mark.benchmark 4 | -------------------------------------------------------------------------------- /datashader/tests/benchmarks/test_bundling.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import numpy as np 4 | import pandas as pd 5 | 6 | from datashader.bundling import directly_connect_edges, hammer_bundle 7 | from datashader.layout import circular_layout, forceatlas2_layout, random_layout 8 | 9 | skimage = pytest.importorskip("skimage") 10 | 11 | 12 | @pytest.fixture 13 | def nodes(): 14 | # Four nodes arranged at the corners of a 200x200 square with one node 15 | # at the center 16 | nodes_df = pd.DataFrame({'id': np.arange(5), 17 | 'x': [0.0, -100.0, 100.0, -100.0, 100.0], 18 | 'y': [0.0, 100.0, 100.0, -100.0, -100.0]}) 19 | nodes_df.set_index('id') 20 | return nodes_df 21 | 22 | 23 | @pytest.fixture 24 | def edges(): 25 | # Four edges originating from the center node and connected to each 26 | # corner 27 | edges_df = pd.DataFrame({'id': np.arange(4), 28 | 'source': np.zeros(4, dtype=int), 29 | 'target': np.arange(1, 5)}) 30 | edges_df.set_index('id') 31 | return edges_df 32 | 33 | 34 | @pytest.mark.parametrize('bundle', [directly_connect_edges, hammer_bundle]) 35 | @pytest.mark.parametrize('layout', [random_layout, circular_layout, forceatlas2_layout]) 36 | @pytest.mark.benchmark(group="bundling") 37 | def test_bundle(benchmark, nodes, edges, layout, bundle): 38 | node_positions = layout(nodes, edges) 39 | benchmark(bundle, node_positions, edges) 40 | -------------------------------------------------------------------------------- /datashader/tests/benchmarks/test_canvas.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import pandas as pd 3 | import datashader as ds 4 | 5 | 6 | @pytest.fixture 7 | def time_series(rng): 8 | n = 10**7 9 | signal = rng.normal(0, 0.3, size=n).cumsum() + 50 10 | def noise(var, bias, n): 11 | return rng.normal(bias, var, n) 12 | ys = signal + noise(1, 10*(rng.random() - 0.5), n) 13 | 14 | df = pd.DataFrame({'y': ys}) 15 | df['x'] = df.index 16 | return df 17 | 18 | 19 | @pytest.mark.benchmark(group="canvas") 20 | def test_line(benchmark, time_series): 21 | cvs = ds.Canvas(plot_height=300, plot_width=900) 22 | benchmark(cvs.line, time_series, 'x', 'y') 23 | 24 | 25 | @pytest.mark.benchmark(group="canvas") 26 | def test_points(benchmark, time_series): 27 | cvs = ds.Canvas(plot_height=300, plot_width=900) 28 | benchmark(cvs.points, time_series, 'x', 'y') 29 | 30 | 31 | @pytest.mark.gpu 32 | @pytest.mark.benchmark(group="canvas") 33 | def test_line_gpu(benchmark, time_series): 34 | from cudf import from_pandas 35 | time_series = from_pandas(time_series) 36 | cvs = ds.Canvas(plot_height=300, plot_width=900) 37 | benchmark(cvs.line, time_series, 'x', 'y') 38 | 39 | 40 | @pytest.mark.gpu 41 | @pytest.mark.benchmark(group="canvas") 42 | def test_points_gpu(benchmark, time_series): 43 | from cudf import from_pandas 44 | time_series = from_pandas(time_series) 45 | cvs = ds.Canvas(plot_height=300, plot_width=900) 46 | benchmark(cvs.points, time_series, 'x', 'y') 47 | -------------------------------------------------------------------------------- /datashader/tests/benchmarks/test_draw_line.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | import numpy as np 6 | 7 | from datashader.glyphs import Glyph 8 | from datashader.glyphs.line import _build_draw_segment, \ 9 | _build_map_onto_pixel_for_line 10 | from datashader.utils import ngjit 11 | 12 | mapper = ngjit(lambda x: x) 13 | map_onto_pixel = _build_map_onto_pixel_for_line(mapper, mapper) 14 | sx, tx, sy, ty = 1, 0, 1, 0 15 | xmin, xmax, ymin, ymax = 0, 5, 0, 5 16 | 17 | 18 | @pytest.fixture 19 | def draw_line(): 20 | @ngjit 21 | def append(i, x, y, agg): 22 | agg[y, x] += 1 23 | 24 | expand_aggs_and_cols = Glyph._expand_aggs_and_cols(append, 1, False) 25 | return _build_draw_segment(append, map_onto_pixel, expand_aggs_and_cols, 26 | 0, False) 27 | 28 | 29 | @pytest.mark.benchmark(group="draw_line") 30 | def test_draw_line_left_border(benchmark, draw_line): 31 | n = 10**4 32 | x0, y0 = (0, 0) 33 | x1, y1 = (0, n) 34 | 35 | agg = np.zeros((n+1, n+1), dtype='i4') 36 | buffer = np.empty(0) 37 | benchmark(draw_line, 0, sx, tx, sy, ty, xmin, xmax, ymin, ymax, True, True, 38 | x0, x1, y0, y1, 0.0, 0.0, buffer, agg) 39 | 40 | 41 | @pytest.mark.benchmark(group="draw_line") 42 | def test_draw_line_diagonal(benchmark, draw_line): 43 | n = 10**4 44 | x0, y0 = (0, 0) 45 | x1, y1 = (n, n) 46 | 47 | agg = np.zeros((n+1, n+1), dtype='i4') 48 | buffer = np.empty(0) 49 | benchmark(draw_line, 0, sx, tx, sy, ty, xmin, xmax, ymin, ymax, True, True, 50 | x0, x1, y0, y1, 0.0, 0.0, buffer, agg) 51 | 52 | 53 | @pytest.mark.benchmark(group="draw_line") 54 | def test_draw_line_offset(benchmark, draw_line): 55 | n = 10**4 56 | x0, y0 = (0, n//4) 57 | x1, y1 = (n, n//4-1) 58 | 59 | agg = np.zeros((n+1, n+1), dtype='i4') 60 | buffer = np.empty(0) 61 | benchmark(draw_line, 0, sx, tx, sy, ty, xmin, xmax, ymin, ymax, True, True, 62 | x0, x1, y0, y1, 0.0, 0.0, buffer, agg) 63 | -------------------------------------------------------------------------------- /datashader/tests/benchmarks/test_extend_line.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | from datashader.glyphs import Glyph 5 | from datashader.glyphs.line import ( 6 | _build_draw_segment, _build_extend_line_axis0, _build_map_onto_pixel_for_line 7 | ) 8 | from datashader.utils import ngjit 9 | 10 | 11 | @pytest.fixture 12 | def extend_line(): 13 | @ngjit 14 | def append(i, x, y, agg): 15 | agg[y, x] += 1 16 | 17 | mapper = ngjit(lambda x: x) 18 | map_onto_pixel = _build_map_onto_pixel_for_line(mapper, mapper) 19 | expand_aggs_and_cols = Glyph._expand_aggs_and_cols(append, 1, False) 20 | draw_line = _build_draw_segment(append, map_onto_pixel, 21 | expand_aggs_and_cols, 0, False) 22 | return _build_extend_line_axis0(draw_line, expand_aggs_and_cols, None)[0] 23 | 24 | 25 | @pytest.mark.parametrize('high', [0, 10**5]) 26 | @pytest.mark.parametrize('low', [0, -10**5]) 27 | @pytest.mark.benchmark(group="extend_line") 28 | def test_extend_line_uniform(benchmark, extend_line, low, high, rng): 29 | n = 10**6 30 | sx, tx, sy, ty = (1, 0, 1, 0) 31 | xmin, xmax, ymin, ymax = (0, 0, 10**4, 10**4) 32 | 33 | xs = rng.uniform(xmin + low, ymin + high, n) 34 | ys = rng.uniform(xmax + low, ymax + high, n) 35 | 36 | agg = np.zeros((ymin, ymax), dtype='i4') 37 | buffer = np.empty(0) 38 | benchmark( 39 | extend_line, sx, tx, sy, ty, xmin, xmax, ymin, ymax, xs, ys, True, buffer, agg 40 | ) 41 | 42 | 43 | @pytest.mark.benchmark(group="extend_line") 44 | def test_extend_line_normal(benchmark, extend_line, rng): 45 | n = 10**6 46 | sx, tx, sy, ty = (1, 0, 1, 0) 47 | xmin, xmax, ymin, ymax = (0, 0, 10**4, 10**4) 48 | 49 | start = 1456297053 50 | end = start + 60 * 60 * 24 51 | xs = np.linspace(start, end, n) 52 | 53 | signal = rng.normal(0, 0.3, size=n).cumsum() + 50 54 | def noise(var, bias, n): 55 | return rng.normal(bias, var, n) 56 | ys = signal + noise(1, 10*(rng.random() - 0.5), n) 57 | 58 | agg = np.zeros((ymin, ymax), dtype='i4') 59 | buffer = np.empty(0) 60 | benchmark( 61 | extend_line, sx, tx, sy, ty, xmin, xmax, ymin, ymax, xs, ys, True, buffer, agg 62 | ) 63 | -------------------------------------------------------------------------------- /datashader/tests/benchmarks/test_layout.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import numpy as np 4 | import pandas as pd 5 | 6 | from datashader.layout import circular_layout, forceatlas2_layout, random_layout 7 | 8 | 9 | @pytest.fixture 10 | def nodes(): 11 | # Four nodes arranged at the corners of a 200x200 square with one node 12 | # at the center 13 | nodes_df = pd.DataFrame({'id': np.arange(5), 14 | 'x': [0., -100., 100., -100., 100.], 15 | 'y': [0., 100., 100., -100., -100.]}) 16 | return nodes_df.set_index('id') 17 | 18 | 19 | @pytest.fixture 20 | def edges(): 21 | # Four edges originating from the center node and connected to each 22 | # corner 23 | edges_df = pd.DataFrame({'id': np.arange(4), 24 | 'source': np.zeros(4, dtype=np.int64), 25 | 'target': np.arange(1, 5)}) 26 | return edges_df.set_index('id') 27 | 28 | 29 | @pytest.mark.parametrize('layout', [random_layout, circular_layout, forceatlas2_layout]) 30 | @pytest.mark.benchmark(group="layout") 31 | def test_layout(benchmark, nodes, edges, layout): 32 | benchmark(layout, nodes, edges) 33 | -------------------------------------------------------------------------------- /datashader/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | CUSTOM_MARKS = {"benchmark", "gpu"} 5 | 6 | 7 | def pytest_addoption(parser): 8 | for marker in sorted(CUSTOM_MARKS): 9 | parser.addoption( 10 | f"--{marker}", 11 | action="store_true", 12 | default=False, 13 | help=f"Run {marker} related tests", 14 | ) 15 | 16 | 17 | def pytest_configure(config): 18 | for marker in sorted(CUSTOM_MARKS): 19 | config.addinivalue_line("markers", f"{marker}: {marker} test marker") 20 | 21 | 22 | def pytest_collection_modifyitems(config, items): 23 | skipped, selected = [], [] 24 | markers = {m for m in CUSTOM_MARKS if config.getoption(f"--{m}")} 25 | empty = not markers 26 | for item in items: 27 | item_marks = set(item.keywords) & CUSTOM_MARKS 28 | if empty and item_marks: 29 | skipped.append(item) 30 | elif empty: 31 | selected.append(item) 32 | elif not empty and item_marks == markers: 33 | selected.append(item) 34 | else: 35 | skipped.append(item) 36 | 37 | config.hook.pytest_deselected(items=skipped) 38 | items[:] = selected 39 | 40 | 41 | @pytest.fixture 42 | def rng(): 43 | return np.random.default_rng(42) 44 | -------------------------------------------------------------------------------- /datashader/tests/data/test_001_antialias_clipped.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_001_antialias_clipped.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_001_antialias_clipped.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_001_antialias_clipped.png -------------------------------------------------------------------------------- /datashader/tests/data/test_001_antialias_normal.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_001_antialias_normal.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_001_antialias_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_001_antialias_normal.png -------------------------------------------------------------------------------- /datashader/tests/data/test_001_noaa_clipped.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_001_noaa_clipped.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_001_noaa_clipped.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_001_noaa_clipped.png -------------------------------------------------------------------------------- /datashader/tests/data/test_001_noaa_normal.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_001_noaa_normal.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_001_noaa_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_001_noaa_normal.png -------------------------------------------------------------------------------- /datashader/tests/data/test_002_antialias_clipped.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_002_antialias_clipped.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_002_antialias_clipped.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_002_antialias_clipped.png -------------------------------------------------------------------------------- /datashader/tests/data/test_002_antialias_normal.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_002_antialias_normal.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_002_antialias_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_002_antialias_normal.png -------------------------------------------------------------------------------- /datashader/tests/data/test_002_noaa_clipped.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_002_noaa_clipped.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_002_noaa_clipped.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_002_noaa_clipped.png -------------------------------------------------------------------------------- /datashader/tests/data/test_002_noaa_normal.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_002_noaa_normal.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_002_noaa_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_002_noaa_normal.png -------------------------------------------------------------------------------- /datashader/tests/data/test_003_antialias_clipped.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_003_antialias_clipped.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_003_antialias_clipped.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_003_antialias_clipped.png -------------------------------------------------------------------------------- /datashader/tests/data/test_003_antialias_normal.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_003_antialias_normal.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_003_antialias_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_003_antialias_normal.png -------------------------------------------------------------------------------- /datashader/tests/data/test_003_noaa_clipped.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_003_noaa_clipped.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_003_noaa_clipped.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_003_noaa_clipped.png -------------------------------------------------------------------------------- /datashader/tests/data/test_003_noaa_normal.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_003_noaa_normal.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_003_noaa_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_003_noaa_normal.png -------------------------------------------------------------------------------- /datashader/tests/data/test_004_antialias_clipped.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_004_antialias_clipped.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_004_antialias_clipped.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_004_antialias_clipped.png -------------------------------------------------------------------------------- /datashader/tests/data/test_004_antialias_normal.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_004_antialias_normal.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_004_antialias_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_004_antialias_normal.png -------------------------------------------------------------------------------- /datashader/tests/data/test_004_noaa_clipped.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_004_noaa_clipped.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_004_noaa_clipped.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_004_noaa_clipped.png -------------------------------------------------------------------------------- /datashader/tests/data/test_004_noaa_normal.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_004_noaa_normal.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_004_noaa_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_004_noaa_normal.png -------------------------------------------------------------------------------- /datashader/tests/data/test_005_antialias_clipped.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_005_antialias_clipped.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_005_antialias_clipped.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_005_antialias_clipped.png -------------------------------------------------------------------------------- /datashader/tests/data/test_005_antialias_normal.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_005_antialias_normal.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_005_antialias_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_005_antialias_normal.png -------------------------------------------------------------------------------- /datashader/tests/data/test_005_noaa_clipped.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_005_noaa_clipped.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_005_noaa_clipped.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_005_noaa_clipped.png -------------------------------------------------------------------------------- /datashader/tests/data/test_005_noaa_normal.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_005_noaa_normal.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_005_noaa_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_005_noaa_normal.png -------------------------------------------------------------------------------- /datashader/tests/data/test_006_antialias_clipped.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_006_antialias_clipped.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_006_antialias_clipped.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_006_antialias_clipped.png -------------------------------------------------------------------------------- /datashader/tests/data/test_006_antialias_normal.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_006_antialias_normal.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_006_antialias_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_006_antialias_normal.png -------------------------------------------------------------------------------- /datashader/tests/data/test_006_noaa_clipped.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_006_noaa_clipped.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_006_noaa_clipped.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_006_noaa_clipped.png -------------------------------------------------------------------------------- /datashader/tests/data/test_006_noaa_normal.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_006_noaa_normal.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_006_noaa_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_006_noaa_normal.png -------------------------------------------------------------------------------- /datashader/tests/data/test_007_antialias_clipped.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_007_antialias_clipped.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_007_antialias_clipped.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_007_antialias_clipped.png -------------------------------------------------------------------------------- /datashader/tests/data/test_007_antialias_normal.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_007_antialias_normal.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_007_antialias_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_007_antialias_normal.png -------------------------------------------------------------------------------- /datashader/tests/data/test_007_noaa_clipped.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_007_noaa_clipped.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_007_noaa_clipped.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_007_noaa_clipped.png -------------------------------------------------------------------------------- /datashader/tests/data/test_007_noaa_normal.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_007_noaa_normal.nc -------------------------------------------------------------------------------- /datashader/tests/data/test_007_noaa_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/test_007_noaa_normal.png -------------------------------------------------------------------------------- /datashader/tests/data/world.rgb.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/datashader/tests/data/world.rgb.tif -------------------------------------------------------------------------------- /datashader/tests/test_colors.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from datashader.colors import rgb, hex_to_rgb 3 | 4 | import pytest 5 | 6 | 7 | def test_hex_to_rgb(): 8 | assert hex_to_rgb('#FAFBFC') == (250, 251, 252) 9 | with pytest.raises(ValueError): 10 | hex_to_rgb('#FFF') 11 | with pytest.raises(ValueError): 12 | hex_to_rgb('FFFFFF') 13 | with pytest.raises(ValueError): 14 | hex_to_rgb('#FFFFFG') 15 | 16 | 17 | def test_rgb(): 18 | assert rgb('#FAFBFC') == (250, 251, 252) 19 | assert rgb('blue') == (0, 0, 255) 20 | assert rgb((255, 255, 255)) == (255, 255, 255) 21 | with pytest.raises(ValueError): 22 | rgb((255, 256, 255)) 23 | with pytest.raises(ValueError): 24 | rgb((-1, 255, 255)) 25 | with pytest.raises(ValueError): 26 | rgb('foobar') 27 | -------------------------------------------------------------------------------- /datashader/tests/test_composite.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import numpy as np 3 | 4 | from datashader.composite import add, saturate, over, source 5 | 6 | src = np.array([[0x00000000, 0x00ffffff, 0xffffffff], 7 | [0x7dff0000, 0x7d00ff00, 0x7d0000ff], 8 | [0xffff0000, 0xff000000, 0x3a3b3c3d]], dtype='uint32') 9 | 10 | clear = np.uint32(0) 11 | clear_white = np.uint32(0x00ffffff) 12 | white = np.uint32(0xffffffff) 13 | blue = np.uint32(0xffff0000) 14 | half_blue = np.uint32(0x7dff0000) 15 | half_purple = np.uint32(0x7d7d007d) 16 | 17 | 18 | def test_source(): 19 | o = src.copy() 20 | o[0, :2] = clear 21 | np.testing.assert_equal(source(src, clear), o) 22 | o[0, :2] = clear_white 23 | np.testing.assert_equal(source(src, clear_white), o) 24 | o[0, :2] = half_blue 25 | np.testing.assert_equal(source(src, half_blue), o) 26 | 27 | 28 | def test_over(): 29 | o = src.copy() 30 | o[0, 1] = 0 31 | np.testing.assert_equal(over(src, clear), o) 32 | np.testing.assert_equal(over(src, clear_white), o) 33 | o = np.array([[0xffffffff, 0xffffffff, 0xffffffff], 34 | [0xffff8282, 0xff82ff82, 0xff8282ff], 35 | [0xffff0000, 0xff000000, 0xffd2d2d2]]) 36 | np.testing.assert_equal(over(src, white), o) 37 | o = np.array([[0xffff0000, 0xffff0000, 0xffffffff], 38 | [0xffff0000, 0xff827d00, 0xff82007d], 39 | [0xffff0000, 0xff000000, 0xffd20d0d]]) 40 | np.testing.assert_equal(over(src, blue), o) 41 | o = np.array([[0x7dff0000, 0x7dff0000, 0xffffffff], 42 | [0xbcff0000, 0xbc56a800, 0xbc5600a8], 43 | [0xffff0000, 0xff000000, 0x9ab51616]]) 44 | np.testing.assert_equal(over(src, half_blue), o) 45 | o = np.array([[0x7d7d007d, 0x7d7d007d, 0xffffffff], 46 | [0xbcd3002a, 0xbc2aa82a, 0xbc2a00d3], 47 | [0xffff0000, 0xff000000, 0x9a641664]]) 48 | np.testing.assert_equal(over(src, half_purple), o) 49 | 50 | 51 | def test_add(): 52 | o = src.copy() 53 | o[0, 1] = 0 54 | np.testing.assert_equal(add(src, clear), o) 55 | np.testing.assert_equal(add(src, clear_white), o) 56 | o = np.array([[0xffffffff, 0xffffffff, 0xffffffff], 57 | [0xffffffff, 0xffffffff, 0xffffffff], 58 | [0xffffffff, 0xffffffff, 0xffffffff]]) 59 | np.testing.assert_equal(add(src, white), o) 60 | o = np.array([[0xffff0000, 0xffff0000, 0xffffffff], 61 | [0xffff0000, 0xffff7d00, 0xffff007d], 62 | [0xffff0000, 0xffff0000, 0xffff0d0d]]) 63 | np.testing.assert_equal(add(src, blue), o) 64 | o = np.array([[0x7dff0000, 0x7dff0000, 0xffffffff], 65 | [0xfaff0000, 0xfa7f7f00, 0xfa7f007f], 66 | [0xffff0000, 0xff7d0000, 0xb7c01313]]) 67 | np.testing.assert_equal(add(src, half_blue), o) 68 | o = np.array([[0x7d7d007d, 0x7d7d007d, 0xffffffff], 69 | [0xfabe003e, 0xfa3e7f3e, 0xfa3e00be], 70 | [0xffff003d, 0xff3d003d, 0xb7681368]]) 71 | np.testing.assert_equal(add(src, half_purple), o) 72 | 73 | 74 | def test_saturate(): 75 | o = src.copy() 76 | o[0, 1] = 0 77 | np.testing.assert_equal(saturate(src, clear), o) 78 | np.testing.assert_equal(saturate(src, clear_white), o) 79 | o = np.full((3, 3), white, dtype='uint32') 80 | np.testing.assert_equal(saturate(src, white), o) 81 | o = np.full((3, 3), blue, dtype='uint32') 82 | np.testing.assert_equal(saturate(src, blue), o) 83 | o = np.array([[0x7dff0000, 0x7dff0000, 0xffff8282], 84 | [0xfaff0000, 0xfa7f7f00, 0xfa7f007f], 85 | [0xffff0000, 0xff7d0000, 0xb7c01313]]) 86 | np.testing.assert_equal(saturate(src, half_blue), o) 87 | o = np.array([[0x7d7d007d, 0x7d7d007d, 0xffbf82bf], 88 | [0xfabe003e, 0xfa3e7f3e, 0xfa3e00be], 89 | [0xffbf003d, 0xff3d003d, 0xb7681368]]) 90 | np.testing.assert_equal(saturate(src, half_purple), o) 91 | -------------------------------------------------------------------------------- /datashader/tests/test_layout.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import pytest 3 | 4 | import numpy as np 5 | import pandas as pd 6 | 7 | from datashader.bundling import directly_connect_edges, hammer_bundle 8 | from datashader.layout import circular_layout, forceatlas2_layout, random_layout 9 | 10 | skimage = pytest.importorskip("skimage") 11 | 12 | 13 | @pytest.fixture 14 | def nodes(): 15 | # Four nodes arranged at the corners of a 200x200 square with one node 16 | # at the center 17 | nodes_df = pd.DataFrame({'id': np.arange(5), 18 | 'x': [0., -100., 100., -100., 100.], 19 | 'y': [0., 100., 100., -100., -100.]}) 20 | return nodes_df.set_index('id') 21 | 22 | 23 | @pytest.fixture 24 | def nodes_without_positions(): 25 | nodes_df = pd.DataFrame({'id': np.arange(5)}) 26 | return nodes_df.set_index('id') 27 | 28 | 29 | @pytest.fixture 30 | def edges(): 31 | # Four edges originating from the center node and connected to each 32 | # corner 33 | edges_df = pd.DataFrame({'id': np.arange(4), 34 | 'source': np.zeros(4, dtype=np.int64), 35 | 'target': np.arange(1, 5)}) 36 | return edges_df.set_index('id') 37 | 38 | 39 | @pytest.fixture 40 | def weighted_edges(): 41 | # Four weighted edges originating from the center node and connected 42 | # to each corner 43 | edges_df = pd.DataFrame({'id': np.arange(4), 44 | 'source': np.zeros(4, dtype=np.int64), 45 | 'target': np.arange(1, 5), 46 | 'weight': np.ones(4)}) 47 | return edges_df.set_index('id') 48 | 49 | 50 | @pytest.mark.parametrize('bundle', [directly_connect_edges, hammer_bundle]) 51 | @pytest.mark.parametrize('layout', [random_layout, circular_layout, forceatlas2_layout]) 52 | def test_renamed_columns(nodes, weighted_edges, bundle, layout): 53 | nodes = nodes.rename(columns={'x': 'xx', 'y': 'yy'}) 54 | edges = weighted_edges.rename(columns={'source': 'src', 'target': 'dst', 'weight': 'w'}) 55 | 56 | node_positions = layout(nodes, edges, x='xx', y='yy', source='src', target='dst', weight='w') 57 | df = bundle(node_positions, edges, x='xx', y='yy', source='src', target='dst', weight='w') 58 | 59 | assert 'xx' in df and 'x' not in df 60 | assert 'yy' in df and 'y' not in df 61 | assert 'w' in df and 'weight' not in df 62 | 63 | 64 | def test_forceatlas2_positioned_nodes_with_unweighted_edges(nodes, edges): 65 | df = forceatlas2_layout(nodes, edges) 66 | assert len(nodes) == len(df) 67 | assert not df.equals(nodes) 68 | 69 | 70 | def test_forceatlas2_positioned_nodes_with_weighted_edges(nodes, weighted_edges): 71 | df = forceatlas2_layout(nodes, weighted_edges) 72 | assert len(nodes) == len(df) 73 | assert not df.equals(nodes) 74 | 75 | 76 | def test_forceatlas2_unpositioned_nodes_with_unweighted_edges(nodes_without_positions, edges): 77 | df = forceatlas2_layout(nodes_without_positions, edges) 78 | assert len(nodes_without_positions) == len(df) 79 | assert not df.equals(nodes_without_positions) 80 | 81 | 82 | def test_forceatlas2_unpositioned_nodes_with_weighted_edges(nodes_without_positions, 83 | weighted_edges): 84 | df = forceatlas2_layout(nodes_without_positions, weighted_edges) 85 | assert len(nodes_without_positions) == len(df) 86 | assert not df.equals(nodes_without_positions) 87 | 88 | 89 | def test_random_layout(nodes_without_positions, edges): 90 | expected_x = [0.511821624, 0.144159612, 0.311831452, 0.827702593, 0.549593687] 91 | expected_y = [0.950463696, 0.948649447, 0.423326448, 0.409199136, 0.027559113] 92 | df = random_layout(nodes_without_positions, edges, seed=1) 93 | 94 | assert np.allclose(df['x'], expected_x) 95 | assert np.allclose(df['y'], expected_y) 96 | 97 | 98 | def test_uniform_circular_layout(nodes_without_positions, edges): 99 | expected_x = [1.0, 0.654508497187, 0.095491502813, 0.095491502813, 0.654508497187] 100 | expected_y = [0.5, 0.975528258148, 0.793892626146, 0.206107373854, 0.024471741852] 101 | 102 | df = circular_layout(nodes_without_positions, edges) 103 | 104 | assert np.allclose(df['x'], expected_x) 105 | assert np.allclose(df['y'], expected_y) 106 | 107 | 108 | def test_random_circular_layout(nodes_without_positions, edges): 109 | expected_x = [0.00137865, 0.97597639, 0.80853537, 0.97420002, 0.31060039] 110 | expected_y = [0.46289541, 0.34687760, 0.89345383, 0.34146188, 0.96273944] 111 | df = circular_layout(nodes_without_positions, edges, uniform=False, seed=1) 112 | 113 | assert np.allclose(df['x'], expected_x) 114 | assert np.allclose(df['y'], expected_y) 115 | -------------------------------------------------------------------------------- /datashader/tests/test_macros.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import warnings 3 | import pytest 4 | 5 | from datashader.macros import expand_varargs 6 | import inspect 7 | from numba import jit 8 | 9 | 10 | # Example functions to test expand_varargs on 11 | def function_no_vararg(a, b): 12 | return a + b 13 | 14 | 15 | def function_with_vararg(a, b, *others): 16 | return a + b - function_no_vararg(*others) 17 | 18 | 19 | def function_with_unsupported_vararg_use(a, b, *others): 20 | print(others[0]) 21 | function_with_vararg(a, b, *others) 22 | 23 | 24 | @jit(nopython=True, nogil=True) 25 | def function_no_vararg_numba(a, b): 26 | return a + b 27 | 28 | 29 | def function_with_vararg_call_numba(a, b, *others): 30 | return a + b - function_no_vararg_numba(*others) 31 | 32 | 33 | # Help functions 34 | def get_args(fn): 35 | with warnings.catch_warnings(): 36 | warnings.simplefilter("ignore") 37 | spec = inspect.getfullargspec(fn) 38 | 39 | args = spec.args 40 | if spec.varargs: 41 | args += ['*' + spec.varargs] 42 | 43 | return args 44 | 45 | 46 | # Tests 47 | def test_expand_varargs(): 48 | assert get_args(function_with_vararg) == ['a', 'b', '*others'] 49 | function_with_vararg_expanded = expand_varargs(2)(function_with_vararg) 50 | assert get_args(function_with_vararg_expanded) == ['a', 'b', '_0', '_1'] 51 | 52 | assert (function_with_vararg(1, 2, 3, 4) == 53 | function_with_vararg_expanded(1, 2, 3, 4)) 54 | 55 | 56 | def test_invalid_expand_number(): 57 | with pytest.raises(ValueError) as e: 58 | # User forgets to construct decorator with expand_number 59 | expand_varargs(function_no_vararg) 60 | 61 | assert e.match(r"non\-negative integer") 62 | 63 | 64 | def test_no_varargs_error(): 65 | with pytest.raises(ValueError) as e: 66 | expand_varargs(2)(function_no_vararg) 67 | 68 | assert e.match(r"does not have a variable length positional argument") 69 | 70 | 71 | def test_unsupported_vararg_use(): 72 | with pytest.raises(ValueError) as e: 73 | expand_varargs(2)(function_with_unsupported_vararg_use) 74 | 75 | assert e.match(r"unsupported context") 76 | 77 | 78 | def test_numba_jit_expanded_function(): 79 | jit_fn = jit(nopython=True, nogil=True)( 80 | expand_varargs(2)(function_with_vararg_call_numba) 81 | ) 82 | assert function_with_vararg_call_numba(1, 2, 3, 4) == jit_fn(1, 2, 3, 4) 83 | -------------------------------------------------------------------------------- /datashader/tests/test_mpl_ext.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import pytest 3 | 4 | pytest.importorskip("matplotlib") 5 | 6 | import matplotlib 7 | matplotlib.use("Agg") 8 | 9 | import matplotlib.pyplot as plt 10 | import numpy as np 11 | import pandas as pd 12 | 13 | import datashader as ds 14 | from datashader.mpl_ext import dsshow 15 | 16 | 17 | df = pd.DataFrame( 18 | { 19 | "x": np.array([0.0] * 10 + [1] * 10), 20 | "y": np.array([0.0] * 5 + [1] * 5 + [0] * 5 + [1] * 5), 21 | } 22 | ) 23 | 24 | 25 | def test_image_initialize(): 26 | plt.figure(dpi=100) 27 | ax = plt.subplot(111) 28 | da = dsshow(df, ds.Point("x", "y"), ax=ax) 29 | 30 | data = da.get_ds_data() 31 | assert data[0, 0] == 5 32 | assert data[0, -1] == 5 33 | assert data[-1, 0] == 5 34 | assert data[-1, -1] == 5 35 | 36 | 37 | def test_image_update(): 38 | fig = plt.figure(dpi=100) 39 | ax = plt.subplot(111) 40 | da = dsshow(df, ds.Point("x", "y"), ax=ax) 41 | ax.set_xlim(0, 0.5) 42 | ax.set_ylim(0, 0.5) 43 | fig.canvas.draw() 44 | 45 | data = da.get_ds_data() 46 | assert data[0, 0] == 5 47 | assert data[0, -1] == 0 48 | assert data[-1, 0] == 0 49 | assert data[-1, -1] == 0 50 | -------------------------------------------------------------------------------- /datashader/tests/test_pipeline.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import numpy as np 3 | import pandas as pd 4 | import pytest 5 | import datashader as ds 6 | import datashader.transfer_functions as tf 7 | 8 | 9 | df = pd.DataFrame({'x': np.array([0.] * 10 + [1] * 10), 10 | 'y': np.array([0.] * 5 + [1] * 5 + [0] * 5 + [1] * 5), 11 | 'f64': np.arange(20, dtype='f8')}) 12 | df.loc['f64', 2] = np.nan 13 | 14 | cvs = ds.Canvas(plot_width=2, plot_height=2, x_range=(0, 1), y_range=(0, 1)) 15 | cvs10 = ds.Canvas(plot_width=10, plot_height=10, x_range=(0, 1), y_range=(0, 1)) 16 | 17 | 18 | def test_pipeline(): 19 | pipeline = ds.Pipeline(df, ds.Point('x', 'y')) 20 | img = pipeline((0, 1), (0, 1), 2, 2) 21 | agg = cvs.points(df, 'x', 'y', ds.count()) 22 | assert img.equals(tf.shade(agg)) 23 | 24 | def color_fn(agg): 25 | return tf.shade(agg, 'pink', 'red') 26 | pipeline.color_fn = color_fn 27 | img = pipeline((0, 1), (0, 1), 2, 2) 28 | assert img.equals(color_fn(agg)) 29 | 30 | def transform_fn(agg): 31 | return agg + 1 32 | pipeline.transform_fn = transform_fn 33 | img = pipeline((0, 1), (0, 1), 2, 2) 34 | assert img.equals(color_fn(transform_fn(agg))) 35 | 36 | pipeline = ds.Pipeline(df, ds.Point('x', 'y'), ds.sum('f64')) 37 | img = pipeline((0, 1), (0, 1), 2, 2) 38 | agg = cvs.points(df, 'x', 'y', ds.sum('f64')) 39 | assert img.equals(tf.shade(agg)) 40 | 41 | 42 | @pytest.mark.parametrize("line_width", [0.0, 0.5, 1.0, 2.0]) 43 | def test_pipeline_antialias(line_width): 44 | glyph = ds.glyphs.LineAxis0('x', 'y') 45 | 46 | glyph.set_line_width(line_width=line_width) 47 | assert glyph._line_width == line_width 48 | assert glyph.antialiased == (line_width > 0) 49 | 50 | pipeline = ds.Pipeline(df, glyph) 51 | img = pipeline(width=cvs10.plot_width, height=cvs10.plot_height, 52 | x_range=cvs10.x_range, y_range=cvs10.y_range) 53 | agg = cvs10.line(df, 'x', 'y', agg=ds.reductions.count(), line_width=line_width) 54 | assert img.equals(tf.dynspread(tf.shade(agg))) 55 | -------------------------------------------------------------------------------- /datashader/tests/test_reductions.py: -------------------------------------------------------------------------------- 1 | import datashader as ds 2 | 3 | 4 | def all_subclasses(cls): 5 | items1 = {cls, *cls.__subclasses__()} 6 | items2 = {s for c in cls.__subclasses__() for s in all_subclasses(c)} 7 | return items1 | items2 8 | 9 | 10 | def test_string_output(): 11 | expected = { 12 | "any": "any('col')", 13 | "by": "by(column='col', reduction=count())", 14 | "count": "count()", 15 | "count_cat": "count_cat(column='col')", 16 | "first": "first('col')", 17 | "first_n": "first_n(column='col', n=1)", 18 | "last": "last('col')", 19 | "last_n": "last_n(column='col', n=1)", 20 | "m2": "m2('col')", 21 | "max": "max('col')", 22 | "max_n": "max_n(column='col', n=1)", 23 | "mean": "mean('col')", 24 | "min": "min('col')", 25 | "min_n": "min_n(column='col', n=1)", 26 | "mode": "mode('col')", 27 | "std": "std('col')", 28 | "sum": "sum('col')", 29 | "summary": "summary(a=1)", 30 | "var": "var('col')", 31 | "where": "where(selector=min('col'), lookup_column='col')", 32 | } 33 | 34 | count = 0 35 | for red in all_subclasses(ds.reductions.Reduction) | all_subclasses(ds.reductions.summary): 36 | red_name = red.__name__ 37 | if red_name.startswith("_") or "Reduction" in red_name: 38 | continue 39 | elif red_name in ("by", "count_cat"): 40 | assert str(red("col")) == expected[red_name] 41 | elif red_name == "where": 42 | assert str(red(ds.min("col"), "col")) == expected[red_name] 43 | elif red_name == "summary": 44 | assert str(red(a=1)) == expected[red_name] 45 | else: 46 | assert str(red("col")) == expected[red_name] 47 | count += 1 48 | 49 | assert count == 20 # Update if more subclasses are added 50 | -------------------------------------------------------------------------------- /datashader/tests/test_tiles.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | import datashader as ds 6 | import datashader.transfer_functions as tf 7 | 8 | from datashader.colors import viridis 9 | 10 | from datashader.tiles import render_tiles 11 | from datashader.tiles import gen_super_tiles 12 | from datashader.tiles import _get_super_tile_min_max 13 | from datashader.tiles import calculate_zoom_level_stats 14 | from datashader.tiles import MercatorTileDefinition 15 | 16 | from datashader.tests.utils import dask_skip 17 | 18 | import numpy as np 19 | import pandas as pd 20 | 21 | TOLERANCE = 0.01 22 | 23 | MERCATOR_CONST = 20037508.34 24 | 25 | df = None 26 | def mock_load_data_func(x_range, y_range): 27 | global df 28 | if df is None: 29 | rng = np.random.default_rng() 30 | xs = rng.normal(loc=0, scale=500000, size=10000000) 31 | ys = rng.normal(loc=0, scale=500000, size=10000000) 32 | df = pd.DataFrame(dict(x=xs, y=ys)) 33 | 34 | return df.loc[df['x'].between(*x_range) & df['y'].between(*y_range)] 35 | 36 | 37 | def mock_rasterize_func(df, x_range, y_range, height, width): 38 | cvs = ds.Canvas(x_range=x_range, y_range=y_range, 39 | plot_height=height, plot_width=width) 40 | agg = cvs.points(df, 'x', 'y') 41 | return agg 42 | 43 | 44 | def mock_shader_func(agg, span=None): 45 | img = tf.shade(agg, cmap=viridis, span=span, how='log') 46 | img = tf.set_background(img, 'black') 47 | return img 48 | 49 | 50 | def mock_post_render_func(img, **kwargs): 51 | ImageDraw = pytest.importorskip("PIL.ImageDraw") 52 | 53 | (x, y) = (5, 5) 54 | info = "x={} / y={} / z={}, w={}, h={}".format(kwargs['x'], 55 | kwargs['y'], 56 | kwargs['z'], 57 | img.width, 58 | img.height) 59 | 60 | draw = ImageDraw.Draw(img) 61 | draw.text((x, y), info, fill='rgb(255, 255, 255)') 62 | return img 63 | 64 | 65 | # TODO: mark with slow_test 66 | @dask_skip 67 | def test_render_tiles(): 68 | pytest.importorskip("PIL") 69 | 70 | full_extent_of_data = (-500000, -500000, 71 | 500000, 500000) 72 | levels = list(range(2)) 73 | output_path = 'test_tiles_output' 74 | results = render_tiles(full_extent_of_data, 75 | levels, 76 | load_data_func=mock_load_data_func, 77 | rasterize_func=mock_rasterize_func, 78 | shader_func=mock_shader_func, 79 | post_render_func=mock_post_render_func, 80 | output_path=output_path) 81 | 82 | assert results 83 | assert isinstance(results, dict) 84 | 85 | for level in levels: 86 | assert level in results 87 | assert isinstance(results[level], dict) 88 | 89 | assert results[0]['success'] 90 | assert results[0]['stats'] 91 | assert results[0]['supertile_count'] 92 | 93 | 94 | def assert_is_numeric(value): 95 | is_int_or_float = isinstance(value, (int, float)) 96 | type_name = type(value).__name__ 97 | is_numpy_int_or_float = 'int' in type_name or 'float' in type_name 98 | assert any([is_int_or_float, is_numpy_int_or_float]) 99 | 100 | 101 | 102 | def test_get_super_tile_min_max(): 103 | 104 | tile_info = {'level': 0, 105 | 'x_range': (-MERCATOR_CONST, MERCATOR_CONST), 106 | 'y_range': (-MERCATOR_CONST, MERCATOR_CONST), 107 | 'tile_size': 256, 108 | 'span': (0, 1000)} 109 | 110 | agg = _get_super_tile_min_max(tile_info, mock_load_data_func, mock_rasterize_func) 111 | 112 | result = [np.nanmin(agg.data), np.nanmax(agg.data)] 113 | 114 | assert isinstance(result, list) 115 | assert len(result) == 2 116 | assert_is_numeric(result[0]) 117 | assert_is_numeric(result[1]) 118 | 119 | @dask_skip 120 | def test_calculate_zoom_level_stats_with_fullscan_ranging_strategy(): 121 | full_extent = (-MERCATOR_CONST, -MERCATOR_CONST, 122 | MERCATOR_CONST, MERCATOR_CONST) 123 | level = 0 124 | color_ranging_strategy = 'fullscan' 125 | super_tiles, span = calculate_zoom_level_stats(list(gen_super_tiles(full_extent, level)), 126 | mock_load_data_func, 127 | mock_rasterize_func, 128 | color_ranging_strategy=color_ranging_strategy) 129 | 130 | assert isinstance(span, (list, tuple)) 131 | assert len(span) == 2 132 | assert_is_numeric(span[0]) 133 | assert_is_numeric(span[1]) 134 | 135 | def test_meters_to_tile(): 136 | # Part of NYC (used in taxi demo) 137 | full_extent_of_data = (-8243206.93436, 4968192.04221, -8226510.539480001, 4982886.20438) 138 | xmin, ymin, xmax, ymax = full_extent_of_data 139 | zoom = 12 140 | tile_def = MercatorTileDefinition((xmin, xmax), (ymin, ymax), tile_size=256) 141 | tile = tile_def.meters_to_tile(xmin, ymin, zoom) 142 | assert tile == (1205, 1540) # using Google tile coordinates, not TMS 143 | -------------------------------------------------------------------------------- /datashader/tests/test_utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import numpy as np 3 | from xarray import DataArray 4 | 5 | from datashader.datashape import dshape 6 | from datashader.utils import Dispatcher, apply, calc_res, isreal, orient_array 7 | 8 | 9 | def test_Dispatcher(): 10 | foo = Dispatcher() 11 | foo.register(int, lambda a, b, c=1: a + b + c) 12 | foo.register(float, lambda a, b, c=1: a - b + c) 13 | foo.register(object, lambda a, b, c=1: 10) 14 | 15 | class Bar: 16 | pass 17 | b = Bar() 18 | assert foo(1, 2) == 4 19 | assert foo(1, 2.0, 3.0) == 6.0 20 | assert foo(1.0, 2.0, 3.0) == 2.0 21 | assert foo(b, 2) == 10 22 | 23 | 24 | def test_isreal(): 25 | assert isreal('int32') 26 | assert isreal(dshape('int32')) 27 | assert isreal('?int32') 28 | assert isreal('float64') 29 | assert not isreal('complex64') 30 | assert not isreal('{x: int64, y: float64}') 31 | 32 | 33 | def test_apply(): 34 | def f(a, b, c=1, d=2): 35 | return a + b + c + d 36 | assert apply(f, (1, 2,)) == 6 37 | assert apply(f, (1, 2,), dict(c=3)) == 8 38 | 39 | 40 | def test_calc_res(): 41 | x = [5, 7] 42 | y = [0, 1] 43 | z = [[0, 0], [0, 0]] 44 | dims = ('y', 'x') 45 | 46 | # x and y increasing 47 | xarr = DataArray(z, coords=dict(x=x, y=y), dims=dims) 48 | xres, yres = calc_res(xarr) 49 | assert xres == 2 50 | assert yres == -1 51 | 52 | # x increasing, y decreasing 53 | xarr = DataArray(z, coords=dict(x=x, y=y[::-1]), dims=dims) 54 | xres, yres = calc_res(xarr) 55 | assert xres == 2 56 | assert yres == 1 57 | 58 | # x decreasing, y increasing 59 | xarr = DataArray(z, coords=dict(x=x[::-1], y=y), dims=dims) 60 | xres, yres = calc_res(xarr) 61 | assert xres == -2 62 | assert yres == -1 63 | 64 | # x and y decreasing 65 | xarr = DataArray(z, coords=dict(x=x[::-1], y=y[::-1]), dims=dims) 66 | xres, yres = calc_res(xarr) 67 | assert xres == -2 68 | assert yres == 1 69 | 70 | 71 | def test_orient_array(): 72 | x = [5, 7] 73 | y = [0, 1] 74 | z = np.array([[0, 1], [2, 3]]) 75 | dims = ('y', 'x') 76 | 77 | # x and y increasing 78 | xarr = DataArray(z, coords=dict(x=x, y=y), dims=dims) 79 | arr = orient_array(xarr) 80 | assert np.array_equal(arr, z) 81 | 82 | # x increasing, y decreasing 83 | xarr = DataArray(z, coords=dict(x=x, y=y[::-1]), dims=dims) 84 | arr = orient_array(xarr) 85 | assert np.array_equal(arr, z[::-1]) 86 | 87 | # x decreasing, y increasing 88 | xarr = DataArray(z, coords=dict(x=x[::-1], y=y), dims=dims) 89 | arr = orient_array(xarr) 90 | assert np.array_equal(arr, z[:, ::-1]) 91 | 92 | # x and y decreasing 93 | xarr = DataArray(z, coords=dict(x=x[::-1], y=y[::-1]), dims=dims) 94 | arr = orient_array(xarr) 95 | assert np.array_equal(arr, z[::-1, ::-1]) 96 | -------------------------------------------------------------------------------- /datashader/tests/utils.py: -------------------------------------------------------------------------------- 1 | from importlib.util import find_spec 2 | 3 | import pytest 4 | 5 | __all__ = ("DASK_UNAVAILABLE", "dask_skip") 6 | 7 | DASK_UNAVAILABLE = find_spec("dask") is None 8 | 9 | dask_skip = pytest.mark.skipif(DASK_UNAVAILABLE, reason="dask is not available") 10 | -------------------------------------------------------------------------------- /datashader/transfer_functions/_cpu_utils.py: -------------------------------------------------------------------------------- 1 | from datashader.utils import ngjit 2 | 3 | 4 | @ngjit 5 | def masked_clip_2d(data, mask, lower, upper): 6 | """ 7 | Clip the elements of an input array between lower and upper bounds, 8 | skipping over elements that are masked out. 9 | 10 | Parameters 11 | ---------- 12 | data: np.ndarray 13 | Numeric ndarray that will be clipped in-place 14 | mask: np.ndarray 15 | Boolean ndarray where True values indicate elements that should be 16 | skipped 17 | lower: int or float 18 | Lower bound to clip to 19 | upper: int or float 20 | Upper bound to clip to 21 | 22 | Returns 23 | ------- 24 | None 25 | data array is modified in-place 26 | """ 27 | for i in range(data.shape[0]): 28 | for j in range(data.shape[1]): 29 | if mask[i, j]: 30 | continue 31 | val = data[i, j] 32 | if val < lower: 33 | data[i, j] = lower 34 | elif val > upper: 35 | data[i, j] = upper 36 | -------------------------------------------------------------------------------- /doc/_static/css/custom.css: -------------------------------------------------------------------------------- 1 | :root { 2 | --pst-color-primary: rgb(47,47,47); 3 | --pst-color-link: rgb(119,76,154); 4 | --pst-color-link-hover: rgb(183,133,220); 5 | } 6 | -------------------------------------------------------------------------------- /doc/_static/datashader-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/doc/_static/datashader-logo.png -------------------------------------------------------------------------------- /doc/_static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/doc/_static/favicon.ico -------------------------------------------------------------------------------- /doc/_static/logo_horizontal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/doc/_static/logo_horizontal.png -------------------------------------------------------------------------------- /doc/_static/logo_horizontal_s.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/doc/_static/logo_horizontal_s.png -------------------------------------------------------------------------------- /doc/_static/logo_stacked.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/doc/_static/logo_stacked.png -------------------------------------------------------------------------------- /doc/_static/logo_stacked_s.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/doc/_static/logo_stacked_s.png -------------------------------------------------------------------------------- /doc/about.rst: -------------------------------------------------------------------------------- 1 | About Us 2 | ======== 3 | 4 | Datashader is completely open source, available under a BSD license freely for both commercial and non-commercial use. Datashader was originally developed with the support of `Anaconda Inc. `_, and is now maintained by Anaconda developers and community contributors. 5 | 6 | Datashader is part of the `HoloViz `_ family of tools. The `holoviz.org `_ website shows how to use Datashader together with other libraries to solve complex problems, with detailed tutorials and examples. You can see a variety of projects using Datashader at `examples.pyviz.org `_, and you can compare Datashader to other available tools at `pyviz.org `_. 7 | 8 | If you have any questions or usage issues visit the `Datashader Discourse `_ site. If you are interested in contributing to Datashader development to help address some of the `open issues `_, see our `developer instructions `_ to set up your development environment. 9 | 10 | If you like Datashader and have built something you want to share, tweet a link or screenshot of your latest creation at @Datashader, along with any other library you used (@Panel_org, @HoloViews, @BokehPlots, @Matplotlib, etc.). Thanks! 11 | 12 | Datashader is supported and maintained by `Anaconda `_. 13 | -------------------------------------------------------------------------------- /doc/api.rst: -------------------------------------------------------------------------------- 1 | API 2 | === 3 | 4 | Entry Points 5 | ------------ 6 | 7 | .. currentmodule:: datashader 8 | 9 | **Canvas** 10 | 11 | .. autosummary:: 12 | 13 | Canvas 14 | Canvas.line 15 | Canvas.points 16 | Canvas.raster 17 | Canvas.trimesh 18 | Canvas.validate 19 | Canvas.area 20 | Canvas.polygons 21 | Canvas.quadmesh 22 | 23 | .. currentmodule:: datashader 24 | 25 | **Pipeline** 26 | 27 | .. autosummary:: 28 | 29 | Pipeline 30 | 31 | Edge Bundling 32 | ------------- 33 | 34 | .. currentmodule:: datashader.bundling 35 | .. autosummary:: 36 | 37 | directly_connect_edges 38 | hammer_bundle 39 | 40 | Glyphs 41 | ------ 42 | 43 | .. currentmodule:: datashader.glyphs 44 | 45 | .. autosummary:: 46 | 47 | Point 48 | Triangles 49 | PolygonGeom 50 | QuadMeshRaster 51 | QuadMeshRectilinear 52 | QuadMeshCurvilinear 53 | LineAxis0 54 | LineAxis0Multi 55 | LinesAxis1 56 | LinesAxis1XConstant 57 | LinesAxis1YConstant 58 | LinesAxis1Ragged 59 | LineAxis1Geometry 60 | AreaToZeroAxis0 61 | AreaToZeroAxis0Multi 62 | AreaToZeroAxis1 63 | AreaToZeroAxis1XConstant 64 | AreaToZeroAxis1YConstant 65 | AreaToZeroAxis1Ragged 66 | AreaToLineAxis0 67 | AreaToLineAxis0Multi 68 | AreaToLineAxis1 69 | AreaToLineAxis1XConstant 70 | AreaToLineAxis1YConstant 71 | AreaToLineAxis1Ragged 72 | 73 | Reductions 74 | ---------- 75 | 76 | .. currentmodule:: datashader.reductions 77 | .. autosummary:: 78 | 79 | any 80 | count 81 | by 82 | first 83 | last 84 | m2 85 | max 86 | mean 87 | min 88 | mode 89 | std 90 | sum 91 | summary 92 | var 93 | where 94 | 95 | The table below indicates which ``Reduction`` classes are supported on the CPU (e.g. using 96 | ``pandas``), on CPU with Dask (e.g. using ``dask.dataframe``), on the GPU (e.g. using ``cudf``), 97 | and on the GPU with Dask (e.g. using ``dask-cudf``). The final two columns indicate which reductions 98 | support antialiased lines and which can be used as the ``selector`` in a 99 | :class:`~datashader.reductions.where` reduction. 100 | 101 | .. csv-table:: 102 | :file: reduction.csv 103 | :header-rows: 1 104 | 105 | The :class:`~datashader.reductions.mode` reduction is not listed in the table and can only be used 106 | with ``Canvas.raster``. A :class:`~datashader.reductions.by` reduction supports anything that its 107 | contained reduction (that is applied separately to each category) supports. 108 | 109 | **Categorizers** 110 | 111 | .. autosummary:: 112 | 113 | category_binning 114 | category_modulo 115 | 116 | Transfer Functions 117 | ------------------ 118 | 119 | .. currentmodule:: datashader.transfer_functions 120 | 121 | **Image** 122 | 123 | .. autosummary:: 124 | 125 | Image 126 | Image.to_bytesio 127 | Image.to_pil 128 | 129 | .. currentmodule:: datashader.transfer_functions 130 | 131 | **Images** 132 | 133 | .. autosummary:: 134 | 135 | Images 136 | Images.cols 137 | 138 | .. currentmodule:: datashader.transfer_functions 139 | 140 | **Other** 141 | 142 | .. autosummary:: 143 | 144 | dynspread 145 | set_background 146 | shade 147 | spread 148 | stack 149 | 150 | Definitions 151 | ----------- 152 | 153 | .. currentmodule:: datashader 154 | .. autoclass:: Canvas 155 | .. autoclass:: Pipeline 156 | 157 | .. currentmodule:: datashader.bundling 158 | .. autoclass:: directly_connect_edges 159 | .. autoclass:: hammer_bundle 160 | 161 | .. currentmodule:: datashader.glyphs 162 | .. autoclass:: Point 163 | .. autoclass:: Triangles 164 | .. autoclass:: PolygonGeom 165 | .. autoclass:: QuadMeshRaster 166 | .. autoclass:: QuadMeshRectilinear 167 | .. autoclass:: QuadMeshCurvilinear 168 | .. autoclass:: LineAxis0 169 | .. autoclass:: LineAxis0Multi 170 | .. autoclass:: LinesAxis1 171 | .. autoclass:: LinesAxis1XConstant 172 | .. autoclass:: LinesAxis1YConstant 173 | .. autoclass:: LinesAxis1Ragged 174 | .. autoclass:: LineAxis1Geometry 175 | .. autoclass:: AreaToZeroAxis0 176 | .. autoclass:: AreaToZeroAxis0Multi 177 | .. autoclass:: AreaToZeroAxis1 178 | .. autoclass:: AreaToZeroAxis1XConstant 179 | .. autoclass:: AreaToZeroAxis1YConstant 180 | .. autoclass:: AreaToZeroAxis1Ragged 181 | .. autoclass:: AreaToLineAxis0 182 | .. autoclass:: AreaToLineAxis0Multi 183 | .. autoclass:: AreaToLineAxis1 184 | .. autoclass:: AreaToLineAxis1XConstant 185 | .. autoclass:: AreaToLineAxis1YConstant 186 | .. autoclass:: AreaToLineAxis1Ragged 187 | 188 | .. currentmodule:: datashader.reductions 189 | .. autoclass:: any 190 | .. autoclass:: count 191 | .. autoclass:: count_cat 192 | .. autoclass:: first 193 | .. autoclass:: last 194 | .. autoclass:: m2 195 | .. autoclass:: max 196 | .. autoclass:: mean 197 | .. autoclass:: min 198 | .. autoclass:: mode 199 | .. autoclass:: std 200 | .. autoclass:: sum 201 | .. autoclass:: summary 202 | .. autoclass:: var 203 | .. autoclass:: where 204 | 205 | .. automodule:: datashader.transfer_functions 206 | :members: 207 | -------------------------------------------------------------------------------- /doc/conf.py: -------------------------------------------------------------------------------- 1 | from nbsite.shared_conf import * 2 | 3 | project = 'Datashader' 4 | copyright_years['start_year'] = '2016' 5 | copyright = copyright_fmt.format(**copyright_years) 6 | description = 'Turns even the largest data into images, accurately.' 7 | 8 | from datashader import __version__ 9 | version = release = base_version(__version__) 10 | 11 | html_static_path += ['_static'] 12 | html_theme = 'pydata_sphinx_theme' 13 | 14 | html_css_files += [ 15 | 'css/custom.css' 16 | ] 17 | 18 | # Somehow .ipynb started to take precedence over .rst. 19 | # This broke the landing page `index.rst` as it embeds `index.ipynb` file. 20 | # Adding `.md` to make our life easier in the future. 21 | source_suffix = ['.rst', '.md', '.ipynb'] 22 | 23 | html_logo = '_static/logo_horizontal.svg' 24 | html_favicon = '_static/favicon.ico' 25 | 26 | html_theme_options.update({ 27 | 'github_url': 'https://github.com/holoviz/datashader', 28 | 'icon_links': [ 29 | { 30 | 'name': 'Twitter', 31 | 'url': 'https://twitter.com/datashader', 32 | 'icon': 'fa-brands fa-twitter-square', 33 | }, 34 | { 35 | 'name': 'Discourse', 36 | 'url': 'https://discourse.holoviz.org/c/datashader/', 37 | 'icon': 'fa-brands fa-discourse', 38 | }, 39 | { 40 | "name": "HoloViz", 41 | "url": "https://holoviz.org/", 42 | "icon": "_static/holoviz-icon-white.svg", 43 | "type": "local", 44 | }, 45 | { 46 | "name": "Discord", 47 | "url": "https://discord.gg/AXRHnJU6sP", 48 | "icon": "fa-brands fa-discord", 49 | }, 50 | ], 51 | "pygment_dark_style": "material" 52 | }) 53 | 54 | html_context.update({ 55 | # Used to add binder links to the latest released tag. 56 | 'last_release': f'v{release}', 57 | 'github_user': 'holoviz', 58 | 'github_repo': 'datashader', 59 | }) 60 | 61 | extensions += [ 62 | 'sphinx.ext.autosummary', 63 | 'numpydoc', 64 | 'nbsite.analytics', 65 | 'sphinxcontrib.mermaid', 66 | 'sphinx_reredirects', 67 | ] 68 | 69 | myst_fence_as_directive = ["mermaid"] 70 | 71 | nbsite_analytics = { 72 | 'goatcounter_holoviz': True, 73 | } 74 | 75 | nbbuild_cell_timeout = 2000 76 | 77 | redirects = { 78 | 'topics/index': 'https://examples.holoviz.org', 79 | } 80 | 81 | # Datashader uses sphinx.ext.autodoc (e.g. automodule) for its API reference 82 | # and automatically include a module that contains Image. Image inherits 83 | # from xr.DataArray. Datashader uses numpydoc to parse the docstrings. 84 | # It turns out xarray broke numpydoc https://github.com/pydata/xarray/issues/8596 85 | # This is a bad hack to work around this issue. 86 | 87 | import numpydoc.docscrape # noqa 88 | 89 | original_error_location = numpydoc.docscrape.NumpyDocString._error_location 90 | 91 | def patch_error_location(self, msg, error=True): 92 | try: 93 | original_error_location(self, msg, error) 94 | except ValueError as e: 95 | if "site-packages/xarray" in str(e): 96 | return 97 | else: 98 | raise e 99 | 100 | numpydoc.docscrape.NumpyDocString._error_location = patch_error_location 101 | 102 | # Override the Sphinx default title that appends `documentation` 103 | html_title = f'{project} v{version}' 104 | -------------------------------------------------------------------------------- /doc/getting_started/index.rst: -------------------------------------------------------------------------------- 1 | *************** 2 | Getting Started 3 | *************** 4 | 5 | Installation 6 | ------------ 7 | 8 | Datashader supports Python 3.10, 3.11, 3.12 and 3.13 on Linux, Windows, or Mac 9 | and can be installed with conda:: 10 | 11 | conda install datashader 12 | 13 | or with pip:: 14 | 15 | pip install datashader 16 | 17 | For the best performance, we recommend using conda so that you are 18 | sure to get numerical libraries optimized for your platform. 19 | The latest releases are available on the pyviz channel ``conda install -c pyviz datashader`` 20 | and the latest pre-release versions are available on the dev-labeled channel 21 | ``conda install -c pyviz/label/dev datashader``. 22 | 23 | Fetching Examples 24 | ----------------- 25 | 26 | Once you've installed datashader as above you can fetch the examples:: 27 | 28 | datashader examples 29 | cd datashader-examples 30 | 31 | This will create a new directory called `datashader-examples` with all the 32 | data needed to run the examples. 33 | 34 | To run all the examples you will need some extra dependencies. If you installed 35 | datashader **within a conda environment**, with that environment active run:: 36 | 37 | conda env update --file environment.yml 38 | 39 | Otherwise create a new environment:: 40 | 41 | conda env create --name datashader --file environment.yml 42 | conda activate datashader 43 | 44 | Usage 45 | ----- 46 | 47 | .. notebook:: datashader ../../examples/getting_started/index.ipynb 48 | :offset: 0 49 | 50 | .. toctree:: 51 | :titlesonly: 52 | :maxdepth: 2 53 | :hidden: 54 | 55 | Introduction 56 | Pipeline 57 | Interactivity 58 | 59 | If you have any questions, please refer to `FAQ <../FAQ>`_ 60 | and if that doesn't help, feel free to post an 61 | `issue on GitHub `_ or a 62 | `question on discourse `_. 63 | 64 | Developer Instructions 65 | ---------------------- 66 | 67 | 1. Install Python 3 `miniconda `_ or `anaconda `_, if you don't already have it on your system. 68 | 69 | 2. Clone the datashader git repository if you do not already have it:: 70 | 71 | git clone git://github.com/holoviz/datashader.git 72 | 73 | 3. Set up a new conda environment with all of the dependencies needed to run the examples:: 74 | 75 | cd datashader 76 | conda env create --name datashader --file ./examples/environment.yml 77 | conda activate datashader 78 | 79 | 4. Put the datashader directory into the Python path in this environment:: 80 | 81 | pip install --no-deps -e . 82 | -------------------------------------------------------------------------------- /doc/governance/project-doc/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | For the contributing policy, see [HoloViz/HoloViz - CONTRIBUTING.md](). 4 | 5 | The Datashader Project’s equivalently named documents take precedence over any external materials referenced within this linked document above. 6 | -------------------------------------------------------------------------------- /doc/governance/project-doc/GOVERNANCE.md: -------------------------------------------------------------------------------- 1 | # Governance Policy 2 | 3 | The "Project" is herein defined as the activities related to this specific GitHub repository [Datashader](), within the `HoloViz` GitHub Organization. 4 | 5 | This Project adopts the governance specified by all of the numbered sections of [HoloViz/HoloViz - GOVERNANCE.md](). 6 | 7 | The Datashader Project’s equivalently named documents take precedence over any external materials referenced within this linked document above. 8 | -------------------------------------------------------------------------------- /doc/governance/project-doc/LICENSE.md: -------------------------------------------------------------------------------- 1 | # License 2 | 3 | For the license, see [HoloViz/Datashader - LICENSE.txt](). 4 | -------------------------------------------------------------------------------- /doc/governance/project-doc/MEMBERS.md: -------------------------------------------------------------------------------- 1 | # Maintainers 2 | 3 | For member policy, see the description at the top of [HoloViz/HoloViz - MEMBERS.md](). 4 | 5 | The Datashader Project’s equivalently named documents take precedence over any external materials referenced within this linked document above. 6 | 7 | | **NAME** | **Role** | **GitHub Handle** | 8 | | --- | --- | --- | 9 | | James Bednar | Project Director | [jbednar](https://github.com/jbednar) | 10 | | Philipp Rudiger | Maintainer | [philippjfr](https://github.com/philippjfr) | 11 | | Simon Hansen | Maintainer | [hoxbro](https://github.com/hoxbro) | 12 | -------------------------------------------------------------------------------- /doc/index.rst: -------------------------------------------------------------------------------- 1 | .. Datashader documentation main file 2 | 3 | .. raw:: html 4 | 5 |

6 | 7 | **Accurately render even the largest data** 8 | 9 | New to Datashader? Check out this 10 | `quick video introduction to what it does and how it works `_! 11 | 12 | Datashader is a graphics pipeline system for creating meaningful representations of large 13 | datasets quickly and flexibly. Datashader breaks the creation of images into a series of explicit 14 | steps that allow computations to be done on intermediate representations. This approach allows 15 | accurate and effective visualizations to be produced automatically without trial-and-error parameter 16 | tuning, and also makes it simple for data scientists to focus on particular data and relationships 17 | of interest in a principled way. 18 | 19 | The computation-intensive steps in this process are written in ordinary Python 20 | but transparently compiled to machine code using `Numba `_ and flexibly 21 | distributed across CPU cores and processors using `Dask `_ or GPUs 22 | using `CUDA `_. This approach provides a highly optimized 23 | rendering pipeline that makes it practical to work with extremely large datasets even on standard 24 | hardware, while exploiting distributed and GPU systems when available. 25 | 26 | .. raw:: html 27 | 28 |
29 | 30 | .. notebook:: datashader ../examples/index.ipynb 31 | :offset: 0 32 | :disable_interactivity_warning: 33 | 34 | .. toctree:: 35 | :hidden: 36 | :maxdepth: 3 37 | 38 | Introduction 39 | Getting Started 40 | User Guide 41 | Examples 42 | Releases 43 | API 44 | FAQ 45 | About 46 | -------------------------------------------------------------------------------- /doc/reduction.csv: -------------------------------------------------------------------------------- 1 | , CPU, CPU + Dask, GPU, GPU + Dask, Antialiasing, Within :class:`~datashader.reductions.where` 2 | :class:`~datashader.reductions.any`, yes, yes, yes, yes, yes, 3 | :class:`~datashader.reductions.by`, yes, yes, yes, yes, yes, 4 | :class:`~datashader.reductions.count`, yes, yes, yes, yes, yes, 5 | :class:`~datashader.reductions.first`, yes, yes, yes, yes, yes, yes 6 | :class:`~datashader.reductions.first_n`, yes, yes, yes, yes, yes, yes 7 | :class:`~datashader.reductions.last`, yes, yes, yes, yes, yes, yes 8 | :class:`~datashader.reductions.last_n`, yes, yes, yes, yes, yes, yes 9 | :class:`~datashader.reductions.max`, yes, yes, yes, yes, yes, yes 10 | :class:`~datashader.reductions.max_n`, yes, yes, yes, yes, yes, yes 11 | :class:`~datashader.reductions.mean`, yes, yes, yes, yes, yes, 12 | :class:`~datashader.reductions.min`, yes, yes, yes, yes, yes, yes 13 | :class:`~datashader.reductions.min_n`, yes, yes, yes, yes, yes, yes 14 | :class:`~datashader.reductions.std`, yes, yes, yes, yes, , 15 | :class:`~datashader.reductions.sum`, yes, yes, yes, yes, yes, 16 | :class:`~datashader.reductions.var`, yes, yes, yes, yes, , 17 | -------------------------------------------------------------------------------- /doc/releases.rst: -------------------------------------------------------------------------------- 1 | Releases 2 | ======== 3 | 4 | .. include:: ../CHANGELOG.rst 5 | -------------------------------------------------------------------------------- /doc/user_guide/index.rst: -------------------------------------------------------------------------------- 1 | ********** 2 | User Guide 3 | ********** 4 | 5 | 6 | The User Guide explains key concepts in detail. 7 | 8 | New users may prefer to start with the introduction in our `Getting Started <../getting_started/index.html>`_. guide. 9 | 10 | To see examples of what can be done with Datashader, see `Topics `_. 11 | 12 | Contents: 13 | 14 | `1. Plotting Pitfalls `_ 15 | Explains how Datashader avoids pitfalls encountered when plotting big datasets using techniques designed for small ones. 16 | 17 | `2. Points `_ 18 | Plotting x,y locations and scatterplots. 19 | 20 | `3. Timeseries `_ 21 | Plotting timeseries and other curves. 22 | 23 | `4. Trajectories `_ 24 | Plotting trajectories (e.g. connected GPS points) in a plane. 25 | 26 | `5. Grids `_ 27 | Plotting 2D-gridded data (rasters and quadmeshes). 28 | 29 | `6. Trimesh `_ 30 | Plotting irregular triangular grids (trimeshes). 31 | 32 | `7. Networks `_ 33 | Plotting network graphs 34 | 35 | `8. Polygons `_ 36 | Using Datashader for geographic and other spatial applications. 37 | 38 | `9. Extending `_ 39 | Extending datashader with new components and functionality. 40 | 41 | `10. Performance `_ 42 | Hints for getting good performance out of Datashader in your applications. 43 | 44 | `11. Geography `_ 45 | Pointers to using Datashader for geographic and other spatial applications. 46 | 47 | `12. Inspection Reductions `_ 48 | Using reduction to inspect rather than aggregate data. 49 | 50 | `13. GeoPandas `_ 51 | GeoPandas support in Datashader. 52 | 53 | .. toctree:: 54 | :hidden: 55 | :maxdepth: 3 56 | 57 | Plotting Pitfalls 58 | Points 59 | Timeseries 60 | Trajectories 61 | Grids 62 | Trimesh 63 | Networks 64 | Polygons 65 | Extending 66 | Performance 67 | Geography 68 | Inspection Reductions 69 | GeoPandas 70 | -------------------------------------------------------------------------------- /examples/README.md: -------------------------------------------------------------------------------- 1 | # Datashader Examples 2 | 3 | The best way to understand how Datashader works is to try out our 4 | extensive set of examples. [Datashader.org](https://datashader.org) 5 | includes static versions of the 6 | [getting started guide](https://datashader.org/getting_started), 7 | [user manual](https://datashader.org/user_guide), and 8 | [topic examples](https://datashader.org/topics), but for the full 9 | experience with dynamic updating you will need to install them on a 10 | live server. 11 | 12 | These instructions assume you are using 13 | [conda](https://conda.io/docs/install/quick.html), but they can be 14 | adapted as needed to use [pip](https://pip.pypa.io/en/stable/installing/) 15 | and [virtualenv](https://virtualenv.pypa.io) if desired. 16 | 17 | To get started, first go to your home directory and 18 | download the current list of everything needed for the examples: 19 | 20 | - Download the [conda ds environment file](https://raw.githubusercontent.com/holoviz/datashader/main/examples/environment.yml) and save it as `environment.yml`. 21 | 22 | Then run the following commands in your terminal (command) prompt, from wherever you saved `environment.yml`: 23 | 24 | ```bash 25 | 1. conda env create --file environment.yml 26 | 2. conda activate ds 27 | 3. datashader examples 28 | 3. cd datashader-examples 29 | ``` 30 | 31 | Step 1 will read `environment.yml`, create a new Conda environment 32 | named `ds`, and install of the libraries needed into that environment 33 | (including datashader itself). It will use Python 3.6 by default, but 34 | you can edit that file to specify a different Python version if you 35 | prefer (which may require changing some of the dependencies in some 36 | cases). 37 | 38 | Step 2 will activate the `ds` environment, using it for all subsequent 39 | commands. You will need to re-run step 2 after closing your terminal or 40 | rebooting your machine, if you want to use anything in the `ds` environment. 41 | For older versions of conda, you may instead need to do `source activate ds` 42 | (mac/linux) or `activate ds` (windows). 43 | 44 | Step 3 will copy the datashader examples from wherever Conda placed 45 | them into a subdirectory `datashader-examples`, and will then download 46 | the sample data required for the examples. (`datashader examples` is 47 | a shorthand for `datashader copy-examples --path datashader-examples 48 | && datashader fetch-data --path datashader-examples`.) 49 | 50 | The total download size is currently about 4GB to transfer, requiring 51 | about 10GB on disk when unpacked, which can take some time depending on 52 | the speed of your connection. The files involved are specified in the 53 | text file `datasets.yml` in the `datashader-examples` directory, and 54 | you are welcome to edit that file or to download the individual files 55 | specified therein manually if you prefer, as long as you put them into 56 | a subdirectory `data/` so the examples can find them. Once these 57 | steps have completed, you will be ready to run any of the examples 58 | listed on [datashader.org](https://datashader.org). 59 | 60 | 61 | ## Notebooks 62 | 63 | Most of the examples are in the form of runnable Jupyter 64 | notebooks. Once you have obtained the notebooks and the data they 65 | require, you can run them on your own system using Jupyter: 66 | 67 | ``` 68 | cd datashader-examples 69 | jupyter notebook 70 | ``` 71 | 72 | If you want the generated notebooks to work without an internet connection or 73 | with an unreliable connection (e.g. if you see `Loading BokehJS ...` but never 74 | `BokehJS successfully loaded`), then restart the Jupyter notebook server using: 75 | 76 | ``` 77 | BOKEH_RESOURCES=inline jupyter notebook --NotebookApp.iopub_data_rate_limit=100000000 78 | ``` 79 | 80 | See dashboard.ipynb in this directory for a Datashder dashboard for viewing data. 81 | -------------------------------------------------------------------------------- /examples/assets/images/airport_connections.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/airport_connections.png -------------------------------------------------------------------------------- /examples/assets/images/chesapeake_farout.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/chesapeake_farout.png -------------------------------------------------------------------------------- /examples/assets/images/chesbay_detail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/chesbay_detail.png -------------------------------------------------------------------------------- /examples/assets/images/dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/dashboard.png -------------------------------------------------------------------------------- /examples/assets/images/ds_hv_bokeh.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/ds_hv_bokeh.png -------------------------------------------------------------------------------- /examples/assets/images/ds_hv_bokeh2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/ds_hv_bokeh2.png -------------------------------------------------------------------------------- /examples/assets/images/fire.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/fire.png -------------------------------------------------------------------------------- /examples/assets/images/hot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/hot.png -------------------------------------------------------------------------------- /examples/assets/images/houston_district29.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/houston_district29.png -------------------------------------------------------------------------------- /examples/assets/images/jet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/jet.png -------------------------------------------------------------------------------- /examples/assets/images/landsat.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/landsat.png -------------------------------------------------------------------------------- /examples/assets/images/nyc_buildings.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/nyc_buildings.png -------------------------------------------------------------------------------- /examples/assets/images/nyc_pickups_vs_dropoffs.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/nyc_pickups_vs_dropoffs.jpg -------------------------------------------------------------------------------- /examples/assets/images/nyc_races.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/nyc_races.jpg -------------------------------------------------------------------------------- /examples/assets/images/nyc_taxi-paramnb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/nyc_taxi-paramnb.png -------------------------------------------------------------------------------- /examples/assets/images/nyc_taxi_100k.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/nyc_taxi_100k.png -------------------------------------------------------------------------------- /examples/assets/images/parambokeh.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/parambokeh.png -------------------------------------------------------------------------------- /examples/assets/images/pcap.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/pcap.png -------------------------------------------------------------------------------- /examples/assets/images/pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/pipeline.png -------------------------------------------------------------------------------- /examples/assets/images/pipeline2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/pipeline2.png -------------------------------------------------------------------------------- /examples/assets/images/rainbow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/rainbow.png -------------------------------------------------------------------------------- /examples/assets/images/rainbow4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/rainbow4.png -------------------------------------------------------------------------------- /examples/assets/images/sym_attractors.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/sym_attractors.jpg -------------------------------------------------------------------------------- /examples/assets/images/uk_researchers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/uk_researchers.png -------------------------------------------------------------------------------- /examples/assets/images/usa_census.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/holoviz/datashader/82a57c1351b67cbe574df082031665c962139c55/examples/assets/images/usa_census.jpg -------------------------------------------------------------------------------- /examples/conftest.py: -------------------------------------------------------------------------------- 1 | from importlib.util import find_spec 2 | from packaging.version import Version 3 | 4 | collect_ignore_glob = [ 5 | "tiling.ipynb", 6 | ] 7 | 8 | if find_spec("geopandas") is None: 9 | collect_ignore_glob += [ 10 | "user_guide/13_Geopandas.ipynb", 11 | ] 12 | 13 | if find_spec("spatialpandas") is None: 14 | collect_ignore_glob += [ 15 | "user_guide/7_Networks.ipynb", 16 | "user_guide/8_Polygons.ipynb", 17 | ] 18 | 19 | if find_spec("dask") is not None: 20 | import dask 21 | 22 | # Spatialpandas does not support dask-expr, which is 23 | # only available from this version. 24 | if Version(dask.__version__).release >= (2025, 1, 0): 25 | collect_ignore_glob += [ 26 | "user_guide/8_Polygons.ipynb", 27 | ] 28 | 29 | 30 | def pytest_sessionfinish(session, exitstatus): 31 | # Can be removed when spatialpandas work with dask-expr 32 | 33 | from pytest import ExitCode 34 | 35 | if exitstatus == ExitCode.NO_TESTS_COLLECTED: 36 | session.exitstatus = ExitCode.OK 37 | -------------------------------------------------------------------------------- /examples/datasets.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | data: 4 | 5 | - url: http://s3.amazonaws.com/datashader-data/nyc_taxi.zip 6 | title: 'NYC Taxi Data' 7 | files: 8 | - nyc_taxi.csv 9 | -------------------------------------------------------------------------------- /examples/environment.yml: -------------------------------------------------------------------------------- 1 | name: ds 2 | channels: 3 | - conda-forge 4 | 5 | dependencies: 6 | - bokeh 7 | - cartopy 8 | - colorcet 9 | - graphviz 10 | - python-graphviz 11 | - dask 12 | - datashader 13 | - distributed 14 | - fastparquet 15 | - holoviews 16 | - ipython 17 | - jupyter 18 | - matplotlib 19 | - networkx>=2.0 20 | - numba 21 | - numpy 22 | - pandas >=0.24.1 23 | - param 24 | - python-snappy 25 | - python 26 | - rasterio 27 | - requests 28 | - scikit-image 29 | - scipy 30 | - shapely 31 | - snappy 32 | - statsmodels 33 | - streamz 34 | - xarray 35 | - ipympl 36 | -------------------------------------------------------------------------------- /examples/getting_started/index.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "Detailed Datashader documentation is contained in the [User Guide](../user_guide/index.ipynb), and\n", 8 | "the [Topics](https://datashader.org/topics/index.html) pages show examples of what you can do with Datashader. But to get started quickly, check out the introductory guide sections in order; it should take around 1 hour in total.\n", 9 | "\n", 10 | "* [1. Introduction](1_Introduction.ipynb)\n", 11 | " Simple self-contained example to show how Datashader works.\n", 12 | "\n", 13 | "* [2. Pipeline](2_Pipeline.ipynb)\n", 14 | " Detailed step-by-step explanation how Datashader turns your data into an image.\n", 15 | "\n", 16 | "* [3. Interactivity](3_Interactivity.ipynb)\n", 17 | " Embedding images into rich, interactive plots in a web browser." 18 | ] 19 | } 20 | ], 21 | "metadata": { 22 | "language_info": { 23 | "name": "python", 24 | "pygments_lexer": "ipython3" 25 | } 26 | }, 27 | "nbformat": 4, 28 | "nbformat_minor": 2 29 | } 30 | -------------------------------------------------------------------------------- /examples/index.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "For concreteness, here's an example of what Datashader code looks like:" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "```python\n", 15 | "import datashader as ds, pandas as pd, colorcet\n", 16 | "df = pd.read_csv('census.csv')\n", 17 | "cvs = ds.Canvas(plot_width=850, plot_height=500)\n", 18 | "agg = cvs.points(df, 'longitude', 'latitude')\n", 19 | "img = ds.tf.shade(agg, cmap=colorcet.fire, how='log')\n", 20 | "```" 21 | ] 22 | }, 23 | { 24 | "cell_type": "markdown", 25 | "metadata": {}, 26 | "source": [ 27 | "This code reads a data file into a Pandas dataframe `df`, and then projects the fields `longitude` and `latitude` onto the x and y dimensions of an 850x500 grid, aggregating it by count. The results are rendered into an image where the minimum count will be plotted in black, the maximum in white, and with brighter colors ranging logarithmically in between.\n", 28 | "\n", 29 | "With code just like the above, you can plot 300 million points of data (one\n", 30 | "per person in the USA) from the 2010 census without any parameter tuning:\n", 31 | "\n", 32 | "\"image\"\n", 33 | "\n", 34 | "\"image\"\n", 35 | "\n", 36 | "Or you can plot attractors with 10 million points each, using random colormaps:\n", 37 | "\n", 38 | "\"image\"\n", 39 | "\n", 40 | "See the [topics page](topics) for these and many other examples.\n", 41 | "\n", 42 | "\n", 43 | "\n", 44 | "## Installation\n", 45 | "\n", 46 | "Please follow the instructions on [Getting Started](https://datashader.org/getting_started)\n", 47 | "if you want to reproduce the specific examples on this website, or follow the instructions at [HoloViz.org](https://holoviz.org) if you want to try out Datashader together with related plotting tools.\n", 48 | "\n", 49 | "\n", 50 | "\n", 51 | "## Other resources\n", 52 | "\n", 53 | "You can see Datashader in action in the [2019 HoloViz SciPy tutorial](https://www.youtube.com/watch?v=7deGS4IPAQ0) (3 hours!),\n", 54 | "listen to the [Open Source Directions](https://www.youtube.com/watch?v=6m3CFbKmK_c) episode from July 2019, or\n", 55 | "see how it is used in many of the projects at [examples.pyviz.org](https://examples.pyviz.org).\n", 56 | "\n", 57 | "Some of the original ideas for Datashader were developed under the\n", 58 | "name Abstract Rendering, which is described in a [2014 SPIE VDA paper](https://dx.doi.org/10.1117/12.2041200).\n", 59 | "\n", 60 | "The source code for datashader is maintained on [Github](https://github.com/holoviz/datashader), and\n", 61 | "is documented using the API link on this page.\n", 62 | "\n", 63 | "We recommend the [Getting Started Guide](getting_started) to learn\n", 64 | "the basic concepts and start using Datashader as quickly as possible.\n", 65 | "\n", 66 | "The [User Guide](user_guide) covers specific topics in more detail.\n", 67 | "\n", 68 | "The [API](api.html) is the definitive guide to each part of\n", 69 | "Datashader, but the same information is available more conveniently via\n", 70 | "the `help()` command as needed when using each component.\n", 71 | "\n", 72 | "Please feel free to report [issues](https://github.com/holoviz/datashader/issues) or [contribute code](https://help.github.com/articles/about-pull-requests). You are also welcome to chat with the developers on [gitter](https://gitter.im/pyviz/pyviz), but please use the official channels for reporting issues or making feature requests so that they are captured appropriately.\n" 73 | ] 74 | } 75 | ], 76 | "metadata": { 77 | "language_info": { 78 | "name": "python", 79 | "pygments_lexer": "ipython3" 80 | } 81 | }, 82 | "nbformat": 4, 83 | "nbformat_minor": 2 84 | } 85 | -------------------------------------------------------------------------------- /examples/pcap_to_parquet.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """ 4 | Convert PCAP output to undirected graph and save in Parquet format. 5 | """ 6 | 7 | from __future__ import annotations 8 | 9 | import re 10 | import socket 11 | import struct 12 | import sys 13 | 14 | import fastparquet as fp 15 | import numpy as np 16 | import pandas as pd 17 | 18 | 19 | def ip_to_integer(s): 20 | return struct.unpack("!I", socket.inet_aton(s))[0] 21 | 22 | 23 | def get_ip_protocol(s): 24 | if "tcp" in s: 25 | return "tcp" 26 | if "UDP" in s: 27 | return "udp" 28 | if "EIGRP" in s: 29 | return "eigrp" 30 | if "ICMP" in s: 31 | return "icmp" 32 | return None 33 | 34 | 35 | def to_parquet(filename, prefix="maccdc2012"): 36 | with open(filename) as f: 37 | traffic = {} 38 | nodes = set() 39 | 40 | for line in f.readlines(): 41 | if "unreachable" in line: 42 | continue 43 | fields = line.split() 44 | if not fields: 45 | continue 46 | if fields[1] != "IP": 47 | continue 48 | protocol = get_ip_protocol(line) 49 | if protocol not in ("tcp", "udp", "eigrp", "icmp"): 50 | continue 51 | try: 52 | addresses = [] 53 | 54 | # Extract source IP address and convert to integer 55 | m = re.match(r'(?P
\d+\.\d+\.\d+\.\d+)', fields[2]) 56 | if not m: 57 | continue 58 | addresses.append(ip_to_integer(m.group('address'))) 59 | 60 | # Extract target IP address and convert to integer 61 | m = re.match(r'(?P
\d+\.\d+\.\d+\.\d+)', fields[4]) 62 | if not m: 63 | continue 64 | addresses.append(ip_to_integer(m.group('address'))) 65 | 66 | nodes = nodes.union(addresses) 67 | src, dst = sorted(addresses) 68 | key = (protocol, src, dst) 69 | 70 | # Extract packet size 71 | nbytes = int(fields[-1]) 72 | 73 | if key in traffic: 74 | traffic[key] += nbytes 75 | else: 76 | traffic[key] = nbytes 77 | except: 78 | pass 79 | 80 | nodes = dict([(node, i) for i, node in enumerate(sorted(nodes))]) 81 | 82 | edges = [] 83 | for key in traffic: 84 | edge = [nodes[key[1]], nodes[key[2]], key[0], traffic[key]] 85 | edges.append(edge) 86 | 87 | nodes_df = pd.DataFrame(np.arange(len(nodes)), columns=['id']) 88 | nodes_df = nodes_df.set_index('id') 89 | 90 | edges_df = pd.DataFrame(np.array(edges), columns=['source', 'target', 'protocol', 'weight']) 91 | edges_df['source'] = pd.to_numeric(edges_df['source']) 92 | edges_df['target'] = pd.to_numeric(edges_df['target']) 93 | edges_df['weight'] = pd.to_numeric(edges_df['weight']) 94 | edges_df['protocol'] = edges_df['protocol'].astype('category') 95 | 96 | fp.write('{}_nodes.parq'.format(prefix), nodes_df) 97 | fp.write('{}_edges.parq'.format(prefix), edges_df) 98 | 99 | if __name__ == '__main__': 100 | if len(sys.argv) > 2: 101 | to_parquet(sys.argv[1], prefix=sys.argv[2]) 102 | else: 103 | to_parquet(sys.argv[1]) 104 | -------------------------------------------------------------------------------- /examples/raster.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | if __name__ == "__main__": 4 | from bokeh.io import curdoc 5 | from bokeh.plotting import Figure 6 | from bokeh.models import ColumnDataSource, CustomJS 7 | from bokeh.tile_providers import get_provider 8 | 9 | import rasterio as rio 10 | import datashader as ds 11 | import datashader.transfer_functions as tf 12 | from datashader.colors import Hot 13 | 14 | def on_dims_change(attr, old, new): 15 | update_image() 16 | 17 | def update_image(): 18 | 19 | global dims, raster_data 20 | 21 | dims_data = dims.data 22 | 23 | if not dims_data['width'] or not dims_data['height']: 24 | return 25 | 26 | xmin = max(dims_data['xmin'][0], raster_data.bounds.left) 27 | ymin = max(dims_data['ymin'][0], raster_data.bounds.bottom) 28 | xmax = min(dims_data['xmax'][0], raster_data.bounds.right) 29 | ymax = min(dims_data['ymax'][0], raster_data.bounds.top) 30 | 31 | canvas = ds.Canvas(plot_width=dims_data['width'][0], 32 | plot_height=dims_data['height'][0], 33 | x_range=(xmin, xmax), 34 | y_range=(ymin, ymax)) 35 | 36 | agg = canvas.raster(raster_data) 37 | img = tf.shade(agg, cmap=Hot, how='linear') 38 | 39 | new_data = {} 40 | new_data['image'] = [img.data] 41 | new_data['x'] = [xmin] 42 | new_data['y'] = [ymin] 43 | new_data['dh'] = [ymax - ymin] 44 | new_data['dw'] = [xmax - xmin] 45 | image_source.stream(new_data, 1) 46 | 47 | # load nyc taxi data 48 | path = './data/projected.tif' 49 | raster_data = rio.open(path) 50 | 51 | # manage client-side dimensions 52 | dims = ColumnDataSource(data=dict(width=[], height=[], xmin=[], xmax=[], ymin=[], ymax=[])) 53 | dims.on_change('data', on_dims_change) 54 | dims_jscode = """ 55 | var update_dims = function () { 56 | var new_data = { 57 | height: [plot.frame.height], 58 | width: [plot.frame.width], 59 | xmin: [plot.x_range.start], 60 | ymin: [plot.y_range.start], 61 | xmax: [plot.x_range.end], 62 | ymax: [plot.y_range.end] 63 | }; 64 | dims.data = new_data; 65 | }; 66 | 67 | if (typeof throttle != 'undefined' && throttle != null) { 68 | clearTimeout(throttle); 69 | } 70 | 71 | throttle = setTimeout(update_dims, 100, "replace"); 72 | """ 73 | 74 | # Create plot ------------------------------- 75 | xmin = -8240227.037 76 | ymin = 4974203.152 77 | xmax = -8231283.905 78 | ymax = 4979238.441 79 | 80 | path = './data/projected.tif' 81 | 82 | fig = Figure(x_range=(xmin, xmax), 83 | y_range=(ymin, ymax), 84 | plot_height=600, 85 | plot_width=900, 86 | tools='pan,wheel_zoom') 87 | fig.background_fill_color = 'black' 88 | fig.add_tile(get_provider("STAMEN_TONER"), alpha=0) # used to set axis ranges 89 | fig.x_range.callback = CustomJS(code=dims_jscode, args=dict(plot=fig, dims=dims)) 90 | fig.y_range.callback = CustomJS(code=dims_jscode, args=dict(plot=fig, dims=dims)) 91 | fig.axis.visible = False 92 | fig.grid.grid_line_alpha = 0 93 | fig.min_border_left = 0 94 | fig.min_border_right = 0 95 | fig.min_border_top = 0 96 | fig.min_border_bottom = 0 97 | 98 | image_source = ColumnDataSource(dict(image=[], x=[], y=[], dw=[], dh=[])) 99 | fig.image_rgba(source=image_source, 100 | image='image', 101 | x='x', 102 | y='y', 103 | dw='dw', 104 | dh='dh', 105 | dilate=False) 106 | 107 | curdoc().add_root(fig) 108 | -------------------------------------------------------------------------------- /examples/taxi_preprocessing_example.py: -------------------------------------------------------------------------------- 1 | """Download data needed for the examples""" 2 | 3 | from __future__ import annotations 4 | 5 | if __name__ == "__main__": 6 | 7 | from os import path, makedirs, remove 8 | from download_sample_data import bar as progressbar 9 | 10 | import pandas as pd 11 | import numpy as np 12 | import sys 13 | 14 | try: 15 | import requests 16 | except ImportError: 17 | print('Download script required requests package: conda install requests') 18 | sys.exit(1) 19 | 20 | def _download_dataset(url): 21 | r = requests.get(url, stream=True) 22 | output_path = path.split(url)[1] 23 | with open(output_path, 'wb') as f: 24 | total_length = int(r.headers.get('content-length')) 25 | for chunk in progressbar(r.iter_content(chunk_size=1024), expected_size=(total_length/1024) + 1): 26 | if chunk: 27 | f.write(chunk) 28 | f.flush() 29 | 30 | examples_dir = path.dirname(path.realpath(__file__)) 31 | data_dir = path.join(examples_dir, 'data') 32 | if not path.exists(data_dir): 33 | makedirs(data_dir) 34 | 35 | # Taxi data 36 | def latlng_to_meters(df, lat_name, lng_name): 37 | lat = df[lat_name] 38 | lng = df[lng_name] 39 | origin_shift = 2 * np.pi * 6378137 / 2.0 40 | mx = lng * origin_shift / 180.0 41 | my = np.log(np.tan((90 + lat) * np.pi / 360.0)) / (np.pi / 180.0) 42 | my = my * origin_shift / 180.0 43 | df.loc[:, lng_name] = mx 44 | df.loc[:, lat_name] = my 45 | 46 | taxi_path = path.join(data_dir, 'nyc_taxi.csv') 47 | if not path.exists(taxi_path): 48 | print("Downloading Taxi Data...") 49 | url = ('https://storage.googleapis.com/tlc-trip-data/2015/' 50 | 'yellow_tripdata_2015-01.csv') 51 | 52 | _download_dataset(url) 53 | df = pd.read_csv('yellow_tripdata_2015-01.csv') 54 | 55 | print('Filtering Taxi Data') 56 | df = df.loc[(df.pickup_longitude < -73.75) & 57 | (df.pickup_longitude > -74.15) & 58 | (df.dropoff_longitude < -73.75) & 59 | (df.dropoff_longitude > -74.15) & 60 | (df.pickup_latitude > 40.68) & 61 | (df.pickup_latitude < 40.84) & 62 | (df.dropoff_latitude > 40.68) & 63 | (df.dropoff_latitude < 40.84)].copy() 64 | 65 | print('Reprojecting Taxi Data') 66 | latlng_to_meters(df, 'pickup_latitude', 'pickup_longitude') 67 | latlng_to_meters(df, 'dropoff_latitude', 'dropoff_longitude') 68 | df.rename(columns={'pickup_longitude': 'pickup_x', 'dropoff_longitude': 'dropoff_x', 69 | 'pickup_latitude': 'pickup_y', 'dropoff_latitude': 'dropoff_y'}, 70 | inplace=True) 71 | df.to_csv(taxi_path, index=False) 72 | remove('yellow_tripdata_2015-01.csv') 73 | 74 | 75 | print("\nAll data downloaded.") 76 | -------------------------------------------------------------------------------- /examples/user_guide/11_Geography.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "Datashader is a general-purpose tool for rasterizing (and re-rasterizing) data of many different types. To make it easier to apply this general functionality to the particular domain of geoscience, there are some geospatial-specific utilities that can be used:\n", 8 | "\n", 9 | "* [Surface toolset](https://github.com/makepath/xarray-spatial/blob/master/examples/user_guide/1_Surface.ipynb)\n", 10 | "* [Proximity (Distance) toolset](https://github.com/makepath/xarray-spatial/blob/master/examples/user_guide/2_Proximity.ipynb)\n", 11 | "* [Zonal Statistics](https://github.com/makepath/xarray-spatial/blob/master/examples/user_guide/3_Zonal.ipynb)\n", 12 | "* [Classification toolset](https://github.com/makepath/xarray-spatial/blob/master/examples/user_guide/5_Classification.ipynb)\n", 13 | "* [Remote Sensing](https://github.com/makepath/xarray-spatial/blob/master/examples/user_guide/6_Remote_Sensing.ipynb)\n", 14 | "* [Pathfinding](https://github.com/makepath/xarray-spatial/blob/master/examples/user_guide/7_Pathfinding.ipynb)\n", 15 | "\n", 16 | "These functionality is provided in the [xarray-spatial](github.com/makepath/xarray-spatial) library.\n", 17 | "You can check out its [example notebooks](https://github.com/makepath/xarray-spatial/tree/master/examples/user_guide) to see how to use the functions.\n", 18 | "\n", 19 | "See also [GeoViews](https://geoviews.org), which is designed to work with Datashader to provide a large range of additional geospatial functionality." 20 | ] 21 | } 22 | ], 23 | "metadata": { 24 | "language_info": { 25 | "name": "python", 26 | "pygments_lexer": "ipython3" 27 | } 28 | }, 29 | "nbformat": 4, 30 | "nbformat_minor": 4 31 | } 32 | -------------------------------------------------------------------------------- /examples/user_guide/2_Points.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "*This notebook is under construction; please see the [pipeline](../getting_started/2_Pipeline.ipynb) and [nyc_taxi](https://examples.pyviz.org/nyc_taxi/nyc_taxi.html) notebooks for extensive examples of working with points. For now, this section only includes information about spatially indexed datasets.*" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "# Spatial indexing\n", 15 | "\n", 16 | "In most cases, Datashader must iterate through your entire dataset to render any plot, because it cannot assume the datapoints have been sorted in any particular order. Thus, the aggregation performance is dependent on the number of datapoints in your entire dataframe, not just those in the current viewport (x and y range). If you have a large dataset covering a wide area and you want to support fast local operations (e.g. if you have data at a global level but analysis is typically done in small local regions), you may want to use [spatialpandas](https://github.com/holoviz/spatialpandas), which is designed to work with Datashader and supports spatial indexing using [Hilbert curves](https://en.wikipedia.org/wiki/Hilbert_curve). Please see the [spatialpandas Overview](https://github.com/holoviz/spatialpandas/blob/main/examples/Overview.ipynb) notebook.\n", 17 | "\n" 18 | ] 19 | } 20 | ], 21 | "metadata": { 22 | "language_info": { 23 | "name": "python", 24 | "pygments_lexer": "ipython3" 25 | } 26 | }, 27 | "nbformat": 4, 28 | "nbformat_minor": 2 29 | } 30 | -------------------------------------------------------------------------------- /pixi.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "datashader" 3 | channels = ["pyviz/label/dev", "conda-forge"] 4 | platforms = ["linux-64", "osx-arm64", "osx-64", "win-64"] 5 | 6 | [tasks] 7 | download-data = "python scripts/download_data.py" 8 | install = "python -m pip install --no-deps --disable-pip-version-check -e ." 9 | 10 | [activation.env] 11 | PYTHONIOENCODING = "utf-8" 12 | 13 | [environments] 14 | test-310 = ["py310", "test-core", "test", "example", "test-example", "test-unit-task"] 15 | test-311 = ["py311", "test-core", "test", "example", "test-example", "test-unit-task"] 16 | test-312 = ["py312", "test-core", "test", "example", "test-example", "test-unit-task"] 17 | test-313 = ["py313", "test-core", "test", "example", "test-example", "test-unit-task"] 18 | test-core = ["py313", "test-core", "test-unit-task"] 19 | test-gpu = ["py312", "test-core", "test-gpu"] 20 | docs = ["py311", "example", "doc"] 21 | build = ["py311", "build"] 22 | lint = ["py311", "lint"] 23 | 24 | [dependencies] 25 | numba = "*" 26 | colorcet = "*" 27 | multipledispatch = "*" 28 | numpy = "*" 29 | pandas = "*" 30 | param = "*" 31 | pip = "*" 32 | pyct = "*" 33 | requests = "*" 34 | scipy = "*" 35 | setuptools = "*" # distutils for pyct 36 | toolz = "*" 37 | xarray = "*" 38 | 39 | [feature.py310.dependencies] 40 | python = "3.10.*" 41 | 42 | [feature.py311.dependencies] 43 | python = "3.11.*" 44 | 45 | [feature.py312.dependencies] 46 | python = "3.12.*" 47 | 48 | [feature.py312.activation.env] 49 | COVERAGE_CORE = "sysmon" 50 | 51 | [feature.py313.dependencies] 52 | python = "3.13.*" 53 | 54 | [feature.py313.activation.env] 55 | COVERAGE_CORE = "sysmon" 56 | 57 | [feature.example.dependencies] 58 | bokeh = ">3.1" 59 | bokeh_sampledata = "*" 60 | dask-core = "*" 61 | dask-geopandas = "*" 62 | fastparquet = "*" 63 | geodatasets = "*" 64 | geopandas-base = "*" 65 | graphviz = "*" 66 | holoviews = "*" 67 | matplotlib-base = ">=3.3" 68 | networkx = "*" 69 | panel = ">1.1" 70 | pillow = "*" 71 | pyogrio = "*" 72 | pyproj = "*" 73 | python-graphviz = "*" 74 | python-snappy = "*" 75 | rasterio = "*" 76 | scikit-image = "*" 77 | shapely = ">=2.0.0" 78 | spatialpandas = "*" 79 | streamz = "*" 80 | 81 | # ============================================= 82 | # =================== TESTS =================== 83 | # ============================================= 84 | [feature.test-core.dependencies] 85 | psutil = "*" 86 | pytest = "*" 87 | pytest-benchmark = "*" 88 | pytest-cov = "*" 89 | pytest-github-actions-annotate-failures = "*" 90 | pytest-xdist = "*" 91 | 92 | [feature.test-unit-task.tasks] # So it is not showing up in the test-gpu environment 93 | test-unit = 'pytest datashader -n logical --dist loadgroup --benchmark-skip' 94 | test-unit-nojit = { cmd = 'pytest datashader -k "not test_tiles" -n logical --dist loadgroup --benchmark-skip', env = { NUMBA_DISABLE_JIT = '1' } } 95 | test-benchmark = 'pytest datashader/tests --benchmark' 96 | 97 | [feature.test.dependencies] 98 | bokeh_sampledata = "*" 99 | dask-core = "*" 100 | dask-geopandas = "*" 101 | geodatasets = "*" 102 | geopandas-base = "*" 103 | netcdf4 = "*" 104 | pyarrow = "*" 105 | pillow = "*" 106 | pyogrio = "*" 107 | rasterio = "*" 108 | rioxarray = "*" 109 | scikit-image = "*" 110 | shapely = ">=2.0.0" 111 | spatialpandas = "*" 112 | 113 | [feature.test-example.dependencies] 114 | nbval = "*" 115 | 116 | [feature.test-example.tasks] 117 | test-example = 'pytest -n logical --dist loadscope --nbval-lax examples --benchmark-skip' 118 | 119 | [feature.test-gpu] 120 | channels = ["rapidsai"] 121 | platforms = ["linux-64"] 122 | 123 | [feature.test-gpu.activation.env] 124 | NUMBA_CUDA_LOW_OCCUPANCY_WARNINGS = '0' 125 | 126 | [feature.test-gpu.dependencies] 127 | cuda-version = "12.2.*" 128 | cudf = "25.04.*" 129 | cupy = "*" 130 | dask-cudf = "*" 131 | dask-expr = "*" 132 | librmm = { version = "*", channel = "rapidsai" } 133 | rmm = { version = "*", channel = "rapidsai" } 134 | 135 | [feature.test-gpu.tasks] 136 | test-gpu = "python -m pytest datashader/tests -n logical --dist loadgroup --gpu --benchmark-skip" 137 | test-benchmark = 'pytest datashader/tests --benchmark --gpu' 138 | 139 | # ============================================= 140 | # =================== DOCS ==================== 141 | # ============================================= 142 | [feature.doc.dependencies] 143 | nbsite = ">=0.8.4,<0.9.0" 144 | numpydoc = "*" 145 | sphinxcontrib-mermaid = "*" 146 | sphinx-reredirects = "*" 147 | 148 | [feature.doc.tasks] 149 | _docs-generate-rst = 'nbsite generate-rst --org holoviz --project-name datashader' 150 | _docs-generate = 'nbsite build --what=html --output=builtdocs --org holoviz --project-name datashader' 151 | 152 | [feature.doc.tasks.docs-build] 153 | depends-on = ['_docs-generate-rst', '_docs-generate'] 154 | 155 | # ============================================= 156 | # ================== BUILD ==================== 157 | # ============================================= 158 | [feature.build.dependencies] 159 | python-build = "*" 160 | conda-build = "*" 161 | 162 | [feature.build.tasks] 163 | build-conda = 'bash scripts/conda/build.sh' 164 | build-pip = 'python -m build .' 165 | 166 | [feature.build.activation.env] 167 | MPLBACKEND = "Agg" 168 | 169 | # ============================================= 170 | # =================== LINT ==================== 171 | # ============================================= 172 | [feature.lint.dependencies] 173 | pre-commit = "*" 174 | 175 | [feature.lint.tasks] 176 | lint = 'pre-commit run --all-files' 177 | lint-install = 'pre-commit install' 178 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling", "hatch-vcs"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "datashader" 7 | dynamic = ["version"] 8 | description = "Data visualization toolchain based on aggregating into a grid" 9 | readme = "README.md" 10 | license = { text = "New BSD" } 11 | requires-python = ">=3.10" 12 | authors = [{ name = "HoloViz developers", email = "developers@holoviz.org" }] 13 | maintainers = [{ name = "HoloViz developers", email = "developers@holoviz.org" }] 14 | classifiers = [ 15 | "License :: OSI Approved :: BSD License", 16 | "Development Status :: 5 - Production/Stable", 17 | "Programming Language :: Python :: 3", 18 | "Programming Language :: Python :: 3.10", 19 | "Programming Language :: Python :: 3.11", 20 | "Programming Language :: Python :: 3.12", 21 | "Programming Language :: Python :: 3.13", 22 | "Operating System :: OS Independent", 23 | "Intended Audience :: Science/Research", 24 | "Intended Audience :: Developers", 25 | "Natural Language :: English", 26 | "Topic :: Scientific/Engineering", 27 | "Topic :: Scientific/Engineering :: Visualization", 28 | "Topic :: Software Development :: Libraries", 29 | ] 30 | dependencies = [ 31 | 'colorcet', 32 | 'multipledispatch', 33 | 'numba', 34 | 'numpy', 35 | 'pandas', 36 | 'param', 37 | 'pyct', 38 | 'requests', 39 | 'scipy', 40 | 'toolz', 41 | 'packaging', 42 | 'xarray', 43 | ] 44 | 45 | [project.urls] 46 | Homepage = "https://datashader.org" 47 | Source = "https://github.com/holoviz/datashader" 48 | HoloViz = "https://holoviz.org/" 49 | 50 | [project.optional-dependencies] 51 | tests = ["pytest"] 52 | 53 | [project.scripts] 54 | datashader = "datashader.__main:main" 55 | 56 | [tool.hatch.version] 57 | source = "vcs" 58 | raw-options = { version_scheme = "no-guess-dev" } 59 | 60 | [tool.hatch.build.targets.wheel] 61 | include = ["datashader"] 62 | 63 | [tool.hatch.build.targets.sdist] 64 | include = ["datashader", "scripts"] 65 | 66 | [tool.hatch.build.targets.sdist.force-include] 67 | examples = "datashader/examples" 68 | 69 | [tool.hatch.build.hooks.vcs] 70 | version-file = "datashader/_version.py" 71 | 72 | [tool.codespell] 73 | ignore-words-list = "trough,thi,ser" 74 | 75 | [tool.ruff] 76 | line-length = 100 77 | fix = true 78 | 79 | [tool.ruff.lint] 80 | select = [ 81 | "E", 82 | "F", 83 | "NPY", 84 | "UP", 85 | "W", 86 | ] 87 | ignore = [ 88 | "UP038", # isinstance and issubclass uses a |-separated union 89 | # The following should be enabled in the future 90 | "UP030", # format-literals 91 | "UP031", # printf-string-formatting 92 | "UP032", # f-string 93 | ] 94 | 95 | [tool.ruff.lint.per-file-ignores] 96 | "test_mpl_ext.py" = ["E402"] # Module level import not at top of file 97 | 98 | [tool.pytest.ini_options] 99 | addopts = [ 100 | "--pyargs", 101 | "--doctest-modules", 102 | "--doctest-ignore-import-errors", 103 | "--strict-config", 104 | "--strict-markers", 105 | "--color=yes", 106 | ] 107 | minversion = "7" 108 | xfail_strict = true 109 | log_cli_level = "INFO" 110 | filterwarnings = [ 111 | "error", 112 | "ignore:Passing a (SingleBlockManager|BlockManager) to (Series|GeoSeries|DataFrame|GeoDataFrame) is deprecated:DeprecationWarning", # https://github.com/holoviz/spatialpandas/issues/137 113 | "ignore:Accessing the underlying geometries through the `.data`:DeprecationWarning:dask_geopandas.core", # https://github.com/geopandas/dask-geopandas/issues/264 114 | # 2024-11 115 | "ignore:numpy.ndarray size changed, may indicate binary incompatibility:RuntimeWarning", # https://github.com/pydata/xarray/issues/7259 116 | "ignore:\\s*Dask dataframe query planning is disabled because dask-expr is not installed:FutureWarning", # https://github.com/holoviz/spatialpandas/issues/146 117 | "ignore:The legacy Dask DataFrame implementation is deprecated:FutureWarning", # https://github.com/holoviz/spatialpandas/issues/146 118 | # 2025-04 119 | "ignore:The 'shapely.geos' module is deprecated, and will be removed in a future version:DeprecationWarning", 120 | ] 121 | -------------------------------------------------------------------------------- /scripts/conda/build.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -euxo pipefail 4 | 5 | PACKAGE="datashader" 6 | 7 | python -m build --sdist . 8 | 9 | VERSION=$(python -c "import $PACKAGE; print($PACKAGE._version.__version__)") 10 | export VERSION 11 | 12 | conda build scripts/conda/recipe --no-anaconda-upload --no-verify -c conda-forge --package-format 1 13 | 14 | mv "$CONDA_PREFIX/conda-bld/noarch/$PACKAGE-$VERSION-py_0.tar.bz2" dist 15 | -------------------------------------------------------------------------------- /scripts/conda/recipe/meta.yaml: -------------------------------------------------------------------------------- 1 | {% set pyproject = load_file_data('../../../pyproject.toml', from_recipe_dir=True) %} 2 | {% set project = pyproject['project'] %} 3 | 4 | package: 5 | name: {{ project["name"] }} 6 | version: {{ VERSION }} 7 | 8 | source: 9 | url: ../../../dist/{{ project["name"] }}-{{ VERSION }}.tar.gz 10 | 11 | build: 12 | noarch: python 13 | script: {{ PYTHON }} -m pip install --no-deps -vv . 14 | entry_points: 15 | {% for group,epoints in project.get("entry_points",{}).items() %} 16 | {% for entry_point in epoints %} 17 | - {{ entry_point }} 18 | {% endfor %} 19 | {% endfor %} 20 | 21 | requirements: 22 | build: 23 | - python {{ project['requires-python'] }} 24 | {% for dep in pyproject['build-system']['requires'] %} 25 | - {{ dep }} 26 | {% endfor %} 27 | run: 28 | - python {{ project['requires-python'] }} 29 | {% for dep in project.get('dependencies', []) %} 30 | - {{ dep if dep != 'dask' else 'dask-core'}} 31 | {% endfor %} 32 | 33 | test: 34 | imports: 35 | - {{ project["name"] }} 36 | commands: 37 | - pip check 38 | requires: 39 | - pip 40 | 41 | about: 42 | home: {{ project['urls']['Homepage'] }} 43 | summary: {{ project['description'] }} 44 | license: {{ project['license']['text'] }} 45 | -------------------------------------------------------------------------------- /scripts/download_data.py: -------------------------------------------------------------------------------- 1 | from contextlib import suppress 2 | 3 | import pyct.cmd 4 | from packaging.version import Version 5 | 6 | pyct.cmd.fetch_data(name="data", path="examples", datasets="datasets.yml") 7 | 8 | 9 | with suppress(ImportError): 10 | import bokeh 11 | 12 | # Replaced with bokeh_sampledata in 3.5 13 | if Version(bokeh.__version__) < Version("3.5"): 14 | import bokeh.sampledata 15 | 16 | bokeh.sampledata.download() 17 | 18 | 19 | with suppress(ImportError): 20 | import geodatasets as gds 21 | 22 | gds.get_path("geoda.natregimes") 23 | gds.get_path("nybb") 24 | gds.get_path('geoda health') 25 | -------------------------------------------------------------------------------- /scripts/filetimes/filetimes.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # Usage: 4 | # conda env create -f filetimes.yml 5 | # source activate filetimes 6 | # mkdir times 7 | # python -c "import filetimes as ft ; ft.p.base='census' ; ft.p.x='easting' ; ft.p.y='northing' ; ft.p.categories=['race']; ft.DD_FORCE_LOAD=True; ft.DEBUG=True; ft.timed_write('data/tinycensus.csv',dftype='pandas',fsize='double')" 8 | # # (dftype can also be 'dask', fsize can also be 'single') 9 | # ./filetimes.sh times/tinycensus 10 | # # (add a second argument to filetimes.sh to set the caching mode) 11 | # # (add a third argument to filetimes.sh to set the ft.DEBUG variable) 12 | # 13 | # More examples of filetimes.sh: 14 | # 1) Use no caching, but enable DEBUG messages: 15 | # ./filetimes.sh times/tinycensus '' debug 16 | # 2) Use "persist" caching mode: 17 | # ./filetimes.sh times/tinycensus persist 18 | # 3) Use "cachey" caching mode (force-loads dask dataframes), enable DEBUG messages: 19 | # ./filetimes.sh times/tinycensus cachey debug 20 | 21 | timer=/usr/bin/time 22 | timer="" # External timing disabled to avoid unhelpful "Command terminated abnormally" messages 23 | 24 | # Display each command if a third argument is provided 25 | test -n "$3" && set -x 26 | 27 | ${timer} python filetimes.py ${1}.parq dask census easting northing race ${3:+--debug} ${2:+--cache=$2} 28 | ${timer} python filetimes.py ${1}.snappy.parq dask census easting northing race ${3:+--debug} ${2:+--cache=$2} 29 | ${timer} python filetimes.py ${1}.gz.parq dask census easting northing race ${3:+--debug} ${2:+--cache=$2} 30 | ${timer} python filetimes.py ${1}.h5 dask census easting northing race ${3:+--debug} ${2:+--cache=$2} 31 | ${timer} python filetimes.py ${1}.csv dask census easting northing race ${3:+--debug} ${2:+--cache=$2} 32 | ${timer} python filetimes.py ${1}.feather dask census easting northing race ${3:+--debug} ${2:+--cache=$2} 33 | 34 | ${timer} python filetimes.py ${1}.parq pandas census easting northing race ${3:+--debug} ${2:+--cache=$2} 35 | ${timer} python filetimes.py ${1}.snappy.parq pandas census easting northing race ${3:+--debug} ${2:+--cache=$2} 36 | ${timer} python filetimes.py ${1}.gz.parq pandas census easting northing race ${3:+--debug} ${2:+--cache=$2} 37 | ${timer} python filetimes.py ${1}.h5 pandas census easting northing race ${3:+--debug} ${2:+--cache=$2} 38 | ${timer} python filetimes.py ${1}.csv pandas census easting northing race ${3:+--debug} ${2:+--cache=$2} 39 | ${timer} python filetimes.py ${1}.feather pandas census easting northing race ${3:+--debug} ${2:+--cache=$2} 40 | -------------------------------------------------------------------------------- /scripts/filetimes/filetimes.yml: -------------------------------------------------------------------------------- 1 | name: filetimes 2 | dependencies: 3 | - bokeh 4 | - matplotlib 5 | - jupyter 6 | - bokeh::datashader=0.4.0=py35_0 7 | - conda-forge::feather-format=0.3.1=py35_1 8 | - dask=0.14.3=py35_0 9 | - numba::numba=0.33.0=np112py35_0 10 | - numexpr=2.6.2=np112py35_0 11 | - numpy=1.12.1=py35_0 12 | - pandas=0.19.2=np112py35_1 13 | - pytest 14 | - python=3.5.2=0 15 | - conda-forge::python-snappy=0.5.1=py35_0 16 | - snappy=1.1.4=1 17 | - conda-forge::fastparquet=0.0.6=py35_1 18 | - bccp::cachey==0.1.1 19 | - bloscpack==0.10.0 20 | - blosc==1.9.2 21 | - pytables==3.4.2 22 | - pip: 23 | - castra==0.1.7 24 | --------------------------------------------------------------------------------