├── .github
├── FUNDING.yml
├── ISSUE_TEMPLATE
│ └── bug_report.md
└── workflows
│ ├── codeql.yml
│ ├── deploy-mkdocs.yml
│ ├── lint.yml
│ └── test.yml
├── .gitignore
├── CHANGELOG.md
├── CONTRIBUTING.md
├── LICENSE
├── Makefile
├── README.md
├── examples
├── README.md
├── basic_example.ipynb
├── dash_apps
│ ├── 01_minimal_global.py
│ ├── 02_minimal_cache.py
│ ├── 03_minimal_cache_dynamic.py
│ ├── 04_minimal_cache_overview.py
│ ├── 05_cache_overview_subplots.py
│ ├── 06_cache_overview_range_buttons.py
│ ├── 11_sine_generator.py
│ ├── 12_file_selector.py
│ ├── 13_coarse_fine.py
│ └── utils
│ │ ├── callback_helpers.py
│ │ └── graph_construction.py
├── data
│ ├── df_gusb.parquet
│ ├── df_pc_test.parquet
│ └── processed_gsr.parquet
├── datashader.ipynb
├── example_utils
│ └── loglttb.py
├── figurewidget_example.ipynb
├── helper.py
├── other_apps
│ └── streamlit_app.py
├── other_examples.ipynb
└── requirements.txt
├── mkdocs.yml
├── mkdocs
├── FAQ.md
├── dash_app_integration.md
├── gen_ref_pages.py
├── getting_started.md
├── index.md
└── static
│ ├── annotate_twitter.gif
│ ├── basic_example.gif
│ ├── basic_example_overview.gif
│ ├── datashader.png
│ ├── figurewidget.gif
│ ├── icon.png
│ ├── logo.png
│ ├── logo.svg
│ ├── minmax_operator.png
│ ├── skin_conductance_interleave_gaps_false.png
│ └── skin_conductance_interleave_gaps_true.png
├── plotly_resampler
├── __init__.py
├── aggregation
│ ├── __init__.py
│ ├── aggregation_interface.py
│ ├── aggregators.py
│ ├── gap_handler_interface.py
│ ├── gap_handlers.py
│ └── plotly_aggregator_parser.py
├── figure_resampler
│ ├── __init__.py
│ ├── assets
│ │ └── coarse_fine.js
│ ├── figure_resampler.py
│ ├── figure_resampler_interface.py
│ ├── figurewidget_resampler.py
│ ├── jupyter_dash_persistent_inline_output.py
│ └── utils.py
└── registering.py
├── poetry.lock
├── pyproject.toml
└── tests
├── __init__.py
├── conftest.py
├── fr_selenium.py
├── test_aggregators.py
├── test_composability.py
├── test_figure_resampler.py
├── test_figure_resampler_selenium.py
├── test_figurewidget_resampler.py
├── test_multiple_axes.py
├── test_plotly_express.py
├── test_rangeslider.py
├── test_registering.py
├── test_serialization.py
├── test_utils.py
└── utils.py
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: [jonasvdd, jvdd]
4 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Help us improving this toolkit by reporting a bug!
4 | title: "[BUG]"
5 | labels: bug
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug** :crayon:
11 | >A clear and concise description of what the bug is.
12 |
13 | **Reproducing the bug** :mag:
14 | > Please provide steps & minimal viable code to reproduce the behavior.
15 | > Giving this information makes it tremendously easier to work on your issue!
16 |
17 | **Expected behavior** :wrench:
18 | > Please give a clear and concise description of what you expected to happen.
19 |
20 | **Screenshots** :camera_flash:
21 | > If applicable, add screenshots to help explain your problem.
22 |
23 | **Environment information**: (please complete the following information)
24 | - OS:
25 | - Python environment:
26 | - Python version:
27 | - plotly-resampler environment: e.g.: Jupyter(lab), Dash web app (which browser)
28 | - plotly-resampler version:
29 |
30 |
31 | **Additional context**
32 | Add any other context about the problem here.
33 |
--------------------------------------------------------------------------------
/.github/workflows/codeql.yml:
--------------------------------------------------------------------------------
1 | name: "CodeQL"
2 |
3 | on:
4 | push:
5 | branches: [ "main" ]
6 | pull_request:
7 | branches: [ "main" ]
8 | schedule:
9 | - cron: '00 00 * * 1'
10 |
11 | jobs:
12 | analyze:
13 | name: Analyze
14 | # Runner size impacts CodeQL analysis time. To learn more, please see:
15 | # - https://gh.io/recommended-hardware-resources-for-running-codeql
16 | # - https://gh.io/supported-runners-and-hardware-resources
17 | # - https://gh.io/using-larger-runners
18 | # Consider using larger runners for possible analysis time improvements.
19 | runs-on: 'ubuntu-latest'
20 | timeout-minutes: 360
21 | permissions:
22 | actions: read
23 | contents: read
24 | security-events: write
25 |
26 | strategy:
27 | fail-fast: false
28 | matrix:
29 | language: [ 'python' ]
30 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby', 'swift' ]
31 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
32 |
33 | steps:
34 | - name: Checkout repository
35 | uses: actions/checkout@v3
36 |
37 | - name: Initialize CodeQL
38 | uses: github/codeql-action/init@v2
39 | with:
40 | languages: ${{ matrix.language }}
41 | # If you wish to specify custom queries, you can do so here or in a config file.
42 | # By default, queries listed here will override any specified in a config file.
43 | # Prefix the list here with "+" to use these queries and those in the config file.
44 |
45 | # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
46 | # queries: security-extended,security-and-quality
47 |
48 | - name: Perform CodeQL Analysis
49 | uses: github/codeql-action/analyze@v2
50 | with:
51 | category: "/language:${{matrix.language}}"
52 |
--------------------------------------------------------------------------------
/.github/workflows/deploy-mkdocs.yml:
--------------------------------------------------------------------------------
1 | name: Deploy docs
2 |
3 | on:
4 | push:
5 | tags:
6 | - "*"
7 |
8 | jobs:
9 | deploy:
10 | name: Deploy
11 | runs-on: ubuntu-latest
12 |
13 | steps:
14 | - name: Checkout repo
15 | uses: actions/checkout@v3
16 |
17 | - name: Set up Python
18 | uses: actions/setup-python@v4
19 | with:
20 | python-version: 3.8
21 |
22 | - name: Install Poetry
23 | uses: snok/install-poetry@v1
24 |
25 | - name: Cache Poetry
26 | id: cached-poetry-dependencies
27 | uses: actions/cache@v3
28 | with:
29 | path: ~/.cache/pypoetry/virtualenvs
30 | key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }}-python-3.8
31 |
32 | - run: poetry --version
33 |
34 | - name: Install dependencies
35 | run: poetry install
36 |
37 | - name: fetch from gh-pages
38 | run: git fetch origin gh-pages --depth=1
39 |
40 | - name: Configure Git user
41 | run: |
42 | git config --local user.email "github-actions[bot]@users.noreply.github.com"
43 | git config --local user.name "github-actions[bot]"
44 |
45 | - name: Deploy with mike
46 | run: |
47 | poetry run mike deploy -b gh-pages --update-aliases --push "${{ github.ref_name }}" "latest"
48 |
--------------------------------------------------------------------------------
/.github/workflows/lint.yml:
--------------------------------------------------------------------------------
1 | name: Lint
2 |
3 | on:
4 | push:
5 | branches: [ main ]
6 | pull_request:
7 | branches: [ main ]
8 |
9 | jobs:
10 | lint:
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@v4
14 | with:
15 | lfs: true
16 | - name: Set up Python 3.10
17 | uses: actions/setup-python@v4
18 | with:
19 | python-version: '3.10'
20 | - name: Install Poetry
21 | uses: snok/install-poetry@v1
22 | - name: Cache poetry
23 | id: cached-poetry-dependencies
24 | uses: actions/cache@v3
25 | with:
26 | path: ~/.cache/pypoetry/virtualenvs
27 | key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }}-python-${{ matrix.python-version }}
28 | - run: poetry --version
29 | - name: Install dependencies
30 | run: poetry install --all-extras
31 | # Do not use caching (anymore)
32 | # if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
33 | - name: Lint
34 | run: make lint
35 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions
2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
3 |
4 | name: Unit tests
5 |
6 | on:
7 | push:
8 | branches: [ main ]
9 | paths:
10 | - "tests/**"
11 | - "plotly_resampler/**"
12 | - "poetry.lock"
13 | pull_request:
14 | branches: [ main ]
15 | paths:
16 | - "tests/**"
17 | - "plotly_resampler/**"
18 | - "poetry.lock"
19 |
20 | jobs:
21 | test:
22 | runs-on: ${{ matrix.os }}
23 | strategy:
24 | fail-fast: false
25 | matrix:
26 | os: ['windows-latest', 'macOS-latest', 'ubuntu-latest']
27 | python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13']
28 | exclude:
29 | - os: ubuntu-latest
30 | python-version: '3.12'
31 | defaults:
32 | run:
33 | shell: bash
34 |
35 | steps:
36 | - uses: actions/checkout@v4
37 | with:
38 | lfs: true
39 | - name: Set up Python ${{ matrix.python-version }}
40 | uses: actions/setup-python@v4
41 | with:
42 | python-version: ${{ matrix.python-version }}
43 |
44 | - uses: nanasess/setup-chromedriver@v2
45 | # with:
46 | # Optional: do not specify to match Chrome's version
47 | # chromedriver-version: '114.0.5735.90'
48 |
49 | - name: Install Poetry
50 | uses: snok/install-poetry@v1
51 | with:
52 | version: 1.5.1
53 | - name: Cache poetry
54 | id: cached-poetry-dependencies
55 | uses: actions/cache@v3
56 | with:
57 | path: ~/.cache/pypoetry/virtualenvs
58 | key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }}-python-${{ matrix.python-version }}
59 | - run: poetry --version
60 | - name: Install dependencies
61 | run: poetry install --all-extras
62 | # Do not use caching (anymore)
63 | # if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
64 |
65 | # - name: Lint with flake8
66 | # run: |
67 | # # stop the build if there are Python syntax errors or undefined names
68 | # flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
69 | # # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
70 | # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
71 | - name: Test with pytest
72 | run: |
73 | poetry run pytest --cov=plotly_resampler --junitxml=junit/test-results-${{ matrix.python-version }}.xml --cov-report=xml tests
74 | - name: Upload pytest test results
75 | # Use always() to always run this step to publish test results when there are test failures
76 | if: ${{ always() && hashFiles('junit/test-results-${{ matrix.python-version }}.xml') != '' }}
77 | uses: actions/upload-artifact@v4
78 | with:
79 | name: pytest-results-${{ matrix.python-version }}-${{ matrix.os }}-${{ github.run_number }}
80 | path: junit/test-results-${{ matrix.python-version }}.xml
81 | overwrite: true
82 | retention-days: 7
83 | compression-level: 5
84 |
85 | - name: Upload coverage to Codecov
86 | uses: codecov/codecov-action@v3
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | venv*
2 | .cache_datasets/
3 | *.ruff*
4 | *.DS_Store
5 |
6 | # Sphinx documentation
7 | *_build/
8 | *_autosummary/
9 |
10 | # Byte-compiled / optimized / DLL files
11 | __pycache__/
12 | *.py[cod]
13 | *$py.class
14 |
15 | # C extensions
16 | *.so
17 |
18 | # Distribution / packaging
19 | .Python
20 | build/
21 | develop-eggs/
22 | dist/
23 | downloads/
24 | eggs/
25 | .eggs/
26 | lib/
27 | lib64/
28 | parts/
29 | sdist/
30 | var/
31 | wheels/
32 | share/python-wheels/
33 | *.egg-info/
34 | .installed.cfg
35 | *.egg
36 | MANIFEST
37 |
38 | # PyInstaller
39 | # Usually these files are written by a python script from a template
40 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
41 | *.manifest
42 | *.spec
43 |
44 | # Installer logs
45 | pip-log.txt
46 | pip-delete-this-directory.txt
47 |
48 | # Unit test / coverage reports
49 | htmlcov/
50 | .tox/
51 | .nox/
52 | .coverage
53 | .coverage.*
54 | .cache
55 | nosetests.xml
56 | coverage.xml
57 | *.cover
58 | *.py,cover
59 | .hypothesis/
60 | .pytest_cache/
61 | cover/
62 |
63 | # Translations
64 | *.mo
65 | *.pot
66 |
67 | # Django stuff:
68 | *.log
69 | local_settings.py
70 | db.sqlite3
71 | db.sqlite3-journal
72 |
73 | # Flask stuff:
74 | instance/
75 | .webassets-cache
76 |
77 | # Scrapy stuff:
78 | .scrapy
79 |
80 | # Sphinx documentation
81 | docs/_build/
82 |
83 | # PyBuilder
84 | .pybuilder/
85 | target/
86 |
87 | # Jupyter Notebook
88 | .ipynb_checkpoints
89 | .virtual_documents
90 |
91 | # IPython
92 | profile_default/
93 | ipython_config.py
94 |
95 | # pyenv
96 | # For a library or package, you might want to ignore these files since the code is
97 | # intended to run in multiple environments; otherwise, check them in:
98 | # .python-version
99 |
100 | # pipenv
101 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
102 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
103 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
104 | # install all needed dependencies.
105 | #Pipfile.lock
106 |
107 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
108 | __pypackages__/
109 |
110 | # Celery stuff
111 | celerybeat-schedule
112 | celerybeat.pid
113 |
114 | # SageMath parsed files
115 | *.sage.py
116 |
117 | # Environments
118 | .env
119 | .venv
120 | env/
121 | venv/
122 | ENV/
123 | env.bak/
124 | venv.bak/
125 |
126 | # Spyder project settings
127 | .spyderproject
128 | .spyproject
129 |
130 | # Rope project settings
131 | .ropeproject
132 |
133 | # mkdocs documentation
134 | /site
135 |
136 | # mypy
137 | .mypy_cache/
138 | .dmypy.json
139 | dmypy.json
140 |
141 | # Pyre type checker
142 | .pyre/
143 |
144 | # pytype static type analyzer
145 | .pytype/
146 |
147 | # Cython debug symbols
148 | cython_debug/
149 |
150 |
151 | # pycharm
152 | .idea
153 | .vscode
154 |
155 | # sphinx-docs
156 | sphinx/_build
157 | sphinx/_autosummary
158 |
159 | # dash apps
160 | file_system_backend/
161 | file_system_store/
162 |
163 |
164 | # testing
165 | bugs/
166 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # How to contribute
2 |
3 | First of all, thank you for considering contributing to `plotly-resampler`.
4 | It's people like you that will help make `plotly-resampler` a great toolkit. 🤝
5 |
6 | As usual, contributions are managed through GitHub Issues and Pull Requests.
7 | We invite you to use GitHub's [Issues](https://github.com/predict-idlab/plotly-resampler/issues) to report bugs, request features, or ask questions about the project. To ask use-specific questions, please use the [Discussions](https://github.com/predict-idlab/plotly-resampler/discussions) instead.
8 |
9 | If you are new to GitHub, you can read more about how to contribute [here](https://docs.github.com/en/get-started/quickstart/contributing-to-projects).
10 |
11 | ## How to develop locally
12 |
13 | *Note: this guide is tailored to developers using linux*
14 |
15 | The following steps assume that your console is at the root folder of this repository.
16 |
17 | ### Create a new (poetry) Python environment
18 |
19 | It is best practice to use a new Python environment when starting on a new project.
20 |
21 | We describe two options;
22 |
23 |
24 | Advised option: using poetry shell
25 | For dependency management we use poetry (read more below).
26 | Hence, we advise to use poetry shell to create a Python environment for this project.
27 |
28 | 1. Install poetry: https://python-poetry.org/docs/#installation
29 | (If necessary add poetry to the PATH)
30 | 2. Create & activate a new python environment: poetry shell
31 |
32 | After the poetry shell command your python environment is activated.
33 |
34 |
35 |
36 | Alternative option: using python-venv
37 | As alternative option, you can create a Python environment by using python-venv
38 |
39 | 1. Create a new Python environment: python -m venv venv
40 | 2. Activate this environment; source venv/bin/activate
41 |
42 |
43 | Make sure that this environment is activated when developing (e.g., installing dependencies, running tests).
44 |
45 |
46 | ### Installing & building the dependencies
47 |
48 | We use [`poetry`](https://python-poetry.org/) as dependency manager for this project.
49 | - The dependencies for installation & development are written in the [`pyproject.toml`](pyproject.toml) file (which is quite similar to a requirements.txt file).
50 | - To ensure that package versions are consistent with everyone who works on this project poetry uses a [`poetry.lock`](poetry.lock) file (read more [here](https://python-poetry.org/docs/basic-usage/#installing-with-poetrylock)).
51 |
52 | To install the requirements
53 | ```sh
54 | pip install poetry # install poetry (if you do use the venv option)
55 | poetry install --all-extras # install all the dependencies
56 | ```
57 |
58 | ### Formatting the code
59 |
60 | We use [`black`](https://github.com/psf/black) and [`ruff`](https://github.com/charliermarsh/ruff) to format the code.
61 |
62 | To format the code, run the following command (more details in the [`Makefile`](Makefile)):
63 | ```sh
64 | make format
65 | ```
66 |
67 | ### Checking the linting
68 |
69 | We use [`ruff`](https://github.com/charliermarsh/ruff) to check the linting.
70 |
71 | To check the linting, run the following command (more details in the [`Makefile`](Makefile)):
72 | ```sh
73 | make lint
74 | ```
75 |
76 | ### Running the tests (& code coverage)
77 |
78 | You can run the tests with the following code (more details in the [`Makefile`](Makefile)):
79 |
80 | ```sh
81 | make test
82 | ```
83 |
84 | To get the selenium tests working you should have Google Chrome installed.
85 |
86 | If you want to visually follow the selenium tests;
87 | * change the `TESTING_LOCAL` variable in [`tests/conftest.py`](tests/conftest.py) to `True`
88 |
89 | ### Generating the docs
90 |
91 | When you've added or updated a feature; it is always a good practice to alter the
92 | documentation and [changelog.md](CHANGELOG.md).
93 |
94 | The current listing below gives you the provided steps to regenerate the documentation.
95 |
96 | 1. Make sure that your python env is active (e.g., by running `poetry shell`)
97 | 2. Navigate to `docs/sphinx` and run from that directory:
98 | ```bash
99 | sphinx-autogen -o _autosummary && make clean html
100 | ```
101 |
102 | ---
103 |
104 | Bonus points for contributions that include a performance analysis with a benchmark script and profiling output (please report on the GitHub issue).
105 |
106 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 Jonas Van Der Donckt, Jeroen Van Der Donckt, Emiel Deprost.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | black = black plotly_resampler examples tests
2 |
3 | .PHONY: format
4 | format:
5 | ruff --fix plotly_resampler tests
6 | $(black)
7 |
8 | .PHONY: lint
9 | lint:
10 | poetry run ruff plotly_resampler tests
11 | poetry run $(black) --check --diff
12 |
13 | .PHONY: test
14 | test:
15 | poetry run pytest --cov-report term-missing --cov=plotly_resampler tests
16 |
17 | .PHONY: docs
18 | docs:
19 | poetry run mkdocs build -c -f mkdocs.yml
20 |
21 | .PHONY: clean
22 | clean:
23 | rm -rf `find . -name __pycache__`
24 | rm -rf .cache
25 | rm -rf .pytest_cache
26 | rm -rf *.egg-info
27 | rm -f .coverage
28 | rm -rf build
29 |
30 | rm -f `find . -type f -name '*.py[co]' `
31 | rm -f `find . -type f -name '*~' `
32 | rm -f `find . -type f -name '.*~' `
33 | rm -f `find . -type f -name '*.cpython-*' `
34 |
--------------------------------------------------------------------------------
/examples/README.md:
--------------------------------------------------------------------------------
1 | # plotly-resampler examples
2 |
3 | This directory withholds several examples, highlighting the applicability of plotly-resampler for various use cases.
4 |
5 |
6 | ## Prerequisites
7 |
8 | To successfully run these examples, make sure that you've installed all the [requirements](requirements.txt) by running:
9 | ```bash
10 | pip install -r requirements.txt
11 | ```
12 |
13 | ## 1. Example notebooks
14 | ### 1.1 basic examples
15 |
16 | The [basic example notebook](basic_example.ipynb) covers most use-cases in which plotly-resampler will be employed. It serves as an ideal starting point for data-scientists who want to use plotly-resampler in their day-to-day jupyter environments.
17 |
18 | Additionally, this notebook also shows some more advanced functionalities, such as:
19 | * Retaining (a static) plotly-resampler figure in your notebook
20 | * How to utilize an x-axis overview (i.e., a rangeslider) to navigate through your time series
21 | * Showing how to style the marker color and size of plotly-resampler figures
22 | * Adjusting trace data of plotly-resampler figures at runtime
23 | * How to add (shaded) confidence bounds to your time series
24 | * The flexibility of configuring different aggregation-algorithms and number of shown samples per trace
25 | * How plotly-resampler can be used for logarithmic x-axes and an implementation of a logarithmic aggregation algorithm, i.e., [LogLTTB](example_utils/loglttb.py)
26 | * Using different `fill_value` for gap handling of filled area plots.
27 | * Using multiple y-axes in a single subplot (see the [other_examples](other_examples.ipynb))
28 |
29 | **Note**: the basic example notebook requires `plotly-resampler>=0.9.0rc3`.
30 |
31 | ### 1.2 Figurewidget example
32 |
33 | The [figurewidget example notebook](figurewidget_example.ipynb) utilizes the `FigureWidgetResampler` wrapper to create a `go.FigureWidget` with dynamic aggregation functionality. A major advantage of this approach is that this does not create a web application, avoiding starting an application on a port (and forwarding that port when working remotely).
34 |
35 | Additionally, this notebook highlights how to use the `FigureWidget` its on-click callback to utilize plotly for large **time series annotation**.
36 |
37 | ## 2. Dash apps
38 |
39 | The [dash_apps](dash_apps/) folder contains example dash apps in which `plotly-resampler` is integrated
40 |
41 | | | description |
42 | |------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
43 | | **minimal examples** | |
44 | | [global variable](dash_apps/01_minimal_global.py) | *bad practice*: minimal example in which a global `FigureResampler` variable is used |
45 | | [server side caching](dash_apps/02_minimal_cache.py) | *good practice*: minimal example in which we perform server side caching of the `FigureResampler` variable |
46 | | [runtime graph construction](dash_apps/03_minimal_cache_dynamic.py) | minimal example where graphs are constructed based on user interactions at runtime. [Pattern matching callbacks](https://dash.plotly.com/pattern-matching-callbacks) are used construct these plotly-resampler graphs dynamically. Again, server side caching is performed. |
47 | | [xaxis overview (rangeslider)](dash_apps/04_minimal_cache_overview.py) | minimal example where a linked xaxis overview is shown below the `FigureResampler` figure. This xaxis rangeslider utilizes [clientside callbacks](https://dash.plotly.com/clientside-callbacks) to realize this behavior. |
48 | | [xaxis overview (subplots)](dash_apps/05_cache_overview_subplots.py) | example where a linked xaxis overview is shown below the `FigureResampler` figure (with subplots). |
49 | | [overview range selector button](dash_apps/06_cache_overview_range_buttons.py) | example where (i) a linked xaxis overview is shown below the `FigureResampler` figure, and (ii) a rangeselector along with a reset axis button is utilized to zoom in on specific window sizes. |
50 | | **advanced apps** | |
51 | | [dynamic sine generator](dash_apps/11_sine_generator.py) | exponential sine generator which uses [pattern matching callbacks](https://dash.plotly.com/pattern-matching-callbacks) to remove and construct plotly-resampler graphs dynamically |
52 | | [file visualization](dash_apps/12_file_selector.py) | load and visualize multiple `.parquet` files with plotly-resampler |
53 | | [dynamic static graph](dash_apps/13_coarse_fine.py) | Visualization dashboard in which a dynamic (i.e., plotly-resampler graph) and a coarse, static graph (i.e., go.Figure) are shown (made for [this issue](https://github.com/predict-idlab/plotly-resampler/issues/56)). Graph interaction events on the coarse graph update the dynamic graph. |
54 |
55 | ## 3. Other apps
56 |
57 | The [other_apps](other_apps/) folder contains examples of `plotly-resampler` being *integrated* in other apps / frameworks
58 |
59 | | app-name | description |
60 | | --- | --- |
61 | | [streamlit integration](other_apps/streamlit_app.py) | visualize a large noisy sine in a [streamlit](https://streamlit.io/) app |
--------------------------------------------------------------------------------
/examples/dash_apps/01_minimal_global.py:
--------------------------------------------------------------------------------
1 | """Minimal dash app example.
2 |
3 | Click on a button, and see a plotly-resampler graph of two noisy sinusoids.
4 | No dynamic graph construction / pattern matching callbacks are needed.
5 |
6 | This example uses a global FigureResampler object, which is considered a bad practice.
7 | source: https://dash.plotly.com/sharing-data-between-callbacks:
8 |
9 | Dash is designed to work in multi-user environments where multiple people view the
10 | application at the same time and have independent sessions.
11 | If your app uses and modifies a global variable, then one user's session could set
12 | the variable to some value which would affect the next user's session.
13 |
14 | """
15 |
16 | import numpy as np
17 | import plotly.graph_objects as go
18 | from dash import Dash, Input, Output, callback_context, dcc, html, no_update
19 |
20 | from plotly_resampler import FigureResampler
21 |
22 | # Data that will be used for the plotly-resampler figures
23 | x = np.arange(2_000_000)
24 | noisy_sin = (3 + np.sin(x / 200) + np.random.randn(len(x)) / 10) * x / 1_000
25 |
26 |
27 | # --------------------------------------Globals ---------------------------------------
28 | app = Dash(__name__)
29 | fig: FigureResampler = FigureResampler()
30 | # NOTE: in this example, this reference to a FigureResampler is essential to preserve
31 | # throughout the whole dash app! If your dash app wants to create a new go.Figure(),
32 | # you should not construct a new FigureResampler object, but replace the figure of this
33 | # FigureResampler object by using the FigureResampler.replace() method.
34 |
35 | app.layout = html.Div(
36 | [
37 | html.H1("plotly-resampler global variable", style={"textAlign": "center"}),
38 | html.Button("plot chart", id="plot-button", n_clicks=0),
39 | html.Hr(),
40 | # The graph object - which we will empower with plotly-resampler
41 | dcc.Graph(id="graph-id"),
42 | ]
43 | )
44 |
45 |
46 | # ------------------------------------ DASH logic -------------------------------------
47 | # The callback used to construct and store the graph's data on the serverside
48 | @app.callback(
49 | Output("graph-id", "figure"),
50 | Input("plot-button", "n_clicks"),
51 | prevent_initial_call=True,
52 | )
53 | def plot_graph(n_clicks):
54 | ctx = callback_context
55 | if len(ctx.triggered) and "plot-button" in ctx.triggered[0]["prop_id"]:
56 | # Note how the replace method is used here on the global figure object
57 | global fig
58 | if len(fig.data):
59 | # Replace the figure with an empty one to clear the graph
60 | fig.replace(go.Figure())
61 | fig.add_trace(go.Scattergl(name="log"), hf_x=x, hf_y=noisy_sin * 0.9999995**x)
62 | fig.add_trace(go.Scattergl(name="exp"), hf_x=x, hf_y=noisy_sin * 1.000002**x)
63 | return fig
64 | else:
65 | return no_update
66 |
67 |
68 | # The plotly-resampler callback to update the graph after a relayout event (= zoom/pan)
69 | fig.register_update_graph_callback(app=app, graph_id="graph-id")
70 |
71 | # --------------------------------- Running the app ---------------------------------
72 | if __name__ == "__main__":
73 | app.run_server(debug=True, port=9023)
74 |
--------------------------------------------------------------------------------
/examples/dash_apps/02_minimal_cache.py:
--------------------------------------------------------------------------------
1 | """Minimal dash app example.
2 |
3 | Click on a button, and see a plotly-resampler graph of two noisy sinusoids.
4 | No dynamic graph construction / pattern matching callbacks are needed.
5 |
6 | This example uses the dash-extensions its ServerSide functionality to cache
7 | the FigureResampler per user/session on the server side. This way, no global figure
8 | variable is used and shows the best practice of using plotly-resampler within dash-apps.
9 |
10 | """
11 |
12 | import numpy as np
13 | import plotly.graph_objects as go
14 | from dash import Input, Output, State, callback_context, dcc, html, no_update
15 | from dash_extensions.enrich import DashProxy, Serverside, ServersideOutputTransform
16 |
17 | from plotly_resampler import FigureResampler
18 | from plotly_resampler.aggregation import MinMaxLTTB
19 |
20 | # Data that will be used for the plotly-resampler figures
21 | x = np.arange(2_000_000)
22 | noisy_sin = (3 + np.sin(x / 200) + np.random.randn(len(x)) / 10) * x / 1_000
23 |
24 | # --------------------------------------Globals ---------------------------------------
25 | app = DashProxy(__name__, transforms=[ServersideOutputTransform()])
26 |
27 | app.layout = html.Div(
28 | [
29 | html.H1("plotly-resampler + dash-extensions", style={"textAlign": "center"}),
30 | html.Button("plot chart", id="plot-button", n_clicks=0),
31 | html.Hr(),
32 | # The graph object - which we will empower with plotly-resampler
33 | dcc.Graph(id="graph-id"),
34 | # Note: we also add a dcc.Store component, which will be used to link the
35 | # server side cached FigureResampler object
36 | dcc.Loading(dcc.Store(id="store")),
37 | ]
38 | )
39 |
40 |
41 | # ------------------------------------ DASH logic -------------------------------------
42 | # The callback used to construct and store the FigureResampler on the serverside
43 | @app.callback(
44 | [Output("graph-id", "figure"), Output("store", "data")],
45 | Input("plot-button", "n_clicks"),
46 | prevent_initial_call=True,
47 | )
48 | def plot_graph(n_clicks):
49 | ctx = callback_context
50 | if len(ctx.triggered) and "plot-button" in ctx.triggered[0]["prop_id"]:
51 | fig: FigureResampler = FigureResampler(
52 | go.Figure(), default_downsampler=MinMaxLTTB(parallel=True)
53 | )
54 |
55 | # Figure construction logic
56 | fig.add_trace(go.Scattergl(name="log"), hf_x=x, hf_y=noisy_sin * 0.9999995**x)
57 | fig.add_trace(go.Scattergl(name="exp"), hf_x=x, hf_y=noisy_sin * 1.000002**x)
58 |
59 | return fig, Serverside(fig)
60 | else:
61 | return no_update
62 |
63 |
64 | # The plotly-resampler callback to update the graph after a relayout event (= zoom/pan)
65 | # As we use the figure again as output, we need to set: allow_duplicate=True
66 | @app.callback(
67 | Output("graph-id", "figure", allow_duplicate=True),
68 | Input("graph-id", "relayoutData"),
69 | State("store", "data"), # The server side cached FigureResampler per session
70 | prevent_initial_call=True,
71 | memoize=True,
72 | )
73 | def update_fig(relayoutdata: dict, fig: FigureResampler):
74 | if fig is None:
75 | return no_update
76 | return fig.construct_update_data_patch(relayoutdata)
77 |
78 |
79 | # --------------------------------- Running the app ---------------------------------
80 | if __name__ == "__main__":
81 | app.run_server(debug=True, port=9023)
82 |
--------------------------------------------------------------------------------
/examples/dash_apps/03_minimal_cache_dynamic.py:
--------------------------------------------------------------------------------
1 | """Minimal dynamic dash app example.
2 |
3 | Click on a button, and draw a new plotly-resampler graph of two noisy sinusoids.
4 | This example uses pattern-matching callbacks to update dynamically constructed graphs.
5 | The plotly-resampler graphs themselves are cached on the server side.
6 |
7 | The main difference between this example and 02_minimal_cache.py is that here, we want
8 | to cache using a dcc.Store that is not yet available on the client side. As a result we
9 | split up our logic into two callbacks: (1) the callback used to construct the necessary
10 | components and send them to the client-side, and (2) the callback used to construct the
11 | actual plotly-resampler graph and cache it on the server side. These two callbacks are
12 | chained together using the dcc.Interval component.
13 |
14 | """
15 |
16 | from typing import List
17 | from uuid import uuid4
18 |
19 | import numpy as np
20 | import plotly.graph_objects as go
21 | from dash import MATCH, Input, Output, State, dcc, html, no_update
22 | from dash_extensions.enrich import (
23 | DashProxy,
24 | Serverside,
25 | ServersideOutputTransform,
26 | Trigger,
27 | TriggerTransform,
28 | )
29 |
30 | from plotly_resampler import FigureResampler
31 | from plotly_resampler.aggregation import MinMaxLTTB
32 |
33 | # Data that will be used for the plotly-resampler figures
34 | x = np.arange(2_000_000)
35 | noisy_sin = (3 + np.sin(x / 200) + np.random.randn(len(x)) / 10) * x / 1_000
36 |
37 | # --------------------------------------Globals ---------------------------------------
38 | app = DashProxy(__name__, transforms=[ServersideOutputTransform(), TriggerTransform()])
39 |
40 | app.layout = html.Div(
41 | [
42 | html.Div(children=[html.Button("Add Chart", id="add-chart", n_clicks=0)]),
43 | html.Div(id="container", children=[]),
44 | ]
45 | )
46 |
47 |
48 | # ------------------------------------ DASH logic -------------------------------------
49 | # This method adds the needed components to the front-end, but does not yet contain the
50 | # FigureResampler graph construction logic.
51 | @app.callback(
52 | Output("container", "children"),
53 | Input("add-chart", "n_clicks"),
54 | State("container", "children"),
55 | prevent_initial_call=True,
56 | )
57 | def add_graph_div(n_clicks: int, div_children: List[html.Div]):
58 | uid = str(uuid4())
59 | new_child = html.Div(
60 | children=[
61 | dcc.Graph(id={"type": "dynamic-graph", "index": uid}, figure=go.Figure()),
62 | # Note: we also add a dcc.Store component, which will be used to link the
63 | # server side cached FigureResampler object
64 | dcc.Loading(dcc.Store(id={"type": "store", "index": uid})),
65 | # This dcc.Interval components makes sure that the `construct_display_graph`
66 | # callback is fired once after these components are added to the session
67 | # its front-end
68 | dcc.Interval(
69 | id={"type": "interval", "index": uid}, max_intervals=1, interval=1
70 | ),
71 | ],
72 | )
73 | div_children.append(new_child)
74 | return div_children
75 |
76 |
77 | # This method constructs the FigureResampler graph and caches it on the server side
78 | @app.callback(
79 | Output({"type": "dynamic-graph", "index": MATCH}, "figure"),
80 | Output({"type": "store", "index": MATCH}, "data"),
81 | State("add-chart", "n_clicks"),
82 | Trigger({"type": "interval", "index": MATCH}, "n_intervals"),
83 | prevent_initial_call=True,
84 | )
85 | def construct_display_graph(n_clicks) -> FigureResampler:
86 | fig = FigureResampler(
87 | go.Figure(),
88 | default_n_shown_samples=2_000,
89 | default_downsampler=MinMaxLTTB(parallel=True),
90 | )
91 |
92 | # Figure construction logic based on a state variable, in our case n_clicks
93 | sigma = n_clicks * 1e-6
94 | fig.add_trace(dict(name="log"), hf_x=x, hf_y=noisy_sin * (1 - sigma) ** x)
95 | fig.add_trace(dict(name="exp"), hf_x=x, hf_y=noisy_sin * (1 + sigma) ** x)
96 | fig.update_layout(title=f"graph - {n_clicks}", title_x=0.5)
97 |
98 | return fig, Serverside(fig)
99 |
100 |
101 | # The plotly-resampler callback to update the graph after a relayout event (= zoom/pan)
102 | # As we use the figure again as output, we need to set: allow_duplicate=True
103 | @app.callback(
104 | Output({"type": "dynamic-graph", "index": MATCH}, "figure", allow_duplicate=True),
105 | Input({"type": "dynamic-graph", "index": MATCH}, "relayoutData"),
106 | State({"type": "store", "index": MATCH}, "data"),
107 | prevent_initial_call=True,
108 | memoize=True,
109 | )
110 | def update_fig(relayoutdata: dict, fig: FigureResampler):
111 | if fig is not None:
112 | return fig.construct_update_data_patch(relayoutdata)
113 | return no_update
114 |
115 |
116 | # --------------------------------- Running the app ---------------------------------
117 | if __name__ == "__main__":
118 | app.run_server(debug=True, port=9023)
119 |
--------------------------------------------------------------------------------
/examples/dash_apps/04_minimal_cache_overview.py:
--------------------------------------------------------------------------------
1 | """Minimal dash app example.
2 |
3 | Click on a button, and see a plotly-resampler graph of two sinusoids.
4 | In addition, another graph is shown, which is an overview of the main graph.
5 | This other graph is bidirectionally linked to the main graph; when you select a region
6 | in the overview graph, the main graph will zoom in on that region and vice versa.
7 |
8 | This example uses the dash-extensions its ServersideOutput functionality to cache
9 | the FigureResampler per user/session on the server side. This way, no global figure
10 | variable is used and shows the best practice of using plotly-resampler within dash-apps.
11 |
12 | """
13 |
14 | import dash
15 | import numpy as np
16 | import plotly.graph_objects as go
17 | from dash import Input, Output, State, callback_context, dcc, html, no_update
18 | from dash_extensions.enrich import DashProxy, Serverside, ServersideOutputTransform
19 |
20 | # The overview figure requires clientside callbacks, whose JavaScript code is located
21 | # in the assets folder. We need to tell dash where to find this folder.
22 | from plotly_resampler import ASSETS_FOLDER, FigureResampler
23 |
24 | # -------------------------------- Data and constants ---------------------------------
25 | # Data that will be used for the plotly-resampler figures
26 | x = np.arange(2_000_000)
27 | noisy_sin = (3 + np.sin(x / 200) + np.random.randn(len(x)) / 10) * x / 1_000
28 |
29 | # The ids of the components used in the app (we put them here to avoid typos)
30 | GRAPH_ID = "graph-id"
31 | OVERVIEW_GRAPH_ID = "overview-graph"
32 | STORE_ID = "store"
33 |
34 |
35 | # --------------------------------------Globals ---------------------------------------
36 | # NOTE: Remark how the assets folder is passed to the Dash(proxy) application and how
37 | # the lodash script is included as an external script.
38 | app = DashProxy(
39 | __name__,
40 | transforms=[ServersideOutputTransform()],
41 | assets_folder=ASSETS_FOLDER,
42 | external_scripts=["https://cdn.jsdelivr.net/npm/lodash/lodash.min.js"],
43 | )
44 |
45 | app.layout = html.Div(
46 | [
47 | html.H1("plotly-resampler + dash-extensions", style={"textAlign": "center"}),
48 | html.Button("plot chart", id="plot-button", n_clicks=0),
49 | html.Hr(),
50 | # The graph, overview graph, and serverside store for the FigureResampler graph
51 | dcc.Graph(id=GRAPH_ID),
52 | dcc.Graph(id=OVERVIEW_GRAPH_ID),
53 | dcc.Loading(dcc.Store(id=STORE_ID)),
54 | ]
55 | )
56 |
57 |
58 | # ------------------------------------ DASH logic -------------------------------------
59 | # --- construct and store the FigureResampler on the serverside ---
60 | @app.callback(
61 | [
62 | Output(GRAPH_ID, "figure"),
63 | Output(OVERVIEW_GRAPH_ID, "figure"),
64 | Output(STORE_ID, "data"),
65 | ],
66 | Input("plot-button", "n_clicks"),
67 | prevent_initial_call=True,
68 | )
69 | def plot_graph(_):
70 | global app
71 | ctx = callback_context
72 | if len(ctx.triggered) and "plot-button" in ctx.triggered[0]["prop_id"]:
73 | fig: FigureResampler = FigureResampler(create_overview=True)
74 |
75 | # Figure construction logic
76 | fig.add_trace(go.Scattergl(name="log"), hf_x=x, hf_y=noisy_sin * 0.9999995**x)
77 | fig.add_trace(go.Scattergl(name="exp"), hf_x=x, hf_y=noisy_sin * 1.000002**x)
78 |
79 | fig.update_layout(legend=dict(orientation="h", yanchor="bottom", y=1.02))
80 | fig.update_layout(margin=dict(b=10), template="plotly_white")
81 |
82 | coarse_fig = fig._create_overview_figure()
83 | return fig, coarse_fig, Serverside(fig)
84 | else:
85 | return no_update
86 |
87 |
88 | # --- Clientside callbacks used to bidirectionally link the overview and main graph ---
89 | app.clientside_callback(
90 | dash.ClientsideFunction(namespace="clientside", function_name="main_to_coarse"),
91 | dash.Output(OVERVIEW_GRAPH_ID, "id", allow_duplicate=True),
92 | dash.Input(GRAPH_ID, "relayoutData"),
93 | [dash.State(OVERVIEW_GRAPH_ID, "id"), dash.State(GRAPH_ID, "id")],
94 | prevent_initial_call=True,
95 | )
96 |
97 | app.clientside_callback(
98 | dash.ClientsideFunction(namespace="clientside", function_name="coarse_to_main"),
99 | dash.Output(GRAPH_ID, "id", allow_duplicate=True),
100 | dash.Input(OVERVIEW_GRAPH_ID, "selectedData"),
101 | [dash.State(GRAPH_ID, "id"), dash.State(OVERVIEW_GRAPH_ID, "id")],
102 | prevent_initial_call=True,
103 | )
104 |
105 |
106 | # ------ FigureResampler update callback ------
107 |
108 |
109 | # The plotly-resampler callback to update the graph after a relayout event (= zoom/pan)
110 | # As we use the figure again as output, we need to set: allow_duplicate=True
111 | @app.callback(
112 | Output(GRAPH_ID, "figure", allow_duplicate=True),
113 | Input(GRAPH_ID, "relayoutData"),
114 | State(STORE_ID, "data"), # The server side cached FigureResampler per session
115 | prevent_initial_call=True,
116 | )
117 | def update_fig(relayoutdata: dict, fig: FigureResampler):
118 | if fig is None:
119 | return no_update
120 | return fig.construct_update_data_patch(relayoutdata)
121 |
122 |
123 | # --------------------------------- Running the app ---------------------------------
124 | if __name__ == "__main__":
125 | app.run_server(debug=True, port=9023, use_reloader=False)
126 |
--------------------------------------------------------------------------------
/examples/dash_apps/05_cache_overview_subplots.py:
--------------------------------------------------------------------------------
1 | """Minimal dash app example.
2 |
3 | Click on a button, and see a plotly-resampler graph of an exponential and log curve
4 | (and combinations thereof) spread over 4 subplots.
5 | In addition, another graph is shown below, which is an overview of subplot columns from
6 | the main graph. This other graph is bidirectionally linked to the main graph; when you
7 | select a region in the overview graph, the main graph will zoom in on that region and
8 | vice versa.
9 |
10 | This example uses the dash-extensions its ServersideOutput functionality to cache
11 | the FigureResampler per user/session on the server side. This way, no global figure
12 | variable is used and shows the best practice of using plotly-resampler within dash-apps.
13 |
14 | """
15 |
16 | import dash
17 | import numpy as np
18 | import plotly.graph_objects as go
19 | from dash import Input, Output, State, callback_context, dcc, html, no_update
20 | from dash_extensions.enrich import DashProxy, Serverside, ServersideOutputTransform
21 | from plotly.subplots import make_subplots
22 |
23 | # The overview figure requires clientside callbacks, whose JavaScript code is located
24 | # in the assets folder. We need to tell dash where to find this folder.
25 | from plotly_resampler import ASSETS_FOLDER, FigureResampler
26 | from plotly_resampler.aggregation import MinMaxLTTB
27 |
28 | # -------------------------------- Data and constants ---------------------------------
29 | # Data that will be used for the plotly-resampler figures
30 | x = np.arange(2_000_000)
31 | noisy_sin = (3 + np.sin(x / 200) + np.random.randn(len(x)) / 10) * x / 1_000
32 |
33 | # The ids of the components used in the app (we put them here to avoid typos)
34 | GRAPH_ID = "graph-id"
35 | OVERVIEW_GRAPH_ID = "overview-graph"
36 | STORE_ID = "store"
37 |
38 |
39 | # --------------------------------------Globals ---------------------------------------
40 | # NOTE: Remark how the assets folder is passed to the Dash(proxy) application and how
41 | # the lodash script is included as an external script.
42 | app = DashProxy(
43 | __name__,
44 | transforms=[ServersideOutputTransform()],
45 | assets_folder=ASSETS_FOLDER,
46 | external_scripts=["https://cdn.jsdelivr.net/npm/lodash/lodash.min.js"],
47 | )
48 |
49 | app.layout = html.Div(
50 | [
51 | html.H1("plotly-resampler + dash-extensions", style={"textAlign": "center"}),
52 | html.Button("plot chart", id="plot-button", n_clicks=0),
53 | html.Hr(),
54 | # The graph, overview graph, and serverside store for the FigureResampler graph
55 | dcc.Graph(id=GRAPH_ID),
56 | dcc.Graph(id=OVERVIEW_GRAPH_ID),
57 | dcc.Loading(dcc.Store(id=STORE_ID)),
58 | ]
59 | )
60 |
61 |
62 | # ------------------------------------ DASH logic -------------------------------------
63 | # --- construct and store the FigureResampler on the serverside ---
64 | @app.callback(
65 | [
66 | Output(GRAPH_ID, "figure"),
67 | Output(OVERVIEW_GRAPH_ID, "figure"),
68 | Output(STORE_ID, "data"),
69 | ],
70 | Input("plot-button", "n_clicks"),
71 | prevent_initial_call=True,
72 | )
73 | def plot_graph(_):
74 | global app
75 | ctx = callback_context
76 | if len(ctx.triggered) and "plot-button" in ctx.triggered[0]["prop_id"]:
77 | # NOTE: remark how the `overview_row_idxs` argument specifies the row indices
78 | # (start at 0) of the subplots that will be used to construct the overview
79 | # graph. In this list the position of the values indicate the column index of
80 | # the subplot. In this case, the overview graph will show for the first column
81 | # the second subplot row (1), and for the second column the first subplot row
82 | # (0).
83 | fig: FigureResampler = FigureResampler(
84 | make_subplots(
85 | rows=2, cols=2, shared_xaxes="columns", horizontal_spacing=0.03
86 | ),
87 | create_overview=True,
88 | overview_row_idxs=[1, 0],
89 | default_downsampler=MinMaxLTTB(parallel=True),
90 | )
91 |
92 | # Figure construction logic
93 | # fmt: off
94 | log = noisy_sin * 0.9999995**x
95 | exp = noisy_sin * 1.000002**x
96 | fig.add_trace(go.Scattergl(name="log", legend='legend1'), hf_x=x, hf_y=log)
97 | fig.add_trace(go.Scattergl(name="exp", legend='legend1'), hf_x=x, hf_y=exp)
98 |
99 | fig.add_trace(go.Scattergl(name="-log", legend='legend2'), hf_x=x, hf_y=-exp, row=1, col=2)
100 |
101 | fig.add_trace(go.Scattergl(name="log", legend='legend3'), hf_x=x, hf_y=-log, row=2, col=1)
102 | fig.add_trace(go.Scattergl(name="3-exp", legend='legend3'), hf_x=x, hf_y=3 - exp, row=2, col=1)
103 |
104 | fig.add_trace(go.Scattergl(name="log", legend='legend4'), hf_x=x, hf_y=log**2, row=2, col=2)
105 |
106 | # fmt: on
107 | fig.update_layout(
108 | legend1=dict(orientation="h", yanchor="bottom", y=1.02),
109 | legend2=dict(orientation="h", yanchor="bottom", y=1.02, x=0.52),
110 | legend3=dict(orientation="h", y=0.51, x=0),
111 | legend4=dict(orientation="h", y=0.51, x=0.52),
112 | )
113 | fig.update_layout(margin=dict(b=10), template="plotly_white")
114 |
115 | coarse_fig = fig._create_overview_figure()
116 | return fig, coarse_fig, Serverside(fig)
117 | else:
118 | return no_update
119 |
120 |
121 | # --- Clientside callbacks used to bidirectionally link the overview and main graph ---
122 | app.clientside_callback(
123 | dash.ClientsideFunction(namespace="clientside", function_name="main_to_coarse"),
124 | dash.Output(
125 | OVERVIEW_GRAPH_ID, "id", allow_duplicate=True
126 | ), # TODO -> look for clean output
127 | dash.Input(GRAPH_ID, "relayoutData"),
128 | [dash.State(OVERVIEW_GRAPH_ID, "id"), dash.State(GRAPH_ID, "id")],
129 | prevent_initial_call=True,
130 | )
131 |
132 | app.clientside_callback(
133 | dash.ClientsideFunction(namespace="clientside", function_name="coarse_to_main"),
134 | dash.Output(GRAPH_ID, "id", allow_duplicate=True),
135 | dash.Input(OVERVIEW_GRAPH_ID, "selectedData"),
136 | [dash.State(GRAPH_ID, "id"), dash.State(OVERVIEW_GRAPH_ID, "id")],
137 | prevent_initial_call=True,
138 | )
139 |
140 |
141 | # --- FigureResampler update callback ---
142 |
143 |
144 | # The plotly-resampler callback to update the graph after a relayout event (= zoom/pan)
145 | # As we use the figure again as output, we need to set: allow_duplicate=True
146 | @app.callback(
147 | Output(GRAPH_ID, "figure", allow_duplicate=True),
148 | Input(GRAPH_ID, "relayoutData"),
149 | State(STORE_ID, "data"), # The server side cached FigureResampler per session
150 | prevent_initial_call=True,
151 | )
152 | def update_fig(relayoutdata, fig: FigureResampler):
153 | if fig is None:
154 | return no_update
155 | return fig.construct_update_data_patch(relayoutdata)
156 |
157 |
158 | # --------------------------------- Running the app ---------------------------------
159 | if __name__ == "__main__":
160 | app.run_server(debug=True, port=9023, use_reloader=False)
161 |
--------------------------------------------------------------------------------
/examples/dash_apps/06_cache_overview_range_buttons.py:
--------------------------------------------------------------------------------
1 | """Minimal dash app example.
2 |
3 | Click on a button, and see a plotly-resampler graph of an exponential and log curve is
4 | shown. In addition, another graph is shown below, which is an overview of the main
5 | graph. This other graph is bidirectionally linked to the main graph; when you
6 | select a region in the overview graph, the main graph will zoom in on that region and
7 | vice versa.
8 |
9 | On the left top of the main graph, you can see a range selector. This range selector
10 | allows to zoom in with a fixed time range.
11 |
12 | Lastly, there is a button present to reset the axes of the main graph. This button
13 | replaces the default reset axis button as the default button removes the spikes.
14 | (specifically, the `xaxis.showspikes` and `yaxis.showspikes` are set to False; This is
15 | most likely a bug in plotly-resampler, but I have not yet found out why).
16 |
17 | This example uses the dash-extensions its ServersideOutput functionality to cache
18 | the FigureResampler per user/session on the server side. This way, no global figure
19 | variable is used and shows the best practice of using plotly-resampler within dash-apps.
20 |
21 | """
22 |
23 | import dash
24 | import numpy as np
25 | import pandas as pd
26 | import plotly.graph_objects as go
27 | from dash import Input, Output, State, callback_context, dcc, html, no_update
28 | from dash_extensions.enrich import DashProxy, Serverside, ServersideOutputTransform
29 |
30 | # The overview figure requires clientside callbacks, whose JavaScript code is located
31 | # in the assets folder. We need to tell dash where to find this folder.
32 | from plotly_resampler import ASSETS_FOLDER, FigureResampler
33 | from plotly_resampler.aggregation import MinMaxLTTB
34 |
35 | # -------------------------------- Data and constants ---------------------------------
36 | # Data that will be used for the plotly-resampler figures
37 | x = np.arange(2_000_000)
38 | x_time = pd.date_range("2020-01-01", periods=len(x), freq="1min")
39 | noisy_sin = (3 + np.sin(x / 200) + np.random.randn(len(x)) / 10) * x / 1_000
40 |
41 | # The ids of the components used in the app (we put them here to avoid typos later on)
42 | GRAPH_ID = "graph-id"
43 | OVERVIEW_GRAPH_ID = "overview-graph"
44 | STORE_ID = "store"
45 | PLOT_BTN_ID = "plot-button"
46 |
47 | # --------------------------------------Globals ---------------------------------------
48 | # NOTE: Remark how
49 | # (1) the assets folder is passed to the Dash(proxy) application
50 | # (2) the lodash script is included as an external script.
51 | app = DashProxy(
52 | __name__,
53 | transforms=[ServersideOutputTransform()],
54 | assets_folder=ASSETS_FOLDER,
55 | external_scripts=["https://cdn.jsdelivr.net/npm/lodash/lodash.min.js"],
56 | )
57 |
58 | # Construct the app layout
59 | app.layout = html.Div(
60 | [
61 | html.H1("plotly-resampler + dash-extensions", style={"textAlign": "center"}),
62 | html.Button("plot chart", id=PLOT_BTN_ID, n_clicks=0),
63 | html.Hr(),
64 | # The graph, overview graph, and serverside store for the FigureResampler graph
65 | dcc.Graph(
66 | id=GRAPH_ID,
67 | # NOTE: we remove the reset scale button as it removes the spikes and
68 | # we provide our own reset-axis button upon graph construction
69 | config={"modeBarButtonsToRemove": ["resetscale"]},
70 | ),
71 | dcc.Graph(id=OVERVIEW_GRAPH_ID, config={"displayModeBar": False}),
72 | dcc.Loading(dcc.Store(id=STORE_ID)),
73 | ]
74 | )
75 |
76 |
77 | # ------------------------------------ DASH logic -------------------------------------
78 | # --- construct and store the FigureResampler on the serverside ---
79 | @app.callback(
80 | [
81 | Output(GRAPH_ID, "figure"),
82 | Output(OVERVIEW_GRAPH_ID, "figure"),
83 | Output(STORE_ID, "data"),
84 | ],
85 | Input(PLOT_BTN_ID, "n_clicks"),
86 | prevent_initial_call=True,
87 | )
88 | def plot_graph(_):
89 | ctx = callback_context
90 | if not len(ctx.triggered) or PLOT_BTN_ID not in ctx.triggered[0]["prop_id"]:
91 | return no_update
92 |
93 | # 1. Create the figure and add data
94 | fig = FigureResampler(
95 | # fmt: off
96 | go.Figure(layout=dict(
97 | # dragmode="pan",
98 | hovermode="x unified",
99 | xaxis=dict(rangeselector=dict(buttons=list([
100 | dict(count=7, label="1 week", step="day", stepmode="backward"),
101 | dict(count=1, label="1 month", step="month", stepmode="backward"),
102 | dict(count=2, label="2 months", step="month", stepmode="backward"),
103 | dict(count=1, label="1 year", step="year", stepmode="backward"),
104 | ]))),
105 | )),
106 | # fmt: on
107 | default_downsampler=MinMaxLTTB(parallel=True),
108 | create_overview=True,
109 | )
110 |
111 | # Figure construction logic
112 | log = noisy_sin * 0.9999995**x
113 | exp = noisy_sin * 1.000002**x
114 | fig.add_trace(go.Scattergl(name="log"), hf_x=x_time, hf_y=log)
115 | fig.add_trace(go.Scattergl(name="exp"), hf_x=x_time, hf_y=exp)
116 |
117 | fig.update_layout(
118 | legend=dict(orientation="h", yanchor="bottom", y=1.02, xanchor="right", x=1)
119 | )
120 | fig.update_layout(
121 | margin=dict(b=10),
122 | template="plotly_white",
123 | height=650, # , hovermode="x unified",
124 | # https://plotly.com/python/custom-buttons/
125 | updatemenus=[
126 | dict(
127 | type="buttons",
128 | x=0.45,
129 | xanchor="left",
130 | y=1.09,
131 | yanchor="top",
132 | buttons=[
133 | dict(
134 | label="reset axes",
135 | method="relayout",
136 | args=[
137 | {
138 | "xaxis.autorange": True,
139 | "yaxis.autorange": True,
140 | "xaxis.showspikes": True,
141 | "yaxis.showspikes": False,
142 | }
143 | ],
144 | ),
145 | ],
146 | )
147 | ],
148 | )
149 | # fig.update_traces(xaxis="x")
150 | # fig.update_xaxes(showspikes=True, spikemode="across", spikesnap="cursor")
151 |
152 | coarse_fig = fig._create_overview_figure()
153 | return fig, coarse_fig, Serverside(fig)
154 |
155 |
156 | # --- Clientside callbacks used to bidirectionally link the overview and main graph ---
157 | app.clientside_callback(
158 | dash.ClientsideFunction(namespace="clientside", function_name="main_to_coarse"),
159 | dash.Output(
160 | OVERVIEW_GRAPH_ID, "id", allow_duplicate=True
161 | ), # TODO -> look for clean output
162 | dash.Input(GRAPH_ID, "relayoutData"),
163 | [dash.State(OVERVIEW_GRAPH_ID, "id"), dash.State(GRAPH_ID, "id")],
164 | prevent_initial_call=True,
165 | )
166 |
167 | app.clientside_callback(
168 | dash.ClientsideFunction(namespace="clientside", function_name="coarse_to_main"),
169 | dash.Output(GRAPH_ID, "id", allow_duplicate=True),
170 | dash.Input(OVERVIEW_GRAPH_ID, "selectedData"),
171 | [dash.State(GRAPH_ID, "id"), dash.State(OVERVIEW_GRAPH_ID, "id")],
172 | prevent_initial_call=True,
173 | )
174 |
175 |
176 | # --- FigureResampler update callback ---
177 | # The plotly-resampler callback to update the graph after a relayout event (= zoom/pan)
178 | # As we use the figure again as output, we need to set: allow_duplicate=True
179 | @app.callback(
180 | Output(GRAPH_ID, "figure", allow_duplicate=True),
181 | Input(GRAPH_ID, "relayoutData"),
182 | State(STORE_ID, "data"), # The server side cached FigureResampler per session
183 | prevent_initial_call=True,
184 | )
185 | def update_fig(relayoutdata, fig: FigureResampler):
186 | if fig is None:
187 | return no_update
188 | return fig.construct_update_data_patch(relayoutdata)
189 |
190 |
191 | if __name__ == "__main__":
192 | # Start the app
193 | app.run(debug=True, host="localhost", port=8055, use_reloader=False)
194 |
--------------------------------------------------------------------------------
/examples/dash_apps/11_sine_generator.py:
--------------------------------------------------------------------------------
1 | """Dash runtime sine generator app example.
2 |
3 | In this example, users can configure parameters of a sine wave and then generate the
4 | sine-wave graph at runtime using the create-new-graph button. There is also an option
5 | to remove the graph.
6 |
7 | This app uses server side caching of the FigureResampler object. As it uses the same
8 | concepts of the 03_minimal_cache_dynamic.py example, the runtime graph construction
9 | callback is again split up into two callbacks: (1) the callback used to construct the
10 | necessary components and send them to the front-end and (2) the callback used to
11 | construct the plotly-resampler figure and cache it on the server side.
12 |
13 | """
14 |
15 | from uuid import uuid4
16 |
17 | import dash_bootstrap_components as dbc
18 | import numpy as np
19 | import plotly.graph_objects as go
20 | from dash import MATCH, Input, Output, State, callback_context, dcc, html, no_update
21 | from dash_extensions.enrich import (
22 | DashProxy,
23 | Serverside,
24 | ServersideOutputTransform,
25 | Trigger,
26 | TriggerTransform,
27 | )
28 |
29 | from plotly_resampler import FigureResampler
30 |
31 | # --------------------------------------Globals ---------------------------------------
32 | app = DashProxy(
33 | __name__,
34 | suppress_callback_exceptions=True,
35 | external_stylesheets=[dbc.themes.LUX],
36 | transforms=[ServersideOutputTransform(), TriggerTransform()],
37 | )
38 |
39 | # -------- Construct the app layout --------
40 | app.layout = html.Div(
41 | [
42 | html.Div(html.H1("Exponential sine generator"), style={"textAlign": "center"}),
43 | html.Hr(),
44 | dbc.Row(
45 | [
46 | dbc.Col(
47 | dbc.Form(
48 | [
49 | dbc.Label("#datapoints:", style={"margin-left": "10px"}),
50 | html.Br(),
51 | dcc.Input(
52 | id="nbr-datapoints",
53 | placeholder="n",
54 | type="number",
55 | style={"margin-left": "10px"},
56 | ),
57 | *([html.Br()] * 2),
58 | dbc.Label("exponent:", style={"margin-left": "10px"}),
59 | html.Br(),
60 | dcc.Input(
61 | id="expansion-factor",
62 | placeholder="pow",
63 | type="number",
64 | min=0.95,
65 | max=1.00001,
66 | style={"margin-left": "10px"},
67 | ),
68 | *([html.Br()] * 2),
69 | dbc.Button(
70 | "Create new graph",
71 | id="add-graph-btn",
72 | color="primary",
73 | style={
74 | "textalign": "center",
75 | "width": "max-content",
76 | "margin-left": "10px",
77 | },
78 | ),
79 | *([html.Br()] * 2),
80 | dbc.Button(
81 | "Remove last graph",
82 | id="remove-graph-btn",
83 | color="danger",
84 | style={
85 | "textalign": "center",
86 | "width": "max-content",
87 | "margin-left": "10px",
88 | },
89 | ),
90 | ],
91 | ),
92 | style={"align": "top"},
93 | md=2,
94 | ),
95 | dbc.Col(html.Div(id="graph-container"), md=10),
96 | ],
97 | ),
98 | ]
99 | )
100 |
101 |
102 | # ------------------------------------ DASH logic -------------------------------------
103 | # This method adds the needed components to the front-end, but does not yet contain the
104 | # FigureResampler graph construction logic.
105 | @app.callback(
106 | Output("graph-container", "children"),
107 | Input("add-graph-btn", "n_clicks"),
108 | Input("remove-graph-btn", "n_clicks"),
109 | [
110 | State("nbr-datapoints", "value"),
111 | State("expansion-factor", "value"),
112 | State("graph-container", "children"),
113 | ],
114 | prevent_initial_call=True,
115 | )
116 | def add_or_remove_graph(add_graph, remove_graph, n, exp, gc_children):
117 | if (add_graph is None or n is None or exp is None) and (remove_graph is None):
118 | return no_update
119 |
120 | # Transform the graph data to a figure
121 | gc_children = [] if gc_children is None else gc_children
122 |
123 | # Check if we need to remove a graph
124 | clicked_btns = [p["prop_id"] for p in callback_context.triggered]
125 | if any("remove-graph" in btn_name for btn_name in clicked_btns):
126 | if not len(gc_children):
127 | return no_update
128 | return gc_children[:-1]
129 |
130 | # No graph needs to be removed -> create a new graph
131 | uid = str(uuid4())
132 | new_child = html.Div(
133 | children=[
134 | # Note: we also add a dcc.Store component, which will be used to link the
135 | # server side cached FigureResampler object
136 | dcc.Graph(id={"type": "dynamic-graph", "index": uid}, figure=go.Figure()),
137 | dcc.Loading(dcc.Store(id={"type": "store", "index": uid})),
138 | # This dcc.Interval components makes sure that the `construct_display_graph`
139 | # callback is fired once after these components are added to the session
140 | # its front-end
141 | dcc.Interval(
142 | id={"type": "interval", "index": uid}, max_intervals=1, interval=1
143 | ),
144 | ],
145 | )
146 | gc_children.append(new_child)
147 | return gc_children
148 |
149 |
150 | # This method constructs the FigureResampler graph and caches it on the server side
151 | @app.callback(
152 | Output({"type": "dynamic-graph", "index": MATCH}, "figure"),
153 | Output({"type": "store", "index": MATCH}, "data"),
154 | State("nbr-datapoints", "value"),
155 | State("expansion-factor", "value"),
156 | State("add-graph-btn", "n_clicks"),
157 | Trigger({"type": "interval", "index": MATCH}, "n_intervals"),
158 | prevent_initial_call=True,
159 | )
160 | def construct_display_graph(n, exp, n_added_graphs) -> FigureResampler:
161 | # Figure construction logic based on state variables
162 | x = np.arange(n)
163 | expansion_scaling = exp**x
164 | y = (
165 | np.sin(x / 200) * expansion_scaling
166 | + np.random.randn(n) / 10 * expansion_scaling
167 | )
168 |
169 | fr = FigureResampler(go.Figure(), verbose=True)
170 | fr.add_trace(go.Scattergl(name="sin"), hf_x=x, hf_y=y)
171 | fr.update_layout(
172 | height=350,
173 | showlegend=True,
174 | legend=dict(orientation="h", y=1.12, xanchor="right", x=1),
175 | template="plotly_white",
176 | title=f"graph {n_added_graphs} - n={n:,} pow={exp}",
177 | title_x=0.5,
178 | )
179 |
180 | return fr, Serverside(fr)
181 |
182 |
183 | # --- FigureResampler update callback ---
184 |
185 |
186 | # The plotly-resampler callback to update the graph after a relayout event (= zoom/pan)
187 | # As we use the figure again as output, we need to set: allow_duplicate=True
188 | @app.callback(
189 | Output({"type": "dynamic-graph", "index": MATCH}, "figure", allow_duplicate=True),
190 | Input({"type": "dynamic-graph", "index": MATCH}, "relayoutData"),
191 | State({"type": "store", "index": MATCH}, "data"),
192 | prevent_initial_call=True,
193 | memoize=True,
194 | )
195 | def update_fig(relayoutdata: dict, fig: FigureResampler):
196 | if fig is not None:
197 | return fig.construct_update_data_patch(relayoutdata)
198 | return no_update
199 |
200 |
201 | # --------------------------------- Running the app ---------------------------------
202 | if __name__ == "__main__":
203 | app.run_server(debug=True, port=9023, use_reloader=False)
204 |
--------------------------------------------------------------------------------
/examples/dash_apps/12_file_selector.py:
--------------------------------------------------------------------------------
1 | """Dash file parquet visualization app example.
2 |
3 | In this use case, we have dropdowns which allows the end-user to select multiple
4 | parquet files, which are visualized using FigureResampler after clicking on a button.
5 |
6 | """
7 |
8 | __author__ = "Jonas Van Der Donckt"
9 |
10 | from pathlib import Path
11 | from typing import List
12 |
13 | import dash_bootstrap_components as dbc
14 | import plotly.graph_objects as go
15 |
16 | from dash import callback_context, dcc, html, no_update
17 | from dash_extensions.enrich import Output, Input, State
18 | from dash_extensions.enrich import DashProxy, Serverside, ServersideOutputTransform
19 | from utils.callback_helpers import get_selector_states, multiple_folder_file_selector
20 | from utils.graph_construction import visualize_multiple_files
21 |
22 | from plotly_resampler import FigureResampler
23 |
24 | # --------------------------------------Globals ---------------------------------------
25 | app = DashProxy(
26 | __name__,
27 | external_stylesheets=[dbc.themes.LUX],
28 | transforms=[ServersideOutputTransform()],
29 | )
30 |
31 | # --------- File selection configurations ---------
32 | name_folder_list = [
33 | {
34 | # the key-string below is the title which will be shown in the dash app
35 | "example data": {"folder": Path(__file__).parent.parent.joinpath("data")},
36 | "other folder": {"folder": Path(__file__).parent.parent.joinpath("data")},
37 | },
38 | # NOTE: A new item om this level creates a new file-selector card.
39 | # { "PC data": { "folder": Path("/home/jonas/data/wesad/empatica/") } }
40 | # TODO: change the folder path above to a location where you have some
41 | # `.parquet` files stored on your machine.
42 | ]
43 |
44 |
45 | # --------- DASH layout logic ---------
46 | def serve_layout() -> dbc.Container:
47 | """Constructs the app's layout.
48 |
49 | Returns
50 | -------
51 | dbc.Container
52 | A Container withholding the layout.
53 |
54 | """
55 | return dbc.Container(
56 | [
57 | dbc.Container(
58 | html.H1("Data loading and visualization dashboard"),
59 | style={"textAlign": "center"},
60 | ),
61 | html.Hr(),
62 | dbc.Row(
63 | [
64 | # Add file selection layout (+ assign callbacks)
65 | dbc.Col(multiple_folder_file_selector(app, name_folder_list), md=2),
66 | # Add the graph and the dcc.Store (for serialization)
67 | dbc.Col(
68 | [
69 | dcc.Graph(id="graph-id", figure=go.Figure()),
70 | dcc.Loading(dcc.Store(id="store")),
71 | ],
72 | md=10,
73 | ),
74 | ],
75 | align="center",
76 | ),
77 | ],
78 | fluid=True,
79 | )
80 |
81 |
82 | app.layout = serve_layout()
83 |
84 |
85 | # ------------------------------------ DASH logic -------------------------------------
86 | @app.callback(
87 | [Output("graph-id", "figure"), Output("store", "data")],
88 | [Input("plot-button", "n_clicks"), *get_selector_states(len(name_folder_list))],
89 | prevent_initial_call=True,
90 | )
91 | def plot_graph(n_clicks, *folder_list):
92 | it = iter(folder_list)
93 | file_list: List[Path] = []
94 | for folder, files in zip(it, it):
95 | if not all((folder, files)):
96 | continue
97 | else:
98 | for file in files:
99 | file_list.append((Path(folder).joinpath(file)))
100 |
101 | ctx = callback_context
102 | if len(ctx.triggered) and "plot-button" in ctx.triggered[0]["prop_id"]:
103 | if len(file_list):
104 | fig: FigureResampler = visualize_multiple_files(file_list)
105 | return fig, Serverside(fig)
106 | else:
107 | return no_update
108 |
109 |
110 | # --------- FigureResampler update callback ---------
111 |
112 |
113 | # The plotly-resampler callback to update the graph after a relayout event (= zoom/pan)
114 | # As we use the figure again as output, we need to set: allow_duplicate=True
115 | @app.callback(
116 | Output("graph-id", "figure", allow_duplicate=True),
117 | Input("graph-id", "relayoutData"),
118 | State("store", "data"), # The server side cached FigureResampler per session
119 | prevent_initial_call=True,
120 | )
121 | def update_fig(relayoutdata: dict, fig: FigureResampler):
122 | if fig is None:
123 | return no_update
124 | return fig.construct_update_data_patch(relayoutdata)
125 |
126 |
127 | # --------------------------------- Running the app ---------------------------------
128 | if __name__ == "__main__":
129 | app.run_server(debug=True, port=9023, use_reloader=False)
130 |
--------------------------------------------------------------------------------
/examples/dash_apps/13_coarse_fine.py:
--------------------------------------------------------------------------------
1 | """Dash file parquet visualization app example with a coarse and fine-grained view.
2 |
3 | In this use case, we have dropdowns which allows end-users to select multiple
4 | parquet files, which are visualized using FigureResampler after clicking on a button.
5 |
6 | There a two graphs displayed; a coarse and a dynamic graph. Interactions with the
7 | coarse graph will affect the dynamic graph it's shown range. Note that the autosize
8 | of the coarse graph is not linked.
9 |
10 | TODO: add an rectangle on the coarse graph
11 |
12 | """
13 |
14 | from __future__ import annotations
15 |
16 | __author__ = "Jonas Van Der Donckt"
17 |
18 | from pathlib import Path
19 | from typing import List
20 |
21 | import dash_bootstrap_components as dbc
22 | import plotly.graph_objects as go
23 | from dash import Input, Output, State, callback_context, dcc, html, no_update
24 | from dash_extensions.enrich import DashProxy, Serverside, ServersideOutputTransform
25 | from utils.callback_helpers import get_selector_states, multiple_folder_file_selector
26 | from utils.graph_construction import visualize_multiple_files
27 |
28 | from plotly_resampler import FigureResampler
29 |
30 | # --------------------------------------Globals ---------------------------------------
31 | app = DashProxy(
32 | __name__,
33 | suppress_callback_exceptions=False,
34 | external_stylesheets=[dbc.themes.LUX],
35 | transforms=[ServersideOutputTransform()],
36 | )
37 |
38 | # --------- File selection configurations ---------
39 | name_folder_list = [
40 | {
41 | # the key-string below is the title which will be shown in the dash app
42 | "example data": {"folder": Path(__file__).parent.parent.joinpath("data")},
43 | "other folder": {"folder": Path(__file__).parent.parent.joinpath("data")},
44 | },
45 | # NOTE: A new item om this level creates a new file-selector card.
46 | # { "PC data": { "folder": Path("/home/jonas/data/wesad/empatica/") } }
47 | # TODO: change the folder path above to a location where you have some
48 | # `.parquet` files stored on your machine.
49 | ]
50 |
51 |
52 | # --------- DASH layout logic ---------
53 | def serve_layout() -> dbc.Container:
54 | """Constructs the app's layout.
55 |
56 | Returns
57 | -------
58 | dbc.Container
59 | A Container withholding the layout.
60 |
61 | """
62 | return dbc.Container(
63 | [
64 | dbc.Container(
65 | html.H1("Data visualization - coarse & dynamic graph"),
66 | style={"textAlign": "center"},
67 | ),
68 | html.Hr(),
69 | dbc.Row(
70 | [
71 | # Add file selection layout (+ assign callbacks)
72 | dbc.Col(
73 | multiple_folder_file_selector(
74 | app, name_folder_list, multi=False
75 | ),
76 | md=2,
77 | ),
78 | # Add the graphs, the dcc.Store (for serialization) and the
79 | dbc.Col(
80 | [
81 | # The coarse graph whose updates will fetch data for the
82 | dcc.Graph(
83 | id="coarse-graph",
84 | figure=go.Figure(),
85 | config={"modeBarButtonsToAdd": ["drawrect"]},
86 | ),
87 | html.Br(),
88 | dcc.Graph(id="plotly-resampler-graph", figure=go.Figure()),
89 | dcc.Loading(dcc.Store(id="store")),
90 | ],
91 | md=10,
92 | ),
93 | ],
94 | align="center",
95 | ),
96 | ],
97 | fluid=True,
98 | )
99 |
100 |
101 | app.layout = serve_layout()
102 |
103 |
104 | # ------------------------------------ DASH logic -------------------------------------
105 | # --------- graph construction logic + callback ---------
106 | @app.callback(
107 | [
108 | Output("coarse-graph", "figure"),
109 | Output("plotly-resampler-graph", "figure"),
110 | Output("store", "data"),
111 | ],
112 | [Input("plot-button", "n_clicks"), *get_selector_states(len(name_folder_list))],
113 | prevent_initial_call=True,
114 | )
115 | def construct_plot_graph(n_clicks, *folder_list):
116 | it = iter(folder_list)
117 | file_list: List[Path] = []
118 | for folder, files in zip(it, it):
119 | if not all((folder, files)):
120 | continue
121 | else:
122 | files = [files] if not isinstance(files, list) else file_list
123 | for file in files:
124 | file_list.append((Path(folder).joinpath(file)))
125 |
126 | ctx = callback_context
127 | if len(ctx.triggered) and "plot-button" in ctx.triggered[0]["prop_id"]:
128 | if len(file_list):
129 | # Create two graphs, a dynamic plotly-resampler graph and a coarse graph
130 | dynamic_fig: FigureResampler = visualize_multiple_files(file_list)
131 | coarse_fig: go.Figure = go.Figure(
132 | FigureResampler(dynamic_fig, default_n_shown_samples=3_000)
133 | )
134 |
135 | coarse_fig.update_layout(title="coarse view", height=250)
136 | coarse_fig.update_layout(margin=dict(l=0, r=0, b=0, t=40, pad=10))
137 | coarse_fig.update_layout(showlegend=False)
138 | coarse_fig._config = coarse_fig._config.update(
139 | {"modeBarButtonsToAdd": ["drawrect"]}
140 | )
141 |
142 | dynamic_fig._global_n_shown_samples = 1000
143 | dynamic_fig.update_layout(title="dynamic view", height=450)
144 | dynamic_fig.update_layout(margin=dict(l=0, r=0, b=40, t=40, pad=10))
145 | dynamic_fig.update_layout(
146 | legend=dict(
147 | orientation="h", y=-0.11, xanchor="right", x=1, font_size=18
148 | )
149 | )
150 |
151 | return coarse_fig, dynamic_fig, Serverside(dynamic_fig)
152 | else:
153 | return no_update
154 |
155 |
156 | # Register the graph update callbacks to the layout
157 | # As we use the figure again as output, we need to set: allow_duplicate=True
158 | @app.callback(
159 | Output("plotly-resampler-graph", "figure", allow_duplicate=True),
160 | Input("coarse-graph", "relayoutData"),
161 | Input("plotly-resampler-graph", "relayoutData"),
162 | State("store", "data"),
163 | prevent_initial_call=True,
164 | )
165 | def update_dynamic_fig(
166 | coarse_grained_relayout: dict | None,
167 | fine_grained_relayout: dict | None,
168 | fr_fig: FigureResampler,
169 | ):
170 | if fr_fig is None: # When the figure does not exist -> do nothing
171 | return no_update
172 |
173 | ctx = callback_context
174 | trigger_id = ctx.triggered[0].get("prop_id", "").split(".")[0]
175 |
176 | if trigger_id == "plotly-resampler-graph":
177 | return fr_fig.construct_update_data_patch(fine_grained_relayout)
178 | elif trigger_id == "coarse-graph":
179 | return fr_fig.construct_update_data_patch(coarse_grained_relayout)
180 |
181 | return no_update
182 |
183 |
184 | # --------------------------------- Running the app ---------------------------------
185 | if __name__ == "__main__":
186 | app.run_server(debug=True, port=9023, use_reloader=False)
187 |
--------------------------------------------------------------------------------
/examples/dash_apps/utils/callback_helpers.py:
--------------------------------------------------------------------------------
1 | """Dash helper functions for constructing a file seelector
2 | """
3 |
4 | __author__ = "Jonas Van Der Donckt"
5 |
6 | import itertools
7 | from pathlib import Path
8 | from typing import Dict, List
9 |
10 | import dash_bootstrap_components as dbc
11 | from dash import Input, Output, State, dcc, html
12 | from functional import seq
13 |
14 |
15 | def _update_file_widget(folder):
16 | if folder is None:
17 | return []
18 | return [
19 | {"label": filename, "value": filename}
20 | for filename in sorted(
21 | set(
22 | list(
23 | seq(Path(folder).iterdir())
24 | .filter(lambda x: x.is_file() and x.name.endswith("parquet"))
25 | .map(lambda x: x.name)
26 | )
27 | )
28 | )
29 | ]
30 |
31 |
32 | def _register_selection_callbacks(app, ids=None):
33 | if ids is None:
34 | ids = [""]
35 |
36 | for id in ids:
37 | app.callback(
38 | Output(f"file-selector{id}", "options"),
39 | [Input(f"folder-selector{id}", "value")],
40 | )(_update_file_widget)
41 |
42 |
43 | def multiple_folder_file_selector(
44 | app, name_folders_list: List[Dict[str, dict]], multi=True
45 | ) -> dbc.Card:
46 | """Constructs a folder user date selector
47 |
48 | Creates a `dbc.Card` component which can be
49 |
50 | Parameters
51 | ----------
52 | app:
53 | The dash application.
54 | name_folders_list:List[Dict[str, Union[Path, str]]]
55 | A dict with key, the display-key and values the correspondign path.
56 |
57 | Returns
58 | -------
59 | A bootstrap card component
60 | """
61 | selector = dbc.Card(
62 | [
63 | dbc.Card(
64 | [
65 | dbc.Col(
66 | [
67 | dbc.Label("folder"),
68 | dcc.Dropdown(
69 | id=f"folder-selector{i}",
70 | options=[
71 | {"label": l, "value": str(f["folder"])}
72 | for (l, f) in name_folders.items()
73 | ],
74 | clearable=False,
75 | ),
76 | dbc.Label("file"),
77 | dcc.Dropdown(
78 | id=f"file-selector{i}",
79 | options=[],
80 | clearable=True,
81 | multi=multi,
82 | ),
83 | html.Br(),
84 | ]
85 | ),
86 | ]
87 | )
88 | for i, name_folders in enumerate(name_folders_list, 1)
89 | ]
90 | + [
91 | dbc.Card(
92 | dbc.Col(
93 | [
94 | html.Br(),
95 | dbc.Button(
96 | "create figure",
97 | id="plot-button",
98 | color="primary",
99 | ),
100 | ],
101 | style={"textAlign": "center"},
102 | ),
103 | )
104 | ],
105 | body=True,
106 | )
107 |
108 | _register_selection_callbacks(app=app, ids=range(1, len(name_folders_list) + 1))
109 | return selector
110 |
111 |
112 | def get_selector_states(n: int) -> List[State]:
113 | """Return a list of all the folder-file selector fields, which are used as State
114 |
115 | Parameters
116 | ----------
117 | n: int
118 | The number of folder selectors
119 |
120 | """
121 | return list(
122 | itertools.chain.from_iterable(
123 | [
124 | (
125 | State(f"folder-selector{i}", "value"),
126 | State(f"file-selector{i}", "value"),
127 | )
128 | for i in range(1, n + 1)
129 | ]
130 | )
131 | )
132 |
--------------------------------------------------------------------------------
/examples/dash_apps/utils/graph_construction.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | from typing import List, Union
3 |
4 | import pandas as pd
5 | import plotly.graph_objects as go
6 | from plotly.subplots import make_subplots
7 |
8 | from plotly_resampler import FigureResampler
9 | from plotly_resampler.aggregation import MinMaxLTTB
10 |
11 |
12 | # --------- graph construction logic + callback ---------
13 | def visualize_multiple_files(file_list: List[Union[str, Path]]) -> FigureResampler:
14 | """Create FigureResampler where each subplot row represents all signals from a file.
15 |
16 | Parameters
17 | ----------
18 | file_list: List[Union[str, Path]]
19 |
20 | Returns
21 | -------
22 | FigureResampler
23 | Returns a view of the existing, global FigureResampler object.
24 |
25 | """
26 | fig = FigureResampler(
27 | make_subplots(rows=len(file_list), shared_xaxes=False),
28 | default_downsampler=MinMaxLTTB(parallel=True),
29 | )
30 | fig.update_layout(height=min(900, 350 * len(file_list)))
31 |
32 | for i, f in enumerate(file_list, 1):
33 | df = pd.read_parquet(f) # TODO: replace with more generic data loading code
34 | if "timestamp" in df.columns:
35 | df = df.set_index("timestamp")
36 |
37 | for c in df.columns[::-1]:
38 | fig.add_trace(go.Scattergl(name=c), hf_x=df.index, hf_y=df[c], row=i, col=1)
39 | return fig
40 |
--------------------------------------------------------------------------------
/examples/data/df_gusb.parquet:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/predict-idlab/plotly-resampler/560e6b98c944cd65143580d5a40b177af57354ff/examples/data/df_gusb.parquet
--------------------------------------------------------------------------------
/examples/data/df_pc_test.parquet:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/predict-idlab/plotly-resampler/560e6b98c944cd65143580d5a40b177af57354ff/examples/data/df_pc_test.parquet
--------------------------------------------------------------------------------
/examples/data/processed_gsr.parquet:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/predict-idlab/plotly-resampler/560e6b98c944cd65143580d5a40b177af57354ff/examples/data/processed_gsr.parquet
--------------------------------------------------------------------------------
/examples/example_utils/loglttb.py:
--------------------------------------------------------------------------------
1 | """An (non-optimized) python implementation of the LTTB algorithm that utilizes
2 | log-scale buckets.
3 | """
4 |
5 | import numpy as np
6 | from plotly_resampler.aggregation.aggregation_interface import DataPointSelector
7 | from typing import Union
8 |
9 |
10 | class LogLTTB(DataPointSelector):
11 | @staticmethod
12 | def _argmax_area(prev_x, prev_y, avg_next_x, avg_next_y, x_bucket, y_bucket) -> int:
13 | """Vectorized triangular area argmax computation.
14 |
15 | Parameters
16 | ----------
17 | prev_x : float
18 | The previous selected point is x value.
19 | prev_y : float
20 | The previous selected point its y value.
21 | avg_next_x : float
22 | The x mean of the next bucket
23 | avg_next_y : float
24 | The y mean of the next bucket
25 | x_bucket : np.ndarray
26 | All x values in the bucket
27 | y_bucket : np.ndarray
28 | All y values in the bucket
29 |
30 | Returns
31 | -------
32 | int
33 | The index of the point with the largest triangular area.
34 | """
35 | return np.abs(
36 | x_bucket * (prev_y - avg_next_y)
37 | + y_bucket * (avg_next_x - prev_x)
38 | + (prev_x * avg_next_y - avg_next_x * prev_y)
39 | ).argmax()
40 |
41 | def _arg_downsample(
42 | self, x: Union[np.ndarray, None], y: np.ndarray, n_out: int, **kwargs
43 | ) -> np.ndarray:
44 | """Downsample to `n_out` points using the log variant of the LTTB algorithm.
45 |
46 | Parameters
47 | ----------
48 | x : np.ndarray
49 | The x-values of the data.
50 | y : np.ndarray
51 | The y-values of the data.
52 | n_out : int
53 | The number of points to downsample to.
54 |
55 | Returns
56 | -------
57 | np.ndarray
58 | The indices of the downsampled data.
59 | """
60 | # We need a valid x array to determine the x-range
61 | assert x is not None, "x cannot be None for this downsampler"
62 |
63 | # the log function to use
64 | lf = np.log1p
65 |
66 | offset = np.unique(
67 | np.searchsorted(
68 | x, np.exp(np.linspace(lf(x[0]), lf(x[-1]), n_out + 1)).astype(np.int64)
69 | )
70 | )
71 |
72 | # Construct the output array
73 | sampled_x = np.empty(len(offset) + 1, dtype="int64")
74 | sampled_x[0] = 0
75 | sampled_x[-1] = x.shape[0] - 1
76 |
77 | # Convert x & y to int if it is boolean
78 | if x.dtype == np.bool_:
79 | x = x.astype(np.int8)
80 | if y.dtype == np.bool_:
81 | y = y.astype(np.int8)
82 |
83 | a = 0
84 | for i in range(len(offset) - 2):
85 | a = (
86 | self._argmax_area(
87 | prev_x=x[a],
88 | prev_y=y[a],
89 | avg_next_x=np.mean(x[offset[i + 1] : offset[i + 2]]),
90 | avg_next_y=y[offset[i + 1] : offset[i + 2]].mean(),
91 | x_bucket=x[offset[i] : offset[i + 1]],
92 | y_bucket=y[offset[i] : offset[i + 1]],
93 | )
94 | + offset[i]
95 | )
96 | sampled_x[i + 1] = a
97 |
98 | # ------------ EDGE CASE ------------
99 | # next-average of last bucket = last point
100 | sampled_x[-2] = (
101 | self._argmax_area(
102 | prev_x=x[a],
103 | prev_y=y[a],
104 | avg_next_x=x[-1], # last point
105 | avg_next_y=y[-1],
106 | x_bucket=x[offset[-2] : offset[-1]],
107 | y_bucket=y[offset[-2] : offset[-1]],
108 | )
109 | + offset[-2]
110 | )
111 | return sampled_x
112 |
--------------------------------------------------------------------------------
/examples/helper.py:
--------------------------------------------------------------------------------
1 | from typing import Union
2 |
3 | import pandas as pd
4 |
5 |
6 | def groupby_consecutive(
7 | df: Union[pd.Series, pd.DataFrame], col_name: str = None
8 | ) -> pd.DataFrame:
9 | """Merges consecutive `column_name` values in a single dataframe.
10 |
11 | This is especially useful if you want to represent sparse data in a more
12 | compact format.
13 |
14 | Parameters
15 | ----------
16 | df : Union[pd.Series, pd.DataFrame]
17 | Must be time-indexed!
18 | col_name : str, optional
19 | If a dataFrame is passed, you will need to specify the `col_name` on which
20 | the consecutive-grouping will need to take plase.
21 |
22 | Returns
23 | -------
24 | pd.DataFrame
25 | A new `DataFrame` view, with columns:
26 | [`start`, `end`, `n_consecutive`, `col_name`], representing the
27 | start- and endtime of the consecutive range, the number of consecutive samples,
28 | and the col_name's consecutive values.
29 | """
30 | if type(df) == pd.Series:
31 | col_name = df.name
32 | df = df.to_frame()
33 |
34 | assert col_name in df.columns
35 |
36 | df_cum = (
37 | (df[col_name].diff(1) != 0)
38 | .astype("int")
39 | .cumsum()
40 | .rename("value_grp")
41 | .to_frame()
42 | )
43 | df_cum["sequence_idx"] = df.index
44 | df_cum[col_name] = df[col_name]
45 |
46 | df_grouped = pd.DataFrame(
47 | {
48 | "start": df_cum.groupby("value_grp")["sequence_idx"].first(),
49 | "end": df_cum.groupby("value_grp")["sequence_idx"].last(),
50 | "n_consecutive": df_cum.groupby("value_grp").size(),
51 | col_name: df_cum.groupby("value_grp")[col_name].first(),
52 | }
53 | ).reset_index(drop=True)
54 | df_grouped["next_start"] = df_grouped.start.shift(-1).fillna(df_grouped["end"])
55 | return df_grouped
56 |
--------------------------------------------------------------------------------
/examples/other_apps/streamlit_app.py:
--------------------------------------------------------------------------------
1 | """Minimal streamlit app example.
2 |
3 | This example shows how to integrate plotly-resampler in a streamlit app.
4 | The following thee steps are required;
5 | 1. use FigureResampler
6 | 2. run the visualization (which is a dash app) in a (sub)process on a certain port
7 | 3. add as iframe component to streamlit
8 |
9 | To run this example execute the following command:
10 | $ streamlit run streamlit_app.py
11 |
12 | Note: to have colored traces in the streamlit app, you should always include the
13 | following code: `import plotly.io as pio; pio.templates.default = "plotly"`
14 |
15 | """
16 |
17 | __author__ = "Jeroen Van Der Donckt"
18 |
19 | # Explicitly set pio.templates in order to have colored traces in the streamlit app!
20 | # -> https://discuss.streamlit.io/t/streamlit-overrides-colours-of-plotly-chart/34943/5
21 | import plotly.io as pio
22 |
23 | pio.templates.default = "plotly"
24 |
25 | # 0. Create a noisy sine wave
26 | import numpy as np
27 | import plotly.graph_objects as go
28 |
29 | from plotly_resampler import FigureResampler
30 |
31 | x = np.arange(1_000_000)
32 | noisy_sin = (3 + np.sin(x / 200) + np.random.randn(len(x)) / 10) * x / 1_000
33 |
34 | ### 1. Use FigureResampler
35 | fig = FigureResampler(default_n_shown_samples=2_000)
36 | fig.add_trace(go.Scattergl(name="noisy sine", showlegend=True), hf_x=x, hf_y=noisy_sin)
37 | fig.update_layout(height=700)
38 |
39 | ### 2. Run the visualization (which is a dash app) in a (sub)process on a certain port
40 | # Note: starting a process allows executing code after `.show_dash` is called
41 | from multiprocessing import Process
42 |
43 | port = 9022
44 | proc = Process(target=fig.show_dash, kwargs=dict(mode="external", port=port)).start()
45 |
46 | # Deleting the lines below this and running this file will result in a classic running dash app
47 | # Note: for just a dash app it is not even necessary to execute .show_dash in a (sub)process
48 |
49 | ### 3. Add as iframe component to streamlit
50 | import streamlit.components.v1 as components
51 |
52 | components.iframe(f"http://localhost:{port}", height=700)
53 |
--------------------------------------------------------------------------------
/examples/requirements.txt:
--------------------------------------------------------------------------------
1 | pyfunctional>=1.4.3
2 | dash-bootstrap-components>=1.2.0
3 | dash-extensions==1.0.20 # fixated on this version as more recent versions do not work
4 | ipywidgets>=7.7.0
5 | memory-profiler>=0.60.0
6 | line-profiler>=3.5.1
7 | pyarrow>=17.0.0
8 | kaleido>=0.2.1
9 | flask-cors>=3.0.10
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | site_name: Plotly Resampler Documentation
2 | site_url: https://predict-idlab.github.io/plotly-resampler/
3 | repo_url: https://github.com/predict-idlab/plotly-resampler
4 | repo_name: plotly-resampler
5 | site_description: Documentation for the Plotly Resampler; a wrapper for plotly Figures to visualize large time-series data.
6 | site_author:
7 | docs_dir: mkdocs
8 |
9 | nav:
10 | - "Get started 🚀": "getting_started.md"
11 | - "Dash apps 🤝": "dash_app_integration.md"
12 | - "API 📖": "api/"
13 | - "FAQ ❓": "FAQ.md"
14 |
15 | markdown_extensions:
16 | - admonition
17 | - pymdownx.details
18 | - pymdownx.superfences
19 | - pymdownx.highlight
20 | - pymdownx.inlinehilite
21 | - attr_list
22 | - sane_lists
23 | - smarty
24 | - toc:
25 | permalink: true
26 | - pymdownx.emoji:
27 | emoji_index: !!python/name:materialx.emoji.twemoji
28 | emoji_generator: !!python/name:materialx.emoji.to_svg
29 |
30 | plugins:
31 | - mike
32 | - search
33 | - gen-files:
34 | scripts:
35 | - mkdocs/gen_ref_pages.py
36 | - literate-nav:
37 | nav_file: SUMMARY.md
38 | - section-index
39 | - mkdocstrings:
40 | default_handler: python
41 | handlers:
42 | python:
43 | paths: [plotly_resampler]
44 | options:
45 | docstring_style: numpy
46 |
47 | theme:
48 | name: material
49 | locale: en
50 | logo: static/icon.png
51 | features:
52 | - navigation.tabs
53 | - navigation.path
54 | - content.code.copy
55 | palette:
56 | # Palette toggle for light mode
57 | - scheme: default
58 | primary: teal
59 | toggle:
60 | icon: material/weather-night
61 |
62 | name: Switch to dark mode
63 |
64 | # Palette toggle for dark mode
65 | - scheme: slate
66 | primary: black
67 | toggle:
68 | icon: material/weather-sunny
69 | name: Switch to light mode
70 | icon:
71 | repo: fontawesome/brands/github
72 |
73 | extra:
74 | version:
75 | provider: mike
76 |
--------------------------------------------------------------------------------
/mkdocs/FAQ.md:
--------------------------------------------------------------------------------
1 | # FAQ ❓
2 |
3 | ??? abstract "What does the orange `~time|number` suffix in legend name indicate?"
4 |
5 | This tilde suffix is only shown when the data is aggregated and represents the _mean aggregation bin size_
6 | which is the mean index-range difference between two consecutive aggregated samples.
7 |
8 | > - for _time-indexed data_: the mean time-range between 2 consecutive (sampled) samples.
9 | > - for _numeric-indexed data_: the mean numeric range between 2 consecutive (sampled) samples.
10 |
11 | When the index is a range-index; the mean aggregation bin size represents the mean downsample ratio; i.e.,
12 | the mean number of samples that are aggregated into one sample.
13 |
14 | ??? abstract "What is the difference between plotly-resampler figures and plain plotly figures?"
15 |
16 | plotly-resampler can be thought of as wrapper around plain plotly figures
17 | which adds line-chart visualization scalability by dynamically aggregating the data of the figures w.r.t.
18 | the front-end view. plotly-resampler thus adds dynamic aggregation functionality to plain plotly figures.
19 |
20 | **important to know**:
21 |
22 | - `show` _always_ returns a static html view of the figure, i.e., no dynamic aggregation can be performed on that view.
23 | - To have dynamic aggregation:
24 | - with `FigureResampler`, you need to call `show_dash` (or output the object in a cell via `IPython.display`) ->
25 | which spawns a dash-web app, and the dynamic aggregation is realized with dash callback
26 | - with `FigureWidgetResampler`, you need to use `IPython.display` on the object,
27 | which uses widget-events to realize dynamic aggregation (via the running IPython kernel).
28 |
29 | **other changes of plotly-resampler figures w.r.t. vanilla plotly**:
30 |
31 | - double-clicking within a line-chart area does not Reset Axes, as it results in an “Autoscale” event.
32 | We decided to implement an Autoscale event as updating your y-range such that it shows all the data that
33 | is in your x-range
34 | - **Note**: vanilla Plotly figures their Autoscale result in Reset Axes behavior,
35 | in our opinion this did not make a lot of sense.
36 | It is therefore that we have overriden this behavior in plotly-resampler.
37 |
38 | ??? abstract "My `FigureResampler.show_dash` keeps hanging (indefinitely) with the error message: `OSError: Port already in use`"
39 |
40 | !!! info "Disclaimer"
41 | Since v0.9.0 we use Dash instead of JupyterDash for Jupyter integration which should have resolved this issue!
42 |
43 |
44 | Plotly-resampler its `FigureResampler.show_dash` method leverages the [jupyterdash](https://github.com/plotly/jupyter-dash)
45 | toolkit to easily allow integration of dash apps in notebooks.
46 | However, there is a [known issue](https://github.com/plotly/jupyter-dash/pull/105) with jupyterDash that causes the `FigureResampler.show_dash`
47 | method to hang when the port is already in use. In a future Pull-Request they will hopefully fix this issue.
48 | We internally track this [issue](https://github.com/predict-idlab/plotly-resampler/issues/123) as well -
49 | please comment there if you want to provide feedback.
50 |
51 | In the meantime, you can use the following workaround (if you do not care about the [Werkzeug security issue](https://github.com/predict-idlab/plotly-resampler/pull/174)):
52 | `pip install werkzeug==2.1.2`.
53 |
54 | ??? abstract "What is the difference in approach between plotly-resampler and datashader?"
55 |
56 | [Datashader](https://datashader.org/getting_started/Introduction.html) is a highly scalable
57 | [open-source](https://github.com/holoviz/datashader) library for analyzing and visualizing large datasets.
58 | More specifically, datashader _“rasterizes”_ or _“aggregates”_ datasets into regular grids
59 | that can be analyzed further or viewed as **images**.
60 |
61 | **The main differences are**:
62 |
63 | Datashader can deal with various kinds of data (e.g., location related data, point clouds),
64 | whereas plotly-resampler is more tailored towards time-series data visualizations.
65 | Furthermore, datashader outputs a **rasterized image/array** encompassing all traces their data,
66 | whereas plotly-resampler outputs an **aggregated series** per trace.
67 | Thus, datashader is more suited for analyzing data where you do not want to pin-out a certain series/trace.
68 |
69 | In our opinion, datashader truly shines (for the time series use case) when:
70 |
71 | - you want a global, overlaying view of all your traces
72 | - you want to visualize a large number of time series in a single plot (many traces)
73 | - there is a lot of noise on your high-frequency data and you want to uncover the underlying pattern
74 | - you want to render all data points in your visualization
75 |
76 | In our opinion, plotly-resampler shines when:
77 |
78 | - you need the capabilities to interact with the traces (e.g., hovering, toggling traces, hovertext per trace)
79 | - you want to use a less complex (but more restricted) visualization interface (as opposed to holoviews), i.e., plotly
80 | - you want to make existing plotly time-series figures more scalable and efficient
81 | - to build scalable Dash apps for time-series data visualization
82 |
83 | Furthermore combined with holoviews, datashader can also be employed in an interactive manner, see the example below.
84 |
85 | ```python
86 | from holoviews.operation.datashader import datashade
87 | import datashader as ds
88 | import holoviews as hv
89 | import numpy as np
90 | import pandas as pd
91 | import panel as pn
92 |
93 | hv.extension("bokeh")
94 | pn.extension(comms='ipywidgets')
95 |
96 | # Create the dummy dataframe
97 | n = 1_000_000
98 | x = np.arange(n)
99 | noisy_sine = (np.sin(x / 3_000) + (np.random.randn(n) / 10)) * x / 5_000
100 | df = pd.DataFrame(
101 | {"ns": noisy_sine, "ns_abs": np.abs(noisy_sine),}
102 | )
103 |
104 | # Visualize interactively with datashader
105 | opts = hv.opts.RGB(width=800, height=400)
106 | ndoverlay = hv.NdOverlay({c:hv.Curve((df.index, df[c])) for c in df.columns})
107 | datashade(ndoverlay, cnorm='linear', aggregator=ds.count(), line_width=3).opts(opts)
108 | ```
109 |
110 | 
111 |
112 | ??? abstract "Pandas or numpy datetime works much slower than unix epoch timestamps?"
113 |
114 | This stems from the plotly scatter(gl) constructor being much slower for non-numeric data.
115 | Plotly performs a different serialization for datetime arrays (which are interpreted as object arrays).
116 | However, plotly-resampler should not be limited by this - to avoid this issue,
117 | add your datetime data as _hf_x_ to your plotly-resampler `FigureResampler.add_trace`
118 | (or `FigureWidgetResampler.add_trace`) method. This avoids adding (& serializing) _all_ the data to the scatter object,
119 | since plotly-resampler will pass the aggregated data to the scatter object.
120 |
121 | Some illustration:
122 |
123 | ```python
124 | import plotly.graph_objects as go
125 | import pandas as pd
126 | import numpy as np
127 | from plotly_resampler import FigureResampler
128 |
129 | # Create the dummy dataframe
130 | y = np.arange(1_000_000)
131 | x = pd.date_range(start="2020-01-01", periods=len(y), freq="1s")
132 |
133 | # Create the plotly-resampler figure
134 | fig = FigureResampler()
135 | # fig.add_trace(go.Scatter(x=x, y=y)) # This is slow
136 | fig.add_trace(go.Scatter(), hf_x=x, hf_y=y) # This is fast
137 |
138 | # ... (add more traces, etc.)
139 | ```
140 |
--------------------------------------------------------------------------------
/mkdocs/dash_app_integration.md:
--------------------------------------------------------------------------------
1 | # Dash apps 🤝
2 |
3 | This documentation page describes how you can integrate `plotly-resampler` in a [dash](https://dash.plotly.com/) application.
4 |
5 | Examples of dash apps with `plotly-resampler` can be found in the
6 | [examples folder](https://github.com/predict-idlab/plotly-resampler/tree/main/examples) of the GitHub repository.
7 |
8 | ## Registering callbacks in a new dash app
9 |
10 | When you add a `FigureResampler` figure in a basic dash app, you should:
11 |
12 | - Register the [`FigureResampler`][figure_resampler.FigureResampler] figure its callbacks to the dash app.
13 | - The id of the [dcc.Graph](https://dash.plotly.com/dash-core-components/graph) component that contains the
14 | [`FigureResampler`][figure_resampler.FigureResampler] figure should be passed to the
15 | [`register_update_graph_callback`][figure_resampler.FigureResampler.register_update_graph_callback] method.
16 |
17 | **Code illustration**:
18 |
19 | ```python
20 | # Construct the to-be resampled figure
21 | fig = FigureResampler(px.line(...))
22 |
23 | # Construct app & its layout
24 | app = dash.Dash(__name__)
25 | app.layout = html.Div(children=[dcc.Graph(id="graph-id", figure=fig)])
26 |
27 | # Register the callback
28 | fig.register_update_graph_callback(app, "graph-id")
29 |
30 | # start the app
31 | app.run_server(debug=True)
32 | ```
33 |
34 | !!! warning
35 |
36 | The above example serves as an illustration, but uses a _global variable_ to store the `FigureResampler` instance;
37 | this is not a good practice. Ideally you should cache the `FigureResampler` per session on the server side.
38 | In the [examples folder](https://github.com/predict-idlab/plotly-resampler/tree/main/examples),
39 | we provide several dash app examples where we perform server side caching of such figures.
40 |
--------------------------------------------------------------------------------
/mkdocs/gen_ref_pages.py:
--------------------------------------------------------------------------------
1 | """Generate the code reference pages."""
2 |
3 | from pathlib import Path
4 |
5 | import mkdocs_gen_files
6 |
7 | SRC_DIR = "plotly_resampler"
8 | API_DIR = "api"
9 |
10 | nav = mkdocs_gen_files.nav.Nav()
11 |
12 | for path in sorted(Path(SRC_DIR).rglob("*.py")):
13 | module_path = path.relative_to(SRC_DIR).with_suffix("")
14 | doc_path = path.relative_to(SRC_DIR).with_suffix(".md")
15 | full_doc_path = Path(API_DIR, doc_path)
16 |
17 | parts = tuple(module_path.parts)
18 |
19 | if parts[-1] == "__init__":
20 | parts = parts[:-1]
21 | doc_path = doc_path.with_name("index.md")
22 | full_doc_path = full_doc_path.with_name("index.md")
23 | elif parts[-1] == "__main__":
24 | continue
25 |
26 | if len(parts) == 0:
27 | continue
28 |
29 | nav[parts] = doc_path.as_posix()
30 |
31 | with mkdocs_gen_files.open(full_doc_path, "w") as fd:
32 | identifier = ".".join(parts)
33 | print("::: " + identifier, file=fd)
34 |
35 | mkdocs_gen_files.set_edit_path(full_doc_path, path)
36 |
37 | with mkdocs_gen_files.open(API_DIR + "/SUMMARY.md", "w") as nav_file:
38 | nav_file.writelines(nav.build_literate_nav())
39 |
--------------------------------------------------------------------------------
/mkdocs/getting_started.md:
--------------------------------------------------------------------------------
1 | # Get started 🚀
2 |
3 | The `plotly-resampler` package offers two primary modules:
4 |
5 | - [`figure_resampler`][figure_resampler]: a wrapper for _plotly.graph_objects Figures_,
6 | coupling dynamic resampling functionality with the _Figure_.
7 | - [`aggregation`][aggregation]: This module contains interfaces for the various aggregation methods implemented in [tsdownsample](https://github.com/predict-idlab/tsdownsample).
8 |
9 | ## Installation ⚙️
10 |
11 | Install via [pip](https://pypi.org/project/plotly-resampler/):
12 |
13 | ```
14 | pip install plotly-resampler
15 | ```
16 |
17 | ## Usage 📈
18 |
19 | Plotly-Resampler facilitates dynamic resampling in two ways:
20 |
21 | - **Automatic Approach** (low code overhead)
22 | - utilize the [`register_plotly_resampler`][registering.register_plotly_resampler] function
23 | - steps:
24 | 1. Import and invoke [`register_plotly_resampler`][registering.register_plotly_resampler]
25 | 2. That's it! 🎉
Just proceed with your standard gaph construction workflow.
26 | - Upon invoking [`register_plotly_resampler`][registering.register_plotly_resampler], all new defined plotly graph objects are transformed into either
27 | [`FigureResampler`][figure_resampler.FigureResampler] or
28 | [`FigureWidgetResampler`][figure_resampler.FigureWidgetResampler] object. The `mode` parameter in this method determines which resampling Figure type is used.
29 |
30 | - **Manual Approach** (data aggregation configurability, graph construction speedups)
31 | 1. By utilizing [Dash](https://github.com/plotly/dash) callbacks to augment a `go.Figure` with dynamic aggregation functionality.
32 | - steps:
33 | 1. wrap the plotly Figure with [`FigureResampler`][figure_resampler.FigureResampler]
34 | 2. call [`.show_dash()`][figure_resampler.FigureResampler.show_dash] on the Figure
35 | !!! note
36 | This is particularly advantageous when working with Dash or outside Jupyter environments.
37 |
38 | 2. By utilizing [FigureWidget.layout.on_change](https://plotly.com/python-api-reference/generated/plotly.html?highlight=on_change#plotly.basedatatypes.BasePlotlyType.on_change)
39 | , when a `go.FigureWidget` is used within a `.ipynb` environment.
40 | - steps:
41 | 1. wrap your plotly Figure
42 | (can be a `go.Figure` with [`FigureWidgetResampler`][figure_resampler.FigureWidgetResampler])
43 | 2. output the `FigureWidgetResampler` instance in a cell
44 |
45 | !!! note
46 | This is especially useful when developing in `jupyter` environments and when **you cannot open/forward a network-port.**
47 |
48 |
49 | !!! tip
50 |
51 | For **significant faster initial loading** of the Figure, we advise to
52 |
53 | 1. wrap the constructor of the plotly Figure with either [`FigureResampler`][figure_resampler.FigureResampler] or [`FigureWidgetResampler`][figure_resampler.FigureWidgetResampler]
54 | 2. add the trace data as `hf_x` and `hf_y`
55 |
56 | !!! note
57 |
58 | Any plotly Figure can be wrapped with dynamic aggregation functionality! 🎉
59 | But **only** `go.Scatter/go.Scattergl` traces **will be resampled**!
60 |
61 | ## Examples ✅
62 |
63 | ### register_plotly_resampler
64 |
65 | ```python
66 | import plotly.graph_objects as go; import numpy as np
67 | from plotly_resampler import register_plotly_resampler, unregister_plotly_resampler
68 |
69 | # Call the register function once and all Figures/FigureWidgets will be wrapped
70 | # according to the register_plotly_resampler its `mode` argument
71 | register_plotly_resampler(mode='auto')
72 |
73 | x = np.arange(1_000_000)
74 | noisy_sin = (3 + np.sin(x / 200) + np.random.randn(len(x)) / 10) * x / 1_000
75 |
76 |
77 | # when working in an IPython environment, this will automatically be a
78 | # FigureWidgetResampler else, this will be an FigureResampler
79 | f = go.Figure()
80 | f.add_trace({"y": noisy_sin + 2, "name": "yp2"})
81 | f
82 |
83 | # to undo the wrapping, call the unregister_plotly_resampler function
84 | ```
85 |
86 | ### FigureResampler
87 |
88 | ```python
89 | # NOTE: this example works in a notebook environment
90 | import plotly.graph_objects as go; import numpy as np
91 | from plotly_resampler import FigureResampler
92 |
93 | x = np.arange(1_000_000)
94 | sin = (3 + np.sin(x / 200) + np.random.randn(len(x)) / 10) * x / 1_000
95 |
96 | fig = FigureResampler(go.Figure())
97 | fig.add_trace(go.Scattergl(name='noisy sine', showlegend=True), hf_x=x, hf_y=sin)
98 |
99 | fig.show_dash(mode='inline')
100 | ```
101 |
102 | ### Overview
103 |
104 | In the example below, we demonstrate the (x-axis)`overview` feature of plotly-ressampler.
105 | For more information you can check out the [examples](https://github.com/predict-idlab/plotly-resampler/tree/main/examples) to find dash apps and in-notebook use-cases.
106 |
107 | !!! Note:
108 | - This overview is only available for the `FigureResampler` and not for the `FigureWidgetResampler`.
109 | - As a recent and experimental feature, user feedback is crucial. Please report any issues encountered!
110 |
111 |
112 | 
113 |
114 | ### FigureWidget
115 |
116 | The gif below demonstrates the example usage of [`FigureWidgetResampler`][figure_resampler.FigureWidgetResampler], where `JupyterLab` is used as the environment and the `FigureWidgetResampler`.
117 | Note how (i) the figure output is redirected into a new view, and (ii) how you are able to dynamically add traces!
118 |
119 | 
120 |
121 | Furthermore, plotly’s `FigureWidget` allows to conveniently add callbacks to for example click events. This allows creating a high-frequency time series annotation app in a couple of lines; as shown in the gif below and in this [notebook](https://github.com/predict-idlab/plotly-resampler/blob/main/examples/figurewidget_example.ipynb).
122 |
123 | 
124 |
125 | ## Important considerations & tips 🚨
126 |
127 | - When running the code on a server, you should forward the port of the
128 | [`FigureResampler.show_dash`][figure_resampler.FigureResampler.show_dash] method to your local machine.
129 | **Note** that you can add dynamic aggregation to plotly figures with the
130 | [`FigureWidgetResampler`][figure_resampler.FigureWidgetResampler] wrapper without needing to forward a port!
131 | - In general, when using downsampling one should be aware of (possible) [aliasing](https://en.wikipedia.org/wiki/Aliasing) effects.
132 | The `[R]` in the legend indicates when the corresponding trace is resampled (and thus possibly distorted).
133 | The `~ delta` suffix in the legend represents the mean index delta for consecutive aggregated data points.
134 | - The plotly **autoscale** event (triggered by the autoscale button or a double-click within the graph),
135 | **does not reset the axes but autoscales the current graph-view of plotly-resampler figures**.
136 | This design choice was made as it seemed more intuitive for the developers to support this behavior
137 | with double-click than the default axes-reset behavior.
138 | The graph axes can ofcourse be resetted by using the reset_axis button.
139 | If you want to give feedback and discuss this further with the developers, see this issue [#49](https://github.com/predict-idlab/plotly-resampler/issues/49).
140 |
141 | ### Dynamically adjusting the scatter data 🔩
142 |
143 | The raw high-frequency trace data of plotly-resampler figures can be adjusted using the `hf_data` property.
144 |
145 | Working example ⬇️:
146 |
147 | ```python
148 | import plotly.graph_objects as go; import numpy as np
149 | from plotly_resampler import FigureResampler
150 | # Note: a FigureWidgetResampler can be used here as well
151 |
152 | # Construct the hf-data
153 | x = np.arange(1_000_000)
154 | sin = (3 + np.sin(x / 200) + np.random.randn(len(x)) / 10) * x / 1_000
155 |
156 | fig = FigureResampler(go.Figure())
157 | fig.add_trace(go.Scattergl(name='noisy sine', showlegend=True), hf_x=x, hf_y=sin)
158 | fig.show_dash(mode='inline')
159 |
160 | # After some time -> update the hf_data y property of the trace
161 | # As we only have 1 trace, this needs to be mapped
162 | fig.hf_data[-1]['y'] = - sin ** 2
163 | ```
164 |
165 | !!! note
166 |
167 | _hf_data_ only withholds high-frequency traces (i.e., traces that are aggregated).
168 | To add non high-frequency traces (i.e., traces with fewer data points than _max_n_samples_),
169 | you need to set the `limit_to_view` argument to _True_ when adding the corresponding trace with the
170 | [`add_trace`][figure_resampler.figure_resampler_interface.AbstractFigureAggregator.add_trace] function.
171 |
172 | !!! tip
173 |
174 | The `FigureWidgetResampler` graph will not be automatically redrawn after adjusting the fig its _hf_data_ property.
175 | The redrawing can be triggered by manually calling either:
176 |
177 | - [`FigureWidgetResampler.reload_data`][figure_resampler.FigureWidgetResampler.reload_data],
178 | which keeps the current-graph range.
179 | - [`FigureWidgetResampler.reset_axes`][figure_resampler.FigureWidgetResampler.reset_axes],
180 | which performs a graph update.
181 |
182 | ### Plotly-resampler & not high-frequency traces 🔍
183 |
184 | !!! tip
185 |
186 | In the _Skin conductance example_ of the [basic_example.ipynb](https://github.com/predict-idlab/plotly-resampler/tree/main/examples),
187 | we deal with such low-frequency traces.
188 |
189 | The `add_trace` method allows configuring argument which allows us to deal with low-frequency traces.
190 |
191 | #### Use-cases
192 |
193 | - **not resampling** trace data: To achieve this, set:
194 |
195 | - `#!python max_n_samples = len(hf_x)`
196 |
197 | - **not resampling** trace data, but **slicing to the view**: To achieve this, set:
198 | - `#!python max_n_samples = len(hf_x)`
199 | - `#!python limit_to_view = True`
200 |
201 | !!! note
202 |
203 | For, **irregularly sampled traces** which are **filled** (e.g. _colored background_ signal quality trace of the skin conductance example),
204 | it is important that you set `gap_handler` to `NoGapHandler` for that trace.
205 |
206 | Otherwise, when you leave `gap_handler` to `MedDiffGapHandler`, you may get weird background shapes such as ⬇️:
207 | 
208 |
209 | When `gap_handler` is set to `NoGapHandler` you get ⬇️:
210 | 
211 |
--------------------------------------------------------------------------------
/mkdocs/index.md:
--------------------------------------------------------------------------------
1 | # Welcome to plotly-resampler's documentation!
2 |
3 | This is the documentation of ;
4 | a wrapper for plotly Figures to **visualize large time-series** data.
5 |
6 | 
7 |
8 | As shown in the demo above, `plotly-resampler` maintains its interactiveness on large data by applying front-end
9 | **resampling respective to the view**.
10 |
11 | [:fontawesome-solid-download: PyPI](https://pypi.org/project/plotly-resampler/){ .md-button .md-button--primary }
12 | [:simple-github: Github](https://github.com/predict-idlab/plotly-resampler){ .md-button .md-button--primary }
13 | [:simple-doi: DOI](https://doi.org/10.48550/arXiv.2206.08703){ .md-button .md-button--primary }
14 |
--------------------------------------------------------------------------------
/mkdocs/static/annotate_twitter.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/predict-idlab/plotly-resampler/560e6b98c944cd65143580d5a40b177af57354ff/mkdocs/static/annotate_twitter.gif
--------------------------------------------------------------------------------
/mkdocs/static/basic_example.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/predict-idlab/plotly-resampler/560e6b98c944cd65143580d5a40b177af57354ff/mkdocs/static/basic_example.gif
--------------------------------------------------------------------------------
/mkdocs/static/basic_example_overview.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/predict-idlab/plotly-resampler/560e6b98c944cd65143580d5a40b177af57354ff/mkdocs/static/basic_example_overview.gif
--------------------------------------------------------------------------------
/mkdocs/static/datashader.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/predict-idlab/plotly-resampler/560e6b98c944cd65143580d5a40b177af57354ff/mkdocs/static/datashader.png
--------------------------------------------------------------------------------
/mkdocs/static/figurewidget.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/predict-idlab/plotly-resampler/560e6b98c944cd65143580d5a40b177af57354ff/mkdocs/static/figurewidget.gif
--------------------------------------------------------------------------------
/mkdocs/static/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/predict-idlab/plotly-resampler/560e6b98c944cd65143580d5a40b177af57354ff/mkdocs/static/icon.png
--------------------------------------------------------------------------------
/mkdocs/static/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/predict-idlab/plotly-resampler/560e6b98c944cd65143580d5a40b177af57354ff/mkdocs/static/logo.png
--------------------------------------------------------------------------------
/mkdocs/static/minmax_operator.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/predict-idlab/plotly-resampler/560e6b98c944cd65143580d5a40b177af57354ff/mkdocs/static/minmax_operator.png
--------------------------------------------------------------------------------
/mkdocs/static/skin_conductance_interleave_gaps_false.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/predict-idlab/plotly-resampler/560e6b98c944cd65143580d5a40b177af57354ff/mkdocs/static/skin_conductance_interleave_gaps_false.png
--------------------------------------------------------------------------------
/mkdocs/static/skin_conductance_interleave_gaps_true.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/predict-idlab/plotly-resampler/560e6b98c944cd65143580d5a40b177af57354ff/mkdocs/static/skin_conductance_interleave_gaps_true.png
--------------------------------------------------------------------------------
/plotly_resampler/__init__.py:
--------------------------------------------------------------------------------
1 | """**plotly_resampler**: visualizing large sequences."""
2 |
3 | import contextlib
4 |
5 | from .aggregation import LTTB, EveryNthPoint, MinMaxLTTB
6 | from .figure_resampler import ASSETS_FOLDER, FigureResampler, FigureWidgetResampler
7 | from .registering import register_plotly_resampler, unregister_plotly_resampler
8 |
9 | __docformat__ = "numpy"
10 | __author__ = "Jonas Van Der Donckt, Jeroen Van Der Donckt, Emiel Deprost"
11 | __version__ = "0.11.0rc1"
12 |
13 | __all__ = [
14 | "__version__",
15 | "FigureResampler",
16 | "FigureWidgetResampler",
17 | "ASSETS_FOLDER",
18 | "MinMaxLTTB",
19 | "LTTB",
20 | "EveryNthPoint",
21 | "register_plotly_resampler",
22 | "unregister_plotly_resampler",
23 | ]
24 |
25 |
26 | # Enable ipywidgets on google colab!
27 | with contextlib.suppress(ImportError, ModuleNotFoundError):
28 | import sys
29 |
30 | if "google.colab" in sys.modules:
31 | from google.colab import output
32 |
33 | output.enable_custom_widget_manager()
34 |
--------------------------------------------------------------------------------
/plotly_resampler/aggregation/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Compatible implementation for various downsample methods and open interface to
3 | other downsample methods.
4 |
5 | """
6 |
7 | __author__ = "Jonas Van Der Donckt"
8 |
9 |
10 | from .aggregation_interface import AbstractAggregator
11 | from .aggregators import (
12 | LTTB,
13 | EveryNthPoint,
14 | FuncAggregator,
15 | MinMaxAggregator,
16 | MinMaxLTTB,
17 | MinMaxOverlapAggregator,
18 | )
19 | from .gap_handler_interface import AbstractGapHandler
20 | from .gap_handlers import MedDiffGapHandler, NoGapHandler
21 | from .plotly_aggregator_parser import PlotlyAggregatorParser
22 |
23 | __all__ = [
24 | "AbstractAggregator",
25 | "AbstractGapHandler",
26 | "PlotlyAggregatorParser",
27 | "LTTB",
28 | "MinMaxLTTB",
29 | "EveryNthPoint",
30 | "FuncAggregator",
31 | "MedDiffGapHandler",
32 | "MinMaxAggregator",
33 | "MinMaxOverlapAggregator",
34 | "NoGapHandler",
35 | ]
36 |
--------------------------------------------------------------------------------
/plotly_resampler/aggregation/aggregation_interface.py:
--------------------------------------------------------------------------------
1 | """AbstractAggregator interface-class, subclassed by concrete aggregators."""
2 |
3 | from __future__ import annotations
4 |
5 | __author__ = "Jonas Van Der Donckt"
6 |
7 | import re
8 | from abc import ABC, abstractmethod
9 | from typing import List, Optional, Tuple
10 |
11 | import numpy as np
12 |
13 |
14 | class AbstractAggregator(ABC):
15 | def __init__(
16 | self,
17 | x_dtype_regex_list: Optional[List[str]] = None,
18 | y_dtype_regex_list: Optional[List[str]] = None,
19 | **downsample_kwargs,
20 | ):
21 | """Constructor of AbstractSeriesAggregator.
22 |
23 | Parameters
24 | ----------
25 | x_dtype_regex_list: List[str], optional
26 | List containing the regex matching the supported datatypes for the x array,
27 | by default None.
28 | y_dtype_regex_list: List[str], optional
29 | List containing the regex matching the supported datatypes for the y array,
30 | by default None.
31 | downsample_kwargs: dict
32 | Additional kwargs passed to the downsample method.
33 |
34 | """
35 | self.x_dtype_regex_list = x_dtype_regex_list
36 | self.y_dtype_regex_list = y_dtype_regex_list
37 | self.downsample_kwargs = downsample_kwargs
38 |
39 | @staticmethod
40 | def _check_n_out(n_out: int) -> None:
41 | """Check if the n_out is valid."""
42 | assert isinstance(n_out, (int, np.integer))
43 | assert n_out > 0
44 |
45 | @staticmethod
46 | def _process_args(*args) -> Tuple[np.ndarray | None, np.ndarray]:
47 | """Process the args into the x and y arrays.
48 |
49 | If only y is passed, x is set to None.
50 | """
51 | assert len(args) in [1, 2], "Must pass either 1 or 2 arrays"
52 | x, y = (None, args[0]) if len(args) == 1 else args
53 | return x, y
54 |
55 | @staticmethod
56 | def _check_arr(arr: np.ndarray, regex_list: Optional[List[str]] = None):
57 | """Check if the array is valid."""
58 | assert isinstance(arr, np.ndarray), f"Expected np.ndarray, got {type(arr)}"
59 | assert arr.ndim == 1
60 | AbstractAggregator._supports_dtype(arr, regex_list)
61 |
62 | def _check_x_y(self, x: np.ndarray | None, y: np.ndarray) -> None:
63 | """Check if the x and y arrays are valid."""
64 | # Check x (if not None)
65 | if x is not None:
66 | self._check_arr(x, self.x_dtype_regex_list)
67 | assert x.shape == y.shape, "x and y must have the same shape"
68 | # Check y
69 | self._check_arr(y, self.y_dtype_regex_list)
70 |
71 | @staticmethod
72 | def _supports_dtype(arr: np.ndarray, dtype_regex_list: Optional[List[str]] = None):
73 | # base case
74 | if dtype_regex_list is None:
75 | return
76 |
77 | for dtype_regex_str in dtype_regex_list:
78 | m = re.compile(dtype_regex_str).match(str(arr.dtype))
79 | if m is not None: # a match is found
80 | return
81 | raise ValueError(
82 | f"{arr.dtype} doesn't match with any regex in {dtype_regex_list}"
83 | )
84 |
85 |
86 | class DataAggregator(AbstractAggregator, ABC):
87 | """Implementation of the AbstractAggregator interface for data aggregation.
88 |
89 | DataAggregator differs from DataPointSelector in that it doesn't select data points,
90 | but rather aggregates the data (e.g., mean).
91 | As such, the `_aggregate` method is responsible for aggregating the data, and thus
92 | returns a tuple of the aggregated x and y values.
93 |
94 | Concrete implementations of this class must implement the `_aggregate` method, and
95 | have full responsibility on how they deal with other high-frequency properties, such
96 | as `hovertext`, `marker_size`, 'marker_color`, etc ...
97 | """
98 |
99 | @abstractmethod
100 | def _aggregate(
101 | self,
102 | x: np.ndarray | None,
103 | y: np.ndarray,
104 | n_out: int,
105 | ) -> Tuple[np.ndarray, np.ndarray]:
106 | raise NotImplementedError
107 |
108 | def aggregate(
109 | self,
110 | *args,
111 | n_out: int,
112 | ) -> Tuple[np.ndarray, np.ndarray]:
113 | """Aggregate the data.
114 |
115 | Parameters
116 | ----------
117 | x, y: np.ndarray
118 | The x and y data of the to-be-aggregated series.
119 | The x array is optional (i.e., if only 1 array is passed, it is assumed to
120 | be the y array).
121 | The array(s) must be 1-dimensional, and have the same length (if x is
122 | passed).
123 | These cannot be passed as keyword arguments, as they are positional-only.
124 | n_out: int
125 | The number of samples which the downsampled series should contain.
126 | This should be passed as a keyword argument.
127 |
128 | Returns
129 | -------
130 | Tuple[np.ndarray, np.ndarray]
131 | The aggregated x and y data, respectively.
132 |
133 | """
134 | # Check n_out
135 | assert n_out is not None
136 |
137 | # Get x and y
138 | x, y = DataPointSelector._process_args(*args)
139 |
140 | # Check x and y
141 | self._check_x_y(x, y)
142 |
143 | return self._aggregate(x=x, y=y, n_out=n_out)
144 |
145 |
146 | class DataPointSelector(AbstractAggregator, ABC):
147 | """Implementation of the AbstractAggregator interface for data point selection.
148 |
149 | DataPointSelector differs from DataAggregator in that they don't aggregate the data
150 | (e.g., mean) but instead select data points (e.g., first, last, min, max, etc ...).
151 | As such, the `_arg_downsample` method returns the index positions of the selected
152 | data points.
153 |
154 | This class utilizes the `arg_downsample` method to compute the index positions.
155 | """
156 |
157 | @abstractmethod
158 | def _arg_downsample(
159 | self,
160 | x: np.ndarray | None,
161 | y: np.ndarray,
162 | n_out: int,
163 | ) -> np.ndarray:
164 | # Note: this method can utilize the self.downsample_kwargs property
165 | raise NotImplementedError
166 |
167 | def arg_downsample(
168 | self,
169 | *args,
170 | n_out: int,
171 | ) -> np.ndarray:
172 | """Compute the index positions for the downsampled representation.
173 |
174 | Parameters
175 | ----------
176 | x, y: np.ndarray
177 | The x and y data of the to-be-aggregated series.
178 | The x array is optional (i.e., if only 1 array is passed, it is assumed to
179 | be the y array).
180 | The array(s) must be 1-dimensional, and have the same length (if x is
181 | passed).
182 | These cannot be passed as keyword arguments, as they are positional-only.
183 | n_out: int
184 | The number of samples which the downsampled series should contain.
185 | This should be passed as a keyword argument.
186 |
187 | Returns
188 | -------
189 | np.ndarray
190 | The index positions of the selected data points.
191 |
192 | """
193 | # Check n_out
194 | DataPointSelector._check_n_out(n_out)
195 |
196 | # Get x and y
197 | x, y = DataPointSelector._process_args(*args)
198 |
199 | # Check x and y
200 | self._check_x_y(x, y)
201 |
202 | if len(y) <= n_out:
203 | # Fewer samples than n_out -> return all indices
204 | return np.arange(len(y))
205 |
206 | # More samples that n_out -> perform data aggregation
207 | return self._arg_downsample(x=x, y=y, n_out=n_out)
208 |
--------------------------------------------------------------------------------
/plotly_resampler/aggregation/gap_handler_interface.py:
--------------------------------------------------------------------------------
1 | """AbstractGapHandler interface-class, subclassed by concrete gap handlers."""
2 |
3 | from __future__ import annotations
4 |
5 | __author__ = "Jeroen Van Der Donckt"
6 |
7 | from abc import ABC, abstractmethod
8 | from typing import Optional, Tuple
9 |
10 | import numpy as np
11 |
12 |
13 | class AbstractGapHandler(ABC):
14 | def __init__(self, fill_value: Optional[float] = None):
15 | """Constructor of AbstractGapHandler.
16 |
17 | Parameters
18 | ----------
19 | fill_value: float, optional
20 | The value to fill the gaps with, by default None.
21 | Note that setting this value to 0 for filled area plots is particularly
22 | useful.
23 |
24 | """
25 | self.fill_value = fill_value
26 |
27 | @abstractmethod
28 | def _get_gap_mask(self, x_agg: np.ndarray) -> Optional[np.ndarray]:
29 | """Get a boolean mask indicating the indices where there are gaps.
30 |
31 | If you require custom gap handling, you can implement this method to return a
32 | boolean mask indicating the indices where there are gaps.
33 |
34 | Parameters
35 | ----------
36 | x_agg: np.ndarray
37 | The x array. This is used to determine the gaps.
38 |
39 | Returns
40 | -------
41 | Optional[np.ndarray]
42 | A boolean mask indicating the indices where there are gaps. If there are no
43 | gaps, None is returned.
44 |
45 | """
46 | pass
47 |
48 | def insert_fill_value_between_gaps(
49 | self,
50 | x_agg: np.ndarray,
51 | y_agg: np.ndarray,
52 | idxs: np.ndarray,
53 | ) -> Tuple[np.ndarray, np.ndarray]:
54 | """Insert the fill_value in the y_agg array where there are gaps.
55 |
56 | Gaps are determined by the x_agg array. The `_get_gap_mask` method is used to
57 | determine a boolean mask indicating the indices where there are gaps.
58 |
59 | Parameters
60 | ----------
61 | x_agg: np.ndarray
62 | The x array. This is used to determine the gaps.
63 | y_agg: np.ndarray
64 | The y array. A copy of this array will be expanded with fill_values where
65 | there are gaps.
66 | idxs: np.ndarray
67 | The index array. This is relevant aggregators that perform data point
68 | selection (e.g., max, min, etc.) - this array will be expanded with the
69 | same indices where there are gaps.
70 |
71 | Returns
72 | -------
73 | Tuple[np.ndarray, np.ndarray]
74 | The expanded y_agg array and the expanded idxs array respectively.
75 |
76 | """
77 | gap_mask = self._get_gap_mask(x_agg)
78 | if gap_mask is None:
79 | # no gaps are found, nothing to do
80 | return y_agg, idxs
81 |
82 | # An array filled with 1s and 2s, where 2 indicates a large gap mask
83 | # (i.e., that index will be repeated twice)
84 | repeats = np.ones(x_agg.shape, dtype="int") + gap_mask
85 |
86 | # use the repeats to expand the idxs, and agg_y array
87 | idx_exp_nan = np.repeat(idxs, repeats)
88 | y_agg_exp_nan = np.repeat(y_agg, repeats)
89 |
90 | # only float arrays can contain NaN values
91 | if issubclass(y_agg_exp_nan.dtype.type, np.integer) or issubclass(
92 | y_agg_exp_nan.dtype.type, np.bool_
93 | ):
94 | y_agg_exp_nan = y_agg_exp_nan.astype("float")
95 |
96 | # Set the NaN values
97 | # We add the gap index offset (via the np.arange) to the indices to account for
98 | # the repeats (i.e., expanded y_agg array).
99 | y_agg_exp_nan[np.where(gap_mask)[0] + np.arange(gap_mask.sum())] = (
100 | self.fill_value
101 | )
102 |
103 | return y_agg_exp_nan, idx_exp_nan
104 |
--------------------------------------------------------------------------------
/plotly_resampler/aggregation/gap_handlers.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """Compatible implementation for various gap handling methods."""
3 |
4 | from __future__ import annotations
5 |
6 | __author__ = "Jeroen Van Der Donckt"
7 |
8 | from typing import Optional, Tuple
9 |
10 | import numpy as np
11 |
12 | from plotly_resampler.aggregation.gap_handler_interface import AbstractGapHandler
13 |
14 |
15 | class NoGapHandler(AbstractGapHandler):
16 | """No gap handling."""
17 |
18 | def _get_gap_mask(self, x_agg: np.ndarray) -> Optional[np.ndarray]:
19 | return
20 |
21 |
22 | class MedDiffGapHandler(AbstractGapHandler):
23 | """Gap handling based on the median diff of the x_agg array."""
24 |
25 | def _calc_med_diff(self, x_agg: np.ndarray) -> Tuple[float, np.ndarray]:
26 | """Calculate the median diff of the x_agg array.
27 |
28 | As median is more robust to outliers than the mean, the median is used to define
29 | the gap threshold.
30 |
31 | This method performs a divide and conquer heuristic to calculate the median;
32 | 1. divide the array into `n_blocks` blocks (with `n_blocks` = 128)
33 | 2. calculate the mean of each block
34 | 3. calculate the median of the means
35 | => This proves to be a good approximation of the median of the full array, while
36 | being much faster than calculating the median of the full array.
37 | """
38 | # remark: thanks to the prepend -> x_diff.shape === len(s)
39 | x_diff = np.diff(x_agg, prepend=x_agg[0])
40 |
41 | # To do so - use an approach where we reshape the data
42 | # into `n_blocks` blocks and calculate the mean and then the median on that
43 | # Why use `median` instead of a global mean?
44 | # => when you have large gaps, they will be represented by a large diff
45 | # which will skew the mean way more than the median!
46 | n_blocks = 128
47 | if x_agg.shape[0] > 5 * n_blocks:
48 | blck_size = x_diff.shape[0] // n_blocks
49 |
50 | # convert the index series index diff into a reshaped view (i.e., sid_v)
51 | sid_v: np.ndarray = x_diff[: blck_size * n_blocks].reshape(n_blocks, -1)
52 |
53 | # calculate the mean fore each block and then the median of those means
54 | med_diff = np.median(np.mean(sid_v, axis=1))
55 | else:
56 | med_diff = np.median(x_diff)
57 |
58 | return med_diff, x_diff
59 |
60 | def _get_gap_mask(self, x_agg: np.ndarray) -> Optional[np.ndarray]:
61 | """Get a boolean mask indicating the indices where there are gaps.
62 |
63 | If you require custom gap handling, you can implement this method to return a
64 | boolean mask indicating the indices where there are gaps.
65 |
66 | Parameters
67 | ----------
68 | x_agg: np.ndarray
69 | The x array. This is used to determine the gaps.
70 |
71 | Returns
72 | -------
73 | Optional[np.ndarray]
74 | A boolean mask indicating the indices where there are gaps. If there are no
75 | gaps, None is returned.
76 |
77 | """
78 | med_diff, x_diff = self._calc_med_diff(x_agg)
79 |
80 | # TODO: this 4 was revealed to me in a dream, but it seems to work well
81 | # After some consideration, we altered this to a 4.1
82 | gap_mask = x_diff > 4.1 * med_diff
83 | if not any(gap_mask):
84 | return
85 | return gap_mask
86 |
--------------------------------------------------------------------------------
/plotly_resampler/aggregation/plotly_aggregator_parser.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import bisect
4 | from typing import Tuple, Union
5 |
6 | import numpy as np
7 | import pandas as pd
8 | import pytz
9 |
10 | from .aggregation_interface import DataAggregator, DataPointSelector
11 | from .gap_handler_interface import AbstractGapHandler
12 | from .gap_handlers import NoGapHandler
13 |
14 |
15 | class PlotlyAggregatorParser:
16 | @staticmethod
17 | def parse_hf_data(
18 | hf_data: np.ndarray | pd.Categorical | pd.Series | pd.Index,
19 | ) -> np.ndarray | pd.Categorical:
20 | """Parse the high-frequency data to a numpy array."""
21 | # Categorical data (pandas)
22 | # - pd.Series with categorical dtype -> calling .values will returns a
23 | # pd.Categorical
24 | # - pd.CategoricalIndex -> calling .values returns a pd.Categorical
25 | # - pd.Categorical: has no .values attribute -> will not be parsed
26 | if isinstance(hf_data, pd.RangeIndex):
27 | return None
28 | if isinstance(hf_data, (pd.Series, pd.Index)):
29 | return hf_data.values
30 | return hf_data
31 |
32 | @staticmethod
33 | def to_same_tz(
34 | ts: Union[pd.Timestamp, None], reference_tz: Union[pytz.BaseTzInfo, None]
35 | ) -> Union[pd.Timestamp, None]:
36 | """Adjust `ts` its timezone to the `reference_tz`."""
37 | if ts is None:
38 | return None
39 | elif reference_tz is not None:
40 | if ts.tz is not None:
41 | # compare if these two have the same timezone / offset
42 | try:
43 | assert ts.tz.__str__() == reference_tz.__str__()
44 | except AssertionError:
45 | assert ts.utcoffset() == reference_tz.utcoffset(ts.tz_convert(None))
46 | return ts
47 | else: # localize -> time remains the same
48 | return ts.tz_localize(reference_tz)
49 | elif reference_tz is None and ts.tz is not None:
50 | return ts.tz_localize(None)
51 | return ts
52 |
53 | @staticmethod
54 | def get_start_end_indices(hf_trace_data, axis_type, start, end) -> Tuple[int, int]:
55 | """Get the start & end indices of the high-frequency data."""
56 | # Base case: no hf data, or both start & end are None
57 | if not len(hf_trace_data["x"]):
58 | return 0, 0
59 | elif start is None and end is None:
60 | return 0, len(hf_trace_data["x"])
61 |
62 | # NOTE: as we use bisect right for the end index, we do not need to add a
63 | # small epsilon to the end value
64 | start = hf_trace_data["x"][0] if start is None else start
65 | end = hf_trace_data["x"][-1] if end is None else end
66 |
67 | # NOTE: we must verify this before check if the x is a range-index
68 | if axis_type == "log":
69 | start, end = 10**start, 10**end
70 |
71 | # We can compute the start & end indices directly when it is a RangeIndex
72 | if isinstance(hf_trace_data["x"], pd.RangeIndex):
73 | x_start = hf_trace_data["x"].start
74 | x_step = hf_trace_data["x"].step
75 | start_idx = int(max((start - x_start) // x_step, 0))
76 | end_idx = int((end - x_start) // x_step)
77 | return start_idx, end_idx
78 | # TODO: this can be performed as-well for a fixed frequency range-index w/ freq
79 |
80 | if axis_type == "date":
81 | start, end = pd.to_datetime(start), pd.to_datetime(end)
82 | # convert start & end to the same timezone
83 | if isinstance(hf_trace_data["x"], pd.DatetimeIndex):
84 | tz = hf_trace_data["x"].tz
85 | try:
86 | assert start.tz.__str__() == end.tz.__str__()
87 | except (TypeError, AssertionError):
88 | # This fix is needed for DST (when the timezone is not fixed)
89 | assert start.tz_localize(None) == start.tz_convert(tz).tz_localize(
90 | None
91 | )
92 | assert end.tz_localize(None) == end.tz_convert(tz).tz_localize(None)
93 |
94 | start = PlotlyAggregatorParser.to_same_tz(start, tz)
95 | end = PlotlyAggregatorParser.to_same_tz(end, tz)
96 |
97 | # Search the index-positions
98 | start_idx = bisect.bisect_left(hf_trace_data["x"], start)
99 | end_idx = bisect.bisect_right(hf_trace_data["x"], end)
100 | return start_idx, end_idx
101 |
102 | @staticmethod
103 | def _handle_gaps(
104 | hf_trace_data: dict,
105 | hf_x: np.ndarray,
106 | agg_x: np.ndarray,
107 | agg_y: np.ndarray,
108 | indices: np.ndarray,
109 | ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
110 | """Handle the gaps in the aggregated data.
111 |
112 | Returns:
113 | - agg_x: the aggregated x-values
114 | - agg_y: the aggregated y-values
115 | - indices: the indices of the hf_data data that were aggregated
116 |
117 | """
118 | gap_handler: AbstractGapHandler = hf_trace_data["gap_handler"]
119 | downsampler = hf_trace_data["downsampler"]
120 |
121 | # TODO check for trace mode (markers, lines, etc.) and only perform the
122 | # gap insertion methodology when the mode is lines.
123 | # if trace.get("connectgaps") != True and
124 | if (
125 | isinstance(gap_handler, NoGapHandler)
126 | # rangeIndex | datetimeIndex with freq -> equally spaced x; so no gaps
127 | or isinstance(hf_trace_data["x"], pd.RangeIndex)
128 | or (
129 | isinstance(hf_trace_data["x"], pd.DatetimeIndex)
130 | and hf_trace_data["x"].freq is not None
131 | )
132 | ):
133 | return agg_x, agg_y, indices
134 |
135 | # Interleave the gaps
136 | # View the data as an int64 when we have a DatetimeIndex
137 | # We only want to detect gaps, so we only want to compare values.
138 | agg_x_parsed = PlotlyAggregatorParser.parse_hf_data(agg_x)
139 | xdt = agg_x_parsed.dtype
140 | if np.issubdtype(xdt, np.timedelta64) or np.issubdtype(xdt, np.datetime64):
141 | agg_x_parsed = agg_x_parsed.view("int64")
142 |
143 | agg_y, indices = gap_handler.insert_fill_value_between_gaps(
144 | agg_x_parsed, agg_y, indices
145 | )
146 | if isinstance(downsampler, DataPointSelector):
147 | agg_x = hf_x[indices]
148 | elif isinstance(downsampler, DataAggregator):
149 | # The indices are in this case a repeat
150 | agg_x = agg_x[indices]
151 |
152 | return agg_x, agg_y, indices
153 |
154 | @staticmethod
155 | def aggregate(
156 | hf_trace_data: dict,
157 | start_idx: int,
158 | end_idx: int,
159 | ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
160 | """Aggregate the data in `hf_trace_data` between `start_idx` and `end_idx`.
161 |
162 | Returns:
163 | - x: the aggregated x-values
164 | - y: the aggregated y-values
165 | - indices: the indices of the hf_data data that were aggregated
166 |
167 | These indices are useful to select the corresponding hf_data from
168 | non `x` and `y` data (e.g. `text`, `marker_size`, `marker_color`).
169 |
170 | """
171 | hf_x = hf_trace_data["x"][start_idx:end_idx]
172 | hf_y = hf_trace_data["y"][start_idx:end_idx]
173 |
174 | # No downsampling needed ; we show the raw data as is, but with gap-detection
175 | if (end_idx - start_idx) <= hf_trace_data["max_n_samples"]:
176 | indices = np.arange(len(hf_y)) # no downsampling - all values are selected
177 | if len(indices):
178 | return PlotlyAggregatorParser._handle_gaps(
179 | hf_trace_data, hf_x=hf_x, agg_x=hf_x, agg_y=hf_y, indices=indices
180 | )
181 | else:
182 | return hf_x, hf_y, indices
183 |
184 | downsampler = hf_trace_data["downsampler"]
185 |
186 | hf_x_parsed = PlotlyAggregatorParser.parse_hf_data(hf_x)
187 | hf_y_parsed = PlotlyAggregatorParser.parse_hf_data(hf_y)
188 |
189 | if isinstance(downsampler, DataPointSelector):
190 | s_v = hf_y_parsed
191 | if isinstance(s_v, pd.Categorical): # pd.Categorical (has no .values)
192 | s_v = s_v.codes
193 | indices = downsampler.arg_downsample(
194 | hf_x_parsed,
195 | s_v,
196 | n_out=hf_trace_data["max_n_samples"],
197 | **hf_trace_data.get("downsampler_kwargs", {}),
198 | )
199 | if isinstance(hf_trace_data["x"], pd.RangeIndex):
200 | # we avoid slicing the default pd.RangeIndex (as this is not an
201 | # in-memory array) - this proves to be faster than slicing the index.
202 | agg_x = (
203 | start_idx
204 | + hf_trace_data["x"].start
205 | + indices.astype(hf_trace_data["x"].dtype) * hf_trace_data["x"].step
206 | )
207 | else:
208 | agg_x = hf_x[indices]
209 | agg_y = hf_y[indices]
210 | elif isinstance(downsampler, DataAggregator):
211 | agg_x, agg_y = downsampler.aggregate(
212 | hf_x_parsed,
213 | hf_y_parsed,
214 | n_out=hf_trace_data["max_n_samples"],
215 | **hf_trace_data.get("downsampler_kwargs", {}),
216 | )
217 | if isinstance(hf_trace_data["x"], pd.RangeIndex):
218 | # we avoid slicing the default pd.RangeIndex (as this is not an
219 | # in-memory array) - this proves to be faster than slicing the index.
220 | agg_x = (
221 | start_idx
222 | + hf_trace_data["x"].start
223 | + agg_x * hf_trace_data["x"].step
224 | )
225 | # The indices are just the range of the aggregated data
226 | indices = np.arange(len(agg_x))
227 | else:
228 | raise ValueError(
229 | "Invalid downsampler instance, must be either a "
230 | + f"DataAggregator or a DataPointSelector, got {type(downsampler)}"
231 | )
232 |
233 | return PlotlyAggregatorParser._handle_gaps(
234 | hf_trace_data, hf_x=hf_x, agg_x=agg_x, agg_y=agg_y, indices=indices
235 | )
236 |
--------------------------------------------------------------------------------
/plotly_resampler/figure_resampler/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Module withholding wrappers for the plotly ``go.Figure`` and ``go.FigureWidget`` class
4 | which allows bookkeeping and back-end based resampling of high-frequency sequential
5 | data.
6 |
7 | !!! tip
8 |
9 | The term `high-frequency` actually refers very large amounts of sequential data.
10 |
11 | """
12 |
13 | from .figure_resampler import ASSETS_FOLDER, FigureResampler
14 | from .figurewidget_resampler import FigureWidgetResampler
15 |
16 | __all__ = [
17 | "FigureResampler",
18 | "ASSETS_FOLDER",
19 | "FigureWidgetResampler",
20 | ]
21 |
--------------------------------------------------------------------------------
/plotly_resampler/figure_resampler/assets/coarse_fine.js:
--------------------------------------------------------------------------------
1 | function getGraphDiv(gdID) {
2 | let graphDiv = document?.querySelectorAll('div[id*="' + gdID + '"][class*="dash-graph"]');
3 | graphDiv = graphDiv?.[0]?.getElementsByClassName("js-plotly-plot")?.[0];
4 | if (!_.isElement(graphDiv)) {
5 | throw new Error(`Invalid gdID '${gdID}'`);
6 | }
7 | return graphDiv;
8 | }
9 |
10 | /**
11 | *
12 | * @param {object} data The data of the graphDiv
13 | * @returns {Array} An array containing all the unique axis keys of the graphDiv data
14 | * [{x: x[ID], y: y[ID]}, {x: x[ID], y: y[ID]}]
15 | */
16 | const getXYAxisKeys = (data) => {
17 | return _.chain(data)
18 | .map((obj) => ({ x: obj.xaxis || "x", y: obj.yaxis || "y" }))
19 | .uniqWith(_.isEqual)
20 | .value();
21 | };
22 |
23 | const getAnchorT = (keys, anchor) => {
24 | const obj_index = anchor.slice(0, 1);
25 | const anchorT = _.chain(keys)
26 | .filter((obj) => obj[obj_index] == anchor)
27 | .value()[0][{ x: "y", y: "x" }[obj_index]];
28 |
29 | return anchorT;
30 | };
31 |
32 | /**
33 | * Get the corresponding axis name of the anchors
34 | *
35 | * @param {object} layout the layout of the graphDiv
36 | * @returns {object} An object containing the anchor and its orthogonal axis name e.g.
37 | * {x[ID]: yaxis[ID], y[ID]: xaxis[ID]}
38 | */
39 | const getLayoutAxisAnchors = (layout) => {
40 | var layout_axis_anchors = Object.assign(
41 | {},
42 | ..._.chain(layout)
43 | .map((value, key) => {
44 | if (key.includes("axis")) return { [value.anchor]: key };
45 | })
46 | .without(undefined)
47 | .value()
48 | );
49 | // Edge case for non "make_subplot" figures; i.e. figures constructed with
50 | // go.Figure
51 | if (_.size(layout_axis_anchors) == 1 && _.has(layout_axis_anchors, undefined)) {
52 | return { x: "yaxis", y: "xaxis" };
53 | }
54 | return layout_axis_anchors;
55 | };
56 |
57 | /**
58 | * Compare the equality of two arrays with a certain decimal point presiction
59 | * @param {*} objValueArr An array with numeric values
60 | * @param {*} othValueArr An array with numeray values
61 | * @returns {boolean} true when all values are equal (to 5 decimal points)
62 | */
63 | function rangeCustomizer(objValueArr, othValueArr) {
64 | return _.every(
65 | _.zipWith(objValueArr, othValueArr, (objValue, othValue) => {
66 | if (_.isNumber(objValue) && _.isNumber(othValue)) {
67 | objValue = _.round(objValue, 5);
68 | othValue = _.round(othValue, 5);
69 | return objValue === othValue;
70 | } else {
71 | alert(`not a number ${objValue} type:${typeof objValue} | ${othValue} type:${typeof othValue}`);
72 | }
73 | })
74 | );
75 | }
76 |
77 | window.dash_clientside = Object.assign({}, window.dash_clientside, {
78 | clientside: {
79 | coarse_to_main: function (selectedData, mainFigID, coarseFigID) {
80 | // Base case
81 | if (!selectedData.range) {
82 | return mainFigID;
83 | }
84 |
85 | main_graphDiv = getGraphDiv(mainFigID);
86 | coarse_graphDiv = getGraphDiv(coarseFigID);
87 |
88 | const coarse_xy_axiskeys = getXYAxisKeys(coarse_graphDiv.data);
89 | const main_xy_axiskeys = getXYAxisKeys(main_graphDiv.data);
90 | const layout_axis_anchors = getLayoutAxisAnchors(main_graphDiv.layout);
91 |
92 | // Use the maingraphDiv its layout to obtain a list of a list of all shared (x)axis names
93 | // in practice, these are the xaxis names that are linked to each other (i.e. the inner list is the
94 | // xaxis names of the subplot columns)
95 | // e.g.: [ [xaxis1, xaxis2], [xaxis3, xaxis4] ]
96 | let shared_axes_list = _.chain(main_graphDiv.layout)
97 | .map((value, key) => {
98 | if (value.matches) return { anchor: value.matches, match: [key] };
99 | })
100 | .without(undefined)
101 | // groupby same anchor and concat the match arrays
102 | .groupBy("anchor")
103 | .map(
104 | _.spread((...values) => {
105 | return _.mergeWith(...values, (objValue, srcValue) => {
106 | if (_.isArray(objValue)) return objValue.concat(srcValue);
107 | });
108 | })
109 | )
110 | // add the axis string to the match array and return the match array
111 | .map((m_obj) => {
112 | const anchorT = getAnchorT(main_xy_axiskeys, m_obj.anchor);
113 | let axis_str = layout_axis_anchors[anchorT];
114 | m_obj.match.push(axis_str);
115 | return m_obj.match;
116 | })
117 | .value();
118 | // console.log("shared axes list", shared_axes_list);
119 |
120 | const relayout = {};
121 |
122 | // Quick inline function to set the relayout range values
123 | const setRelayoutRangeValues = (axisStr, values) => {
124 | for (let rangeIdx = 0; rangeIdx < 2; rangeIdx++) {
125 | relayout[axisStr + `.range[${rangeIdx}]`] = values[rangeIdx];
126 | }
127 | };
128 |
129 | // iterate over the selected data range
130 | // console.log("selected data range", selectedData.range);
131 | for (const anchor_key in selectedData.range) {
132 | const selected_range = selectedData.range[anchor_key];
133 | // Obtain the anchor key of the orthogonal axis (x or y), based on the coarse graphdiv anchor pairs
134 | const anchorT = getAnchorT(coarse_xy_axiskeys, anchor_key);
135 | const axisStr = layout_axis_anchors[anchorT];
136 | const mainLayoutRange = main_graphDiv.layout[axisStr].range;
137 | const coarseFigRange = coarse_graphDiv.layout[axisStr].range;
138 |
139 | if (!_.isEqual(selected_range, mainLayoutRange)) {
140 | const shared_axis_match = _.chain(shared_axes_list)
141 | .filter((arr) => arr.includes(axisStr))
142 | .value()[0];
143 | if (axisStr.includes("yaxis") && _.isEqualWith(selected_range, coarseFigRange, rangeCustomizer)) {
144 | continue;
145 | }
146 |
147 | if (shared_axis_match) {
148 | shared_axis_match.forEach((axisMStr) => {
149 | setRelayoutRangeValues(axisMStr, selected_range);
150 | });
151 | } else {
152 | setRelayoutRangeValues(axisStr, selected_range);
153 | }
154 | }
155 | }
156 |
157 | Object.keys(relayout).length > 0 ? Plotly.relayout(main_graphDiv, relayout) : null;
158 | return mainFigID;
159 | },
160 |
161 | main_to_coarse: function (mainRelayout, coarseFigID, mainFigID) {
162 | const coarse_graphDiv = getGraphDiv(coarseFigID);
163 | const main_graphDiv = getGraphDiv(mainFigID);
164 |
165 | const coarse_xy_axiskeys = getXYAxisKeys(coarse_graphDiv.data);
166 | const layout_axis_anchors = getLayoutAxisAnchors(coarse_graphDiv.layout);
167 |
168 | const currentSelections = coarse_graphDiv.layout.selections;
169 | const update = { selections: currentSelections || [] };
170 |
171 | const getUpdateObj = (xy_pair, x_range, y_range) => {
172 | return {
173 | type: "rect",
174 | xref: xy_pair.x,
175 | yref: xy_pair.y,
176 | line: { width: 1, color: "#352F44", dash: "solid" },
177 | x0: x_range[0],
178 | x1: x_range[1],
179 | y0: y_range[0],
180 | y1: y_range[1],
181 | };
182 | };
183 |
184 | // Base case; no selections yet on the coarse graph
185 | if (!currentSelections) {
186 | // if current selections is None
187 | coarse_xy_axiskeys.forEach((xy_pair) => {
188 | // console.log("xy pair", xy_pair);
189 | const x_axis_key = _.has(layout_axis_anchors, xy_pair.y) ? layout_axis_anchors[xy_pair.y] : "xaxis";
190 | const y_axis_key = _.has(layout_axis_anchors, xy_pair.x) ? layout_axis_anchors[xy_pair.x] : "yaxis";
191 | // console.log('xaxis key', x_axis_key, main_graphDiv.layout[x_axis_key]);
192 | const x_range = main_graphDiv.layout[x_axis_key].range;
193 | const y_range = main_graphDiv.layout[y_axis_key].range;
194 |
195 | update["selections"].push(getUpdateObj(xy_pair, x_range, y_range));
196 | });
197 | Plotly.relayout(coarse_graphDiv, update);
198 | return coarseFigID;
199 | }
200 |
201 | // Alter the selections based on the relayout
202 | let performed_update = false;
203 |
204 | for (let i = 0; i < coarse_xy_axiskeys.length; i++) {
205 | const xy_pair = coarse_xy_axiskeys[i];
206 | // If else handles the edge case of a figure without subplots
207 | const x_axis_key = _.has(layout_axis_anchors, xy_pair.y) ? layout_axis_anchors[xy_pair.y] : "xaxis";
208 | const y_axis_key = _.has(layout_axis_anchors, xy_pair.x) ? layout_axis_anchors[xy_pair.x] : "yaxis";
209 | // console.log('xaxis key', x_axis_key, main_graphDiv.layout[x_axis_key]);
210 |
211 | let x_range = main_graphDiv.layout[x_axis_key].range;
212 | let y_range = main_graphDiv.layout[y_axis_key].range;
213 | // If the y-axis autorange is true, we alter the y-range to the coarse graphdiv its y-range
214 | // console.log('mainrelayout', mainRelayout);
215 | if (main_graphDiv.layout[y_axis_key]["autorange"] === true) {
216 | y_range = coarse_graphDiv.layout[y_axis_key].range;
217 | }
218 | if (
219 | mainRelayout[x_axis_key + ".autorange"] === true &&
220 | mainRelayout[y_axis_key + ".autorange"] === true
221 | ) {
222 | performed_update = true;
223 | if (
224 | // NOTE: for some reason, showspikes info is only available for the xaxis & yaxis keys
225 | _.has(mainRelayout, "xaxis.showspikes") &&
226 | _.has(mainRelayout, "yaxis.showspikes")
227 | ) {
228 | // reset axis -> we use the coarse graphDiv layout
229 | x_range = coarse_graphDiv.layout[x_axis_key].range;
230 | }
231 | } else if (mainRelayout[x_axis_key + ".range[0]"] || mainRelayout[y_axis_key + ".range[0]"]) {
232 | // a specific range is set
233 | performed_update = true;
234 | }
235 |
236 | update["selections"][i] = getUpdateObj(xy_pair, x_range, y_range);
237 | }
238 | performed_update ? Plotly.relayout(coarse_graphDiv, update) : null;
239 | return coarseFigID;
240 | },
241 | },
242 | });
243 |
--------------------------------------------------------------------------------
/plotly_resampler/figure_resampler/utils.py:
--------------------------------------------------------------------------------
1 | """Utility functions for the figure_resampler submodule."""
2 |
3 | import math
4 |
5 | import pandas as pd
6 | from plotly.basedatatypes import BaseFigure
7 |
8 | try: # Fails when IPywidgets is not installed
9 | from plotly.basewidget import BaseFigureWidget
10 | except (ImportError, ModuleNotFoundError):
11 | BaseFigureWidget = type(None)
12 |
13 | from typing import Any
14 |
15 | ### Checks for the figure type
16 |
17 |
18 | def is_figure(figure: Any) -> bool:
19 | """Check if the figure is a plotly go.Figure or a FigureResampler.
20 |
21 | !!! note
22 |
23 | This method does not use isinstance(figure, go.Figure) as this will not work
24 | when go.Figure is decorated (after executing the
25 | ``register_plotly_resampler`` function).
26 |
27 | Parameters
28 | ----------
29 | figure : Any
30 | The figure to check.
31 |
32 | Returns
33 | -------
34 | bool
35 | True if the figure is a plotly go.Figure or a FigureResampler.
36 | """
37 | return isinstance(figure, BaseFigure) and (not isinstance(figure, BaseFigureWidget))
38 |
39 |
40 | def is_figurewidget(figure: Any):
41 | """Check if the figure is a plotly go.FigureWidget or a FigureWidgetResampler.
42 |
43 | !!! note
44 |
45 | This method does not use isinstance(figure, go.FigureWidget) as this will not
46 | work when go.FigureWidget is decorated (after executing the
47 | ``register_plotly_resampler`` function).
48 |
49 | Parameters
50 | ----------
51 | figure : Any
52 | The figure to check.
53 |
54 | Returns
55 | -------
56 | bool
57 | True if the figure is a plotly go.FigureWidget or a FigureWidgetResampler.
58 | """
59 | return isinstance(figure, BaseFigureWidget)
60 |
61 |
62 | def is_fr(figure: Any) -> bool:
63 | """Check if the figure is a FigureResampler.
64 |
65 | !!! note
66 |
67 | This method will not return True if the figure is a plotly go.Figure.
68 |
69 | Parameters
70 | ----------
71 | figure : Any
72 | The figure to check.
73 |
74 | Returns
75 | -------
76 | bool
77 | True if the figure is a FigureResampler.
78 | """
79 | from plotly_resampler import FigureResampler
80 |
81 | return isinstance(figure, FigureResampler)
82 |
83 |
84 | def is_fwr(figure: Any) -> bool:
85 | """Check if the figure is a FigureWidgetResampler.
86 |
87 | !!! note
88 |
89 | This method will not return True if the figure is a plotly go.FigureWidget.
90 |
91 | Parameters
92 | ----------
93 | figure : Any
94 | The figure to check.
95 |
96 | Returns
97 | -------
98 | bool
99 | True if the figure is a FigureWidgetResampler.
100 | """
101 | from plotly_resampler import FigureWidgetResampler
102 |
103 | return isinstance(figure, FigureWidgetResampler)
104 |
105 |
106 | ### Rounding functions for bin size
107 |
108 |
109 | def timedelta_to_str(td: pd.Timedelta) -> str:
110 | """Construct a tight string representation for the given timedelta arg.
111 |
112 | Parameters
113 | ----------
114 | td: pd.Timedelta
115 | The timedelta for which the string representation is constructed
116 |
117 | Returns
118 | -------
119 | str:
120 | The tight string bounds of format '$d-$h$m$s.$ms'.
121 | If the timedelta is negative, the string starts with 'NEG'.
122 |
123 | """
124 | out_str = ""
125 |
126 | # Edge case if we deal with negative
127 | if td < pd.Timedelta(seconds=0):
128 | td *= -1
129 | out_str += "NEG"
130 |
131 | # Note: this must happen after the *= -1
132 | c = td.components
133 | if c.days > 0:
134 | out_str += f"{c.days}D"
135 | if c.hours > 0 or c.minutes > 0 or c.seconds > 0 or c.milliseconds > 0:
136 | out_str += "_" if out_str else "" # add seperator if non-empty
137 |
138 | if c.hours > 0:
139 | out_str += f"{c.hours}h"
140 | if c.minutes > 0:
141 | out_str += f"{c.minutes}m"
142 | if c.seconds > 0:
143 | if c.milliseconds:
144 | out_str += (
145 | f"{c.seconds}.{str(c.milliseconds / 1000).split('.')[-1].rstrip('0')}s"
146 | )
147 | else:
148 | out_str += f"{c.seconds}s"
149 | elif c.milliseconds > 0:
150 | out_str += f"{c.milliseconds}ms"
151 | if c.microseconds > 0:
152 | out_str += f"{c.microseconds}us"
153 | if c.nanoseconds > 0:
154 | out_str += f"{c.nanoseconds}ns"
155 | return out_str
156 |
157 |
158 | def round_td_str(td: pd.Timedelta) -> str:
159 | """Round a timedelta to the nearest unit and convert to a string.
160 |
161 | Parameters
162 | ----------
163 | td : pd.Timedelta
164 | The timedelta to round.
165 |
166 | Returns
167 | -------
168 | str
169 | The rounded timedelta as a string.
170 | If the timedelta is == 0, None is returned.
171 |
172 | !!! info "See Also"
173 | [`timedelta_to_str`][figure_resampler.utils.timedelta_to_str]
174 |
175 | """
176 | for t_s in ("D", "h", "min", "s", "ms", "us", "ns"):
177 | if td > 0.95 * pd.Timedelta(f"1{t_s}"):
178 | return timedelta_to_str(td.round(t_s))
179 |
180 |
181 | def round_number_str(number: float) -> str:
182 | """Round a number to the nearest unit and convert to a string.
183 |
184 | Parameters
185 | ----------
186 | number : float
187 | The number to round.
188 |
189 | Returns
190 | -------
191 | str
192 | The rounded number as a string.
193 | If the number is == 0, None is returned.
194 |
195 | """
196 | sign = "-" if number < 0 else ""
197 | number = abs(number)
198 | if number > 0.95:
199 | for unit, scaling in [
200 | ("T", int(1e12)), # Trillion
201 | ("B", int(1e9)), # Billion
202 | ("M", int(1e6)), # Million
203 | ("k", int(1e3)), # Thousand
204 | ]:
205 | if number / scaling > 0.95:
206 | return f"{round(number / scaling)}{unit}"
207 | return sign + str(round(number))
208 | if number > 0: # avoid log10(0)
209 | # we have a number between 0-0.95 -> round till nearest non-zero digit
210 | return sign + str(round(number, 1 + abs(int(math.log10(number)))))
211 |
--------------------------------------------------------------------------------
/plotly_resampler/registering.py:
--------------------------------------------------------------------------------
1 | """Register plotly-resampler to (un)wrap plotly-graph-objects."""
2 |
3 | __author__ = "Jeroen Van Der Donckt, Jonas Van Der Donckt, Emiel Deprost"
4 |
5 | from functools import wraps
6 |
7 | import plotly
8 |
9 | from plotly_resampler import FigureResampler, FigureWidgetResampler
10 | from plotly_resampler.figure_resampler.figure_resampler_interface import (
11 | AbstractFigureAggregator,
12 | )
13 |
14 | WRAPPED_PREFIX = "[Plotly-Resampler]__"
15 | PLOTLY_MODULES = [
16 | plotly.graph_objs,
17 | plotly.graph_objects,
18 | ] # wait for this PR https://github.com/plotly/plotly.py/pull/3779
19 | PLOTLY_CONSTRUCTOR_WRAPPER = {
20 | "Figure": FigureResampler,
21 | "FigureWidget": FigureWidgetResampler,
22 | }
23 |
24 |
25 | def _already_wrapped(constr):
26 | return constr.__name__.startswith(WRAPPED_PREFIX)
27 |
28 |
29 | def _get_plotly_constr(constr):
30 | """Return the constructor of the underlying plotly graph object and thus omit the
31 | possibly wrapped [`AbstractFigureAggregator`][figure_resampler.figure_resampler_interface.AbstractFigureAggregator]
32 | instance.
33 |
34 | Parameters
35 | ----------
36 | constr : callable
37 | The constructor of a instantiated plotly-object.
38 |
39 | Returns
40 | -------
41 | callable
42 | The constructor of a ``go.FigureWidget`` or a ``go.Figure``.
43 | """
44 | if _already_wrapped(constr):
45 | return constr.__wrapped__ # get the original constructor
46 | return constr
47 |
48 |
49 | ### Registering the wrappers
50 |
51 |
52 | def _is_ipython_env():
53 | """Check if we are in an IPython environment (with a kernel)."""
54 | try:
55 | from IPython import get_ipython
56 |
57 | return "IPKernelApp" in get_ipython().config
58 | except (ImportError, AttributeError):
59 | return False
60 |
61 |
62 | def _register_wrapper(
63 | module: type,
64 | constr_name: str,
65 | pr_class: AbstractFigureAggregator,
66 | **aggregator_kwargs,
67 | ):
68 | constr = getattr(module, constr_name)
69 | constr = _get_plotly_constr(constr) # get the original plotly constructor
70 |
71 | # print(f"Wrapping {constr_name} with {pr_class}")
72 |
73 | @wraps(constr)
74 | def wrapped_constr(*args, **kwargs):
75 | # print(f"Executing constructor wrapper for {constr_name}", constr)
76 | return pr_class(constr(*args, **kwargs), **aggregator_kwargs)
77 |
78 | wrapped_constr.__name__ = WRAPPED_PREFIX + constr_name
79 | setattr(module, constr_name, wrapped_constr)
80 |
81 |
82 | def register_plotly_resampler(mode="auto", **aggregator_kwargs):
83 | """Register plotly-resampler to plotly.graph_objects.
84 |
85 | This function results in the use of plotly-resampler under the hood.
86 |
87 | !!! note
88 | We advise to use mode= ``widget`` when working in an IPython based environment
89 | as this will just behave as a ``go.FigureWidget``, but with dynamic aggregation.
90 | When using mode= ``auto`` or ``figure``; most figures will be wrapped as
91 | [`FigureResampler`][figure_resampler.FigureResampler], on which
92 | [`show_dash`][figure_resampler.FigureResampler.show_dash] needs to be called.
93 |
94 | !!! note
95 | This function is mostly useful for notebooks. For dash-apps, we advise to look
96 | at the dash app examples on [GitHub](https://github.com/predict-idlab/plotly-resampler/tree/main/examples#2-dash-apps)
97 |
98 | Parameters
99 | ----------
100 | mode : str, optional
101 | The mode of the plotly-resampler.
102 | Possible values are: 'auto', 'figure', 'widget', None.
103 | If 'auto' is used, the mode is determined based on the environment; if it is in
104 | an IPython environment, the mode is 'widget', otherwise it is 'figure'.
105 | If 'figure' is used, all plotly figures are wrapped as FigureResampler objects.
106 | If 'widget' is used, all plotly figure widgets are wrapped as
107 | FigureWidgetResampler objects (we advise to use this mode in IPython environment
108 | with a kernel).
109 | If None is used, wrapping is done as expected (go.Figure -> FigureResampler,
110 | go.FigureWidget -> FigureWidgetResampler).
111 | aggregator_kwargs : dict, optional
112 | The keyword arguments to pass to the plotly-resampler decorator its constructor.
113 | See more details in [`FigureResampler`][figure_resampler.FigureResampler] and
114 | [`FigureWidgetResampler`][figure_resampler.FigureWidgetResampler].
115 |
116 | """
117 | for constr_name, pr_class in PLOTLY_CONSTRUCTOR_WRAPPER.items():
118 | if (mode == "auto" and _is_ipython_env()) or mode == "widget":
119 | pr_class = FigureWidgetResampler
120 | elif mode == "figure":
121 | pr_class = FigureResampler
122 | # else: default mode -> wrap according to PLOTLY_CONSTRUCTOR_WRAPPER
123 |
124 | for module in PLOTLY_MODULES:
125 | _register_wrapper(module, constr_name, pr_class, **aggregator_kwargs)
126 |
127 |
128 | ### Unregistering the wrappers
129 |
130 |
131 | def _unregister_wrapper(module: type, constr_name: str):
132 | constr = getattr(module, constr_name)
133 | if _already_wrapped(constr):
134 | constr = constr.__wrapped__
135 | setattr(module, constr_name, constr)
136 |
137 |
138 | def unregister_plotly_resampler():
139 | """Unregister plotly-resampler from plotly.graph_objects."""
140 | for constr in PLOTLY_CONSTRUCTOR_WRAPPER.keys():
141 | for module in PLOTLY_MODULES:
142 | _unregister_wrapper(module, constr)
143 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "plotly-resampler" # Do not forget to update the __init__.py __version__ variable
3 | version = "0.11.0rc1"
4 | description = "Visualizing large time series with plotly"
5 | authors = ["Jonas Van Der Donckt", "Jeroen Van Der Donckt", "Emiel Deprost"]
6 | readme = "README.md"
7 | license = "MIT"
8 | repository = "https://github.com/predict-idlab/plotly-resampler"
9 | documentation = "https://predict-idlab.github.io/plotly-resampler/latest"
10 | keywords = ["time-series", "visualization", "resampling", "plotly", "plotly-dash"]
11 | packages = [
12 | { include = "plotly_resampler" }
13 | ]
14 | include = [
15 | # C extensions must be included in the wheel distributions
16 | {path = "plotly_resampler/aggregation/algorithms/*.so", format = "wheel"},
17 | {path = "plotly_resampler/aggregation/algorithms/*.pyd", format = "wheel"}
18 | ]
19 | classifiers = [
20 | "Development Status :: 5 - Production/Stable",
21 | "Intended Audience :: Developers",
22 | "License :: OSI Approved :: MIT License",
23 | "Programming Language :: Python",
24 | "Programming Language :: Python :: 3",
25 | "Programming Language :: Python :: 3.8",
26 | "Programming Language :: Python :: 3.9",
27 | "Programming Language :: Python :: 3.10",
28 | "Programming Language :: Python :: 3.11",
29 | "Programming Language :: Python :: 3.12",
30 | "Programming Language :: Python :: 3.13",
31 | "Operating System :: POSIX :: Linux",
32 | "Operating System :: MacOS :: MacOS X",
33 | "Operating System :: Microsoft :: Windows",
34 | ]
35 |
36 | [tool.poetry.dependencies]
37 | python = "^3.8"
38 | plotly = ">=5.5.0,<7.0.0"
39 | dash = ">=2.11.0" # from dash 2.11, jupyter support is included
40 | pandas =[
41 | { version = ">=1", python = "<3.13" },
42 | { version = ">=2.2.3", python = ">=3.13" }
43 | ]
44 | numpy = [
45 | { version = ">=1.14", python = "<3.11" },
46 | { version = ">=1.24", python = ">=3.11,<3.13" },
47 | { version = ">=2.0", python = ">=3.13" }
48 | ]
49 | orjson = "^3.10.0" # Faster json serialization (from 3.10 onwards f16 is supported)
50 | # Optional dependencies
51 | Flask-Cors = { version = "^4.0.2", optional = true }
52 | # Lock kaleido dependency until https://github.com/plotly/Kaleido/issues/156 is resolved
53 | kaleido = {version = "0.2.1", optional = true}
54 | tsdownsample = ">=0.1.3"
55 |
56 | [tool.poetry.extras]
57 | # Optional dependencies
58 | inline_persistent = ["kaleido", "Flask-Cors", "ipython"]
59 |
60 | [tool.poetry.dev-dependencies]
61 | pytest = "^7.2.0"
62 | pytest-cov = "^3.0.0"
63 | selenium = "4.2.0"
64 | pytest-selenium = "^2.0.1"
65 | blinker= "1.7.0" # we need version 1.7.0 (otherwise we get a blinker._saferef module not found error
66 | selenium-wire = "^5.0"
67 | pyarrow = [
68 | {version = ">=15.0", python = "<3.13"},
69 | {version = ">=18.0", python = ">=3.13"},
70 | ]
71 | ipywidgets = "^7.7.1" # needs to be v7 in order to support serialization
72 | memory-profiler = "^0.60.0"
73 | line-profiler = "^4.0"
74 | ruff = "^0.0.262"
75 | black = "^24.3.0"
76 | pytest-lazy-fixture = "^0.6.3"
77 | # yep = "^0.4" # c code profiling
78 | mkdocs = "^1.5.3"
79 | mkdocstrings = "^0.20.0"
80 | mkdocstrings-python = "^1.7.3"
81 | griffe = ">=0.32.0"
82 | mkdocs-gen-files = "^0.5.0"
83 | mike = "^1.1.2"
84 | mkdocs-material = "^9.1.18"
85 | mkdocs-literate-nav = "^0.6.0"
86 | mkdocs-section-index = "^0.3.5"
87 | cffi = ">=1.16"
88 | anywidget = "^0.9.13"
89 |
90 | # Linting
91 | [tool.ruff]
92 | select = ["E", "F", "I"]
93 | line-length = 88
94 | ignore = ["E501"] # Never enforce `E501` (line length violations).
95 | [tool.ruff.per-file-ignores]
96 | "tests/test_registering.py" = ["F401", "F811"]
97 | "tests/test_serialization.py" = ["F401", "F811"]
98 |
99 | # Formatting
100 | [tool.black]
101 | line-length = 88
102 |
103 | [build-system]
104 | requires = ["poetry-core"]
105 | build-backend = "poetry.core.masonry.api"
106 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/predict-idlab/plotly-resampler/560e6b98c944cd65143580d5a40b177af57354ff/tests/__init__.py
--------------------------------------------------------------------------------
/tests/fr_selenium.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Selenium wrapper class withholding methods for testing the plolty-figureResampler.
4 |
5 | .. note::
6 | Headless mode is enabled by default.
7 |
8 | """
9 |
10 | from __future__ import annotations
11 |
12 | __author__ = "Jonas Van Der Donckt, Jeroen Van Der Donckt"
13 |
14 | import json
15 | import time
16 | from typing import List, Union
17 |
18 | from selenium.webdriver.common.action_chains import ActionChains
19 | from selenium.webdriver.common.by import By
20 | from selenium.webdriver.support import expected_conditions as EC
21 | from selenium.webdriver.support.ui import WebDriverWait
22 | from seleniumwire import webdriver
23 | from seleniumwire.request import Request
24 |
25 | # Note: this will be used to add more waiting time to windows & mac os tests as
26 | # - on these OS's serialization of the figure is necessary (to start the dash app in a
27 | # multiprocessing.Process)
28 | # https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
29 | # - on linux, the browser (i.e., sending & getting requests) goes a lot faster
30 | from .utils import not_on_linux
31 |
32 | # https://www.blazemeter.com/blog/improve-your-selenium-webdriver-tests-with-pytest
33 | # and create a parameterized driver.get method
34 |
35 |
36 | class RequestParser:
37 | @staticmethod
38 | def filter_callback_requests(requests: List[Request]) -> List[Request]:
39 | valid_requests = []
40 | for r in requests:
41 | if r.method.upper() != "POST":
42 | # note; the `_reload_hash` GET request will thus be filtered out
43 | continue
44 |
45 | if not r.url.endswith("_dash-update-component"):
46 | continue
47 |
48 | valid_requests.append(r)
49 | return valid_requests
50 |
51 | def assert_fetch_data_request(
52 | data_request: Request, relayout_keys: List[str], n_updated_traces: int
53 | ):
54 | """Withholds checks for the relayout-data fetch request
55 |
56 | Parameters
57 | ----------
58 | data_request : Request
59 | The relayout data fetch request, with
60 | * Request body: the relayout changes
61 | * Response body: a list of dicts with first tiem
62 | relayout_keys : List[str]
63 | The expected keys to be found in the relayout command
64 | n_updated_traces : int
65 | The expected amount of traces which will be updated.
66 |
67 | """
68 | fetch_data_body = json.loads(data_request.body)
69 | assert "inputs" in fetch_data_body and len(fetch_data_body["inputs"]) == 1
70 | # verify that the request is triggered by the relayoutData
71 | figure_id = "resample-figure"
72 | assert fetch_data_body["inputs"][0]["id"] == figure_id
73 | assert fetch_data_body["inputs"][0]["property"] == "relayoutData"
74 | assert all(k in fetch_data_body["inputs"][0]["value"] for k in relayout_keys)
75 | # verify that the response is a list of dicts
76 | fetch_data_response_body = json.loads(data_request.response.body)["response"]
77 | # convert the updateData to a list of dicts
78 | updateData = fetch_data_response_body[figure_id]["figure"]["operations"]
79 | updated_traces = list(set(d["location"][1] for d in updateData))
80 |
81 | updated_x_keys = set(
82 | map(
83 | lambda d: d["location"][1],
84 | (filter(lambda x: x["location"][-1] == "x", updateData)),
85 | )
86 | )
87 | updated_y_keys = set(
88 | map(
89 | lambda d: d["location"][1],
90 | (filter(lambda x: x["location"][-1] == "y", updateData)),
91 | )
92 | )
93 |
94 | assert n_updated_traces == len(updated_traces)
95 |
96 | # verify that there are x and y updates for each trace
97 | assert len(updated_x_keys) == len(updated_y_keys) == n_updated_traces
98 |
99 | def assert_front_end_relayout_request(relayout_request: Request):
100 | relayout_body = json.loads(relayout_request.body)
101 | assert "inputs" in relayout_body and len(relayout_body["inputs"]) == 1
102 | assert relayout_body["inputs"][0]["id"] == "resample-figure"
103 | assert relayout_body["inputs"][0]["property"] == "relayoutData"
104 | assert all(
105 | k in relayout_body["inputs"][0]["value"]
106 | for k in ["annotations", "template", "title", "legend", "xaxis", "yaxis"]
107 | )
108 |
109 | relayout_response_body = json.loads(relayout_request.response.body)["response"]
110 | # the relayout response its updateData should be an empty dict
111 | # { "response": { "trace-updater": { "updateData": [ {} ] } } }
112 | updateData = relayout_response_body["trace-updater"]["updateData"]
113 | assert len(updateData) == 1
114 | assert updateData[0] == {}
115 |
116 | def browser_independent_single_callback_request_assert(
117 | fr: FigureResamplerGUITests, relayout_keys: List[str], n_updated_traces: int
118 | ):
119 | """Verifies the callback requests on a browser-independent manner
120 |
121 | fr: FigureResamplerGUITests
122 | used for determining the browser-type.
123 | requests: List[Request]
124 | The captured requests of a SINGLE INTERACTION CALLBACK
125 | relayout_keys : List[str]
126 | The expected keys to be found in the relayout command
127 | n_updated_traces : int
128 | The expected amount of traces which will be updated.
129 |
130 | """
131 | # First, filter the requests to only retain the relevant ones
132 | requests = RequestParser.filter_callback_requests(fr.get_requests())
133 |
134 | browser_name = fr.driver.capabilities["browserName"]
135 | if "firefox" in browser_name:
136 | # There are 2 requests which are send
137 | # 1. first: changed-layout to server -> new data to back-end request
138 | # 2. the front-end relayout request
139 | assert len(requests) >= 1, f"len(requests) = {len(requests)}"
140 | if len(requests) == 2:
141 | fetch_data_request, relayout_request = requests
142 | # RequestParser.assert_front_end_relayout_request(relayout_request)
143 | else:
144 | fetch_data_request = requests[0]
145 |
146 | elif "chrome" in browser_name:
147 | # for some, yet unknown reason, chrome does not seem to capture the
148 | # second front-end request.
149 | assert len(requests) == 1, f"len(requests) = {len(requests)}"
150 | fetch_data_request = requests[0]
151 | else:
152 | raise ValueError(f"invalid browser name {browser_name}")
153 |
154 | # Validate the update-data-callback request
155 | RequestParser.assert_fetch_data_request(
156 | fetch_data_request,
157 | relayout_keys=relayout_keys,
158 | n_updated_traces=n_updated_traces,
159 | )
160 |
161 |
162 | class FigureResamplerGUITests:
163 | """Wrapper for performing figure-resampler GUI."""
164 |
165 | def __init__(self, driver: webdriver, port: int):
166 | """Construct an instance of A firefox selenium driver to fetch wearable data.
167 |
168 | Parameters
169 | ----------
170 | username : str
171 | The e4connect login username.
172 | password : str
173 | The e4connect password.
174 | save_dir : str
175 | The directory in which the data elements will be saved.
176 | headless: bool, default: True
177 | If set to `True` the driver will be ran in a headless mode.
178 |
179 | """
180 | self.port = port
181 | self.driver: Union[webdriver.Firefox, webdriver.Chrome] = driver
182 | self.on_page = False
183 |
184 | def go_to_page(self):
185 | """Navigate to FigureResampler page."""
186 | time.sleep(1)
187 | self.driver.get("http://localhost:{}".format(self.port))
188 | self.on_page = True
189 | if not_on_linux():
190 | time.sleep(7) # bcs serialization of multiprocessing
191 | max_nb_tries = 3
192 | for _ in range(max_nb_tries):
193 | try:
194 | self.driver.find_element_by_id("resample-figure")
195 | break
196 | except Exception:
197 | time.sleep(5)
198 |
199 | def clear_requests(self, sleep_time_s=1):
200 | time.sleep(sleep_time_s)
201 | del self.driver.requests
202 |
203 | def get_requests(self, delete: bool = True):
204 | if not_on_linux():
205 | time.sleep(2) # bcs slower browser
206 | requests = self.driver.requests
207 | if delete:
208 | self.clear_requests()
209 |
210 | return requests
211 |
212 | def drag_and_zoom(self, div_classname, x0=0.25, x1=0.5, y0=0.25, y1=0.5):
213 | """
214 | Drags and zooms the div with the given classname.
215 |
216 | Parameters
217 | ----------
218 | div_classname : str
219 | The classname of the div to be dragged and zoomed.
220 | x0 : float, default: 0.5
221 | The relative x-coordinate of the upper left corner of the div.
222 | x1 : float, default: 0.5
223 | The relative x-coordinate of the lower right corner of the div.
224 | y0 : float, default: 0.5
225 | The relative y-coordinate of the upper left corner of the div.
226 | y1 : float, default: 0.5
227 | The relative y-coordinate of the lower right corner of the div.
228 |
229 | """
230 | if not self.on_page:
231 | self.go_to_page()
232 |
233 | WebDriverWait(self.driver, 3).until(
234 | EC.presence_of_element_located((By.CLASS_NAME, div_classname))
235 | )
236 |
237 | subplot = self.driver.find_element(By.CLASS_NAME, div_classname)
238 | size = subplot.size
239 | w, h = size["width"], size["height"]
240 |
241 | actions = ActionChains(self.driver)
242 | actions.move_to_element_with_offset(subplot, xoffset=w * x0, yoffset=h * y0)
243 | actions.click_and_hold()
244 | actions.pause(0.2)
245 | actions.move_by_offset(xoffset=w * (x1 - x0), yoffset=h * (y1 - y0))
246 | actions.pause(0.2)
247 | actions.release()
248 | actions.pause(0.2)
249 | actions.perform()
250 |
251 | def _get_modebar_btns(self):
252 | if not self.on_page:
253 | self.go_to_page()
254 |
255 | WebDriverWait(self.driver, 3).until(
256 | EC.presence_of_element_located((By.CLASS_NAME, "modebar-group"))
257 | )
258 | return self.driver.find_elements(By.CLASS_NAME, "modebar-btn")
259 |
260 | def autoscale(self):
261 | for btn in self._get_modebar_btns():
262 | data_title = btn.get_attribute("data-title")
263 | if data_title == "Autoscale":
264 | ActionChains(self.driver).move_to_element(btn).click().perform()
265 | return
266 |
267 | def reset_axes(self):
268 | for btn in self._get_modebar_btns():
269 | data_title = btn.get_attribute("data-title")
270 | if data_title == "Reset axes":
271 | ActionChains(self.driver).move_to_element(btn).click().perform()
272 | return
273 |
274 | def click_legend_item(self, legend_name):
275 | WebDriverWait(self.driver, 3).until(
276 | EC.presence_of_element_located((By.CLASS_NAME, "modebar-group"))
277 | )
278 | for legend_item in self.driver.find_elements(By.CLASS_NAME, "legendtext"):
279 | if legend_name in legend_item.get_attribute("data-unformatted"):
280 | # move to the center of the item and click it
281 | (
282 | ActionChains(self.driver)
283 | .move_to_element(legend_item)
284 | .pause(0.1)
285 | .click()
286 | .perform()
287 | )
288 | return
289 |
290 | # ------------------------------ DATA MODEL METHODS ------------------------------
291 | def __del__(self):
292 | self.driver.close()
293 |
--------------------------------------------------------------------------------
/tests/test_multiple_axes.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import plotly.graph_objects as go
3 | import pytest
4 | from plotly.subplots import make_subplots
5 |
6 | from plotly_resampler import FigureResampler, FigureWidgetResampler
7 | from plotly_resampler.aggregation import MinMaxLTTB
8 |
9 |
10 | @pytest.mark.parametrize("fig_type", [FigureResampler, FigureWidgetResampler])
11 | def test_multiple_axes_figure(fig_type):
12 | # Generate some data
13 | x = np.arange(200_000)
14 | sin = 3 + np.sin(x / 200) + np.random.randn(len(x)) / 30
15 |
16 | fig = fig_type(
17 | default_n_shown_samples=2000, default_downsampler=MinMaxLTTB(parallel=True)
18 | )
19 |
20 | # all traces will be plotted against the same x-axis
21 | # note: the first added trace its yaxis will be used as reference
22 | fig.add_trace(go.Scatter(name="orig", yaxis="y1", line_width=1), hf_x=x, hf_y=sin)
23 | fig.add_trace(
24 | go.Scatter(name="negative", yaxis="y2", line_width=1), hf_x=x, hf_y=-sin
25 | )
26 | fig.add_trace(
27 | go.Scatter(name="sqrt(orig)", yaxis="y3", line_width=1),
28 | hf_x=x,
29 | hf_y=np.sqrt(sin * 10),
30 | )
31 | fig.add_trace(
32 | go.Scatter(name="orig**2", yaxis="y4", line_width=1),
33 | hf_x=x,
34 | hf_y=(sin - 3) ** 2,
35 | )
36 |
37 | # in order for autoshift to work, you need to set x-anchor to free
38 | fig.update_layout(
39 | # NOTE: you can use the domain key to set the x-axis range (if you want to display)
40 | # the legend on the right instead of the top as done here
41 | xaxis=dict(domain=[0, 1]),
42 | # Add a title to the y-axis
43 | yaxis=dict(title="orig"),
44 | # by setting anchor=free, overlaying, and autoshift, the axis will be placed
45 | # automatically, without overlapping any other axes
46 | yaxis2=dict(
47 | title="negative",
48 | anchor="free",
49 | overlaying="y1",
50 | side="left",
51 | autoshift=True,
52 | ),
53 | yaxis3=dict(
54 | title="sqrt(orig)",
55 | anchor="free",
56 | overlaying="y1",
57 | side="right",
58 | autoshift=True,
59 | ),
60 | yaxis4=dict(
61 | title="orig ** 2",
62 | anchor="free",
63 | overlaying="y1",
64 | side="right",
65 | autoshift=True,
66 | ),
67 | )
68 |
69 | # Update layout properties
70 | fig.update_layout(
71 | title_text="multiple y-axes example",
72 | height=600,
73 | legend=dict(
74 | orientation="h",
75 | yanchor="bottom",
76 | y=1.02,
77 | xanchor="right",
78 | x=1,
79 | ),
80 | template="plotly_white",
81 | )
82 |
83 | # Test: check whether a single update triggers all traces to be updated
84 | out = fig._construct_update_data({"xaxis.range[0]": 0, "xaxis.range[1]": 50_000})
85 | assert len(out) == 5
86 | # fig.show_dash
87 |
88 |
89 | @pytest.mark.parametrize("fig_type", [FigureResampler, FigureWidgetResampler])
90 | def test_multiple_axes_subplot_rows(fig_type):
91 | # Generate some data
92 | x = np.arange(200_000)
93 | sin = 3 + np.sin(x / 200) + np.random.randn(len(x)) / 30
94 |
95 | # create a figure with 2 rows and 1 column
96 | # NOTE: instead of the above methods, we don't add the "yaxis" argument to the
97 | # scatter object
98 | fig = fig_type(make_subplots(rows=2, cols=1, shared_xaxes=True))
99 | fig.add_trace(go.Scatter(name="orig"), hf_x=x, hf_y=sin, row=2, col=1)
100 | fig.add_trace(go.Scatter(name="-orig"), hf_x=x, hf_y=-sin, row=2, col=1)
101 | fig.add_trace(go.Scatter(name="sqrt"), hf_x=x, hf_y=np.sqrt(sin * 10), row=2, col=1)
102 | fig.add_trace(go.Scatter(name="orig**2"), hf_x=x, hf_y=(sin - 3) ** 2, row=2, col=1)
103 |
104 | # NOTE: because of the row and col specification, the yaxis is automatically set to y2
105 | for i, data in enumerate(fig.data[1:], 3):
106 | data.update(yaxis=f"y{i}")
107 |
108 | # add the original signal to the first row subplot
109 | fig.add_trace(go.Scatter(name="orig"), row=1, col=1, hf_x=x, hf_y=sin)
110 |
111 | # in order for autoshift to work, you need to set x-anchor to free
112 | fig.update_layout(
113 | xaxis2=dict(domain=[0, 1], anchor="y2"),
114 | yaxis2=dict(title="orig"),
115 | yaxis3=dict(
116 | title="-orig",
117 | anchor="free",
118 | overlaying="y2",
119 | side="left",
120 | autoshift=True,
121 | ),
122 | yaxis4=dict(
123 | title="sqrt(orig)",
124 | anchor="free",
125 | overlaying="y2",
126 | side="right",
127 | autoshift=True,
128 | ),
129 | yaxis5=dict(
130 | title="orig ** 2",
131 | anchor="free",
132 | overlaying="y2",
133 | side="right",
134 | autoshift=True,
135 | ),
136 | )
137 |
138 | # Update layout properties
139 | fig.update_layout(
140 | title_text="multiple y-axes example",
141 | height=800,
142 | legend=dict(
143 | orientation="h",
144 | yanchor="bottom",
145 | y=1.02,
146 | xanchor="right",
147 | x=1,
148 | ),
149 | template="plotly_white",
150 | )
151 |
152 | # Test: check whether a single update triggers all traces to be updated
153 | out = fig._construct_update_data(
154 | {
155 | "xaxis.range[0]": 0,
156 | "xaxis.range[1]": 50_000,
157 | "xaxis2.range[0]": 0,
158 | "xaxis2.range[1]": 50_000,
159 | }
160 | )
161 | assert len(out) == 6
162 |
163 |
164 | @pytest.mark.parametrize("fig_type", [FigureResampler, FigureWidgetResampler])
165 | def test_multiple_axes_subplot_cols(fig_type):
166 | x = np.arange(200_000)
167 | sin = 3 + np.sin(x / 200) + np.random.randn(len(x)) / 30
168 |
169 | # Create a figure with 1 row and 2 columns
170 | fig = fig_type(make_subplots(rows=1, cols=2))
171 | fig.add_trace(go.Scatter(name="orig"), hf_x=x, hf_y=sin, row=1, col=2)
172 | fig.add_trace(go.Scatter(name="-orig"), hf_x=x, hf_y=-sin, row=1, col=2)
173 | fig.add_trace(go.Scatter(name="sqrt"), hf_x=x, hf_y=np.sqrt(sin * 10), row=1, col=2)
174 | fig.add_trace(go.Scatter(name="orig**2"), hf_x=x, hf_y=(sin - 3) ** 2, row=1, col=2)
175 |
176 | # NOTE: because of the row & col specification, the yaxis is automatically set to y2
177 | for i, data in enumerate(fig.data[1:], 3):
178 | data.update(yaxis=f"y{i}")
179 |
180 | fig.add_trace(go.Scatter(name="orig"), row=1, col=1, hf_x=x, hf_y=sin)
181 |
182 | # In order for autoshift to work, you need to set x-anchor to free
183 | fig.update_layout(
184 | xaxis=dict(domain=[0, 0.4]),
185 | xaxis2=dict(domain=[0.56, 1]),
186 | yaxis2=dict(title="orig"),
187 | yaxis3=dict(
188 | title="-orig",
189 | anchor="free",
190 | overlaying="y2",
191 | side="left",
192 | autoshift=True,
193 | ),
194 | yaxis4=dict(
195 | title="sqrt(orig)",
196 | anchor="free",
197 | overlaying="y2",
198 | side="right",
199 | autoshift=True,
200 | ),
201 | yaxis5=dict(
202 | title="orig ** 2",
203 | anchor="free",
204 | overlaying="y2",
205 | side="right",
206 | autoshift=True,
207 | ),
208 | )
209 |
210 | # Update layout properties
211 | fig.update_layout(
212 | title_text="multiple y-axes example",
213 | height=300,
214 | legend=dict(
215 | orientation="h",
216 | yanchor="bottom",
217 | y=1.02,
218 | xanchor="right",
219 | x=1,
220 | ),
221 | template="plotly_white",
222 | )
223 |
224 | out = fig._construct_update_data(
225 | {
226 | "xaxis.range[0]": 0,
227 | "xaxis.range[1]": 50_000,
228 | }
229 | )
230 | assert len(out) == 2
231 |
232 | out = fig._construct_update_data(
233 | {
234 | "xaxis2.range[0]": 0,
235 | "xaxis2.range[1]": 50_000,
236 | }
237 | )
238 | assert len(out) == 5
239 |
--------------------------------------------------------------------------------
/tests/test_plotly_express.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pandas as pd
3 | import plotly.express as px
4 |
5 | from plotly_resampler import register_plotly_resampler, unregister_plotly_resampler
6 |
7 |
8 | def test_px_hoverlabel_figureResampler():
9 | labels = list(range(0, 3))
10 | N = 60_000
11 | x = np.arange(N)
12 | y = np.random.normal(size=N)
13 | label = np.random.randint(low=labels[0], high=labels[-1] + 1, size=N).astype(str)
14 | description = np.random.randint(low=3, high=5, size=N)
15 |
16 | df = pd.DataFrame.from_dict(
17 | {"x": x, "y": y, "label": label, "description": description}
18 | )
19 |
20 | x_label = "x"
21 | y_label = "y"
22 | label_label = "label"
23 | df = df.sort_values(by=[x_label])
24 |
25 | # Without resampler, shows correct hover data
26 | fig = px.scatter(
27 | df,
28 | x=x_label,
29 | y=y_label,
30 | color=label_label,
31 | title="Without resampler",
32 | hover_data=["description"],
33 | )
34 |
35 | # With resampler, shows incorrect hover data
36 | register_plotly_resampler(mode="auto", default_n_shown_samples=1000)
37 | fig2 = px.scatter(
38 | df,
39 | x=x_label,
40 | y=y_label,
41 | color=label_label,
42 | title="With resampler",
43 | hover_data=["description"],
44 | )
45 |
46 | # verify whether the selected has the same y and customdata as the original
47 | for idx in range(len(fig.data)):
48 | trc_orig = fig.data[idx]
49 | trc_res = fig2.data[idx]
50 |
51 | agg_indices = np.searchsorted(trc_orig["x"], trc_res["x"]).ravel()
52 | for k in ["customdata", "y"]:
53 | assert all(trc_orig[k].ravel()[agg_indices] == trc_res[k].ravel())
54 |
55 | unregister_plotly_resampler()
56 |
--------------------------------------------------------------------------------
/tests/test_rangeslider.py:
--------------------------------------------------------------------------------
1 | """Code which tests the overview functionality."""
2 |
3 | __author__ = "Jonas Van Der Donckt"
4 |
5 | import numpy as np
6 | import plotly.graph_objects as go
7 | import pytest
8 | from plotly.subplots import make_subplots
9 | from pytest_lazyfixture import lazy_fixture as lf
10 |
11 | from plotly_resampler import FigureResampler
12 | from plotly_resampler.aggregation import (
13 | EveryNthPoint,
14 | MedDiffGapHandler,
15 | MinMaxLTTB,
16 | NoGapHandler,
17 | )
18 |
19 |
20 | @pytest.mark.parametrize("figure_class", [go.Figure, make_subplots])
21 | @pytest.mark.parametrize(
22 | "series", [lf("float_series"), lf("cat_series"), lf("bool_series")]
23 | )
24 | def test_overview_figure_type(figure_class, series):
25 | """Test the overview functionality (i.e., whether the overview figure can be
26 | constructed)"""
27 | # Create a figure with a scatter plot
28 | fig = FigureResampler(figure_class(), create_overview=True)
29 | fig.add_trace(go.Scatter(x=series.index, y=series))
30 | fig.add_trace({}, hf_x=series.index, hf_y=series)
31 |
32 | overview_fig = fig._create_overview_figure()
33 | assert len(overview_fig["data"]) == 2
34 | # fig.write_image(f"test_{figure_class.__name__}_{series.name}.png")
35 |
36 |
37 | @pytest.mark.parametrize("n_cols", [1, 2, 3])
38 | def test_valid_row_indices_subplots(n_cols):
39 | fig = FigureResampler(
40 | make_subplots(rows=3, cols=n_cols, shared_xaxes="columns"),
41 | create_overview=True,
42 | overview_row_idxs=None,
43 | )
44 | fig._create_overview_figure()
45 | # by default, the overview row indices should be the first row of each subplot col
46 | assert fig._overview_row_idxs == [0] * n_cols
47 |
48 | # this should not crash
49 | fig = FigureResampler(
50 | make_subplots(rows=3, cols=n_cols, shared_xaxes="columns"),
51 | create_overview=True,
52 | overview_row_idxs=[np.random.randint(0, 2) for _ in range(n_cols)],
53 | )
54 | fig._create_overview_figure()
55 |
56 | # By adding None values, we can skip certain subplot columns
57 | row_idxs = [np.random.randint(0, 2) for _ in range(n_cols)]
58 | for _ in range(np.random.randint(0, n_cols)):
59 | row_idxs[np.random.randint(0, n_cols)] = None
60 | # print(row_idxs)
61 | fig = FigureResampler(
62 | make_subplots(rows=3, cols=n_cols, shared_xaxes="columns"),
63 | create_overview=True,
64 | overview_row_idxs=row_idxs,
65 | )
66 | fig._create_overview_figure()
67 |
68 |
69 | @pytest.mark.parametrize("n_cols", [1, 2, 3])
70 | def test_invalid_row_indices_subplots(n_cols):
71 | with pytest.raises(AssertionError):
72 | FigureResampler(
73 | make_subplots(rows=3, cols=n_cols, shared_xaxes="columns"),
74 | create_overview=True,
75 | # row index 3 is too high (starts at 0, so [0, 1, 2])
76 | overview_row_idxs=[3 for _ in range(n_cols)],
77 | )
78 |
79 | with pytest.raises(AssertionError):
80 | FigureResampler(
81 | make_subplots(rows=3, cols=n_cols, shared_xaxes="columns"),
82 | create_overview=True,
83 | # n_cols -1 causes the overview to have one subplot column less
84 | overview_row_idxs=[0 for _ in range(n_cols - 1)],
85 | )
86 |
87 |
88 | @pytest.mark.parametrize("overview_kwargs", [{"height": 80}])
89 | @pytest.mark.parametrize("series", [lf("float_series")])
90 | def test_overview_kwargs(overview_kwargs, series):
91 | fig = FigureResampler(
92 | go.Figure(),
93 | create_overview=True,
94 | overview_kwargs=overview_kwargs,
95 | )
96 | fig.add_trace(go.Scatter(x=series.index, y=series))
97 |
98 | overview_fig = fig._create_overview_figure()
99 | for key, value in overview_kwargs.items():
100 | assert overview_fig.layout[key] == value
101 |
102 |
103 | @pytest.mark.parametrize("figure_class", [go.Figure, make_subplots])
104 | @pytest.mark.parametrize(
105 | "series", [lf("float_series"), lf("cat_series"), lf("bool_series")]
106 | )
107 | @pytest.mark.parametrize("default_n_samples", [500, 1000, 1500])
108 | def test_coarse_figure_aggregation(figure_class, series, default_n_samples):
109 | """Test whether the coarse figure aggregation works as expected"""
110 | # Create a figure with a scatter plot
111 | fig = FigureResampler(
112 | figure_class(), create_overview=True, default_n_shown_samples=default_n_samples
113 | )
114 | fig.add_trace(go.Scatter(x=series.index, y=series))
115 | fig.add_trace({}, hf_x=series.index, hf_y=series)
116 |
117 | overview_fig = fig._create_overview_figure()
118 | for trace in overview_fig.data:
119 | assert len(trace.y) == 3 * default_n_samples
120 |
121 |
122 | @pytest.mark.parametrize("aggregator", [MinMaxLTTB, EveryNthPoint])
123 | def test_overview_figure_gap_handler_similarity(aggregator):
124 | """Test whether the same gap handlers as those used in the figure are used in the
125 | overview figure"""
126 | fig = FigureResampler(create_overview=True, default_downsampler=aggregator())
127 |
128 | # create uneven data which contains gaps
129 | N = 20_000
130 | x = np.arange(N)
131 | for idx in np.random.randint(0, N, size=4):
132 | x[idx:] += np.random.randint(N / 10, N / 5)
133 | y = np.random.normal(size=N)
134 |
135 | fig.add_trace(go.Scatter(x=x, y=y), gap_handler=NoGapHandler())
136 | fig.add_trace({}, hf_x=x, hf_y=y, gap_handler=MedDiffGapHandler())
137 | fig.add_trace({}, hf_x=x, hf_y=y, gap_handler=MedDiffGapHandler(fill_value=42))
138 |
139 | overview_fig = fig._create_overview_figure()
140 | assert len(overview_fig.data) == 3
141 | assert np.isnan(overview_fig.data[0]["y"]).sum() == 0
142 | assert np.isnan(overview_fig.data[1]["y"]).sum() == 4
143 | assert (overview_fig.data[2]["y"] == 42).sum() == 4
144 |
--------------------------------------------------------------------------------
/tests/test_registering.py:
--------------------------------------------------------------------------------
1 | from inspect import isfunction
2 |
3 | import numpy as np
4 | import plotly.express as px
5 | import plotly.graph_objects as go
6 |
7 | from plotly_resampler import FigureResampler, FigureWidgetResampler
8 | from plotly_resampler.figure_resampler.figure_resampler_interface import (
9 | AbstractFigureAggregator,
10 | )
11 | from plotly_resampler.registering import (
12 | _get_plotly_constr,
13 | register_plotly_resampler,
14 | unregister_plotly_resampler,
15 | )
16 |
17 | from .conftest import registering_cleanup
18 |
19 |
20 | def test_get_plotly_const(registering_cleanup):
21 | # Check the basi(c)s
22 | assert issubclass(FigureResampler, AbstractFigureAggregator)
23 | assert issubclass(FigureWidgetResampler, AbstractFigureAggregator)
24 |
25 | # Is unregistered now
26 | assert not (isfunction(go.Figure) or isfunction(go.FigureWidget))
27 | assert not issubclass(go.Figure, AbstractFigureAggregator)
28 | assert not issubclass(go.FigureWidget, AbstractFigureAggregator)
29 | assert not issubclass(_get_plotly_constr(go.Figure), AbstractFigureAggregator)
30 | assert not issubclass(_get_plotly_constr(go.FigureWidget), AbstractFigureAggregator)
31 |
32 | register_plotly_resampler()
33 | assert isfunction(go.Figure) and isfunction(go.FigureWidget)
34 | assert isinstance(go.Figure(), AbstractFigureAggregator)
35 | assert isinstance(go.FigureWidget(), AbstractFigureAggregator)
36 | assert issubclass(FigureResampler, AbstractFigureAggregator)
37 | assert issubclass(FigureWidgetResampler, AbstractFigureAggregator)
38 | assert not issubclass(_get_plotly_constr(go.Figure), AbstractFigureAggregator)
39 | assert not issubclass(_get_plotly_constr(go.FigureWidget), AbstractFigureAggregator)
40 |
41 | unregister_plotly_resampler()
42 | assert not (isfunction(go.Figure) or isfunction(go.FigureWidget))
43 | assert not issubclass(go.Figure, AbstractFigureAggregator)
44 | assert not issubclass(go.FigureWidget, AbstractFigureAggregator)
45 | assert not issubclass(_get_plotly_constr(go.Figure), AbstractFigureAggregator)
46 | assert not issubclass(_get_plotly_constr(go.FigureWidget), AbstractFigureAggregator)
47 |
48 |
49 | def test_register_and_unregister_graph_objects(registering_cleanup):
50 | import plotly.graph_objects as go_
51 |
52 | # Is unregistered now
53 | assert not (isfunction(go_.Figure) or isfunction(go_.FigureWidget))
54 | fig = go_.Figure()
55 | assert not isinstance(fig, AbstractFigureAggregator)
56 | fig = go_.FigureWidget()
57 | assert not isinstance(fig, AbstractFigureAggregator)
58 |
59 | register_plotly_resampler()
60 | assert isfunction(go_.Figure) and isfunction(go_.FigureWidget)
61 | fig = go_.Figure()
62 | assert isinstance(fig, AbstractFigureAggregator)
63 | assert isinstance(fig, FigureResampler)
64 | assert not isinstance(fig, FigureWidgetResampler)
65 | fig = go_.FigureWidget()
66 | assert isinstance(fig, AbstractFigureAggregator)
67 | assert isinstance(fig, FigureWidgetResampler)
68 | assert not isinstance(fig, FigureResampler)
69 |
70 | unregister_plotly_resampler()
71 | assert not (isfunction(go_.Figure) or isfunction(go_.FigureWidget))
72 | fig = go_.Figure()
73 | assert not isinstance(fig, AbstractFigureAggregator)
74 | fig = go_.FigureWidget()
75 | assert not isinstance(fig, AbstractFigureAggregator)
76 |
77 |
78 | def test_register_and_unregister_graph_objs(registering_cleanup):
79 | import plotly.graph_objs as go_
80 |
81 | # Is unregistered now
82 | assert not (isfunction(go_.Figure) or isfunction(go_.FigureWidget))
83 | fig = go_.Figure()
84 | assert not isinstance(fig, AbstractFigureAggregator)
85 | fig = go_.FigureWidget()
86 | assert not isinstance(fig, AbstractFigureAggregator)
87 |
88 | register_plotly_resampler()
89 | assert isfunction(go_.Figure) and isfunction(go_.FigureWidget)
90 | fig = go_.Figure()
91 | assert isinstance(fig, AbstractFigureAggregator)
92 | assert isinstance(fig, FigureResampler)
93 | assert not isinstance(fig, FigureWidgetResampler)
94 | fig = go_.FigureWidget()
95 | assert isinstance(fig, AbstractFigureAggregator)
96 | assert isinstance(fig, FigureWidgetResampler)
97 | assert not isinstance(fig, FigureResampler)
98 |
99 | unregister_plotly_resampler()
100 | assert not (isfunction(go_.Figure) or isfunction(go_.FigureWidget))
101 | fig = go_.Figure()
102 | assert not isinstance(fig, AbstractFigureAggregator)
103 | fig = go_.FigureWidget()
104 | assert not isinstance(fig, AbstractFigureAggregator)
105 |
106 |
107 | def test_registering_modes(registering_cleanup):
108 | register_plotly_resampler(mode="auto")
109 | # Should be default
110 | assert isinstance(go.Figure(), FigureResampler)
111 | assert isinstance(go.FigureWidget(), FigureWidgetResampler)
112 |
113 | register_plotly_resampler(mode="figure")
114 | # Should be all FigureResampler
115 | assert isinstance(go.Figure(), FigureResampler)
116 | assert isinstance(go.FigureWidget(), FigureResampler)
117 |
118 | register_plotly_resampler(mode="widget")
119 | # Should be all FigureWidgetResampler
120 | assert isinstance(go.Figure(), FigureWidgetResampler)
121 | assert isinstance(go.FigureWidget(), FigureWidgetResampler)
122 |
123 |
124 | def test_registering_plotly_express_and_kwargs(registering_cleanup):
125 | # Is unregistered now
126 | fig = px.scatter(y=np.arange(500))
127 | assert not isinstance(fig, AbstractFigureAggregator)
128 | assert len(fig.data) == 1
129 | assert len(fig.data[0].y) == 500
130 |
131 | register_plotly_resampler(
132 | default_n_shown_samples=50, show_dash_kwargs=dict(mode="inline", port=8051)
133 | )
134 | fig = px.scatter(y=np.arange(500))
135 | assert isinstance(fig, FigureResampler)
136 | assert fig._show_dash_kwargs == dict(mode="inline", port=8051)
137 | assert len(fig.data) == 1
138 | assert len(fig.data[0].y) == 50
139 | assert len(fig.hf_data) == 1
140 | assert len(fig.hf_data[0]["y"]) == 500
141 |
142 | register_plotly_resampler()
143 | fig = px.scatter(y=np.arange(5000))
144 | assert isinstance(fig, FigureResampler)
145 | assert fig._show_dash_kwargs == dict()
146 | assert len(fig.data) == 1
147 | assert len(fig.data[0].y) == 1000
148 | assert len(fig.hf_data) == 1
149 | assert len(fig.hf_data[0]["y"]) == 5000
150 |
151 | unregister_plotly_resampler()
152 | fig = px.scatter(y=np.arange(500))
153 | assert not isinstance(fig, AbstractFigureAggregator)
154 | assert len(fig.data) == 1
155 | assert len(fig.data[0].y) == 500
156 |
157 |
158 | def test_compasibility_when_registered(registering_cleanup):
159 | fr = FigureResampler
160 | fwr = FigureWidgetResampler
161 |
162 | fig_orig_1 = px.scatter(y=np.arange(1_005))
163 | fig_orig_2 = go.FigureWidget({"type": "scatter", "y": np.arange(1_005)})
164 | for fig in [fig_orig_1, fig_orig_2]:
165 | fig1 = fr(fig)
166 | fig2 = fr(fwr(fig))
167 | fig3 = fr(fr(fr(fr(fwr(fwr(fr(fwr(fr(fig)))))))))
168 | for f in [fig1, fig2, fig3]:
169 | assert isinstance(f, FigureResampler)
170 | assert len(f.data) == 1
171 | assert len(f.data[0].y) == 1000
172 | assert len(f.hf_data) == 1
173 | assert len(f.hf_data[0]["y"]) == 1005
174 |
175 | fig1 = fwr(fig)
176 | fig2 = fwr(fr(fig))
177 | fig3 = fwr(fwr(fwr(fwr(fr(fr(fwr(fr(fwr(fig)))))))))
178 | for f in [fig1, fig2, fig3]:
179 | assert isinstance(f, FigureWidgetResampler)
180 | assert len(f.data) == 1
181 | assert len(f.data[0].y) == 1000
182 | assert len(f.hf_data) == 1
183 | assert len(f.hf_data[0]["y"]) == 1005
184 |
185 | register_plotly_resampler()
186 |
187 | fig_orig_1 = px.scatter(y=np.arange(1_005))
188 | fig_orig_2 = go.FigureWidget({"type": "scatter", "y": np.arange(1_005)})
189 | for fig in [fig_orig_1, fig_orig_2]:
190 | fig1 = fr(fig)
191 | fig2 = fr(fwr(fig))
192 | fig3 = fr(fr(fr(fr(fwr(fwr(fr(fwr(fr(fig)))))))))
193 | for f in [fig1, fig2, fig3]:
194 | assert isinstance(f, FigureResampler)
195 | assert len(f.data) == 1
196 | assert len(f.data[0].y) == 1000
197 | assert len(f.hf_data) == 1
198 | assert len(f.hf_data[0]["y"]) == 1005
199 |
200 | fig1 = fwr(fig)
201 | fig2 = fwr(fr(fig))
202 | fig3 = fwr(fwr(fwr(fwr(fr(fr(fwr(fr(fwr(fig)))))))))
203 | for f in [fig1, fig2, fig3]:
204 | assert isinstance(f, FigureWidgetResampler)
205 | assert len(f.data) == 1
206 | assert len(f.data[0].y) == 1000
207 | assert len(f.hf_data) == 1
208 | assert len(f.hf_data[0]["y"]) == 1005
209 |
--------------------------------------------------------------------------------
/tests/test_utils.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import plotly.graph_objects as go
3 | import pytest
4 |
5 | from plotly_resampler import FigureResampler, FigureWidgetResampler
6 | from plotly_resampler.figure_resampler.utils import (
7 | is_figure,
8 | is_figurewidget,
9 | is_fr,
10 | is_fwr,
11 | round_number_str,
12 | round_td_str,
13 | timedelta_to_str,
14 | )
15 |
16 |
17 | @pytest.mark.parametrize(
18 | "obj",
19 | [
20 | go.Figure(),
21 | go.Figure({"type": "scatter", "y": [1, 2, 3]}),
22 | FigureResampler(),
23 | FigureResampler({"type": "scatter", "y": [1, 2, 3]}),
24 | ],
25 | )
26 | def test_is_figure(obj):
27 | assert is_figure(obj)
28 |
29 |
30 | @pytest.mark.parametrize(
31 | "obj",
32 | [
33 | go.FigureWidget(),
34 | None,
35 | {"type": "scatter", "y": [1, 2, 3]},
36 | go.Scatter(y=[1, 2, 3]),
37 | FigureWidgetResampler(),
38 | FigureWidgetResampler({"type": "scatter", "y": [1, 2, 3]}),
39 | ],
40 | )
41 | def test_not_is_figure(obj):
42 | assert not is_figure(obj)
43 |
44 |
45 | def test_is_fr():
46 | fig_dict = {"type": "scatter", "y": [1, 2, 3]}
47 | assert is_fr(FigureResampler())
48 | assert is_fr(FigureResampler(fig_dict))
49 | assert not is_fr(go.Figure())
50 | assert not is_fr(go.Figure(fig_dict))
51 | assert not is_fr(go.FigureWidget())
52 | assert not is_fr(None)
53 | assert not is_fr(fig_dict)
54 | assert not is_fr(go.Scatter(y=[1, 2, 3]))
55 | assert not is_fr(FigureWidgetResampler())
56 | assert not is_fr(FigureWidgetResampler(fig_dict))
57 |
58 |
59 | def test_is_figurewidget():
60 | fig_dict = {"type": "scatter", "y": [1, 2, 3]}
61 | assert is_figurewidget(go.FigureWidget())
62 | assert is_figurewidget(go.FigureWidget(fig_dict))
63 | assert is_figurewidget(FigureWidgetResampler())
64 | assert is_figurewidget(FigureWidgetResampler(fig_dict))
65 | assert not is_figurewidget(go.Figure())
66 | assert not is_figurewidget(None)
67 | assert not is_figurewidget(fig_dict)
68 | assert not is_figurewidget(go.Scatter(y=[1, 2, 3]))
69 | assert not is_figurewidget(FigureResampler())
70 | assert not is_figurewidget(FigureResampler(fig_dict))
71 |
72 |
73 | def test_is_fwr():
74 | fig_dict = {"type": "scatter", "y": [1, 2, 3]}
75 | assert is_fwr(FigureWidgetResampler())
76 | assert is_fwr(FigureWidgetResampler(fig_dict))
77 | assert not is_fwr(go.FigureWidget())
78 | assert not is_fwr(go.FigureWidget(fig_dict))
79 | assert not is_fwr(go.Figure())
80 | assert not is_fwr(None)
81 | assert not is_fwr(fig_dict)
82 | assert not is_fwr(go.Scatter(y=[1, 2, 3]))
83 | assert not is_fwr(FigureResampler())
84 | assert not is_fwr(FigureResampler(fig_dict))
85 |
86 |
87 | def test_timedelta_to_str():
88 | assert (round_td_str(pd.Timedelta("1W"))) == "7D"
89 | assert (timedelta_to_str(pd.Timedelta("1W"))) == "7D"
90 | assert (timedelta_to_str(pd.Timedelta("1W") * -1)) == "NEG7D"
91 | assert timedelta_to_str(pd.Timedelta("1s 114ms")) == "1.114s"
92 | assert round_td_str(pd.Timedelta("14.4ms")) == "14ms"
93 | assert round_td_str(pd.Timedelta("501ms")) == "501ms"
94 | assert round_td_str(pd.Timedelta("951ms")) == "1s"
95 | assert round_td_str(pd.Timedelta("950ms")) == "950ms"
96 | assert round_td_str(pd.Timedelta("949ms")) == "949ms"
97 | assert round_td_str(pd.Timedelta("500ms")) == "500ms"
98 | assert round_td_str(pd.Timedelta("14.4ms")) == "14ms"
99 | assert round_td_str(pd.Timedelta("14.6ms")) == "15ms"
100 | assert round_td_str(pd.Timedelta("1h 14.4us")) == "1h"
101 | assert round_td_str(pd.Timedelta("1128.9us")) == "1ms"
102 | assert round_td_str(pd.Timedelta("128.9us")) == "129us"
103 | assert round_td_str((pd.Timedelta("14ns"))) == "14ns"
104 | # zero should return None
105 | assert round_td_str((pd.Timedelta("0ns"))) is None
106 |
107 |
108 | def test_round_int_str():
109 | assert round_number_str(0.951) == "1"
110 | assert round_number_str(0.95) == "0.9"
111 | assert round_number_str(0.949) == "0.9"
112 | assert round_number_str(0.00949) == "0.009"
113 | assert round_number_str(0.00950) == "0.009"
114 | assert round_number_str(0.00951) == "0.01"
115 | assert round_number_str(0.0044) == "0.004"
116 | assert round_number_str(0.00451) == "0.005"
117 | assert round_number_str(0.0001) == "0.0001"
118 | assert round_number_str(0.00001) == "1e-05"
119 | assert round_number_str(0.000000321) == "3e-07"
120 | assert round_number_str(12_000) == "12k"
121 | assert round_number_str(13_340) == "13k"
122 | assert round_number_str(13_540) == "14k"
123 | assert round_number_str(559_540) == "560k"
124 | assert round_number_str(949_000) == "949k"
125 | assert round_number_str(950_000) == "950k"
126 | assert round_number_str(950_001) == "1M"
127 | assert round_number_str(1_950_001) == "2M"
128 | assert round_number_str(111_950_001) == "112M"
129 | assert round_number_str(950_001_001) == "1B"
130 | assert round_number_str(1_950_001_001) == "2B"
131 | assert round_number_str(111_950_001_001) == "112B"
132 | assert round_number_str(950_001_001_001) == "1T"
133 | assert round_number_str(1_950_001_001_001) == "2T"
134 | assert round_number_str(111_950_001_001_001) == "112T"
135 | # zero should return None
136 | assert round_number_str(0) is None
137 | # negative case
138 | assert round_number_str(-0.951) == "-1"
139 | assert round_number_str(-0.95) == "-0.9"
140 |
--------------------------------------------------------------------------------
/tests/utils.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import base64
4 | import sys
5 |
6 | import numpy as np
7 | import pandas as pd
8 |
9 | from plotly_resampler.aggregation import MedDiffGapHandler, MinMaxLTTB
10 | from plotly_resampler.aggregation.aggregation_interface import (
11 | DataAggregator,
12 | DataPointSelector,
13 | )
14 | from plotly_resampler.aggregation.gap_handler_interface import AbstractGapHandler
15 | from plotly_resampler.aggregation.plotly_aggregator_parser import PlotlyAggregatorParser
16 |
17 |
18 | def not_on_linux():
19 | """Return True if the current platform is not Linux.
20 |
21 | This is to avoid / alter test bahavior for non-Linux (as browser testing gets
22 | tricky on other platforms).
23 | """
24 | return not sys.platform.startswith("linux")
25 |
26 |
27 | def construct_hf_data_dict(hf_x, hf_y, **kwargs):
28 | hf_data_dict = {
29 | "x": hf_x,
30 | "y": hf_y,
31 | "axis_type": (
32 | "date"
33 | if isinstance(hf_x, pd.DatetimeIndex)
34 | or pd.core.dtypes.common.is_datetime64_any_dtype(hf_x)
35 | else "linear"
36 | ),
37 | "downsampler": MinMaxLTTB(),
38 | "gap_handler": MedDiffGapHandler(),
39 | "max_n_samples": 1_000,
40 | }
41 | hf_data_dict.update(kwargs)
42 | return hf_data_dict
43 |
44 |
45 | def wrap_aggregate(
46 | hf_x: np.ndarray | None = None,
47 | hf_y: pd.Series | np.ndarray = None,
48 | downsampler: DataPointSelector | DataAggregator = None,
49 | gap_handler: AbstractGapHandler = None,
50 | n_out: int = None,
51 | ) -> tuple[np.ndarray, np.ndarray, np.ndarray]:
52 | hf_trace_data = construct_hf_data_dict(
53 | **{
54 | "hf_x": hf_x,
55 | "hf_y": hf_y,
56 | "downsampler": downsampler,
57 | "gap_handler": gap_handler,
58 | "max_n_samples": n_out,
59 | }
60 | )
61 | return PlotlyAggregatorParser.aggregate(hf_trace_data, 0, len(hf_y))
62 |
63 |
64 | def construct_index(series: pd.Series, index_type: str) -> pd.Index:
65 | """Construct an index of the given type for the given series.
66 |
67 | series: pd.Series
68 | The series to construct an index for
69 | index_type: str
70 | One of "range", "datetime", "timedelta", "float", or "int"
71 | """
72 | if index_type == "range":
73 | return pd.RangeIndex(len(series))
74 | if index_type == "datetime":
75 | return pd.date_range("1/1/2020", periods=len(series), freq="1ms")
76 | if index_type == "timedelta":
77 | return pd.timedelta_range(start="0s", periods=len(series), freq="1ms")
78 | if index_type == "float":
79 | return pd.Index(np.arange(len(series), dtype=np.float64))
80 | if index_type == "int":
81 | return pd.Index(np.arange(len(series), dtype=np.int64))
82 | raise ValueError(f"Unknown index type: {index_type}")
83 |
84 |
85 | def decode_trace_bdata(data: dict | list):
86 | """As from plotly>6.0.0, traces can be encoded as binary strings, we need to decode
87 | them to get the actual data.
88 | """
89 | if isinstance(data, dict) and "bdata" in data:
90 | bdata = data["bdata"]
91 | dtype = data["dtype"]
92 |
93 | # Decode the base64 encoded binary data
94 | decoded_data = base64.b64decode(bdata)
95 | # Convert the decoded data to a numpy array
96 | np_array = np.frombuffer(decoded_data, dtype=np.dtype(dtype))
97 | return np_array # Return the numpy array for further use if needed
98 | else:
99 | return data
100 |
--------------------------------------------------------------------------------