├── benchmarks ├── __init__.py ├── sample.py ├── event.py └── blocked.py ├── doc ├── genindex.rst ├── api │ ├── io.rst │ ├── event.rst │ ├── profile.rst │ ├── regrid.rst │ ├── index.rst │ └── blocked.rst ├── _static │ └── asv │ │ ├── info.json │ │ ├── swallow.ico │ │ ├── swallow.png │ │ ├── graphs │ │ ├── summary │ │ │ ├── event.EventDistributedSuite.time_find_event.json │ │ │ ├── blocked.ResampleDistributedSuite.time_blocked.json │ │ │ ├── blocked.ResampleDistributedSuite.time_xarray.json │ │ │ ├── blocked.GroupbyDistributedSuite.time_xarray_dayofyear.json │ │ │ ├── blocked.GroupbyDistributedSuite.time_blocked_dayofyear.json │ │ │ ├── blocked.GroupbyDistributedSuite.time_blocked_monthday.json │ │ │ ├── blocked.ResampleSuite.time_xarray.json │ │ │ ├── event.EventSuite.time_find_event.json │ │ │ ├── blocked.GroupbySuite.time_xarray_dayofyear.json │ │ │ ├── blocked.ResampleSuite.time_blocked.json │ │ │ ├── blocked.GroupbySuite.time_blocked_dayofyear.json │ │ │ └── blocked.GroupbySuite.time_blocked_monthday.json │ │ └── arch-x86_64 │ │ │ └── branch-master │ │ │ └── cfunits │ │ │ ├── cpu-AMD Ryzen 5 3600X 6-Core Processor │ │ │ └── dask │ │ │ │ └── iris │ │ │ │ └── machine-Freya │ │ │ │ └── mule │ │ │ │ └── netcdf4 │ │ │ │ └── num_cpu-12 │ │ │ │ └── os-Linux 4.4.0-19041-Microsoft │ │ │ │ └── pandas │ │ │ │ └── python │ │ │ │ └── python-graphviz │ │ │ │ └── ram-16726988 │ │ │ │ └── scipy │ │ │ │ └── sparse │ │ │ │ └── tqdm │ │ │ │ └── typing_extensions │ │ │ │ └── xarray │ │ │ │ ├── blocked.ResampleDistributedSuite.time_blocked.json │ │ │ │ ├── blocked.ResampleDistributedSuite.time_xarray.json │ │ │ │ ├── event.EventDistributedSuite.time_find_event.json │ │ │ │ ├── blocked.GroupbyDistributedSuite.time_blocked_dayofyear.json │ │ │ │ ├── blocked.GroupbyDistributedSuite.time_blocked_monthday.json │ │ │ │ ├── blocked.GroupbyDistributedSuite.time_xarray_dayofyear.json │ │ │ │ ├── blocked.ResampleSuite.time_blocked.json │ │ │ │ ├── blocked.ResampleSuite.time_xarray.json │ │ │ │ ├── event.EventSuite.time_find_event.json │ │ │ │ ├── blocked.GroupbySuite.time_xarray_dayofyear.json │ │ │ │ ├── blocked.GroupbySuite.time_blocked_dayofyear.json │ │ │ │ ├── blocked.GroupbySuite.time_blocked_monthday.json │ │ │ │ └── summary.json │ │ │ └── cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz │ │ │ └── dask │ │ │ └── iris │ │ │ └── machine-gadi │ │ │ └── mule │ │ │ └── netcdf4 │ │ │ └── num_cpu-48 │ │ │ └── os-Linux 4.18.0-240.1.1.el8.nci.x86_64 │ │ │ └── pandas │ │ │ └── python │ │ │ └── python-graphviz │ │ │ └── ram-262432756 │ │ │ └── scipy │ │ │ └── sparse │ │ │ └── tqdm │ │ │ └── typing_extensions │ │ │ └── xarray │ │ │ ├── blocked.ResampleSuite.time_xarray.json │ │ │ ├── event.EventSuite.time_find_event.json │ │ │ ├── blocked.ResampleSuite.time_blocked.json │ │ │ ├── blocked.GroupbySuite.time_blocked_dayofyear.json │ │ │ ├── blocked.GroupbySuite.time_blocked_monthday.json │ │ │ ├── blocked.GroupbySuite.time_xarray_dayofyear.json │ │ │ ├── blocked.ResampleDistributedSuite.time_xarray.json │ │ │ ├── event.EventDistributedSuite.time_find_event.json │ │ │ ├── blocked.ResampleDistributedSuite.time_blocked.json │ │ │ ├── blocked.GroupbyDistributedSuite.time_blocked_monthday.json │ │ │ ├── blocked.GroupbyDistributedSuite.time_xarray_dayofyear.json │ │ │ ├── blocked.GroupbyDistributedSuite.time_blocked_dayofyear.json │ │ │ └── summary.json │ │ ├── error.html │ │ ├── regressions.css │ │ ├── summarylist.css │ │ ├── regressions.json │ │ ├── vendor │ │ ├── jquery.flot-0.8.3.categories.min.js │ │ ├── jquery.flot-0.8.3.selection.min.js │ │ ├── jquery.flot-0.8.3.time.min.js │ │ ├── stupidtable.js │ │ ├── jquery.flot.axislabels.js │ │ └── jquery.flot.orderBars.js │ │ ├── asv.css │ │ ├── summarygrid.js │ │ ├── regressions.xml │ │ ├── asv_ui.js │ │ └── index.html ├── nci.rst ├── Makefile ├── index.rst ├── make.bat └── conf.py ├── codecov.yml ├── MANIFEST.in ├── .vscode └── settings.json ├── .readthedocs.yml ├── test ├── test_climtas.py ├── conftest.py ├── test_daskutil.py ├── test_grid.py ├── test_dimension.py ├── test_io.py ├── test_helpers.py └── test_event.py ├── .asv └── results │ ├── Freya │ ├── machine.json │ ├── 3801e053-conda-py-cfunits-dask-iris-mule-netcdf4-pandas-python-graphviz-scipy-sparse-tqdm-typing_extensions-xarray.json │ ├── 0ab06d7a-conda-py-cfunits-dask-iris-mule-netcdf4-pandas-python-graphviz-scipy-sparse-tqdm-typing_extensions-xarray.json │ └── 9bb26ba6-conda-py-cfunits-dask-iris-mule-netcdf4-pandas-python-graphviz-scipy-sparse-tqdm-typing_extensions-xarray.json │ └── gadi │ ├── machine.json │ ├── dc44fc19-conda-py-cfunits-dask-iris-mule-netcdf4-pandas-python-graphviz-scipy-sparse-tqdm-typing_extensions-xarray.json │ ├── 9bb26ba6-conda-py-cfunits-dask-iris-mule-netcdf4-pandas-python-graphviz-scipy-sparse-tqdm-typing_extensions-xarray.json │ └── afde3022-conda-py-cfunits-dask-iris-mule-netcdf4-pandas-python-graphviz-scipy-sparse-tqdm-typing_extensions-xarray.json ├── src └── climtas │ ├── __init__.py │ ├── dimension.py │ ├── daskutil.py │ ├── nci │ └── __init__.py │ ├── grid.py │ └── io.py ├── environment.yml ├── setup.py ├── setup.cfg ├── meta.yaml ├── profile_event.py ├── .gitignore ├── README.rst ├── .circleci └── config.yml └── asv.conf.json /benchmarks/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /doc/genindex.rst: -------------------------------------------------------------------------------- 1 | Index 2 | ===== 3 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | ignore: 2 | - "src/climtas/_version.py" 3 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include versioneer.py 2 | include src/climtas/_version.py 3 | -------------------------------------------------------------------------------- /doc/api/io.rst: -------------------------------------------------------------------------------- 1 | climtas.io 2 | ---------- 3 | .. automodule:: climtas.io 4 | :members: 5 | 6 | -------------------------------------------------------------------------------- /doc/_static/asv/info.json: -------------------------------------------------------------------------------- 1 | { 2 | "asv-version": "0.4.2", 3 | "timestamp": 1610597327300 4 | } -------------------------------------------------------------------------------- /doc/_static/asv/swallow.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ScottWales/climtas/HEAD/doc/_static/asv/swallow.ico -------------------------------------------------------------------------------- /doc/_static/asv/swallow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ScottWales/climtas/HEAD/doc/_static/asv/swallow.png -------------------------------------------------------------------------------- /doc/api/event.rst: -------------------------------------------------------------------------------- 1 | climtas.event 2 | ------------- 3 | .. automodule:: climtas.event 4 | :members: 5 | 6 | -------------------------------------------------------------------------------- /doc/api/profile.rst: -------------------------------------------------------------------------------- 1 | climtas.profile 2 | --------------- 3 | .. automodule:: climtas.profile 4 | :members: 5 | -------------------------------------------------------------------------------- /doc/api/regrid.rst: -------------------------------------------------------------------------------- 1 | climtas.regrid 2 | -------------- 3 | .. automodule:: climtas.regrid 4 | :members: 5 | 6 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.formatting.provider": "black", 3 | "python.testing.pytestEnabled": true, 4 | } 5 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | sphinx: 4 | configuration: doc/conf.py 5 | 6 | conda: 7 | environment: environment.yml 8 | -------------------------------------------------------------------------------- /doc/api/index.rst: -------------------------------------------------------------------------------- 1 | Python API 2 | ========== 3 | 4 | .. toctree:: 5 | io 6 | event 7 | blocked 8 | regrid 9 | profile 10 | -------------------------------------------------------------------------------- /test/test_climtas.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import print_function 3 | 4 | 5 | def test_import(): 6 | import climtas 7 | -------------------------------------------------------------------------------- /doc/_static/asv/graphs/summary/event.EventDistributedSuite.time_find_event.json: -------------------------------------------------------------------------------- 1 | [[162, 4.680436278680172], [231, 2.409879918799354], [235, 2.396913523209943]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/summary/blocked.ResampleDistributedSuite.time_blocked.json: -------------------------------------------------------------------------------- 1 | [[162, 0.7181764074538736], [231, 0.7055557898279452], [235, 0.7219422447346218]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/summary/blocked.ResampleDistributedSuite.time_xarray.json: -------------------------------------------------------------------------------- 1 | [[162, 3.6412415043770823], [231, 3.482931840883537], [235, 3.6142867972513573]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/summary/blocked.GroupbyDistributedSuite.time_xarray_dayofyear.json: -------------------------------------------------------------------------------- 1 | [[162, 6.655312080764815], [231, 6.567617754626099], [235, 6.68963967793949]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/summary/blocked.GroupbyDistributedSuite.time_blocked_dayofyear.json: -------------------------------------------------------------------------------- 1 | [[162, 1.1668247588085625], [231, 1.1504474958848818], [235, 1.1792496409698605]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/summary/blocked.GroupbyDistributedSuite.time_blocked_monthday.json: -------------------------------------------------------------------------------- 1 | [[162, 0.9599340958007904], [231, 0.9455693568270093], [235, 0.9531490336939297]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/summary/blocked.ResampleSuite.time_xarray.json: -------------------------------------------------------------------------------- 1 | [[162, 2.618022876919107], [204, 2.618022876919107], [208, 2.651673622957724], [231, 2.669954278193672], [235, 2.4761876716136255]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/summary/event.EventSuite.time_find_event.json: -------------------------------------------------------------------------------- 1 | [[162, 1.9132366991263452], [204, 1.9132366991263452], [208, 1.9537981077728621], [231, 2.52635425151849], [235, 2.76469093595943]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/summary/blocked.GroupbySuite.time_xarray_dayofyear.json: -------------------------------------------------------------------------------- 1 | [[162, 4.153588189240214], [204, 4.153588189240214], [208, 4.217684941104278], [231, 4.377534913366834], [235, 3.8968954800335798]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/summary/blocked.ResampleSuite.time_blocked.json: -------------------------------------------------------------------------------- 1 | [[162, 0.12277808671446029], [204, 0.12277808671446029], [208, 0.12220112403259703], [231, 0.11865293918805024], [235, 0.11687777649532403]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/summary/blocked.GroupbySuite.time_blocked_dayofyear.json: -------------------------------------------------------------------------------- 1 | [[162, 0.4373719010973967], [204, 0.4373719010973967], [208, 0.4463710570757527], [231, 0.4816776344354351], [235, 0.43743053142081423]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/summary/blocked.GroupbySuite.time_blocked_monthday.json: -------------------------------------------------------------------------------- 1 | [[162, 0.30741983672152456], [204, 0.30741983672152456], [208, 0.3236941637695328], [231, 0.37463673058201297], [235, 0.3458564380391663]] -------------------------------------------------------------------------------- /.asv/results/Freya/machine.json: -------------------------------------------------------------------------------- 1 | { 2 | "arch": "x86_64", 3 | "cpu": "AMD Ryzen 5 3600X 6-Core Processor", 4 | "machine": "Freya", 5 | "num_cpu": "12", 6 | "os": "Linux 4.4.0-19041-Microsoft", 7 | "ram": "16726988", 8 | "version": 1 9 | } -------------------------------------------------------------------------------- /.asv/results/gadi/machine.json: -------------------------------------------------------------------------------- 1 | { 2 | "arch": "x86_64", 3 | "cpu": "Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz", 4 | "machine": "gadi", 5 | "num_cpu": "48", 6 | "os": "Linux 4.18.0-240.1.1.el8.nci.x86_64", 7 | "ram": "262432756", 8 | "version": 1 9 | } -------------------------------------------------------------------------------- /doc/nci.rst: -------------------------------------------------------------------------------- 1 | NCI Specific Functions 2 | ====================== 3 | 4 | These functions are specifically for working with the compute and data 5 | resources at NCI, and may not work at other sites. 6 | 7 | climtas.nci 8 | ----------- 9 | .. automodule:: climtas.nci 10 | :members: 11 | -------------------------------------------------------------------------------- /test/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import dask 3 | 4 | 5 | @pytest.fixture 6 | def distributed_client(tmpdir): 7 | c = dask.distributed.Client( 8 | n_workers=1, threads_per_worker=1, local_directory=tmpdir / "dask-worker-space" 9 | ) 10 | yield c 11 | c.close() 12 | -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-AMD Ryzen 5 3600X 6-Core Processor/dask/iris/machine-Freya/mule/netcdf4/num_cpu-12/os-Linux 4.4.0-19041-Microsoft/pandas/python/python-graphviz/ram-16726988/scipy/sparse/tqdm/typing_extensions/xarray/blocked.ResampleDistributedSuite.time_blocked.json: -------------------------------------------------------------------------------- 1 | [[235, 0.7067086999886669]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-AMD Ryzen 5 3600X 6-Core Processor/dask/iris/machine-Freya/mule/netcdf4/num_cpu-12/os-Linux 4.4.0-19041-Microsoft/pandas/python/python-graphviz/ram-16726988/scipy/sparse/tqdm/typing_extensions/xarray/blocked.ResampleDistributedSuite.time_xarray.json: -------------------------------------------------------------------------------- 1 | [[235, 3.3373256499762647]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-AMD Ryzen 5 3600X 6-Core Processor/dask/iris/machine-Freya/mule/netcdf4/num_cpu-12/os-Linux 4.4.0-19041-Microsoft/pandas/python/python-graphviz/ram-16726988/scipy/sparse/tqdm/typing_extensions/xarray/event.EventDistributedSuite.time_find_event.json: -------------------------------------------------------------------------------- 1 | [[235, 2.0268603999866173]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-AMD Ryzen 5 3600X 6-Core Processor/dask/iris/machine-Freya/mule/netcdf4/num_cpu-12/os-Linux 4.4.0-19041-Microsoft/pandas/python/python-graphviz/ram-16726988/scipy/sparse/tqdm/typing_extensions/xarray/blocked.GroupbyDistributedSuite.time_blocked_dayofyear.json: -------------------------------------------------------------------------------- 1 | [[235, 1.1829518500017002]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-AMD Ryzen 5 3600X 6-Core Processor/dask/iris/machine-Freya/mule/netcdf4/num_cpu-12/os-Linux 4.4.0-19041-Microsoft/pandas/python/python-graphviz/ram-16726988/scipy/sparse/tqdm/typing_extensions/xarray/blocked.GroupbyDistributedSuite.time_blocked_monthday.json: -------------------------------------------------------------------------------- 1 | [[235, 0.963999400002649]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-AMD Ryzen 5 3600X 6-Core Processor/dask/iris/machine-Freya/mule/netcdf4/num_cpu-12/os-Linux 4.4.0-19041-Microsoft/pandas/python/python-graphviz/ram-16726988/scipy/sparse/tqdm/typing_extensions/xarray/blocked.GroupbyDistributedSuite.time_xarray_dayofyear.json: -------------------------------------------------------------------------------- 1 | [[235, 6.495260349998716]] -------------------------------------------------------------------------------- /src/climtas/__init__.py: -------------------------------------------------------------------------------- 1 | from ._version import get_versions 2 | 3 | __version__ = get_versions()["version"] 4 | del get_versions 5 | 6 | from . import event 7 | from . import io 8 | from . import regrid 9 | from . import blocked 10 | from . import profile 11 | 12 | from .blocked import blocked_resample, blocked_groupby, approx_percentile 13 | -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-AMD Ryzen 5 3600X 6-Core Processor/dask/iris/machine-Freya/mule/netcdf4/num_cpu-12/os-Linux 4.4.0-19041-Microsoft/pandas/python/python-graphviz/ram-16726988/scipy/sparse/tqdm/typing_extensions/xarray/blocked.ResampleSuite.time_blocked.json: -------------------------------------------------------------------------------- 1 | [[204, 0.13036479999573203], [208, 0.1291424500013818], [235, 0.12802800003555603]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-AMD Ryzen 5 3600X 6-Core Processor/dask/iris/machine-Freya/mule/netcdf4/num_cpu-12/os-Linux 4.4.0-19041-Microsoft/pandas/python/python-graphviz/ram-16726988/scipy/sparse/tqdm/typing_extensions/xarray/blocked.ResampleSuite.time_xarray.json: -------------------------------------------------------------------------------- 1 | [[204, 1.8484123000016552], [208, 1.8962347999986378], [235, 1.7487288500124123]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-AMD Ryzen 5 3600X 6-Core Processor/dask/iris/machine-Freya/mule/netcdf4/num_cpu-12/os-Linux 4.4.0-19041-Microsoft/pandas/python/python-graphviz/ram-16726988/scipy/sparse/tqdm/typing_extensions/xarray/event.EventSuite.time_find_event.json: -------------------------------------------------------------------------------- 1 | [[204, 1.1528359999938402], [208, 1.2022353499996825], [235, 1.4560975000204053]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-AMD Ryzen 5 3600X 6-Core Processor/dask/iris/machine-Freya/mule/netcdf4/num_cpu-12/os-Linux 4.4.0-19041-Microsoft/pandas/python/python-graphviz/ram-16726988/scipy/sparse/tqdm/typing_extensions/xarray/blocked.GroupbySuite.time_xarray_dayofyear.json: -------------------------------------------------------------------------------- 1 | [[204, 2.9537656500033336], [208, 3.0456320500015863], [235, 2.716725950012915]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-AMD Ryzen 5 3600X 6-Core Processor/dask/iris/machine-Freya/mule/netcdf4/num_cpu-12/os-Linux 4.4.0-19041-Microsoft/pandas/python/python-graphviz/ram-16726988/scipy/sparse/tqdm/typing_extensions/xarray/blocked.GroupbySuite.time_blocked_dayofyear.json: -------------------------------------------------------------------------------- 1 | [[204, 0.30014864999975543], [208, 0.3126271499932045], [235, 0.2849782000412233]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-AMD Ryzen 5 3600X 6-Core Processor/dask/iris/machine-Freya/mule/netcdf4/num_cpu-12/os-Linux 4.4.0-19041-Microsoft/pandas/python/python-graphviz/ram-16726988/scipy/sparse/tqdm/typing_extensions/xarray/blocked.GroupbySuite.time_blocked_monthday.json: -------------------------------------------------------------------------------- 1 | [[204, 0.21985315000347327], [208, 0.2437466499977745], [235, 0.2233245499955956]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/blocked.ResampleSuite.time_xarray.json: -------------------------------------------------------------------------------- 1 | [[162, 3.708070858469], [231, 3.7593740224838257], [235, 3.5062642130069435]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/event.EventSuite.time_find_event.json: -------------------------------------------------------------------------------- 1 | [[162, 3.1751911519968417], [231, 5.308832254988374], [235, 5.249316045985324]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/blocked.ResampleSuite.time_blocked.json: -------------------------------------------------------------------------------- 1 | [[162, 0.11563288999604993], [231, 0.10901543201180175], [235, 0.10669864900410175]] -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: climtas 2 | channels: 3 | - conda-forge 4 | - coecms 5 | dependencies: 6 | - python >= 3.6 7 | - dask 8 | - netcdf4 9 | - pandas 10 | - scipy 11 | - tqdm 12 | - xarray 13 | - typing_extensions 14 | - cfunits 15 | - iris 16 | - mule 17 | - sparse 18 | - black 19 | - nbsphinx 20 | - sphinx-book-theme 21 | -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/blocked.GroupbySuite.time_blocked_dayofyear.json: -------------------------------------------------------------------------------- 1 | [[162, 0.6373314684897196], [231, 0.7421407370420638], [235, 0.6714389724947978]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/blocked.GroupbySuite.time_blocked_monthday.json: -------------------------------------------------------------------------------- 1 | [[162, 0.4298640069901012], [231, 0.5758137800148688], [235, 0.5356181205133907]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/blocked.GroupbySuite.time_xarray_dayofyear.json: -------------------------------------------------------------------------------- 1 | [[162, 5.840779834979912], [231, 6.291899875999661], [235, 5.589740982977673]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/blocked.ResampleDistributedSuite.time_xarray.json: -------------------------------------------------------------------------------- 1 | [[162, 3.9728336649714038], [231, 3.634890771994833], [235, 3.9142326589790173]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/event.EventDistributedSuite.time_find_event.json: -------------------------------------------------------------------------------- 1 | [[162, 10.808087107987376], [231, 2.8652793369838037], [235, 2.8345289284770843]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/blocked.ResampleDistributedSuite.time_blocked.json: -------------------------------------------------------------------------------- 1 | [[162, 0.7298302005219739], [231, 0.7044047605013475], [235, 0.7375041579944082]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/blocked.GroupbyDistributedSuite.time_blocked_monthday.json: -------------------------------------------------------------------------------- 1 | [[162, 0.9558859354874585], [231, 0.9274916650028899], [235, 0.9424207944830414]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/blocked.GroupbyDistributedSuite.time_xarray_dayofyear.json: -------------------------------------------------------------------------------- 1 | [[162, 6.81930769601604], [231, 6.640781223017257], [235, 6.889836066489806]] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/blocked.GroupbyDistributedSuite.time_blocked_dayofyear.json: -------------------------------------------------------------------------------- 1 | [[162, 1.1509175270039123], [231, 1.1188362745160703], [235, 1.1755590185057372]] -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import print_function 3 | from setuptools import setup, find_packages 4 | import versioneer 5 | 6 | setup( 7 | name="climtas", 8 | packages=find_packages("src"), 9 | package_dir={"": "src"}, 10 | version=versioneer.get_version(), 11 | cmdclass=versioneer.get_cmdclass(), 12 | install_requires=[], 13 | entry_points={"console_scripts": []}, 14 | ) 15 | -------------------------------------------------------------------------------- /doc/api/blocked.rst: -------------------------------------------------------------------------------- 1 | climtas.blocked 2 | --------------- 3 | .. automodule:: climtas.blocked 4 | 5 | groupby 6 | ^^^^^^^ 7 | 8 | .. autofunction:: climtas.blocked.blocked_groupby 9 | 10 | .. autoclass:: climtas.blocked.BlockedGroupby 11 | :members: 12 | 13 | percentile 14 | ^^^^^^^^^^ 15 | 16 | .. autofunction:: climtas.blocked.approx_percentile 17 | 18 | .. autofunction:: climtas.blocked.dask_approx_percentile 19 | 20 | resample 21 | ^^^^^^^^ 22 | 23 | .. autofunction:: climtas.blocked.blocked_resample 24 | 25 | .. autoclass:: climtas.blocked.BlockedResampler 26 | :members: 27 | -------------------------------------------------------------------------------- /benchmarks/sample.py: -------------------------------------------------------------------------------- 1 | import pandas 2 | import dask 3 | import xarray 4 | 5 | 6 | def sample_data(years, freq): 7 | x = range(100) 8 | y = range(100) 9 | t = pandas.date_range("2001", str(2001 + years), freq=freq, closed="left") 10 | 11 | t_chunks = pandas.Series(0, index=t).resample("M").count().values 12 | 13 | data = dask.array.concatenate( 14 | [ 15 | dask.array.random.random((c, len(y), len(x)), chunks=(-1, 50, 50)) 16 | for c in t_chunks 17 | ] 18 | ) 19 | 20 | da = xarray.DataArray(data, coords=[("time", t), ("y", y), ("x", x)]) 21 | 22 | return da 23 | -------------------------------------------------------------------------------- /benchmarks/event.py: -------------------------------------------------------------------------------- 1 | import climtas 2 | import dask 3 | import tempfile 4 | from .sample import sample_data 5 | 6 | 7 | class EventSuite: 8 | def setup(self): 9 | self.data = sample_data(years=1, freq="D") 10 | 11 | def time_find_event(self): 12 | events = climtas.event.find_events(self.data > 0.9, min_duration=4) 13 | dask.compute(events) 14 | 15 | 16 | class EventDistributedSuite(EventSuite): 17 | def setup(self): 18 | self.tmpdir = tempfile.TemporaryDirectory() 19 | self.client = dask.distributed.Client(local_directory=self.tmpdir.name) 20 | super().setup() 21 | 22 | def teardown(self): 23 | self.client.close() 24 | -------------------------------------------------------------------------------- /doc/_static/asv/error.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | airspeed velocity error 5 | 6 | 7 | 8 | 9 |

10 | swallow 11 | Can not determine continental origin of swallow. 12 |

13 | 14 |

15 | One or more external (JavaScript) dependencies of airspeed velocity failed to load. 16 |

17 | 18 |

19 | Make sure you have an active internet connection and enable 3rd-party scripts 20 | in your browser the first time you load airspeed velocity. 21 |

22 | 23 | 24 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = climtas 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | serve: html 18 | python -m http.server --bind localhost --directory _build/html/ 19 | 20 | # Catch-all target: route all unknown targets to Sphinx using the new 21 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 22 | %: Makefile 23 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 24 | -------------------------------------------------------------------------------- /test/test_daskutil.py: -------------------------------------------------------------------------------- 1 | from climtas.daskutil import * 2 | 3 | 4 | def compare_compute(s): 5 | a = throttled_compute(s, n=10) 6 | (b,) = dask.compute(s) 7 | numpy.testing.assert_array_equal(a, b) 8 | 9 | 10 | def test_throttled_compute(): 11 | s = numpy.random.random((10, 10)) 12 | compare_compute(s) 13 | 14 | s = dask.array.from_array(s, chunks=(5, 5)) 15 | compare_compute(s) 16 | 17 | s = dask.array.random.random((10, 10), chunks=(5, 5)) 18 | compare_compute(s) 19 | 20 | t = dask.array.random.random((10, 10), chunks=(2, 2)) 21 | s = s @ t 22 | compare_compute(s) 23 | 24 | 25 | def test_visualize_block(): 26 | import dask.dot 27 | 28 | s = dask.array.random.random((10, 10), chunks=(5, 5)) 29 | s = s + 1 30 | v = visualize_block(s) 31 | 32 | assert "label=add" in v.source 33 | -------------------------------------------------------------------------------- /doc/index.rst: -------------------------------------------------------------------------------- 1 | .. climtas documentation master file, created by 2 | sphinx-quickstart on Tue Mar 13 15:48:49 2018. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | climtas: Climate Timeseries Analysis 7 | ==================================== 8 | 9 | Climtas is a package for working with large climate analyses. It focuses on the 10 | time domain with custom functions for `Xarray `_ and 11 | `Dask `_ data. 12 | 13 | Contents 14 | -------- 15 | .. toctree:: 16 | :maxdepth: 2 17 | :caption: Contents: 18 | 19 | event 20 | groupby 21 | percentile 22 | regrid 23 | resample 24 | 25 | 26 | Reference 27 | --------- 28 | .. toctree:: 29 | :caption: Reference: 30 | 31 | api/index 32 | nci 33 | 34 | genindex 35 | 36 | `Benchmark graphs <_static/asv/index.html>`_ 37 | 38 | 39 | -------------------------------------------------------------------------------- /doc/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | set SPHINXPROJ=climtas 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 20 | echo.installed, then set the SPHINXBUILD environment variable to point 21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 22 | echo.may add the Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /doc/_static/asv/regressions.css: -------------------------------------------------------------------------------- 1 | #regressions-body { 2 | margin-left: 2em; 3 | margin-right: 2em; 4 | margin-top: 1em; 5 | margin-bottom: 2em; 6 | } 7 | 8 | #regressions-body table thead th { 9 | cursor: pointer; 10 | white-space: nowrap; 11 | } 12 | 13 | #regressions-body table thead th.desc:after { 14 | content: ' \2191'; 15 | } 16 | 17 | #regressions-body table thead th.asc:after { 18 | content: ' \2193'; 19 | } 20 | 21 | #regressions-body table.ignored { 22 | padding-top: 1em; 23 | color: #ccc; 24 | background-color: #eee; 25 | } 26 | 27 | #regressions-body table.ignored a { 28 | color: #82abda; 29 | } 30 | 31 | #regressions-body .feed-div { 32 | float: right; 33 | } 34 | 35 | #regressions-body table tbody td.date { 36 | white-space: nowrap; 37 | } 38 | 39 | #regressions-body table button { 40 | margin-top: -2px; 41 | padding-top: 2px; 42 | padding-bottom: 0px; 43 | white-space: nowrap; 44 | } 45 | -------------------------------------------------------------------------------- /doc/_static/asv/summarylist.css: -------------------------------------------------------------------------------- 1 | #summarylist-body { 2 | padding-left: 2em; 3 | padding-right: 2em; 4 | padding-top: 1em; 5 | padding-bottom: 2em; 6 | } 7 | 8 | #summarylist-body table thead th { 9 | cursor: pointer; 10 | white-space: nowrap; 11 | } 12 | 13 | #summarylist-body table thead th.desc:after { 14 | content: ' \2191'; 15 | } 16 | 17 | #summarylist-body table thead th.asc:after { 18 | content: ' \2193'; 19 | } 20 | 21 | #summarylist-body table.ignored { 22 | padding-top: 1em; 23 | color: #ccc; 24 | background-color: #eee; 25 | } 26 | 27 | #summarylist-body table.ignored a { 28 | color: #82abda; 29 | } 30 | 31 | #summarylist-body table tbody td.positive-change { 32 | background-color: #fdd; 33 | } 34 | 35 | #summarylist-body table tbody td.negative-change { 36 | background-color: #dfd; 37 | } 38 | 39 | #summarylist-body table tbody td.value { 40 | white-space: nowrap; 41 | } 42 | 43 | #summarylist-body table tbody td.change a { 44 | color: black; 45 | white-space: nowrap; 46 | } 47 | 48 | #summarylist-body table tbody td.change-date { 49 | white-space: nowrap; 50 | } 51 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [versioneer] 2 | VCS = git 3 | style = pep440 4 | versionfile_source = src/climtas/_version.py 5 | versionfile_build = climtas/_version.py 6 | tag_prefix = 7 | parentdir_prefix = climtas- 8 | 9 | [coverage:paths] 10 | source = 11 | src 12 | */site-packages 13 | 14 | [tool:pytest] 15 | addopts = --doctest-modules --doctest-glob="*.rst" 16 | doctest_optionflags=ELLIPSIS 17 | norecursedirs = benchmarks notebooks .asv 18 | 19 | [mypy] 20 | files = src/climtas,test 21 | #plugins = numpy.typing.mypy_plugin 22 | 23 | [mypy-climtas._version] 24 | ignore_errors = True 25 | 26 | [mypy-dask.*] 27 | ignore_missing_imports = True 28 | 29 | [mypy-pandas.*] 30 | ignore_missing_imports = True 31 | 32 | [mypy-tqdm.*] 33 | ignore_missing_imports = True 34 | 35 | [mypy-pytest.*] 36 | ignore_missing_imports = True 37 | 38 | [mypy-scipy.*] 39 | ignore_missing_imports = True 40 | 41 | [mypy-iris.*] 42 | ignore_missing_imports = True 43 | 44 | [mypy-mule.*] 45 | ignore_missing_imports = True 46 | 47 | [mypy-sparse.*] 48 | ignore_missing_imports = True 49 | 50 | [mypy-cfunits.*] 51 | ignore_missing_imports = True 52 | 53 | [mypy-graphviz.*] 54 | ignore_missing_imports = True 55 | -------------------------------------------------------------------------------- /meta.yaml: -------------------------------------------------------------------------------- 1 | {% set data = load_setup_py_data() %} 2 | 3 | package: 4 | name: climtas 5 | version: {{ data.get('version') }} 6 | 7 | source: 8 | path: . 9 | 10 | 11 | build: 12 | script: "{{ PYTHON }} -m pip install . --no-deps" 13 | noarch: python 14 | number: {{ GIT_DESCRIBE_NUMBER }} 15 | 16 | 17 | requirements: 18 | host: 19 | - python >=3.8 20 | - pip 21 | run: 22 | - python >=3.8 23 | - dask >=2015.5 24 | - netcdf4 25 | - pandas 26 | - scipy 27 | - tqdm 28 | - xarray 29 | - typing_extensions 30 | - iris 31 | - cfunits 32 | - mule 33 | - sparse 34 | - python-graphviz 35 | 36 | test: 37 | imports: 38 | - climtas 39 | requires: 40 | - pytest 41 | - coverage 42 | - cdo 43 | - esmf 44 | - hdf5 45 | - nco 46 | files: 47 | - setup.cfg 48 | - test 49 | - README.rst 50 | - doc 51 | script_env: 52 | - TEST_OUTPUT 53 | commands: 54 | - COVERAGE_FILE=${TEST_OUTPUT:-.}/coverage coverage run --source climtas -m pytest ./test --pyargs climtas --junit-xml=${TEST_OUTPUT:-.}/pytest/junit.xml 55 | 56 | -------------------------------------------------------------------------------- /benchmarks/blocked.py: -------------------------------------------------------------------------------- 1 | import climtas 2 | import dask 3 | import tempfile 4 | from .sample import sample_data 5 | 6 | 7 | class GroupbySuite: 8 | def setup(self): 9 | self.data = sample_data(years=5, freq="D") 10 | 11 | def time_xarray_dayofyear(self): 12 | self.data.groupby("time.dayofyear").mean().load() 13 | 14 | def time_blocked_dayofyear(self): 15 | climtas.blocked.blocked_groupby(self.data, time="dayofyear").mean().load() 16 | 17 | def time_blocked_monthday(self): 18 | climtas.blocked.blocked_groupby(self.data, time="monthday").mean().load() 19 | 20 | 21 | class ResampleSuite: 22 | def setup(self): 23 | self.data = sample_data(years=2, freq="6H") 24 | 25 | def time_xarray(self): 26 | self.data.resample(time="D").mean().load() 27 | 28 | def time_blocked(self): 29 | climtas.blocked.blocked_resample(self.data, time=4).mean().load() 30 | 31 | 32 | class GroupbyDistributedSuite(GroupbySuite): 33 | def setup(self): 34 | self.tmpdir = tempfile.TemporaryDirectory() 35 | self.client = dask.distributed.Client(local_directory=self.tmpdir.name) 36 | super().setup() 37 | 38 | def teardown(self): 39 | self.client.close() 40 | 41 | 42 | class ResampleDistributedSuite(ResampleSuite): 43 | def setup(self): 44 | self.tmpdir = tempfile.TemporaryDirectory() 45 | self.client = dask.distributed.Client(local_directory=self.tmpdir.name) 46 | super().setup() 47 | 48 | def teardown(self): 49 | self.client.close() 50 | -------------------------------------------------------------------------------- /test/test_grid.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright 2018 ARC Centre of Excellence for Climate Extremes 3 | # author: Scott Wales 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | from __future__ import print_function 17 | 18 | from climtas.grid import * 19 | import xarray 20 | import numpy 21 | import tempfile 22 | 23 | 24 | def test_latlon_grid_to_scrip(): 25 | d = xarray.DataArray( 26 | data=numpy.ones((2, 4)), coords=[("lat", [-45, 45]), ("lon", [0, 90, 180, 270])] 27 | ) 28 | d.lat.attrs["units"] = "degrees_north" 29 | d.lon.attrs["units"] = "degrees_east" 30 | 31 | center_lon, center_lat = numpy.meshgrid(d.lon, d.lat) 32 | d[:, :] = center_lon 33 | 34 | s = identify_grid(d).to_scrip() 35 | 36 | assert s.grid_dims[0] == 4 37 | assert s.grid_dims[1] == 2 38 | 39 | # Bottom left corner of bottom left cell 40 | assert s.grid_corner_lat[0, 0] == -90 41 | assert s.grid_corner_lon[0, 0] == -45 42 | 43 | # Top left corner of bottom left cell 44 | assert s.grid_corner_lat[0, 3] == 0 45 | assert s.grid_corner_lon[0, 3] == -45 46 | -------------------------------------------------------------------------------- /profile_event.py: -------------------------------------------------------------------------------- 1 | import xarray 2 | import tempfile 3 | import climtas 4 | import climtas.nci 5 | import time 6 | 7 | 8 | class Timer: 9 | def __init__(self): 10 | self.starts = {} 11 | self.ends = {} 12 | 13 | def mark(self, name): 14 | if name not in self.starts: 15 | self.starts[name] = time.perf_counter() 16 | else: 17 | self.ends[name] = time.perf_counter() 18 | 19 | def results(self): 20 | return {k: v - self.starts[k] for k, v in self.ends.items()} 21 | 22 | 23 | if __name__ == "__main__": 24 | t = Timer() 25 | t.mark("full") 26 | 27 | client = climtas.nci.GadiClient() 28 | workers = len(client.cluster.workers) 29 | threads = sum([w.nthreads for w in client.cluster.workers.values()]) 30 | 31 | t.mark("load") 32 | oisst = xarray.open_mfdataset( 33 | "/g/data/ua8/NOAA_OISST/AVHRR/v2-0_modified/oisst_avhrr_v2_*.nc", 34 | chunks={"time": 1}, 35 | ) 36 | sst = oisst.sst 37 | t.mark("load") 38 | 39 | clim_file = "/scratch/w35/saw562/tmp/oisst_clim.nc" 40 | 41 | t.mark("clim") 42 | # climatology = climtas.blocked_groupby( 43 | # sst.sel(time=slice("1985", "1987")), time="monthday" 44 | # ).percentile(90) 45 | # climatology.name = "sst_thresh" 46 | 47 | # climtas.io.to_netcdf_throttled(climatology, clim_file) 48 | 49 | climatology = xarray.open_dataarray(clim_file, chunks={"monthday": 1}) 50 | t.mark("clim") 51 | 52 | t.mark("find") 53 | delta = climtas.blocked_groupby(sst.sel(time="1985"), time="monthday") - climatology 54 | delta = delta.chunk({"time": 30, "lat": 100, "lon": 100}) 55 | print(delta) 56 | events = climtas.event.find_events_block( 57 | delta > 0, min_duration=10, offset=(0, 0, 0) 58 | ) 59 | t.mark("find") 60 | 61 | t.mark("full") 62 | 63 | print("workers ", workers, " threads ", threads) 64 | print(t.results()) 65 | -------------------------------------------------------------------------------- /test/test_dimension.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright 2018 ARC Centre of Excellence for Climate Extremes 3 | # author: Scott Wales 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | from __future__ import print_function 17 | 18 | from climtas.dimension import * 19 | 20 | import pytest 21 | import xarray 22 | import numpy 23 | 24 | 25 | def test_remove_degenerate_axes(): 26 | a = xarray.DataArray([1, 2], dims=["i"]) 27 | o = remove_degenerate_axes(a) 28 | 29 | numpy.testing.assert_array_equal(a.data, o.data) 30 | 31 | b = xarray.DataArray([[1, 2], [1, 2]], dims=["i", "j"]) 32 | o = remove_degenerate_axes(b) 33 | 34 | numpy.testing.assert_array_equal([1, 2], o.data) 35 | 36 | 37 | def test_identify_lat_lon(): 38 | da = xarray.DataArray([[0, 0], [0, 0]], coords=[("lat", [0, 1]), ("lon", [0, 1])]) 39 | 40 | # Missing CF metadata is an error 41 | with pytest.raises(Exception): 42 | lat, lon = identify_lat_lon(da) 43 | 44 | # Should find units, axis or standard_name attributes 45 | da.lat.attrs["units"] = "degrees_north" 46 | da.lon.attrs["axis"] = "X" 47 | lat, lon = identify_lat_lon(da) 48 | assert lat.equals(da.lat) 49 | assert lon.equals(da.lon) 50 | 51 | 52 | def test_identify_time(): 53 | da = xarray.DataArray([0, 0], coords=[("time", [0, 1])]) 54 | 55 | # Missing CF metadata is an error 56 | with pytest.raises(Exception): 57 | time = identify_time(da) 58 | 59 | # Units should be identified 60 | da.time.attrs["units"] = "days since 2006-01-09" 61 | time = identify_time(da) 62 | assert time.equals(da.time) 63 | 64 | # Units should work with CF decoding 65 | da = xarray.decode_cf(xarray.Dataset({"da": da})).da 66 | time = identify_time(da) 67 | assert time.equals(da.time) 68 | -------------------------------------------------------------------------------- /doc/_static/asv/regressions.json: -------------------------------------------------------------------------------- 1 | {"regressions": [["event.EventSuite.time_find_event", "graphs/arch-x86_64/branch-master/cfunits/cpu-AMD Ryzen 5 3600X 6-Core Processor/dask/iris/machine-Freya/mule/netcdf4/num_cpu-12/os-Linux 4.4.0-19041-Microsoft/pandas/python/python-graphviz/ram-16726988/scipy/sparse/tqdm/typing_extensions/xarray/event.EventSuite.time_find_event.json", {"cpu": "AMD Ryzen 5 3600X 6-Core Processor", "machine": "Freya", "num_cpu": "12", "os": "Linux 4.4.0-19041-Microsoft", "ram": "16726988"}, null, 1.4560975000204053, 1.1528359999938402, [[208, 235, 1.2022353499996825, 1.4560975000204053]]], ["event.EventSuite.time_find_event", "graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/event.EventSuite.time_find_event.json", {"cpu": "Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz", "machine": "gadi", "num_cpu": "48", "os": "Linux 4.18.0-240.1.1.el8.nci.x86_64", "ram": "262432756"}, null, 5.249316045985324, 3.1751911519968417, [[162, 231, 3.1751911519968417, 5.249316045985324]]], ["blocked.GroupbySuite.time_blocked_monthday", "graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/blocked.GroupbySuite.time_blocked_monthday.json", {"cpu": "Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz", "machine": "gadi", "num_cpu": "48", "os": "Linux 4.18.0-240.1.1.el8.nci.x86_64", "ram": "262432756"}, null, 0.5356181205133907, 0.4298640069901012, [[162, 231, 0.4298640069901012, 0.5356181205133907]]], ["blocked.ResampleDistributedSuite.time_xarray", "graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/blocked.ResampleDistributedSuite.time_xarray.json", {"cpu": "Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz", "machine": "gadi", "num_cpu": "48", "os": "Linux 4.18.0-240.1.1.el8.nci.x86_64", "ram": "262432756"}, null, 3.9142326589790173, 3.634890771994833, [[null, 235, 3.634890771994833, 3.9142326589790173]]]]} -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | *.swp 131 | 132 | doc/_build 133 | PET*.RegridWeightGen.Log 134 | benchmark/*/log 135 | .asv/env 136 | -------------------------------------------------------------------------------- /doc/_static/asv/vendor/jquery.flot-0.8.3.categories.min.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) 2007-2014 IOLA and Ole Laursen 3 | 4 | Permission is hereby granted, free of charge, to any person 5 | obtaining a copy of this software and associated documentation 6 | files (the "Software"), to deal in the Software without 7 | restriction, including without limitation the rights to use, 8 | copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the 10 | Software is furnished to do so, subject to the following 11 | conditions: 12 | 13 | The above copyright notice and this permission notice shall be 14 | included in all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 18 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 19 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 20 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 21 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 22 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 23 | OTHER DEALINGS IN THE SOFTWARE. 24 | */ 25 | 26 | !function(r){function o(r,o,e,i){var s="categories"==o.xaxis.options.mode,n="categories"==o.yaxis.options.mode;if(s||n){var a=i.format;if(!a){var t=o;if(a=[],a.push({x:!0,number:!0,required:!0}),a.push({y:!0,number:!0,required:!0}),t.bars.show||t.lines.show&&t.lines.fill){var u=!!(t.bars.show&&t.bars.zero||t.lines.show&&t.lines.zero);a.push({y:!0,number:!0,required:!1,defaultValue:0,autoscale:u}),t.bars.horizontal&&(delete a[a.length-1].y,a[a.length-1].x=!0)}i.format=a}for(var f=0;fo&&(o=r[e]);return o+1}function i(r){var o=[];for(var e in r.categories){var i=r.categories[e];i>=r.min&&i<=r.max&&o.push([i,e])}return o.sort(function(r,o){return r[0]-o[0]}),o}function s(o,e,s){if("categories"==o[e].options.mode){if(!o[e].categories){var a={},t=o[e].options.categories||{};if(r.isArray(t))for(var u=0;uc;++c){var l=s[f+c];null!=l&&a[c][t]&&(l in i||(i[l]=u,++u),s[f+c]=i[l])}}function a(r,o,e){s(o,"xaxis",e),s(o,"yaxis",e)}function t(r){r.hooks.processRawData.push(o),r.hooks.processDatapoints.push(a)}var u={xaxis:{categories:null},yaxis:{categories:null}};r.plot.plugins.push({init:t,options:u,name:"categories",version:"1.0"})}(jQuery); -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-AMD Ryzen 5 3600X 6-Core Processor/dask/iris/machine-Freya/mule/netcdf4/num_cpu-12/os-Linux 4.4.0-19041-Microsoft/pandas/python/python-graphviz/ram-16726988/scipy/sparse/tqdm/typing_extensions/xarray/summary.json: -------------------------------------------------------------------------------- 1 | [{"name": "blocked.GroupbyDistributedSuite.time_blocked_dayofyear", "idx": null, "pretty_name": "blocked.GroupbyDistributedSuite.time_blocked_dayofyear", "last_rev": 235, "last_value": 1.1829518500017002, "last_err": 0.0, "prev_value": null, "change_rev": null}, {"name": "blocked.GroupbyDistributedSuite.time_blocked_monthday", "idx": null, "pretty_name": "blocked.GroupbyDistributedSuite.time_blocked_monthday", "last_rev": 235, "last_value": 0.963999400002649, "last_err": 0.0, "prev_value": null, "change_rev": null}, {"name": "blocked.GroupbyDistributedSuite.time_xarray_dayofyear", "idx": null, "pretty_name": "blocked.GroupbyDistributedSuite.time_xarray_dayofyear", "last_rev": 235, "last_value": 6.495260349998716, "last_err": 0.0, "prev_value": null, "change_rev": null}, {"name": "blocked.GroupbySuite.time_blocked_dayofyear", "idx": null, "pretty_name": "blocked.GroupbySuite.time_blocked_dayofyear", "last_rev": 235, "last_value": 0.30014864999975543, "last_err": 0.008690700654255389, "prev_value": null, "change_rev": null}, {"name": "blocked.GroupbySuite.time_blocked_monthday", "idx": null, "pretty_name": "blocked.GroupbySuite.time_blocked_monthday", "last_rev": 235, "last_value": 0.2233245499955956, "last_err": 0.007246654976722034, "prev_value": null, "change_rev": null}, {"name": "blocked.GroupbySuite.time_xarray_dayofyear", "idx": null, "pretty_name": "blocked.GroupbySuite.time_xarray_dayofyear", "last_rev": 235, "last_value": 2.9537656500033336, "last_err": 0.12581787973661263, "prev_value": null, "change_rev": null}, {"name": "blocked.ResampleDistributedSuite.time_blocked", "idx": null, "pretty_name": "blocked.ResampleDistributedSuite.time_blocked", "last_rev": 235, "last_value": 0.7067086999886669, "last_err": 0.0, "prev_value": null, "change_rev": null}, {"name": "blocked.ResampleDistributedSuite.time_xarray", "idx": null, "pretty_name": "blocked.ResampleDistributedSuite.time_xarray", "last_rev": 235, "last_value": 3.3373256499762647, "last_err": 0.0, "prev_value": null, "change_rev": null}, {"name": "blocked.ResampleSuite.time_blocked", "idx": null, "pretty_name": "blocked.ResampleSuite.time_blocked", "last_rev": 235, "last_value": 0.12802800003555603, "last_err": 0.0, "prev_value": 0.1291424500013818, "change_rev": [208, 235]}, {"name": "blocked.ResampleSuite.time_xarray", "idx": null, "pretty_name": "blocked.ResampleSuite.time_xarray", "last_rev": 235, "last_value": 1.8484123000016552, "last_err": 0.060204885292630156, "prev_value": null, "change_rev": null}, {"name": "event.EventDistributedSuite.time_find_event", "idx": null, "pretty_name": "event.EventDistributedSuite.time_find_event", "last_rev": 235, "last_value": 2.0268603999866173, "last_err": 0.0, "prev_value": null, "change_rev": null}, {"name": "event.EventSuite.time_find_event", "idx": null, "pretty_name": "event.EventSuite.time_find_event", "last_rev": 235, "last_value": 1.4560975000204053, "last_err": 0.0, "prev_value": 1.2022353499996825, "change_rev": [208, 235]}] -------------------------------------------------------------------------------- /doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/summary.json: -------------------------------------------------------------------------------- 1 | [{"name": "blocked.GroupbyDistributedSuite.time_blocked_dayofyear", "idx": null, "pretty_name": "blocked.GroupbyDistributedSuite.time_blocked_dayofyear", "last_rev": 235, "last_value": 1.1509175270039123, "last_err": 0.015101201900593305, "prev_value": null, "change_rev": null}, {"name": "blocked.GroupbyDistributedSuite.time_blocked_monthday", "idx": null, "pretty_name": "blocked.GroupbyDistributedSuite.time_blocked_monthday", "last_rev": 235, "last_value": 0.9424207944830414, "last_err": 0.0, "prev_value": 0.9274916650028899, "change_rev": [231, 235]}, {"name": "blocked.GroupbyDistributedSuite.time_xarray_dayofyear", "idx": null, "pretty_name": "blocked.GroupbyDistributedSuite.time_xarray_dayofyear", "last_rev": 235, "last_value": 6.81930769601604, "last_err": 0.05000486624965498, "prev_value": null, "change_rev": null}, {"name": "blocked.GroupbySuite.time_blocked_dayofyear", "idx": null, "pretty_name": "blocked.GroupbySuite.time_blocked_dayofyear", "last_rev": 235, "last_value": 0.6714389724947978, "last_err": 0.03386290753635429, "prev_value": null, "change_rev": null}, {"name": "blocked.GroupbySuite.time_blocked_monthday", "idx": null, "pretty_name": "blocked.GroupbySuite.time_blocked_monthday", "last_rev": 235, "last_value": 0.5356181205133907, "last_err": 0.0, "prev_value": 0.5758137800148688, "change_rev": [231, 235]}, {"name": "blocked.GroupbySuite.time_xarray_dayofyear", "idx": null, "pretty_name": "blocked.GroupbySuite.time_xarray_dayofyear", "last_rev": 235, "last_value": 5.589740982977673, "last_err": 0.0, "prev_value": 6.291899875999661, "change_rev": [231, 235]}, {"name": "blocked.ResampleDistributedSuite.time_blocked", "idx": null, "pretty_name": "blocked.ResampleDistributedSuite.time_blocked", "last_rev": 235, "last_value": 0.7298302005219739, "last_err": 0.012202703436815713, "prev_value": null, "change_rev": null}, {"name": "blocked.ResampleDistributedSuite.time_xarray", "idx": null, "pretty_name": "blocked.ResampleDistributedSuite.time_xarray", "last_rev": 235, "last_value": 3.9142326589790173, "last_err": 0.0, "prev_value": 3.634890771994833, "change_rev": [231, 235]}, {"name": "blocked.ResampleSuite.time_blocked", "idx": null, "pretty_name": "blocked.ResampleSuite.time_blocked", "last_rev": 235, "last_value": 0.10669864900410175, "last_err": 0.0, "prev_value": 0.10901543201180175, "change_rev": [231, 235]}, {"name": "blocked.ResampleSuite.time_xarray", "idx": null, "pretty_name": "blocked.ResampleSuite.time_xarray", "last_rev": 235, "last_value": 3.708070858469, "last_err": 0.08677950767956295, "prev_value": null, "change_rev": null}, {"name": "event.EventDistributedSuite.time_find_event", "idx": null, "pretty_name": "event.EventDistributedSuite.time_find_event", "last_rev": 235, "last_value": 2.8345289284770843, "last_err": 0.0, "prev_value": 2.8652793369838037, "change_rev": [231, 235]}, {"name": "event.EventSuite.time_find_event", "idx": null, "pretty_name": "event.EventSuite.time_find_event", "last_rev": 235, "last_value": 5.249316045985324, "last_err": 0.0, "prev_value": 5.308832254988374, "change_rev": [231, 235]}] -------------------------------------------------------------------------------- /src/climtas/dimension.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright 2018 ARC Centre of Excellence for Climate Extremes 3 | # author: Scott Wales 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | from __future__ import print_function 17 | 18 | from cfunits import Units 19 | import numpy 20 | 21 | 22 | def remove_degenerate_axes(coord): 23 | """ 24 | Remove any degenerate axes from the coordinate, where all the values along a dimension are identical 25 | 26 | Args: 27 | coord (xarray.DataArray): Co-ordinate to operate on 28 | 29 | Returns: 30 | xarray.DataArray with degenerate axes removed 31 | """ 32 | 33 | for d in coord.dims: 34 | if numpy.allclose(coord.max(dim=d) - coord.min(dim=d), 0): 35 | coord = coord.mean(dim=d) 36 | 37 | return coord 38 | 39 | 40 | def identify_lat_lon(dataarray): 41 | """ 42 | Identify the latitude and longitude dimensions of a dataarray using CF 43 | attributes 44 | 45 | Args: 46 | dataarray: Source dataarray 47 | 48 | Returns: 49 | (lat, lon): Tuple of `xarray.Dataarray` for the latitude and longitude 50 | dimensions 51 | 52 | Todo: 53 | * Assumes latitude and longitude are unique 54 | """ 55 | 56 | lat = None 57 | lon = None 58 | 59 | for c in dataarray.coords.values(): 60 | if ( 61 | c.attrs.get("standard_name", "") == "latitude" 62 | or Units(c.attrs.get("units", "")).islatitude 63 | or c.attrs.get("axis", "") == "Y" 64 | ): 65 | lat = c 66 | 67 | if ( 68 | c.attrs.get("standard_name", "") == "longitude" 69 | or Units(c.attrs.get("units", "")).islongitude 70 | or c.attrs.get("axis", "") == "X" 71 | ): 72 | lon = c 73 | 74 | if lat is None or lon is None: 75 | raise Exception("Couldn't identify horizontal coordinates") 76 | 77 | return (lat, lon) 78 | 79 | 80 | def identify_time(dataarray): 81 | """ 82 | Identify the time dimension of a dataarray using CF attributes 83 | 84 | Args: 85 | dataarray: Source dataarray 86 | 87 | Returns: 88 | :obj:`xarray.Dataarray` for the time dimension 89 | 90 | Todo: 91 | * Assumes time dimension is unique 92 | """ 93 | 94 | for c in dataarray.coords.values(): 95 | if ( 96 | c.attrs.get("standard_name", "") == "time" 97 | or Units(c.attrs.get("units", "")).isreftime 98 | or Units(c.encoding.get("units", "")).isreftime 99 | or c.attrs.get("axis", "") == "T" 100 | ): 101 | return c 102 | 103 | raise Exception("No time axis found") 104 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | climtas 2 | ========== 3 | 4 | .. image:: https://img.shields.io/circleci/build/github/ScottWales/climtas/master 5 | :target: https://circleci.com/gh/ScottWales/climtas 6 | :alt: CircleCI 7 | 8 | .. image:: https://img.shields.io/codecov/c/github/ScottWales/climtas/master 9 | :target: https://codecov.io/gh/ScottWales/climtas 10 | :alt: Codecov 11 | 12 | .. image:: https://img.shields.io/readthedocs/climtas/latest 13 | :target: https://climtas.readthedocs.io/en/latest/ 14 | :alt: Read the Docs (latest) 15 | 16 | .. image:: https://img.shields.io/conda/v/coecms/climtas 17 | :target: https://anaconda.org/coecms/climtas 18 | :alt: Conda 19 | 20 | Functions for working with large (> 10 GB) datasets using Xarray and Dask, 21 | especially for working in the time domain 22 | 23 | Topics 24 | ------ 25 | 26 | `Apply a function grouping by day of year, without massive numbers of dask chunks `_ 27 | ~~~~ 28 | 29 | Climtas' blocked resample and groupby operations use array reshaping, rather than Xarray's default slicing methods. This results in a much simpler and efficient Dask graph, at the cost of some restrictions to the data (the data must be regularly spaced and start/end on a resampling boundary) 30 | 31 | Example notebook: `ERA-5 90th percentile climatology `_ 32 | 33 | .. image:: benchmark/climatology/climatology_walltime.png 34 | :alt: Walltime of Climtas climatology vs xarray 35 | 36 | .. code-block:: python 37 | 38 | >>> import numpy; import pandas; import xarray 39 | >>> time = pandas.date_range("20010101", "20030101", closed="left") 40 | >>> data = numpy.random.rand(len(time)) 41 | >>> da = xarray.DataArray(data, coords=[("time", time)]) 42 | >>> da = da.chunk({"time": 365}) 43 | 44 | >>> from climtas import blocked_groupby 45 | >>> blocked_groupby(da, time='dayofyear').mean() 46 | 47 | dask.array 48 | Coordinates: 49 | * dayofyear (dayofyear) int64 1 2 3 4 5 6 7 8 ... 360 361 362 363 364 365 366 50 | 51 | 52 | 53 | `Find and apply a function to events `_ 54 | ~~~~ 55 | 56 | Climtas includes a number of parallelised building blocks for heatwave detection 57 | 58 | .. code-block:: python 59 | 60 | >>> from climtas.event import find_events, map_events 61 | >>> temp = xarray.DataArray([28,31,34,32,30,35,39], dims=['time']) 62 | >>> events = find_events(temp > 30) 63 | >>> sums = map_events(temp, events, lambda x: {'sum': x.sum().item()}) 64 | >>> events.join(sums) 65 | time event_duration sum 66 | 0 1 3 97 67 | 1 5 2 74 68 | 69 | `Memory-saving write to NetCDF `_ 70 | ~~~~ 71 | 72 | Climtas' throttled saver reduces memory usage, by limiting the number of Dask output chunks that get processed at one time 73 | 74 | Examples 75 | -------- 76 | 77 | See the examples in the `notebooks `_ directory for mores ideas on how to 78 | use these functions to analyse large datasets 79 | -------------------------------------------------------------------------------- /doc/_static/asv/asv.css: -------------------------------------------------------------------------------- 1 | /* Basic navigation */ 2 | 3 | .asv-navigation { 4 | padding: 2px; 5 | } 6 | 7 | nav ul li.active a { 8 | height: 52px; 9 | } 10 | 11 | nav li.active span.navbar-brand { 12 | background-color: #e7e7e7; 13 | height: 52px; 14 | } 15 | 16 | nav li.active span.navbar-brand:hover { 17 | background-color: #e7e7e7; 18 | } 19 | 20 | .navbar-default .navbar-link { 21 | color: #2458D9; 22 | } 23 | 24 | .panel-body { 25 | padding: 0; 26 | } 27 | 28 | .panel { 29 | margin-bottom: 4px; 30 | -webkit-box-shadow: none; 31 | box-shadow: none; 32 | border-radius: 0; 33 | border-top-left-radius: 3px; 34 | border-top-right-radius: 3px; 35 | } 36 | 37 | .panel-default>.panel-heading, 38 | .panel-heading { 39 | font-size: 12px; 40 | font-weight:bold; 41 | padding: 2px; 42 | text-align: center; 43 | border-top-left-radius: 3px; 44 | border-top-right-radius: 3px; 45 | background-color: #eee; 46 | } 47 | 48 | .btn, 49 | .btn-group, 50 | .btn-group-vertical>.btn:first-child, 51 | .btn-group-vertical>.btn:last-child:not(:first-child), 52 | .btn-group-vertical>.btn:last-child { 53 | border: none; 54 | border-radius: 0px; 55 | overflow: hidden; 56 | } 57 | 58 | .btn-default:focus, .btn-default:active, .btn-default.active { 59 | border: none; 60 | color: #fff; 61 | background-color: #99bfcd; 62 | } 63 | 64 | #range { 65 | font-family: monospace; 66 | text-align: center; 67 | background: #ffffff; 68 | } 69 | 70 | .form-control { 71 | border: none; 72 | border-radius: 0px; 73 | font-size: 12px; 74 | padding: 0px; 75 | } 76 | 77 | .tooltip-inner { 78 | min-width: 100px; 79 | max-width: 800px; 80 | text-align: left; 81 | white-space: pre; 82 | font-family: monospace; 83 | } 84 | 85 | /* Benchmark tree */ 86 | 87 | .nav-list { 88 | font-size: 12px; 89 | padding: 0; 90 | padding-left: 15px; 91 | } 92 | 93 | .nav-list>li { 94 | overflow-x: hidden; 95 | } 96 | 97 | .nav-list>li>a { 98 | padding: 0; 99 | padding-left: 5px; 100 | color: #000; 101 | } 102 | 103 | .nav-list>li>a:focus { 104 | color: #fff; 105 | background-color: #99bfcd; 106 | box-shadow: inset 0 3px 5px rgba(0,0,0,.125); 107 | } 108 | 109 | .nav-list>li>.nav-header { 110 | white-space: nowrap; 111 | font-weight: 500; 112 | margin-bottom: 2px; 113 | } 114 | 115 | .caret-right { 116 | display: inline-block; 117 | width: 0; 118 | height: 0; 119 | margin-left: 2px; 120 | vertical-align: middle; 121 | border-left: 4px solid; 122 | border-bottom: 4px solid transparent; 123 | border-top: 4px solid transparent; 124 | } 125 | 126 | /* Summary page */ 127 | 128 | .benchmark-group > h1 { 129 | text-align: center; 130 | } 131 | 132 | .benchmark-container { 133 | width: 300px; 134 | height: 116px; 135 | padding: 4px; 136 | border-radius: 3px; 137 | } 138 | 139 | .benchmark-container:hover { 140 | background-color: #eee; 141 | } 142 | 143 | .benchmark-plot { 144 | width: 292px; 145 | height: 88px; 146 | } 147 | 148 | .benchmark-text { 149 | font-size: 12px; 150 | color: #000; 151 | width: 292px; 152 | overflow: hidden; 153 | } 154 | 155 | #extra-buttons { 156 | margin: 1em; 157 | } 158 | 159 | #extra-buttons a { 160 | border: solid 1px #ccc; 161 | } 162 | -------------------------------------------------------------------------------- /test/test_io.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright 2019 Scott Wales 3 | # author: Scott Wales 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | import xarray 18 | import numpy 19 | import dask 20 | import pandas 21 | 22 | from climtas import io 23 | 24 | 25 | def test_to_netcdf_throttled(tmpdir, distributed_client): 26 | def helper(path, data): 27 | da = xarray.DataArray(data, dims=["t", "x", "y"], name="test") 28 | io.to_netcdf_throttled(da, path) 29 | out = xarray.open_dataset(str(path)).test 30 | xarray.testing.assert_identical(da, out) 31 | 32 | path = tmpdir / "numpy.nc" 33 | data = numpy.zeros([10, 10, 10]) 34 | helper(path, data) 35 | 36 | path = tmpdir / "dask.nc" 37 | data = dask.array.zeros([10, 10, 10]) 38 | helper(path, data) 39 | 40 | data = dask.array.random.random([10, 10, 10]) + numpy.random.random([10, 10, 10]) 41 | helper(path, data) 42 | 43 | 44 | def test_to_netcdf_throttled_serial(tmpdir): 45 | def helper(path, data): 46 | da = xarray.DataArray(data, dims=["t", "x", "y"], name="test") 47 | io.to_netcdf_throttled(da, path) 48 | out = xarray.open_dataset(str(path)).test 49 | xarray.testing.assert_identical(da, out) 50 | 51 | path = tmpdir / "numpy.nc" 52 | data = numpy.zeros([10, 10, 10]) 53 | helper(path, data) 54 | 55 | path = tmpdir / "dask.nc" 56 | data = dask.array.zeros([10, 10, 10]) 57 | helper(path, data) 58 | 59 | 60 | def test_to_netcdf_series(tmpdir): 61 | path = tmpdir / "data_{start.year}.nc" 62 | data = xarray.DataArray( 63 | numpy.zeros([20]), 64 | coords=[("time", pandas.date_range("20010101", freq="MS", periods=20))], 65 | name="test", 66 | ) 67 | 68 | io.to_netcdf_series(data, path, groupby="time.year") 69 | 70 | assert (tmpdir / "data_2001.nc").exists() 71 | assert (tmpdir / "data_2002.nc").exists() 72 | 73 | data.coords["group"] = ( 74 | "time", 75 | [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], 76 | ) 77 | path = tmpdir / "data_{group}.nc" 78 | io.to_netcdf_series(data, path, groupby="group") 79 | assert (tmpdir / "data_0.nc").exists() 80 | assert (tmpdir / "data_1.nc").exists() 81 | 82 | 83 | def test_to_netcdf_throttled_fillvalue(tmpdir, distributed_client): 84 | def helper(path, data): 85 | da = xarray.DataArray(data, dims=["t", "x", "y"], name="test") 86 | da.encoding["_FillValue"] = 1 87 | io.to_netcdf_throttled(da, path) 88 | out = xarray.open_dataset(str(path)).test 89 | xarray.testing.assert_identical(da, out) 90 | assert out.encoding["_FillValue"] == 1 91 | 92 | path = tmpdir / "numpy.nc" 93 | data = numpy.zeros([10, 10, 10]) 94 | helper(path, data) 95 | 96 | path = tmpdir / "dask.nc" 97 | data = dask.array.zeros([10, 10, 10]) 98 | helper(path, data) 99 | 100 | data = dask.array.random.random([10, 10, 10]) + numpy.random.random([10, 10, 10]) 101 | helper(path, data) 102 | -------------------------------------------------------------------------------- /test/test_helpers.py: -------------------------------------------------------------------------------- 1 | import dask 2 | import pandas 3 | import numpy 4 | from climtas.helpers import * 5 | 6 | 7 | def test_blockwise(): 8 | da = dask.array.zeros((10, 10), chunks=(5, 5)) 9 | 10 | def func(da): 11 | return pandas.DataFrame({"mean": da.mean()}, index=[1]) 12 | 13 | meta = pandas.DataFrame({"mean": pandas.Series([], dtype=da.dtype)}) 14 | 15 | df = map_blocks_array_to_dataframe(func, da, meta=meta) 16 | df = df.compute() 17 | 18 | numpy.testing.assert_array_equal(df.to_numpy(), [[0], [0], [0], [0]]) 19 | 20 | def func(da, block_info=None): 21 | return pandas.DataFrame.from_records([block_info[0]], index=[1]) 22 | 23 | df = map_blocks_array_to_dataframe(func, da, meta=meta) 24 | df = df.compute() 25 | 26 | numpy.testing.assert_array_equal( 27 | df["chunk-location"].sort_values().apply(lambda x: x[0]), 28 | numpy.array( 29 | [ 30 | 0, 31 | 0, 32 | 1, 33 | 1, 34 | ] 35 | ), 36 | ) 37 | 38 | numpy.testing.assert_array_equal( 39 | df["chunk-location"].sort_values().apply(lambda x: x[1]), 40 | numpy.array( 41 | [ 42 | 0, 43 | 1, 44 | 0, 45 | 1, 46 | ] 47 | ), 48 | ) 49 | 50 | 51 | def test_blockwise_xarray(): 52 | da = dask.array.zeros((10, 10), chunks=(5, 5)) 53 | xda = xarray.DataArray(da, dims=["t", "x"]) 54 | 55 | def func(da, block_info=None): 56 | meta = locate_block_in_dataarray( 57 | da, xda.name, xda.dims, xda.coords, block_info[0] 58 | ) 59 | return pandas.DataFrame({"mean": meta.mean().values}, index=[1]) 60 | 61 | meta = pandas.DataFrame({"mean": pandas.Series([], dtype=da.dtype)}) 62 | 63 | df = map_blocks_array_to_dataframe(func, xda.data, meta=meta) 64 | df = df.compute() 65 | 66 | numpy.testing.assert_array_equal(df.to_numpy(), [[0], [0], [0], [0]]) 67 | 68 | 69 | def test_throttled_compute(): 70 | 71 | # Numpy array 72 | s = numpy.random.random((10, 10)) 73 | 74 | tc = throttled_compute(s, n=1) 75 | (dc,) = dask.compute(s) 76 | numpy.testing.assert_array_equal(tc, dc) 77 | 78 | # Numpy array converted to dask 79 | s = dask.array.from_array(s, chunks=(5, 5)) 80 | 81 | tc = throttled_compute(s, n=1) 82 | (dc,) = dask.compute(s) 83 | numpy.testing.assert_array_equal(tc, dc) 84 | 85 | # Pure dask array 86 | s = dask.array.random.random((10, 10), chunks=(5, 5)) 87 | 88 | tc = throttled_compute(s, n=1) 89 | (dc,) = dask.compute(s) 90 | numpy.testing.assert_array_equal(tc, dc) 91 | 92 | # Xarray + Numpy 93 | s = numpy.random.random((10, 10)) 94 | s = xarray.DataArray(s, name="foo") 95 | 96 | tc = throttled_compute(s, n=1) 97 | (dc,) = dask.compute(s) 98 | numpy.testing.assert_array_equal(tc, dc) 99 | assert tc.name == "foo" 100 | 101 | # Xarray + Dask 102 | s = dask.array.random.random((10, 10), chunks=(5, 5)) 103 | s = xarray.DataArray(s, name="foo") 104 | 105 | tc = throttled_compute(s, n=1) 106 | (dc,) = dask.compute(s) 107 | numpy.testing.assert_array_equal(tc, dc) 108 | assert tc.name == "foo" 109 | 110 | 111 | def test_array_blocks_to_dataframe(): 112 | meta = pandas.DataFrame({"a": [0, 1, 2], "b": [2, 3, 4]}) 113 | 114 | array = numpy.array([0, 1, 2, 3]) 115 | a = dask.array.from_array(array, chunks=(2,)) 116 | 117 | def mapper(x): 118 | if x[0] == 0: 119 | return meta.iloc[0:1] 120 | else: 121 | return meta.iloc[1:] 122 | 123 | b = dask.array.map_blocks(mapper, a, dtype="object") 124 | 125 | result = array_blocks_to_dataframe(b, meta).compute() 126 | 127 | assert meta.equals(result) 128 | -------------------------------------------------------------------------------- /.asv/results/Freya/3801e053-conda-py-cfunits-dask-iris-mule-netcdf4-pandas-python-graphviz-scipy-sparse-tqdm-typing_extensions-xarray.json: -------------------------------------------------------------------------------- 1 | {"results": {"blocked.GroupbySuite.time_blocked_dayofyear": {"result": [0.3126271499932045], "stats": [{"ci_99": [0.2977016999939224, 0.34394289999909233], "q_25": 0.3003146499977447, "q_75": 0.3215577500013751, "min": 0.2977016999939224, "max": 0.34394289999909233, "mean": 0.3137292899962631, "std": 0.014682803495529122, "repeat": 10, "number": 1}]}, "blocked.GroupbySuite.time_blocked_monthday": {"result": [0.2437466499977745], "stats": [{"ci_99": [0.22851909999735653, 0.27990589999535587], "q_25": 0.23839082500126096, "q_75": 0.2593821000009484, "min": 0.22851909999735653, "max": 0.27990589999535587, "mean": 0.2484980499968515, "std": 0.014908833143747205, "repeat": 10, "number": 1}]}, "blocked.GroupbySuite.time_xarray_dayofyear": {"result": [3.0456320500015863], "stats": [{"ci_99": [3.0129942999919876, 3.064236629433123], "q_25": 3.031911849997414, "q_75": 3.05106107499887, "min": 3.0129942999919876, "max": 3.05717949999962, "mean": 3.0402745166647946, "std": 0.01522875723199255, "repeat": 6, "number": 1}]}, "blocked.ResampleSuite.time_blocked": {"result": [0.1291424500013818], "stats": [{"ci_99": [0.11620689999836031, 0.14901609999651555], "q_25": 0.12433564999810187, "q_75": 0.13542332499855547, "min": 0.11620689999836031, "max": 0.14901609999651555, "mean": 0.13057890999916708, "std": 0.009049610063400704, "repeat": 10, "number": 1}]}, "blocked.ResampleSuite.time_xarray": {"result": [1.8962347999986378], "stats": [{"ci_99": [1.8759468000062043, 1.9226966000132961], "q_25": 1.8873265749934944, "q_75": 1.906550950006931, "min": 1.8759468000062043, "max": 1.9226966000132961, "mean": 1.8969048800005113, "std": 0.013334025574830217, "repeat": 10, "number": 1}]}, "event.EventSuite.time_find_event": {"result": [1.2022353499996825], "stats": [{"ci_99": [1.185307500010822, 1.2574620000086725], "q_25": 1.1946725249981682, "q_75": 1.21394304999194, "min": 1.185307500010822, "max": 1.2574620000086725, "mean": 1.2073511100010363, "std": 0.019356085737859734, "repeat": 10, "number": 1}]}}, "params": {"arch": "x86_64", "cpu": "AMD Ryzen 5 3600X 6-Core Processor", "machine": "Freya", "num_cpu": "12", "os": "Linux 4.4.0-19041-Microsoft", "ram": "16726988", "python": "", "cfunits": "", "dask": "", "iris": "", "mule": "", "netcdf4": "", "pandas": "", "python-graphviz": "", "scipy": "", "sparse": "", "tqdm": "", "typing_extensions": "", "xarray": ""}, "requirements": {"cfunits": "", "dask": "", "iris": "", "mule": "", "netcdf4": "", "pandas": "", "python-graphviz": "", "scipy": "", "sparse": "", "tqdm": "", "typing_extensions": "", "xarray": ""}, "commit_hash": "3801e053df60b21ffab4534be4c2642bf0a17e01", "date": 1609824900000, "env_name": "conda-py-cfunits-dask-iris-mule-netcdf4-pandas-python-graphviz-scipy-sparse-tqdm-typing_extensions-xarray", "python": "", "profiles": {}, "started_at": {"blocked.GroupbySuite.time_blocked_dayofyear": 1609825020542, "blocked.GroupbySuite.time_blocked_monthday": 1609825022725, "blocked.GroupbySuite.time_xarray_dayofyear": 1609825024565, "blocked.ResampleSuite.time_blocked": 1609825037028, "blocked.ResampleSuite.time_xarray": 1609825038009, "event.EventSuite.time_find_event": 1609825049789}, "ended_at": {"blocked.GroupbySuite.time_blocked_dayofyear": 1609825022725, "blocked.GroupbySuite.time_blocked_monthday": 1609825024565, "blocked.GroupbySuite.time_xarray_dayofyear": 1609825037027, "blocked.ResampleSuite.time_blocked": 1609825038009, "blocked.ResampleSuite.time_xarray": 1609825049789, "event.EventSuite.time_find_event": 1609825057304}, "benchmark_version": {"blocked.GroupbySuite.time_blocked_dayofyear": "01d3740b4f5916532dc07a5d1aec3619fd38128f350845bcc1d9d15cd4d3b1dd", "blocked.GroupbySuite.time_blocked_monthday": "ea1ecf7949f8513fba397204403a348b9efc7ccd3753e255e48e9e64689684d2", "blocked.GroupbySuite.time_xarray_dayofyear": "c5d3ef9404ad60cb7e3b28de3c565c02f7a75c1041aab3ccdeba1cc6c7618e7b", "blocked.ResampleSuite.time_blocked": "62b1f41ff720dc40ea68f172077bed6172fe97cfcf57eace07809d4845c85a52", "blocked.ResampleSuite.time_xarray": "364ac7c62ba036563517fdb59f62d4ce79f49fe8d6476f286494b9671c236610", "event.EventSuite.time_find_event": "a231d487f53daed34d1f991be54d797f3e1f15a79e7d0a811686326b16fc028e"}, "version": 1} -------------------------------------------------------------------------------- /src/climtas/daskutil.py: -------------------------------------------------------------------------------- 1 | """ 2 | Utilities for working with Dask 3 | """ 4 | 5 | import xarray 6 | import dask 7 | import numpy 8 | from itertools import zip_longest 9 | import graphviz 10 | import typing as T 11 | 12 | 13 | # An array-like value for typing 14 | ArrayVar = T.TypeVar("ArrayVar", xarray.DataArray, dask.array.Array, numpy.ndarray) 15 | 16 | 17 | def _grouper(iterable, n, fillvalue=None): 18 | "Collect data into fixed-length chunks or blocks" 19 | # grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx" 20 | args = [iter(iterable)] * n 21 | return zip_longest(*args, fillvalue=fillvalue) 22 | 23 | 24 | def throttled_compute(arr: ArrayVar, *, n: int, name: T.Hashable = None) -> ArrayVar: 25 | """ 26 | Compute a Dask object N chunks at a time 27 | 28 | Args: 29 | obj: Object to compute 30 | n: Number of chunks to process at once 31 | name: Dask layer name to compute (default obj.name) 32 | 33 | Returns: 34 | 'obj', with each chunk computed 35 | """ 36 | 37 | # Copy the input in case it's a xarray object 38 | obj = arr 39 | 40 | if isinstance(arr, xarray.DataArray): 41 | # Work on the data 42 | obj = arr.data 43 | 44 | if not hasattr(obj, "dask") or isinstance(obj, numpy.ndarray): 45 | # Short-circuit non-dask arrays 46 | return arr 47 | 48 | # Current dask scheduler 49 | schedule = dask.base.get_scheduler(collections=[obj]) 50 | 51 | # Get the layer to work on 52 | if name is None: 53 | name = obj.name 54 | top_layer = obj.dask.layers[name] 55 | 56 | result = {} 57 | 58 | # Compute chunks N at a time 59 | for x in _grouper(top_layer, n): 60 | x = [xx for xx in x if xx is not None] 61 | 62 | graph = obj.dask.cull(set(x)) 63 | values = schedule(graph, list(x)) 64 | result.update(dict(zip(x, values))) 65 | 66 | # Build a new dask graph 67 | layer = dask.highlevelgraph.MaterializedLayer(result) 68 | graph = dask.highlevelgraph.HighLevelGraph.from_collections(name, layer) 69 | 70 | obj.dask = graph 71 | 72 | if isinstance(arr, xarray.DataArray): 73 | # Add back metadata 74 | obj = xarray.DataArray( 75 | obj, name=arr.name, dims=arr.dims, coords=arr.coords, attrs=arr.attrs 76 | ) 77 | 78 | return obj 79 | 80 | 81 | def visualize_block(arr: dask.array.Array, sizes=True) -> graphviz.Digraph: 82 | """ 83 | Visualise the graph of a single chunk from 'arr' 84 | 85 | In a Jupyter notebook the graph will automatically display, otherwise use 86 | :meth:`graphviz.Digraph.render` to create an image. 87 | 88 | Args: 89 | arr: Array to visualise 90 | sizes: Calculate the sizes of each node and display as the node label 91 | if True 92 | """ 93 | import dask.dot 94 | 95 | name = arr.name 96 | graph = arr.dask 97 | layer = graph.layers[name] 98 | block = next(iter(layer.keys())) 99 | culled = graph.cull(set([block])) 100 | 101 | attrs = {} 102 | if sizes: 103 | attrs = graph_sizes(arr) 104 | 105 | graph = dask.dot.to_graphviz(culled, data_attributes=attrs) 106 | 107 | return graph 108 | 109 | 110 | def graph_sizes(arr: dask.array.Array) -> T.Dict[T.Hashable, T.Dict]: 111 | """ 112 | Get the node sizes for each node in arr's Dask graph, to be used in 113 | visualisation functions 114 | 115 | Sizes are returned using the 'label' graphviz attribute 116 | 117 | >>> import dask.dot 118 | >>> a = dask.array.zeros((10,10), chunks=(5,5)) 119 | >>> sizes = graph_sizes(a) 120 | >>> dask.dot.to_graphviz(a.dask, data_attributes=sizes) # doctest: +ELLIPSIS 121 | 122 | 123 | Note: All nodes will be computed to calculate the size 124 | """ 125 | 126 | keys = list(arr.dask.keys()) 127 | sizes = dict( 128 | zip( 129 | keys, 130 | [ 131 | {"label": dask.utils.format_bytes(x.nbytes)} 132 | if isinstance(x, numpy.ndarray) 133 | else {} 134 | for x in dask.get(arr.dask, keys) 135 | ], 136 | ) 137 | ) 138 | 139 | return sizes 140 | -------------------------------------------------------------------------------- /doc/_static/asv/vendor/jquery.flot-0.8.3.selection.min.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) 2007-2014 IOLA and Ole Laursen 3 | 4 | Permission is hereby granted, free of charge, to any person 5 | obtaining a copy of this software and associated documentation 6 | files (the "Software"), to deal in the Software without 7 | restriction, including without limitation the rights to use, 8 | copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the 10 | Software is furnished to do so, subject to the following 11 | conditions: 12 | 13 | The above copyright notice and this permission notice shall be 14 | included in all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 18 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 19 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 20 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 21 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 22 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 23 | OTHER DEALINGS IN THE SOFTWARE. 24 | */ 25 | 26 | !function(e){function t(t){function o(e){h.active&&(a(e),t.getPlaceholder().trigger("plotselecting",[s()]))}function n(t){1==t.which&&(document.body.focus(),void 0!==document.onselectstart&&null==m.onselectstart&&(m.onselectstart=document.onselectstart,document.onselectstart=function(){return!1}),void 0!==document.ondrag&&null==m.ondrag&&(m.ondrag=document.ondrag,document.ondrag=function(){return!1}),l(h.first,t),h.active=!0,x=function(e){i(e)},e(document).one("mouseup",x))}function i(e){return x=null,void 0!==document.onselectstart&&(document.onselectstart=m.onselectstart),void 0!==document.ondrag&&(document.ondrag=m.ondrag),h.active=!1,a(e),g()?r():(t.getPlaceholder().trigger("plotunselected",[]),t.getPlaceholder().trigger("plotselecting",[null])),!1}function s(){if(!g())return null;if(!h.show)return null;var o={},n=h.first,i=h.second;return e.each(t.getAxes(),function(e,t){if(t.used){var s=t.c2p(n[t.direction]),r=t.c2p(i[t.direction]);o[e]={from:Math.min(s,r),to:Math.max(s,r)}}}),o}function r(){var e=s();t.getPlaceholder().trigger("plotselected",[e]),e.xaxis&&e.yaxis&&t.getPlaceholder().trigger("selected",[{x1:e.xaxis.from,y1:e.yaxis.from,x2:e.xaxis.to,y2:e.yaxis.to}])}function c(e,t,o){return e>t?e:t>o?o:t}function l(e,o){var n=t.getOptions(),i=t.getPlaceholder().offset(),s=t.getPlotOffset();e.x=c(0,o.pageX-i.left-s.left,t.width()),e.y=c(0,o.pageY-i.top-s.top,t.height()),"y"==n.selection.mode&&(e.x=e==h.first?0:t.width()),"x"==n.selection.mode&&(e.y=e==h.first?0:t.height())}function a(e){null!=e.pageX&&(l(h.second,e),g()?(h.show=!0,t.triggerRedrawOverlay()):u(!0))}function u(e){h.show&&(h.show=!1,t.triggerRedrawOverlay(),e||t.getPlaceholder().trigger("plotunselected",[]))}function d(e,o){var n,i,s,r,c=t.getAxes();for(var l in c)if(n=c[l],n.direction==o&&(r=o+n.n+"axis",e[r]||1!=n.n||(r=o+"axis"),e[r])){i=e[r].from,s=e[r].to;break}if(e[r]||(n="x"==o?t.getXAxes()[0]:t.getYAxes()[0],i=e[o+"1"],s=e[o+"2"]),null!=i&&null!=s&&i>s){var a=i;i=s,s=a}return{from:i,to:s,axis:n}}function f(e,o){var n,i=t.getOptions();"y"==i.selection.mode?(h.first.x=0,h.second.x=t.width()):(n=d(e,"x"),h.first.x=n.axis.p2c(n.from),h.second.x=n.axis.p2c(n.to)),"x"==i.selection.mode?(h.first.y=0,h.second.y=t.height()):(n=d(e,"y"),h.first.y=n.axis.p2c(n.from),h.second.y=n.axis.p2c(n.to)),h.show=!0,t.triggerRedrawOverlay(),!o&&g()&&r()}function g(){var e=t.getOptions().selection.minSize;return Math.abs(h.second.x-h.first.x)>=e&&Math.abs(h.second.y-h.first.y)>=e}var h={first:{x:-1,y:-1},second:{x:-1,y:-1},show:!1,active:!1},m={},x=null;t.clearSelection=u,t.setSelection=f,t.getSelection=s,t.hooks.bindEvents.push(function(e,t){var i=e.getOptions();null!=i.selection.mode&&(t.mousemove(o),t.mousedown(n))}),t.hooks.drawOverlay.push(function(t,o){if(h.show&&g()){var n=t.getPlotOffset(),i=t.getOptions();o.save(),o.translate(n.left,n.top);var s=e.color.parse(i.selection.color);o.strokeStyle=s.scale("a",.8).toString(),o.lineWidth=1,o.lineJoin=i.selection.shape,o.fillStyle=s.scale("a",.4).toString();var r=Math.min(h.first.x,h.second.x)+.5,c=Math.min(h.first.y,h.second.y)+.5,l=Math.abs(h.second.x-h.first.x)-1,a=Math.abs(h.second.y-h.first.y)-1;o.fillRect(r,c,l,a),o.strokeRect(r,c,l,a),o.restore()}}),t.hooks.shutdown.push(function(t,i){i.unbind("mousemove",o),i.unbind("mousedown",n),x&&e(document).unbind("mouseup",x)})}e.plot.plugins.push({init:t,options:{selection:{mode:null,color:"#e8cfac",shape:"round",minSize:5}},name:"selection",version:"1.1"})}(jQuery); -------------------------------------------------------------------------------- /doc/_static/asv/summarygrid.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | $(document).ready(function() { 4 | var summary_loaded = false; 5 | 6 | /* Callback a function when an element comes in view */ 7 | function callback_in_view(element, func) { 8 | function handler(evt) { 9 | var visible = ( 10 | $('#summarygrid-display').css('display') != 'none' && 11 | (element.offset().top <= $(window).height() + $(window).scrollTop()) && 12 | (element.offset().top + element.height() >= $(window).scrollTop())); 13 | if (visible) { 14 | func(); 15 | $(window).off('scroll', handler); 16 | } 17 | } 18 | $(window).on('scroll', handler); 19 | } 20 | 21 | function get_benchmarks_by_groups() { 22 | var master_json = $.asv.master_json; 23 | var groups = {}; 24 | $.each(master_json.benchmarks, function(bm_name, bm) { 25 | var i = bm_name.indexOf('.'); 26 | var group = bm_name.slice(0, i); 27 | var name = bm_name.slice(i + 1); 28 | if (groups[group] === undefined) { 29 | groups[group] = []; 30 | } 31 | groups[group].push(bm_name); 32 | }); 33 | return groups; 34 | } 35 | 36 | function benchmark_container(bm) { 37 | var container = $( 38 | ''); 40 | var plot_div = $( 41 | '
'); 42 | var display_name = bm.pretty_name || bm.name.slice(bm.name.indexOf('.') + 1); 43 | var name = $('
' + display_name + '
'); 44 | name.tooltip({ 45 | title: bm.name, 46 | html: true, 47 | placement: 'top', 48 | container: 'body', 49 | animation: false 50 | }); 51 | 52 | plot_div.tooltip({ 53 | title: bm.code, 54 | html: true, 55 | placement: 'bottom', 56 | container: 'body', 57 | animation: false 58 | }); 59 | 60 | container.append(name); 61 | container.append(plot_div); 62 | 63 | callback_in_view(plot_div, function() { 64 | $.asv.load_graph_data( 65 | 'graphs/summary/' + bm.name + '.json' 66 | ).done(function(data) { 67 | var options = { 68 | colors: $.asv.colors, 69 | series: { 70 | lines: { 71 | show: true, 72 | lineWidth: 2 73 | }, 74 | shadowSize: 0 75 | }, 76 | grid: { 77 | borderWidth: 1, 78 | margin: 0, 79 | labelMargin: 0, 80 | axisMargin: 0, 81 | minBorderMargin: 0 82 | }, 83 | xaxis: { 84 | ticks: [], 85 | }, 86 | yaxis: { 87 | ticks: [], 88 | min: 0 89 | }, 90 | legend: { 91 | show: false 92 | } 93 | }; 94 | 95 | var plot = $.plot( 96 | plot_div, [{data: data}], options); 97 | }).fail(function() { 98 | // TODO: Handle failure 99 | }); 100 | }); 101 | return container; 102 | } 103 | 104 | function make_summary() { 105 | var summary_display = $('#summarygrid-display'); 106 | var master_json = $.asv.master_json; 107 | var summary_container = $('
'); 108 | 109 | if (summary_loaded) { 110 | return; 111 | } 112 | 113 | $.each(get_benchmarks_by_groups(), function(group, benchmarks) { 114 | var group_container = $('
') 115 | group_container.append($('

' + group + '

')); 116 | summary_display.append(group_container); 117 | $.each(benchmarks, function(i, bm_name) { 118 | var bm = $.asv.master_json.benchmarks[bm_name]; 119 | group_container.append(benchmark_container(bm)); 120 | }); 121 | }); 122 | 123 | summary_display.append(summary_container); 124 | $(window).trigger('scroll'); 125 | 126 | summary_loaded = true; 127 | } 128 | 129 | $.asv.register_page('', function(params) { 130 | $('#summarygrid-display').show(); 131 | $("#title").text("All benchmarks"); 132 | $('.tooltip').remove(); 133 | make_summary(); 134 | }); 135 | }); 136 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | jobs: 3 | build: 4 | docker: 5 | - image: cimg/base:2020.01 6 | environment: 7 | TEST_OUTPUT: /tmp/artifacts 8 | steps: 9 | - checkout 10 | - run: | 11 | cd ~ 12 | mkdir -p bin micromamba 13 | wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba 14 | eval "$(~/bin/micromamba shell hook -s bash -p ~/micromamba)" 15 | micromamba activate 16 | micromamba install conda-build conda-verify codecov -c conda-forge 17 | - run: | 18 | eval "$(~/bin/micromamba shell hook -s bash -p ~/micromamba)" 19 | micromamba activate 20 | mkdir -p ${TEST_OUTPUT} 21 | conda build . -c conda-forge -c coecms 22 | - run: 23 | name: Upload codecov 24 | command: | 25 | eval "$(~/bin/micromamba shell hook -s bash -p ~/micromamba)" 26 | micromamba activate 27 | coverage combine ${TEST_OUTPUT}/coverage 28 | coverage xml -o ${TEST_OUTPUT}/coverage.xml 29 | codecov -f ${TEST_OUTPUT}/coverage.xml 30 | when: always 31 | - store_artifacts: 32 | path: /tmp/artifacts 33 | - store_test_results: 34 | path: /tmp/artifacts/pytest 35 | - persist_to_workspace: 36 | root: /home/circleci/micromamba/conda-bld 37 | paths: 38 | - '*/*.tar.bz2' 39 | black-check: 40 | docker: 41 | - image: cimg/base:2020.01 42 | steps: 43 | - checkout 44 | - run: | 45 | cd ~ 46 | mkdir -p bin micromamba 47 | wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba 48 | eval "$(~/bin/micromamba shell hook -s bash -p ~/micromamba)" 49 | micromamba activate 50 | micromamba install black -c conda-forge 51 | - run: 52 | name: Checking if 'black' needs to be run 53 | command: | 54 | eval "$(~/bin/micromamba shell hook -s bash -p ~/micromamba)" 55 | micromamba activate 56 | black --check --diff . 57 | when: always 58 | mypy-check: 59 | docker: 60 | - image: cimg/base:2020.01 61 | steps: 62 | - checkout 63 | - run: | 64 | cd ~ 65 | mkdir -p bin micromamba 66 | wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba 67 | eval "$(~/bin/micromamba shell hook -s bash -p ~/micromamba)" 68 | micromamba activate 69 | micromamba install mypy xarray -c conda-forge 70 | - run: 71 | name: Type checking with mypy 72 | command: | 73 | eval "$(~/bin/micromamba shell hook -s bash -p ~/micromamba)" 74 | micromamba activate 75 | mypy 76 | when: always 77 | 78 | publish: 79 | docker: 80 | - image: cimg/base:2020.01 81 | steps: 82 | - attach_workspace: 83 | at: /tmp/artifacts 84 | - run: | 85 | cd ~ 86 | mkdir -p bin micromamba 87 | wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba 88 | eval "$(~/bin/micromamba shell hook -s bash -p ~/micromamba)" 89 | micromamba activate 90 | micromamba install anaconda-client -c conda-forge 91 | - run: | 92 | eval "$(~/bin/micromamba shell hook -s bash -p ~/micromamba)" 93 | micromamba activate 94 | anaconda --token "${ANACONDA_TOKEN}" upload --user "${ANACONDA_USER}" /tmp/artifacts/*/*.tar.bz2 95 | 96 | workflows: 97 | version: 2 98 | ci-check: 99 | jobs: 100 | - black-check 101 | - mypy-check 102 | - build: 103 | filters: 104 | tags: 105 | only: /.*/ 106 | - publish: 107 | context: conda-publish 108 | requires: 109 | - build 110 | filters: 111 | tags: 112 | only: /.*/ 113 | branches: 114 | ignore: /.*/ 115 | monthly-check: 116 | jobs: 117 | - build 118 | triggers: 119 | - schedule: 120 | cron: "0 0 1 * *" 121 | filters: 122 | branches: 123 | only: 124 | - master 125 | 126 | -------------------------------------------------------------------------------- /doc/_static/asv/regressions.xml: -------------------------------------------------------------------------------- 1 | 2 | tag:climtas.asv,1970-01-01:/b0bfb31ef8af4bbb6700582c119133116b6dc187c64a5045c2b2532d1bf91de5Airspeed Velocityclimtas performance regressions2021-01-14T15:07:53Ztag:climtas.asv,2021-01-14:/52e14a2064607645dbc4df43478e4bb1133d994e864acf4b12fc7291bf8fc7f221.12% event.EventSuite.time_find_event2021-01-14T15:07:53Z<a href="index.html#event.EventSuite.time_find_event?cpu=AMD+Ryzen+5+3600X+6-Core+Processor&machine=Freya&num_cpu=12&os=Linux+4.4.0-19041-Microsoft&ram=16726988&commits=3801e053df60b21ffab4534be4c2642bf0a17e01-9bb26ba608bd54335632cc953f0b232a8c94b269">21.12% regression</a> on 2021-01-14 13:18:23 in commits <a href="http://github.com/coecms/climtas/commit/../compare/3801e053df60b21ffab4534be4c2642bf0a17e01...9bb26ba608bd54335632cc953f0b232a8c94b269">3801e053...9bb26ba6</a>.<br> 3 | New value: 1.46s, old value: 1.20s.<br> 4 | Latest value: 1.46s (26.31% worse than best value 1.15s).tag:climtas.asv,2021-01-14:/40665b6df38fe3b61447e77369897a4fcc0c5b40f99f16d4369e444ad19c01257.69% blocked.ResampleDistributedSuite.time_xarray2021-01-14T15:07:12Z<a href="index.html#blocked.ResampleDistributedSuite.time_xarray?cpu=Intel%28R%29+Xeon%28R%29+Platinum+8268+CPU+%40+2.90GHz&machine=gadi&num_cpu=48&os=Linux+4.18.0-240.1.1.el8.nci.x86_64&ram=262432756&commits=9bb26ba608bd54335632cc953f0b232a8c94b269">7.69% regression</a> on 2021-01-14 13:18:23 in commit <a href="http://github.com/coecms/climtas/commit/9bb26ba608bd54335632cc953f0b232a8c94b269">9bb26ba6</a>.<br> 5 | New value: 3.91s, old value: 3.63s.<br> 6 | Latest value: 3.91s (7.69% worse than best value 3.63s).tag:climtas.asv,2021-01-14:/2555fd4e1fc418d9ba05faa8218169c437784fac560fd38f8d48fb717ac1906e65.32% event.EventSuite.time_find_event2021-01-14T11:10:42Z<a href="index.html#event.EventSuite.time_find_event?cpu=Intel%28R%29+Xeon%28R%29+Platinum+8268+CPU+%40+2.90GHz&machine=gadi&num_cpu=48&os=Linux+4.18.0-240.1.1.el8.nci.x86_64&ram=262432756&commits=afde30224b467f2b24f12dbbd580194a4155f334-dc44fc19d72a7969a09b912c1a6fae1f35056458">65.32% regression</a> on 2021-01-14 10:50:29 in commits <a href="http://github.com/coecms/climtas/commit/../compare/afde30224b467f2b24f12dbbd580194a4155f334...dc44fc19d72a7969a09b912c1a6fae1f35056458">afde3022...dc44fc19</a>.<br> 7 | New value: 5.25s, old value: 3.18s.<br> 8 | Latest value: 5.25s (65.32% worse than best value 3.18s).tag:climtas.asv,2021-01-14:/3e8d95dacd2509b3beb8e9187fa0a03f692b5f5f4b0726bc0ea3b2749bd428f024.60% blocked.GroupbySuite.time_blocked_monthday2021-01-14T11:09:20Z<a href="index.html#blocked.GroupbySuite.time_blocked_monthday?cpu=Intel%28R%29+Xeon%28R%29+Platinum+8268+CPU+%40+2.90GHz&machine=gadi&num_cpu=48&os=Linux+4.18.0-240.1.1.el8.nci.x86_64&ram=262432756&commits=afde30224b467f2b24f12dbbd580194a4155f334-dc44fc19d72a7969a09b912c1a6fae1f35056458">24.60% regression</a> on 2021-01-14 10:50:29 in commits <a href="http://github.com/coecms/climtas/commit/../compare/afde30224b467f2b24f12dbbd580194a4155f334...dc44fc19d72a7969a09b912c1a6fae1f35056458">afde3022...dc44fc19</a>.<br> 9 | New value: 536ms, old value: 430ms.<br> 10 | Latest value: 536ms (24.60% worse than best value 430ms). -------------------------------------------------------------------------------- /src/climtas/nci/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright 2020 Scott Wales 3 | # author: Scott Wales 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | """NCI Specific functions and utilities 18 | """ 19 | 20 | import dask.distributed 21 | import os 22 | import tempfile 23 | 24 | _dask_client = None 25 | _tmpdir = None 26 | 27 | 28 | def Client(threads=1, malloc_trim_threshold=None): 29 | """Start a Dask client at NCI 30 | 31 | An appropriate client will be started for the current system 32 | 33 | Args: 34 | threads: Number of threads per worker process. The total number of 35 | workers will be ncpus/threads, so that each thread gets its own 36 | CPU 37 | malloc_trim_threshold: Threshold for automatic memory trimming. Can be 38 | either a string e.g. '64kib' or a number of bytes e.g. 65536. 39 | Smaller values may reduce out of memory errors at the cost of 40 | running slower 41 | 42 | https://distributed.dask.org/en/latest/worker.html?highlight=worker#automatically-trim-memory 43 | """ 44 | 45 | if os.environ["HOSTNAME"].startswith("ood"): 46 | return OODClient(threads, malloc_trim_threshold) 47 | else: 48 | return GadiClient(threads, malloc_trim_threshold) 49 | 50 | 51 | def OODClient(threads=1, malloc_trim_threshold=None): 52 | """Start a Dask client on OOD 53 | 54 | This function is mostly to be consistent with the Gadi version 55 | 56 | Args: 57 | threads: Number of threads per worker process. The total number of 58 | workers will be ncpus/threads, so that each thread gets its own 59 | CPU 60 | malloc_trim_threshold: Threshold for automatic memory trimming. Can be 61 | either a string e.g. '64kib' or a number of bytes e.g. 65536. 62 | Smaller values may reduce out of memory errors at the cost of 63 | running slower 64 | 65 | https://distributed.dask.org/en/latest/worker.html?highlight=worker#automatically-trim-memory 66 | """ 67 | global _dask_client, _tmpdir 68 | 69 | env = {} 70 | 71 | if malloc_trim_threshold is not None: 72 | env["MALLOC_TRIM_THRESHOLD_"] = str( 73 | dask.utils.parse_bytes(malloc_trim_threshold) 74 | ) 75 | 76 | if _dask_client is None: 77 | try: 78 | # Works in sidebar and can follow the link 79 | dask.config.set( 80 | { 81 | "distributed.dashboard.link": f'/node/{os.environ["host"]}/{os.environ["port"]}/proxy/{{port}}/status' 82 | } 83 | ) 84 | except KeyError: 85 | # Works in sidebar, but can't follow the link 86 | dask.config.set({"distributed.dashboard.link": "/proxy/{port}/status"}) 87 | 88 | _dask_client = dask.distributed.Client(threads_per_worker=threads, env=env) 89 | 90 | return _dask_client 91 | 92 | 93 | def GadiClient(threads=1, malloc_trim_threshold=None): 94 | """Start a Dask client on Gadi 95 | 96 | If run on a compute node it will check the PBS resources to know how many 97 | CPUs and the amount of memory that is available. 98 | 99 | If run on a login node it will ask for 2 workers each with a 1GB memory 100 | limit 101 | 102 | Args: 103 | threads: Number of threads per worker process. The total number of 104 | workers will be $PBS_NCPUS/threads, so that each thread gets its own 105 | CPU 106 | malloc_trim_threshold: Threshold for automatic memory trimming. Can be 107 | either a string e.g. '64kib' or a number of bytes e.g. 65536. 108 | Smaller values may reduce out of memory errors at the cost of 109 | running slower 110 | 111 | https://distributed.dask.org/en/latest/worker.html?highlight=worker#automatically-trim-memory 112 | """ 113 | global _dask_client, _tmpdir 114 | 115 | env = {} 116 | 117 | if malloc_trim_threshold is not None: 118 | env["MALLOC_TRIM_THRESHOLD_"] = str( 119 | dask.utils.parse_bytes(malloc_trim_threshold) 120 | ) 121 | 122 | if _dask_client is None: 123 | _tmpdir = tempfile.TemporaryDirectory("dask-worker-space") 124 | 125 | if os.environ["HOSTNAME"].startswith("gadi-login"): 126 | _dask_client = dask.distributed.Client( 127 | n_workers=2, 128 | threads_per_worker=threads, 129 | memory_limit="1000mb", 130 | local_directory=_tmpdir.name, 131 | env=env, 132 | ) 133 | else: 134 | workers = int(os.environ["PBS_NCPUS"]) // threads 135 | _dask_client = dask.distributed.Client( 136 | n_workers=workers, 137 | threads_per_worker=threads, 138 | memory_limit=int(os.environ["PBS_VMEM"]) / workers, 139 | local_directory=_tmpdir.name, 140 | env=env, 141 | ) 142 | return _dask_client 143 | -------------------------------------------------------------------------------- /doc/_static/asv/vendor/jquery.flot-0.8.3.time.min.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) 2007-2014 IOLA and Ole Laursen 3 | 4 | Permission is hereby granted, free of charge, to any person 5 | obtaining a copy of this software and associated documentation 6 | files (the "Software"), to deal in the Software without 7 | restriction, including without limitation the rights to use, 8 | copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the 10 | Software is furnished to do so, subject to the following 11 | conditions: 12 | 13 | The above copyright notice and this permission notice shall be 14 | included in all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 18 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 19 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 20 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 21 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 22 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 23 | OTHER DEALINGS IN THE SOFTWARE. 24 | */ 25 | 26 | !function(e){function t(e,t){return t*Math.floor(e/t)}function n(e,t,n,r){if("function"==typeof e.strftime)return e.strftime(t);var a=function(e,t){return e=""+e,t=""+(null==t?"0":t),1==e.length?t+e:e},i=[],o=!1,s=e.getHours(),u=12>s;null==n&&(n=["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]),null==r&&(r=["Sun","Mon","Tue","Wed","Thu","Fri","Sat"]);var m;m=s>12?s-12:0==s?12:s;for(var c=0;c=o);++l);var h=u[l][0],f=u[l][1];if("year"==f){if(null!=i.minTickSize&&"year"==i.minTickSize[1])h=Math.floor(i.minTickSize[0]);else{var k=Math.pow(10,Math.floor(Math.log(e.delta/s.year)/Math.LN10)),d=e.delta/s.year/k;h=1.5>d?1:3>d?2:7.5>d?5:10,h*=k}1>h&&(h=1)}e.tickSize=i.tickSize||[h,f];var g=e.tickSize[0];f=e.tickSize[1];var M=g*s[f];"second"==f?r.setSeconds(t(r.getSeconds(),g)):"minute"==f?r.setMinutes(t(r.getMinutes(),g)):"hour"==f?r.setHours(t(r.getHours(),g)):"month"==f?r.setMonth(t(r.getMonth(),g)):"quarter"==f?r.setMonth(3*t(r.getMonth()/3,g)):"year"==f&&r.setFullYear(t(r.getFullYear(),g)),r.setMilliseconds(0),M>=s.minute&&r.setSeconds(0),M>=s.hour&&r.setMinutes(0),M>=s.day&&r.setHours(0),M>=4*s.day&&r.setDate(1),M>=2*s.month&&r.setMonth(t(r.getMonth(),3)),M>=2*s.quarter&&r.setMonth(t(r.getMonth(),6)),M>=s.year&&r.setMonth(0);var y,S=0,z=Number.NaN;do if(y=z,z=r.getTime(),n.push(z),"month"==f||"quarter"==f)if(1>g){r.setDate(1);var p=r.getTime();r.setMonth(r.getMonth()+("quarter"==f?3:1));var v=r.getTime();r.setTime(z+S*s.hour+(v-p)*g),S=r.getHours(),r.setHours(0)}else r.setMonth(r.getMonth()+g*("quarter"==f?3:1));else"year"==f?r.setFullYear(r.getFullYear()+g):r.setTime(z+M);while(z' + opts.axisLabel + '
'); 65 | plot.getPlaceholder().append(elem); 66 | w = elem.outerWidth(true); 67 | h = elem.outerHeight(true); 68 | elem.remove(); 69 | } 70 | 71 | if (axisName.charAt(0) == 'x') 72 | axis.labelHeight += h; 73 | else 74 | axis.labelWidth += w; 75 | opts.labelHeight = axis.labelHeight; 76 | opts.labelWidth = axis.labelWidth; 77 | }); 78 | // re-draw with new label widths and heights 79 | secondPass = true; 80 | plot.setupGrid(); 81 | plot.draw(); 82 | 83 | 84 | } else { 85 | // DRAW 86 | $.each(plot.getAxes(), function(axisName, axis) { 87 | var opts = axis.options // Flot 0.7 88 | || plot.getOptions()[axisName]; // Flot 0.6 89 | if (!opts || !opts.axisLabel) 90 | return; 91 | 92 | if (opts.axisLabelUseCanvas) { 93 | // canvas text 94 | var ctx = plot.getCanvas().getContext('2d'); 95 | ctx.save(); 96 | ctx.font = opts.axisLabelFontSizePixels + 'px ' + 97 | opts.axisLabelFontFamily; 98 | var width = ctx.measureText(opts.axisLabel).width; 99 | var height = opts.axisLabelFontSizePixels; 100 | var x, y; 101 | if (axisName.charAt(0) == 'x') { 102 | x = plot.getPlotOffset().left + plot.width()/2 - width/2; 103 | y = plot.getCanvas().height; 104 | } else { 105 | x = height * 0.72; 106 | y = plot.getPlotOffset().top + plot.height()/2 - width/2; 107 | } 108 | ctx.translate(x, y); 109 | ctx.rotate((axisName.charAt(0) == 'x') ? 0 : -Math.PI/2); 110 | ctx.fillText(opts.axisLabel, 0, 0); 111 | ctx.restore(); 112 | 113 | } else { 114 | // HTML text 115 | plot.getPlaceholder().find('#' + axisName + 'Label').remove(); 116 | var elem = $('
' + opts.axisLabel + '
'); 117 | if (axisName.charAt(0) == 'x') { 118 | elem.css('left', plot.getPlotOffset().left + plot.width()/2 - elem.outerWidth()/2 + 'px'); 119 | elem.css('bottom', '0px'); 120 | } else { 121 | elem.css('top', plot.getPlotOffset().top + plot.height()/2 - elem.outerHeight()/2 + 'px'); 122 | elem.css('left', '0px'); 123 | } 124 | plot.getPlaceholder().append(elem); 125 | } 126 | }); 127 | secondPass = false; 128 | } 129 | }); 130 | } 131 | 132 | 133 | 134 | $.plot.plugins.push({ 135 | init: init, 136 | options: options, 137 | name: 'axisLabels', 138 | version: '1.0' 139 | }); 140 | })(jQuery); 141 | -------------------------------------------------------------------------------- /doc/_static/asv/vendor/jquery.flot.orderBars.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Flot plugin to order bars side by side. 3 | * 4 | * Released under the MIT license by Benjamin BUFFET, 20-Sep-2010. 5 | * 6 | * This plugin is an alpha version. 7 | * 8 | * To activate the plugin you must specify the parameter "order" for the specific serie : 9 | * 10 | * $.plot($("#placeholder"), [{ data: [ ... ], bars :{ order = null or integer }]) 11 | * 12 | * If 2 series have the same order param, they are ordered by the position in the array; 13 | * 14 | * The plugin adjust the point by adding a value depanding of the barwidth 15 | * Exemple for 3 series (barwidth : 0.1) : 16 | * 17 | * first bar décalage : -0.15 18 | * second bar décalage : -0.05 19 | * third bar décalage : 0.05 20 | * 21 | */ 22 | 23 | (function($){ 24 | function init(plot){ 25 | var orderedBarSeries; 26 | var nbOfBarsToOrder; 27 | var borderWidth; 28 | var borderWidthInXabsWidth; 29 | var pixelInXWidthEquivalent = 1; 30 | var isHorizontal = false; 31 | 32 | /* 33 | * This method add shift to x values 34 | */ 35 | function reOrderBars(plot, serie, datapoints){ 36 | var shiftedPoints = null; 37 | 38 | if(serieNeedToBeReordered(serie)){ 39 | checkIfGraphIsHorizontal(serie); 40 | calculPixel2XWidthConvert(plot); 41 | retrieveBarSeries(plot); 42 | calculBorderAndBarWidth(serie); 43 | 44 | if(nbOfBarsToOrder >= 2){ 45 | var position = findPosition(serie); 46 | var decallage = 0; 47 | 48 | var centerBarShift = calculCenterBarShift(); 49 | 50 | if (isBarAtLeftOfCenter(position)){ 51 | decallage = -1*(sumWidth(orderedBarSeries,position-1,Math.floor(nbOfBarsToOrder / 2)-1)) - centerBarShift; 52 | }else{ 53 | decallage = sumWidth(orderedBarSeries,Math.ceil(nbOfBarsToOrder / 2),position-2) + centerBarShift + borderWidthInXabsWidth*2; 54 | } 55 | 56 | shiftedPoints = shiftPoints(datapoints,serie,decallage); 57 | datapoints.points = shiftedPoints; 58 | } 59 | } 60 | return shiftedPoints; 61 | } 62 | 63 | function serieNeedToBeReordered(serie){ 64 | return serie.bars != null 65 | && serie.bars.show 66 | && serie.bars.order != null; 67 | } 68 | 69 | function calculPixel2XWidthConvert(plot){ 70 | var gridDimSize = isHorizontal ? plot.getPlaceholder().innerHeight() : plot.getPlaceholder().innerWidth(); 71 | var minMaxValues = isHorizontal ? getAxeMinMaxValues(plot.getData(),1) : getAxeMinMaxValues(plot.getData(),0); 72 | var AxeSize = minMaxValues[1] - minMaxValues[0]; 73 | pixelInXWidthEquivalent = AxeSize / gridDimSize; 74 | } 75 | 76 | function getAxeMinMaxValues(series,AxeIdx){ 77 | var minMaxValues = new Array(); 78 | for(var i = 0; i < series.length; i++){ 79 | if (typeof series[i].data[0] != "number") { 80 | minMaxValues[0] = 0; 81 | minMaxValues[1] = series.length; 82 | return minMaxValues; 83 | } 84 | minMaxValues[0] = series[i].data[0][AxeIdx]; 85 | minMaxValues[1] = series[i].data[series[i].data.length - 1][AxeIdx]; 86 | } 87 | return minMaxValues; 88 | } 89 | 90 | function retrieveBarSeries(plot){ 91 | orderedBarSeries = findOthersBarsToReOrders(plot.getData()); 92 | nbOfBarsToOrder = orderedBarSeries.length; 93 | } 94 | 95 | function findOthersBarsToReOrders(series){ 96 | var retSeries = new Array(); 97 | 98 | for(var i = 0; i < series.length; i++){ 99 | if(series[i].bars.order != null && series[i].bars.show){ 100 | retSeries.push(series[i]); 101 | } 102 | } 103 | 104 | return retSeries.sort(sortByOrder); 105 | } 106 | 107 | function sortByOrder(serie1,serie2){ 108 | var x = serie1.bars.order; 109 | var y = serie2.bars.order; 110 | return ((x < y) ? -1 : ((x > y) ? 1 : 0)); 111 | } 112 | 113 | function calculBorderAndBarWidth(serie){ 114 | borderWidth = serie.bars.lineWidth ? serie.bars.lineWidth : 2; 115 | borderWidthInXabsWidth = borderWidth * pixelInXWidthEquivalent; 116 | } 117 | 118 | function checkIfGraphIsHorizontal(serie){ 119 | if(serie.bars.horizontal){ 120 | isHorizontal = true; 121 | } 122 | } 123 | 124 | function findPosition(serie){ 125 | var pos = 0 126 | for (var i = 0; i < orderedBarSeries.length; ++i) { 127 | if (serie == orderedBarSeries[i]){ 128 | pos = i; 129 | break; 130 | } 131 | } 132 | 133 | return pos+1; 134 | } 135 | 136 | function calculCenterBarShift(){ 137 | var width = 0; 138 | 139 | if(nbOfBarsToOrder%2 != 0) 140 | width = (orderedBarSeries[Math.ceil(nbOfBarsToOrder / 2)].bars.barWidth)/2; 141 | 142 | return width; 143 | } 144 | 145 | function isBarAtLeftOfCenter(position){ 146 | return position <= Math.ceil(nbOfBarsToOrder / 2); 147 | } 148 | 149 | function sumWidth(series,start,end){ 150 | var totalWidth = 0; 151 | 152 | for(var i = start; i <= end; i++){ 153 | totalWidth += series[i].bars.barWidth+borderWidthInXabsWidth*2; 154 | } 155 | 156 | return totalWidth; 157 | } 158 | 159 | function shiftPoints(datapoints,serie,dx){ 160 | var ps = datapoints.pointsize; 161 | var points = datapoints.points; 162 | var j = 0; 163 | for(var i = isHorizontal ? 1 : 0;i < points.length; i += ps){ 164 | points[i] += dx; 165 | //Adding the new x value in the serie to be abble to display the right tooltip value, 166 | //using the index 3 to not overide the third index. 167 | serie.data[j][3] = points[i]; 168 | j++; 169 | } 170 | 171 | return points; 172 | } 173 | 174 | plot.hooks.processDatapoints.push(reOrderBars); 175 | 176 | } 177 | 178 | var options = { 179 | series : { 180 | bars: {order: null} // or number/string 181 | } 182 | }; 183 | 184 | $.plot.plugins.push({ 185 | init: init, 186 | options: options, 187 | name: "orderBars", 188 | version: "0.2" 189 | }); 190 | 191 | })(jQuery); 192 | 193 | -------------------------------------------------------------------------------- /.asv/results/gadi/dc44fc19-conda-py-cfunits-dask-iris-mule-netcdf4-pandas-python-graphviz-scipy-sparse-tqdm-typing_extensions-xarray.json: -------------------------------------------------------------------------------- 1 | {"results": {"blocked.GroupbyDistributedSuite.time_blocked_dayofyear": {"result": [1.1188362745160703], "stats": [{"ci_99": [1.0693158345040825, 1.2003626819932833], "q_25": 1.1029717827768764, "q_75": 1.146128972992301, "min": 1.083916526986286, "max": 1.2003626819932833, "mean": 1.1291436526710943, "std": 0.03837425573996615, "repeat": 6, "number": 1}]}, "blocked.GroupbyDistributedSuite.time_blocked_monthday": {"result": [0.9274916650028899], "stats": [{"ci_99": [0.8938917012426452, 0.9903108390280977], "q_25": 0.9137546134879813, "q_75": 0.9515080179844517, "min": 0.902632643992547, "max": 0.9903108390280977, "mean": 0.9358514872812, "std": 0.03093758572475247, "repeat": 7, "number": 1}]}, "blocked.GroupbyDistributedSuite.time_xarray_dayofyear": {"result": [6.640781223017257], "stats": [{"ci_99": [-17.440353625512216, 30.72191607154671], "q_25": 6.399969874531962, "q_75": 6.881592571502551, "min": 6.159158526046667, "max": 7.122403919987846, "mean": 6.640781223017257, "std": 0.48162269697058946, "repeat": 2, "number": 1}]}, "blocked.GroupbySuite.time_blocked_dayofyear": {"result": [0.7421407370420638], "stats": [{"ci_99": [0.5572840130189434, 0.7768937540240586], "q_25": 0.7272538732649991, "q_75": 0.75673515649396, "min": 0.5572840130189434, "max": 0.7768937540240586, "mean": 0.7260637401137501, "std": 0.058812484322861965, "repeat": 10, "number": 1}]}, "blocked.GroupbySuite.time_blocked_monthday": {"result": [0.5758137800148688], "stats": [{"ci_99": [0.5394097630050965, 0.5906442609848455], "q_25": 0.5606387275183806, "q_75": 0.579933779037674, "min": 0.5394097630050965, "max": 0.5906442609848455, "mean": 0.57134903370752, "std": 0.01547337643651879, "repeat": 10, "number": 1}]}, "blocked.GroupbySuite.time_xarray_dayofyear": {"result": [6.291899875999661], "stats": [{"ci_99": [-6.849528173130238, 19.43332792512955], "q_25": 6.160485595508362, "q_75": 6.42331415649096, "min": 6.029071315017063, "max": 6.554728436982259, "mean": 6.291899875999661, "std": 0.262828560982598, "repeat": 2, "number": 1}]}, "blocked.ResampleDistributedSuite.time_blocked": {"result": [0.7044047605013475], "stats": [{"ci_99": [0.6884356090449728, 0.7457743940176442], "q_25": 0.6998785037430935, "q_75": 0.7347686157590942, "min": 0.6884356090449728, "max": 0.7457743940176442, "mean": 0.7144233772560256, "std": 0.021336766929078724, "repeat": 8, "number": 1}]}, "blocked.ResampleDistributedSuite.time_xarray": {"result": [3.634890771994833], "stats": [{"ci_99": [3.48270241822215, 3.841286970776447], "q_25": 3.610613158743945, "q_75": 3.686272307750187, "min": 3.60215855500428, "max": 3.77603867900325, "mean": 3.661994694499299, "std": 0.06887788460130795, "repeat": 4, "number": 1}]}, "blocked.ResampleSuite.time_blocked": {"result": [0.10901543201180175], "stats": [{"ci_99": [0.09912305697798729, 0.11456213699420914], "q_25": 0.10553523772978224, "q_75": 0.11231253478035796, "min": 0.09912305697798729, "max": 0.11456213699420914, "mean": 0.10834767769556493, "std": 0.0050942960850221255, "repeat": 10, "number": 1}]}, "blocked.ResampleSuite.time_xarray": {"result": [3.7593740224838257], "stats": [{"ci_99": [3.3364849564429093, 4.1398736900840225], "q_25": 3.678487672485062, "q_75": 3.81906567326223, "min": 3.5081883300445043, "max": 3.92578091804171, "mean": 3.7381793232634664, "std": 0.15012676749190942, "repeat": 4, "number": 1}]}, "event.EventDistributedSuite.time_find_event": {"result": [2.8652793369838037], "stats": [{"ci_99": [2.621194567958132, 3.077187618046759], "q_25": 2.828818547510309, "q_75": 2.8776077604852617, "min": 2.7923577580368146, "max": 2.8899361839867197, "mean": 2.849191093002446, "std": 0.04142874202384978, "repeat": 3, "number": 1}]}, "event.EventSuite.time_find_event": {"result": [5.308832254988374], "stats": [{"ci_99": [3.9566171056067105, 6.661047404370037], "q_25": 5.2953101034945576, "q_75": 5.322354406482191, "min": 5.281787952000741, "max": 5.3358765579760075, "mean": 5.308832254988374, "std": 0.027044302987633273, "repeat": 2, "number": 1}]}}, "params": {"arch": "x86_64", "cpu": "Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz", "machine": "gadi", "num_cpu": "48", "os": "Linux 4.18.0-240.1.1.el8.nci.x86_64", "ram": "262432756", "python": "", "cfunits": "", "dask": "", "iris": "", "mule": "", "netcdf4": "", "pandas": "", "python-graphviz": "", "scipy": "", "sparse": "", "tqdm": "", "typing_extensions": "", "xarray": ""}, "requirements": {"cfunits": "", "dask": "", "iris": "", "mule": "", "netcdf4": "", "pandas": "", "python-graphviz": "", "scipy": "", "sparse": "", "tqdm": "", "typing_extensions": "", "xarray": ""}, "commit_hash": "dc44fc19d72a7969a09b912c1a6fae1f35056458", "date": 1610581829000, "env_name": "conda-py-cfunits-dask-iris-mule-netcdf4-pandas-python-graphviz-scipy-sparse-tqdm-typing_extensions-xarray", "python": "", "profiles": {}, "started_at": {"blocked.GroupbyDistributedSuite.time_blocked_dayofyear": 1610582902242, "blocked.GroupbyDistributedSuite.time_blocked_monthday": 1610582916091, "blocked.GroupbyDistributedSuite.time_xarray_dayofyear": 1610582932098, "blocked.GroupbySuite.time_blocked_dayofyear": 1610582952012, "blocked.GroupbySuite.time_blocked_monthday": 1610582956598, "blocked.GroupbySuite.time_xarray_dayofyear": 1610582960282, "blocked.ResampleDistributedSuite.time_blocked": 1610582972526, "blocked.ResampleDistributedSuite.time_xarray": 1610582987339, "blocked.ResampleSuite.time_blocked": 1610583005546, "blocked.ResampleSuite.time_xarray": 1610583006424, "event.EventDistributedSuite.time_find_event": 1610583018018, "event.EventSuite.time_find_event": 1610583031166}, "ended_at": {"blocked.GroupbyDistributedSuite.time_blocked_dayofyear": 1610582916091, "blocked.GroupbyDistributedSuite.time_blocked_monthday": 1610582932097, "blocked.GroupbyDistributedSuite.time_xarray_dayofyear": 1610582952011, "blocked.GroupbySuite.time_blocked_dayofyear": 1610582956597, "blocked.GroupbySuite.time_blocked_monthday": 1610582960281, "blocked.GroupbySuite.time_xarray_dayofyear": 1610582972526, "blocked.ResampleDistributedSuite.time_blocked": 1610582987339, "blocked.ResampleDistributedSuite.time_xarray": 1610583005545, "blocked.ResampleSuite.time_blocked": 1610583006424, "blocked.ResampleSuite.time_xarray": 1610583018017, "event.EventDistributedSuite.time_find_event": 1610583031165, "event.EventSuite.time_find_event": 1610583042008}, "benchmark_version": {"blocked.GroupbyDistributedSuite.time_blocked_dayofyear": "b9f0bb55bbc47ee06d3b18d1a262ecb508482a72c15090ec33ce28fc8b66d564", "blocked.GroupbyDistributedSuite.time_blocked_monthday": "c8273bf5b34f68f8d0748f20122fbeedba81d415aa9d87794aa2b6869e6408d0", "blocked.GroupbyDistributedSuite.time_xarray_dayofyear": "d92aafd6c72af2cba3a3ab99a9e2ef6a439baf27470e8cf68b450951356cb45b", "blocked.GroupbySuite.time_blocked_dayofyear": "01d3740b4f5916532dc07a5d1aec3619fd38128f350845bcc1d9d15cd4d3b1dd", "blocked.GroupbySuite.time_blocked_monthday": "ea1ecf7949f8513fba397204403a348b9efc7ccd3753e255e48e9e64689684d2", "blocked.GroupbySuite.time_xarray_dayofyear": "c5d3ef9404ad60cb7e3b28de3c565c02f7a75c1041aab3ccdeba1cc6c7618e7b", "blocked.ResampleDistributedSuite.time_blocked": "d29f9c2d6cde111261f22e51c557062c5068402c190cf9dcec965b378369d37f", "blocked.ResampleDistributedSuite.time_xarray": "d8d10101f16199a6d72ef005b883a26202f2f8a1851c0042e59041d01aca66eb", "blocked.ResampleSuite.time_blocked": "62b1f41ff720dc40ea68f172077bed6172fe97cfcf57eace07809d4845c85a52", "blocked.ResampleSuite.time_xarray": "364ac7c62ba036563517fdb59f62d4ce79f49fe8d6476f286494b9671c236610", "event.EventDistributedSuite.time_find_event": "c5f98ef36a9e8d6fdefb8f5a70d489bad9a34b78345c40b8a9bb628a0bb0d04e", "event.EventSuite.time_find_event": "a231d487f53daed34d1f991be54d797f3e1f15a79e7d0a811686326b16fc028e"}, "version": 1} -------------------------------------------------------------------------------- /.asv/results/gadi/9bb26ba6-conda-py-cfunits-dask-iris-mule-netcdf4-pandas-python-graphviz-scipy-sparse-tqdm-typing_extensions-xarray.json: -------------------------------------------------------------------------------- 1 | {"results": {"blocked.GroupbyDistributedSuite.time_blocked_dayofyear": {"result": [1.1755590185057372], "stats": [{"ci_99": [1.120809971995186, 1.2014542734069489], "q_25": 1.1672759004723048, "q_75": 1.1848113794840174, "min": 1.120809971995186, "max": 1.1904686199850403, "mean": 1.1691344983216065, "std": 0.02318811144440752, "repeat": 6, "number": 1}]}, "blocked.GroupbyDistributedSuite.time_blocked_monthday": {"result": [0.9424207944830414], "stats": [{"ci_99": [0.9126509808807601, 1.0170762080233544], "q_25": 0.9369514437421458, "q_75": 0.9483564157853834, "min": 0.9232574930065311, "max": 1.0170762080233544, "mean": 0.9517730955073299, "std": 0.03036401909917046, "repeat": 6, "number": 1}]}, "blocked.GroupbyDistributedSuite.time_xarray_dayofyear": {"result": [6.889836066489806], "stats": [{"ci_99": [-3.8319272078515496, 17.611599340831155], "q_25": 6.782618433746393, "q_75": 6.99705369923322, "min": 6.675400801002979, "max": 7.1042713319766335, "mean": 6.889836066489806, "std": 0.21443526548682712, "repeat": 2, "number": 1}]}, "blocked.GroupbySuite.time_blocked_dayofyear": {"result": [0.6714389724947978], "stats": [{"ci_99": [0.5508643620414659, 0.7089323020190932], "q_25": 0.6661584460089216, "q_75": 0.6814665054989746, "min": 0.5508643620414659, "max": 0.7089323020190932, "mean": 0.6568439665075857, "std": 0.049964256472600214, "repeat": 10, "number": 1}]}, "blocked.GroupbySuite.time_blocked_monthday": {"result": [0.5356181205133907], "stats": [{"ci_99": [0.37975451396778226, 0.5627500279806554], "q_25": 0.45521607776754536, "q_75": 0.5489953659998719, "min": 0.37975451396778226, "max": 0.5627500279806554, "mean": 0.5010153308045119, "std": 0.065623899100981, "repeat": 10, "number": 1}]}, "blocked.GroupbySuite.time_xarray_dayofyear": {"result": [5.589740982977673], "stats": [{"ci_99": [0.5103741836501285, 10.669107782305215], "q_25": 5.538947314984398, "q_75": 5.640534650970949, "min": 5.4881536469911225, "max": 5.691328318964224, "mean": 5.589740982977673, "std": 0.1015873359865509, "repeat": 2, "number": 1}]}, "blocked.ResampleDistributedSuite.time_blocked": {"result": [0.7375041579944082], "stats": [{"ci_99": [0.7164551750174724, 0.7817067170399241], "q_25": 0.7302471015136689, "q_75": 0.7701529359910637, "min": 0.7164551750174724, "max": 0.7817067170399241, "mean": 0.7466523497569142, "std": 0.022985157899448, "repeat": 8, "number": 1}]}, "blocked.ResampleDistributedSuite.time_xarray": {"result": [3.9142326589790173], "stats": [{"ci_99": [3.2256148661860538, 4.618205581148958], "q_25": 3.844144533999497, "q_75": 3.9958371309912764, "min": 3.774056409019977, "max": 4.077441603003535, "mean": 3.92191022366751, "std": 0.12397540813072529, "repeat": 3, "number": 1}]}, "blocked.ResampleSuite.time_blocked": {"result": [0.10669864900410175], "stats": [{"ci_99": [0.0995364929549396, 0.11599053599638864], "q_25": 0.10300001520954538, "q_75": 0.11077374576416332, "min": 0.0995364929549396, "max": 0.11599053599638864, "mean": 0.10732407619361765, "std": 0.005377392899632661, "repeat": 10, "number": 1}]}, "blocked.ResampleSuite.time_xarray": {"result": [3.5062642130069435], "stats": [{"ci_99": [3.107193192500196, 4.023780929473656], "q_25": 3.384721558948513, "q_75": 3.797274918993935, "min": 3.290524979995098, "max": 3.848649633990135, "mean": 3.565487060986925, "std": 0.2216728726311324, "repeat": 5, "number": 1}]}, "event.EventDistributedSuite.time_find_event": {"result": [2.8345289284770843], "stats": [{"ci_99": [2.7040406080952906, 3.0305031479044757], "q_25": 2.82742747523298, "q_75": 2.874373331243987, "min": 2.8163702620076947, "max": 2.9836593930376694, "mean": 2.867271877999883, "std": 0.0676471800575939, "repeat": 4, "number": 1}]}, "event.EventSuite.time_find_event": {"result": [5.249316045985324], "stats": [{"ci_99": [-0.6839793029648717, 11.182611394935515], "q_25": 5.189983092495822, "q_75": 5.308648999474826, "min": 5.13065013900632, "max": 5.367981952964328, "mean": 5.249316045985324, "std": 0.11866590697900392, "repeat": 2, "number": 1}]}}, "params": {"arch": "x86_64", "cpu": "Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz", "machine": "gadi", "num_cpu": "48", "os": "Linux 4.18.0-240.1.1.el8.nci.x86_64", "ram": "262432756", "python": "", "cfunits": "", "dask": "", "iris": "", "mule": "", "netcdf4": "", "pandas": "", "python-graphviz": "", "scipy": "", "sparse": "", "tqdm": "", "typing_extensions": "", "xarray": ""}, "requirements": {"cfunits": "", "dask": "", "iris": "", "mule": "", "netcdf4": "", "pandas": "", "python-graphviz": "", "scipy": "", "sparse": "", "tqdm": "", "typing_extensions": "", "xarray": ""}, "commit_hash": "9bb26ba608bd54335632cc953f0b232a8c94b269", "date": 1610590703000, "env_name": "conda-py-cfunits-dask-iris-mule-netcdf4-pandas-python-graphviz-scipy-sparse-tqdm-typing_extensions-xarray", "python": "", "profiles": {}, "started_at": {"blocked.GroupbyDistributedSuite.time_blocked_dayofyear": 1610597133857, "blocked.GroupbyDistributedSuite.time_blocked_monthday": 1610597148017, "blocked.GroupbyDistributedSuite.time_xarray_dayofyear": 1610597160909, "blocked.GroupbySuite.time_blocked_dayofyear": 1610597179413, "blocked.GroupbySuite.time_blocked_monthday": 1610597183838, "blocked.GroupbySuite.time_xarray_dayofyear": 1610597186933, "blocked.ResampleDistributedSuite.time_blocked": 1610597198225, "blocked.ResampleDistributedSuite.time_xarray": 1610597213684, "blocked.ResampleSuite.time_blocked": 1610597232340, "blocked.ResampleSuite.time_xarray": 1610597233172, "event.EventDistributedSuite.time_find_event": 1610597246864, "event.EventSuite.time_find_event": 1610597263062}, "ended_at": {"blocked.GroupbyDistributedSuite.time_blocked_dayofyear": 1610597148017, "blocked.GroupbyDistributedSuite.time_blocked_monthday": 1610597160909, "blocked.GroupbyDistributedSuite.time_xarray_dayofyear": 1610597179413, "blocked.GroupbySuite.time_blocked_dayofyear": 1610597183837, "blocked.GroupbySuite.time_blocked_monthday": 1610597186932, "blocked.GroupbySuite.time_xarray_dayofyear": 1610597198224, "blocked.ResampleDistributedSuite.time_blocked": 1610597213684, "blocked.ResampleDistributedSuite.time_xarray": 1610597232340, "blocked.ResampleSuite.time_blocked": 1610597233171, "blocked.ResampleSuite.time_xarray": 1610597246864, "event.EventDistributedSuite.time_find_event": 1610597263061, "event.EventSuite.time_find_event": 1610597273601}, "benchmark_version": {"blocked.GroupbyDistributedSuite.time_blocked_dayofyear": "b9f0bb55bbc47ee06d3b18d1a262ecb508482a72c15090ec33ce28fc8b66d564", "blocked.GroupbyDistributedSuite.time_blocked_monthday": "c8273bf5b34f68f8d0748f20122fbeedba81d415aa9d87794aa2b6869e6408d0", "blocked.GroupbyDistributedSuite.time_xarray_dayofyear": "d92aafd6c72af2cba3a3ab99a9e2ef6a439baf27470e8cf68b450951356cb45b", "blocked.GroupbySuite.time_blocked_dayofyear": "01d3740b4f5916532dc07a5d1aec3619fd38128f350845bcc1d9d15cd4d3b1dd", "blocked.GroupbySuite.time_blocked_monthday": "ea1ecf7949f8513fba397204403a348b9efc7ccd3753e255e48e9e64689684d2", "blocked.GroupbySuite.time_xarray_dayofyear": "c5d3ef9404ad60cb7e3b28de3c565c02f7a75c1041aab3ccdeba1cc6c7618e7b", "blocked.ResampleDistributedSuite.time_blocked": "d29f9c2d6cde111261f22e51c557062c5068402c190cf9dcec965b378369d37f", "blocked.ResampleDistributedSuite.time_xarray": "d8d10101f16199a6d72ef005b883a26202f2f8a1851c0042e59041d01aca66eb", "blocked.ResampleSuite.time_blocked": "62b1f41ff720dc40ea68f172077bed6172fe97cfcf57eace07809d4845c85a52", "blocked.ResampleSuite.time_xarray": "364ac7c62ba036563517fdb59f62d4ce79f49fe8d6476f286494b9671c236610", "event.EventDistributedSuite.time_find_event": "c5f98ef36a9e8d6fdefb8f5a70d489bad9a34b78345c40b8a9bb628a0bb0d04e", "event.EventSuite.time_find_event": "a231d487f53daed34d1f991be54d797f3e1f15a79e7d0a811686326b16fc028e"}, "version": 1} -------------------------------------------------------------------------------- /.asv/results/gadi/afde3022-conda-py-cfunits-dask-iris-mule-netcdf4-pandas-python-graphviz-scipy-sparse-tqdm-typing_extensions-xarray.json: -------------------------------------------------------------------------------- 1 | {"results": {"blocked.GroupbyDistributedSuite.time_blocked_dayofyear": {"result": [1.1509175270039123], "stats": [{"ci_99": [1.1091802892239278, 1.1914958144573984], "q_25": 1.1316034759947797, "q_75": 1.1717174780205823, "min": 1.116110639006365, "max": 1.1802663640119135, "mean": 1.150338051840663, "std": 0.023944795595527067, "repeat": 6, "number": 1}]}, "blocked.GroupbyDistributedSuite.time_blocked_monthday": {"result": [0.9558859354874585], "stats": [{"ci_99": [0.9307885231942469, 0.9816420751480788], "q_25": 0.9457703082443913, "q_75": 0.9688418202567846, "min": 0.9330412150011398, "max": 0.976919828040991, "mean": 0.9562152991711628, "std": 0.015389625800533236, "repeat": 6, "number": 1}]}, "blocked.GroupbyDistributedSuite.time_xarray_dayofyear": {"result": [6.81930769601604], "stats": [{"ci_99": [-3.7552282538381405, 17.393843645870213], "q_25": 6.713562336517498, "q_75": 6.9250530555145815, "min": 6.607816977018956, "max": 7.030798415013123, "mean": 6.81930769601604, "std": 0.2114907189970836, "repeat": 2, "number": 1}]}, "blocked.GroupbySuite.time_blocked_dayofyear": {"result": [0.6373314684897196], "stats": [{"ci_99": [0.5136100249947049, 0.7232162920408882], "q_25": 0.5712519564985996, "q_75": 0.6889380442589754, "min": 0.5136100249947049, "max": 0.7232162920408882, "mean": 0.6278369132021908, "std": 0.07181448997153272, "repeat": 10, "number": 1}]}, "blocked.GroupbySuite.time_blocked_monthday": {"result": [0.4298640069901012], "stats": [{"ci_99": [0.3498805650160648, 0.5387999369995669], "q_25": 0.4056735927442787, "q_75": 0.47653796352096833, "min": 0.3498805650160648, "max": 0.5387999369995669, "mean": 0.44111910730134696, "std": 0.053943474141526275, "repeat": 10, "number": 1}]}, "blocked.GroupbySuite.time_xarray_dayofyear": {"result": [5.840779834979912], "stats": [{"ci_99": [-2.639772313850699, 14.321331983810516], "q_25": 5.755974313491606, "q_75": 5.925585356468218, "min": 5.6711687920033, "max": 6.0103908779565245, "mean": 5.840779834979912, "std": 0.16961104297661223, "repeat": 2, "number": 1}]}, "blocked.ResampleDistributedSuite.time_blocked": {"result": [0.7298302005219739], "stats": [{"ci_99": [0.7019174009910785, 0.7760752750327811], "q_25": 0.7142286002635956, "q_75": 0.7427171197341522, "min": 0.7019174009910785, "max": 0.7760752750327811, "mean": 0.7310069683808251, "std": 0.02280930852873368, "repeat": 8, "number": 1}]}, "blocked.ResampleDistributedSuite.time_xarray": {"result": [3.9728336649714038], "stats": [{"ci_99": [3.581292479711498, 4.333950042312124], "q_25": 3.920644091500435, "q_75": 4.0022046325029805, "min": 3.8684545180294663, "max": 4.031575600034557, "mean": 3.957621261011809, "std": 0.06745707165449903, "repeat": 3, "number": 1}]}, "blocked.ResampleSuite.time_blocked": {"result": [0.11563288999604993], "stats": [{"ci_99": [0.11120907100848854, 0.12500936700962484], "q_25": 0.11303503178351093, "q_75": 0.11879549350123852, "min": 0.11120907100848854, "max": 0.12500936700962484, "mean": 0.1162677597021684, "std": 0.0042017680860626065, "repeat": 10, "number": 1}]}, "blocked.ResampleSuite.time_xarray": {"result": [3.708070858469], "stats": [{"ci_99": [3.163845458853917, 4.24287367111522], "q_25": 3.5343664529791567, "q_75": 3.87706397047441, "min": 3.4917387299938127, "max": 3.905557813006453, "mean": 3.7033595649845665, "std": 0.1847899597328669, "repeat": 4, "number": 1}]}, "event.EventDistributedSuite.time_find_event": {"result": [10.808087107987376], "stats": [{"ci_99": [7.763569157075835, 13.852605058898913], "q_25": 10.77764192847826, "q_75": 10.838532287496491, "min": 10.747196748969145, "max": 10.868977467005607, "mean": 10.808087107987376, "std": 0.06089035901823081, "repeat": 2, "number": 1}]}, "event.EventSuite.time_find_event": {"result": [3.1751911519968417], "stats": [{"ci_99": [3.1228488392959584, 3.28433184500318], "q_25": 3.1708559979888378, "q_75": 3.1984935044893064, "min": 3.1311651630094275, "max": 3.28433184500318, "mean": 3.1902585356632094, "std": 0.04733671580896524, "repeat": 6, "number": 1}]}}, "params": {"arch": "x86_64", "cpu": "Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz", "machine": "gadi", "num_cpu": "48", "os": "Linux 4.18.0-240.1.1.el8.nci.x86_64", "ram": "262432756", "python": "", "cfunits": "", "dask": "", "iris": "", "mule": "", "netcdf4": "", "pandas": "", "python-graphviz": "", "scipy": "", "sparse": "", "tqdm": "", "typing_extensions": "", "xarray": ""}, "requirements": {"cfunits": "", "dask": "", "iris": "", "mule": "", "netcdf4": "", "pandas": "", "python-graphviz": "", "scipy": "", "sparse": "", "tqdm": "", "typing_extensions": "", "xarray": ""}, "commit_hash": "afde30224b467f2b24f12dbbd580194a4155f334", "date": 1604277864000, "env_name": "conda-py-cfunits-dask-iris-mule-netcdf4-pandas-python-graphviz-scipy-sparse-tqdm-typing_extensions-xarray", "python": "", "profiles": {}, "started_at": {"blocked.GroupbyDistributedSuite.time_blocked_dayofyear": 1610579011781, "blocked.GroupbyDistributedSuite.time_blocked_monthday": 1610579026043, "blocked.GroupbyDistributedSuite.time_xarray_dayofyear": 1610579039195, "blocked.GroupbySuite.time_blocked_dayofyear": 1610579057350, "blocked.GroupbySuite.time_blocked_monthday": 1610579061775, "blocked.GroupbySuite.time_xarray_dayofyear": 1610579065019, "blocked.ResampleDistributedSuite.time_blocked": 1610579077277, "blocked.ResampleDistributedSuite.time_xarray": 1610579092090, "blocked.ResampleSuite.time_blocked": 1610579110904, "blocked.ResampleSuite.time_xarray": 1610579111855, "event.EventDistributedSuite.time_find_event": 1610579123647, "event.EventSuite.time_find_event": 1610579150932}, "ended_at": {"blocked.GroupbyDistributedSuite.time_blocked_dayofyear": 1610579026042, "blocked.GroupbyDistributedSuite.time_blocked_monthday": 1610579039195, "blocked.GroupbyDistributedSuite.time_xarray_dayofyear": 1610579057350, "blocked.GroupbySuite.time_blocked_dayofyear": 1610579061775, "blocked.GroupbySuite.time_blocked_monthday": 1610579065018, "blocked.GroupbySuite.time_xarray_dayofyear": 1610579077276, "blocked.ResampleDistributedSuite.time_blocked": 1610579092090, "blocked.ResampleDistributedSuite.time_xarray": 1610579110903, "blocked.ResampleSuite.time_blocked": 1610579111854, "blocked.ResampleSuite.time_xarray": 1610579123647, "event.EventDistributedSuite.time_find_event": 1610579150932, "event.EventSuite.time_find_event": 1610579163777}, "benchmark_version": {"blocked.GroupbyDistributedSuite.time_blocked_dayofyear": "b9f0bb55bbc47ee06d3b18d1a262ecb508482a72c15090ec33ce28fc8b66d564", "blocked.GroupbyDistributedSuite.time_blocked_monthday": "c8273bf5b34f68f8d0748f20122fbeedba81d415aa9d87794aa2b6869e6408d0", "blocked.GroupbyDistributedSuite.time_xarray_dayofyear": "d92aafd6c72af2cba3a3ab99a9e2ef6a439baf27470e8cf68b450951356cb45b", "blocked.GroupbySuite.time_blocked_dayofyear": "01d3740b4f5916532dc07a5d1aec3619fd38128f350845bcc1d9d15cd4d3b1dd", "blocked.GroupbySuite.time_blocked_monthday": "ea1ecf7949f8513fba397204403a348b9efc7ccd3753e255e48e9e64689684d2", "blocked.GroupbySuite.time_xarray_dayofyear": "c5d3ef9404ad60cb7e3b28de3c565c02f7a75c1041aab3ccdeba1cc6c7618e7b", "blocked.ResampleDistributedSuite.time_blocked": "d29f9c2d6cde111261f22e51c557062c5068402c190cf9dcec965b378369d37f", "blocked.ResampleDistributedSuite.time_xarray": "d8d10101f16199a6d72ef005b883a26202f2f8a1851c0042e59041d01aca66eb", "blocked.ResampleSuite.time_blocked": "62b1f41ff720dc40ea68f172077bed6172fe97cfcf57eace07809d4845c85a52", "blocked.ResampleSuite.time_xarray": "364ac7c62ba036563517fdb59f62d4ce79f49fe8d6476f286494b9671c236610", "event.EventDistributedSuite.time_find_event": "c5f98ef36a9e8d6fdefb8f5a70d489bad9a34b78345c40b8a9bb628a0bb0d04e", "event.EventSuite.time_find_event": "a231d487f53daed34d1f991be54d797f3e1f15a79e7d0a811686326b16fc028e"}, "version": 1} -------------------------------------------------------------------------------- /.asv/results/Freya/9bb26ba6-conda-py-cfunits-dask-iris-mule-netcdf4-pandas-python-graphviz-scipy-sparse-tqdm-typing_extensions-xarray.json: -------------------------------------------------------------------------------- 1 | {"results": {"blocked.GroupbyDistributedSuite.time_blocked_dayofyear": {"result": [1.1829518500017002], "stats": [{"ci_99": [1.10608699418263, 1.2656521058225372], "q_25": 1.1449280499946326, "q_75": 1.2242101000156254, "min": 1.1247740999679081, "max": 1.2543232000316493, "mean": 1.1858695500025835, "std": 0.04756067990181208, "repeat": 6, "number": 1}]}, "blocked.GroupbyDistributedSuite.time_blocked_monthday": {"result": [0.963999400002649], "stats": [{"ci_99": [0.9384688841368306, 0.9830672491967444], "q_25": 0.9501163249951787, "q_75": 0.9725717999972403, "min": 0.9404900000081398, "max": 0.9752016999991611, "mean": 0.9607680666667875, "std": 0.013211613632431533, "repeat": 6, "number": 1}]}, "blocked.GroupbyDistributedSuite.time_xarray_dayofyear": {"result": [6.495260349998716], "stats": [{"ci_99": [0.5166628502192907, 12.473857849778136], "q_25": 6.435474375000922, "q_75": 6.5550463249965105, "min": 6.375688400003128, "max": 6.614832299994305, "mean": 6.495260349998716, "std": 0.11957194999558851, "repeat": 2, "number": 1}]}, "blocked.GroupbySuite.time_blocked_dayofyear": {"result": [0.2849782000412233], "stats": [{"ci_99": [0.28187160001834854, 0.31035079999128357], "q_25": 0.28309060000174213, "q_75": 0.29322445002617314, "min": 0.28187160001834854, "max": 0.31035079999128357, "mean": 0.2900812000152655, "std": 0.009879281206378925, "repeat": 10, "number": 1}]}, "blocked.GroupbySuite.time_blocked_monthday": {"result": [0.2233245499955956], "stats": [{"ci_99": [0.22030839999206364, 0.2458561999956146], "q_25": 0.2222478749899892, "q_75": 0.23233845001959708, "min": 0.22030839999206364, "max": 0.2458561999956146, "mean": 0.22815064000315033, "std": 0.008424178789458558, "repeat": 10, "number": 1}]}, "blocked.GroupbySuite.time_xarray_dayofyear": {"result": [2.716725950012915], "stats": [{"ci_99": [2.682379848717006, 2.760701599996537], "q_25": 2.6998895999859087, "q_75": 2.72974457500095, "min": 2.6933827999746427, "max": 2.760701599996537, "mean": 2.719316316661813, "std": 0.022994609507452243, "repeat": 6, "number": 1}]}, "blocked.ResampleDistributedSuite.time_blocked": {"result": [0.7067086999886669], "stats": [{"ci_99": [0.667921899992507, 0.7400982000399381], "q_25": 0.692340024994337, "q_75": 0.7253980750101618, "min": 0.667921899992507, "max": 0.7400982000399381, "mean": 0.7076173624955118, "std": 0.02341736232081865, "repeat": 8, "number": 1}]}, "blocked.ResampleDistributedSuite.time_xarray": {"result": [3.3373256499762647], "stats": [{"ci_99": [3.258693150768522, 3.415958149184007], "q_25": 3.3365393249841873, "q_75": 3.338111974968342, "min": 3.33575299999211, "max": 3.3388982999604195, "mean": 3.3373256499762647, "std": 0.0015726499841548502, "repeat": 2, "number": 1}]}, "blocked.ResampleSuite.time_blocked": {"result": [0.12802800003555603], "stats": [{"ci_99": [0.10802400001557544, 0.14807400002609938], "q_25": 0.1261363999947207, "q_75": 0.12980227499792818, "min": 0.10802400001557544, "max": 0.14807400002609938, "mean": 0.12658126000897027, "std": 0.010497339836524715, "repeat": 10, "number": 1}]}, "blocked.ResampleSuite.time_xarray": {"result": [1.7487288500124123], "stats": [{"ci_99": [1.7398299000342377, 1.7749236999661662], "q_25": 1.7425019499933114, "q_75": 1.760283950032317, "min": 1.7398299000342377, "max": 1.7749236999661662, "mean": 1.7519610800081864, "std": 0.01140514786112084, "repeat": 10, "number": 1}]}, "event.EventDistributedSuite.time_find_event": {"result": [2.0268603999866173], "stats": [{"ci_99": [1.9576754376103636, 2.099146862376016], "q_25": 2.009281424994697, "q_75": 2.04599012498511, "min": 1.997750399983488, "max": 2.062173400016036, "mean": 2.0284111499931896, "std": 0.024809764713007606, "repeat": 4, "number": 1}]}, "event.EventSuite.time_find_event": {"result": [1.4560975000204053], "stats": [{"ci_99": [1.451076199999079, 1.4654352000216022], "q_25": 1.4514518250362016, "q_75": 1.4594796749879606, "min": 1.451076199999079, "max": 1.4654352000216022, "mean": 1.4562070100102573, "std": 0.0046578459102627045, "repeat": 10, "number": 1}]}}, "params": {"arch": "x86_64", "cpu": "AMD Ryzen 5 3600X 6-Core Processor", "machine": "Freya", "num_cpu": "12", "os": "Linux 4.4.0-19041-Microsoft", "ram": "16726988", "python": "", "cfunits": "", "dask": "", "iris": "", "mule": "", "netcdf4": "", "pandas": "", "python-graphviz": "", "scipy": "", "sparse": "", "tqdm": "", "typing_extensions": "", "xarray": ""}, "requirements": {"cfunits": "", "dask": "", "iris": "", "mule": "", "netcdf4": "", "pandas": "", "python-graphviz": "", "scipy": "", "sparse": "", "tqdm": "", "typing_extensions": "", "xarray": ""}, "commit_hash": "9bb26ba608bd54335632cc953f0b232a8c94b269", "date": 1610590703000, "env_name": "conda-py-cfunits-dask-iris-mule-netcdf4-pandas-python-graphviz-scipy-sparse-tqdm-typing_extensions-xarray", "python": "", "profiles": {}, "started_at": {"blocked.GroupbyDistributedSuite.time_blocked_dayofyear": 1610596736892, "blocked.GroupbyDistributedSuite.time_blocked_monthday": 1610596750853, "blocked.GroupbyDistributedSuite.time_xarray_dayofyear": 1610596763662, "blocked.GroupbySuite.time_blocked_dayofyear": 1610596782581, "blocked.GroupbySuite.time_blocked_monthday": 1610596784665, "blocked.GroupbySuite.time_xarray_dayofyear": 1610596786410, "blocked.ResampleDistributedSuite.time_blocked": 1610596797552, "blocked.ResampleDistributedSuite.time_xarray": 1610596811259, "blocked.ResampleSuite.time_blocked": 1610596824621, "blocked.ResampleSuite.time_xarray": 1610596825548, "event.EventDistributedSuite.time_find_event": 1610596836391, "event.EventSuite.time_find_event": 1610596849506}, "ended_at": {"blocked.GroupbyDistributedSuite.time_blocked_dayofyear": 1610596750852, "blocked.GroupbyDistributedSuite.time_blocked_monthday": 1610596763662, "blocked.GroupbyDistributedSuite.time_xarray_dayofyear": 1610596782581, "blocked.GroupbySuite.time_blocked_dayofyear": 1610596784664, "blocked.GroupbySuite.time_blocked_monthday": 1610596786409, "blocked.GroupbySuite.time_xarray_dayofyear": 1610596797551, "blocked.ResampleDistributedSuite.time_blocked": 1610596811259, "blocked.ResampleDistributedSuite.time_xarray": 1610596824621, "blocked.ResampleSuite.time_blocked": 1610596825548, "blocked.ResampleSuite.time_xarray": 1610596836391, "event.EventDistributedSuite.time_find_event": 1610596849505, "event.EventSuite.time_find_event": 1610596858443}, "benchmark_version": {"blocked.GroupbyDistributedSuite.time_blocked_dayofyear": "b9f0bb55bbc47ee06d3b18d1a262ecb508482a72c15090ec33ce28fc8b66d564", "blocked.GroupbyDistributedSuite.time_blocked_monthday": "c8273bf5b34f68f8d0748f20122fbeedba81d415aa9d87794aa2b6869e6408d0", "blocked.GroupbyDistributedSuite.time_xarray_dayofyear": "d92aafd6c72af2cba3a3ab99a9e2ef6a439baf27470e8cf68b450951356cb45b", "blocked.GroupbySuite.time_blocked_dayofyear": "01d3740b4f5916532dc07a5d1aec3619fd38128f350845bcc1d9d15cd4d3b1dd", "blocked.GroupbySuite.time_blocked_monthday": "ea1ecf7949f8513fba397204403a348b9efc7ccd3753e255e48e9e64689684d2", "blocked.GroupbySuite.time_xarray_dayofyear": "c5d3ef9404ad60cb7e3b28de3c565c02f7a75c1041aab3ccdeba1cc6c7618e7b", "blocked.ResampleDistributedSuite.time_blocked": "d29f9c2d6cde111261f22e51c557062c5068402c190cf9dcec965b378369d37f", "blocked.ResampleDistributedSuite.time_xarray": "d8d10101f16199a6d72ef005b883a26202f2f8a1851c0042e59041d01aca66eb", "blocked.ResampleSuite.time_blocked": "62b1f41ff720dc40ea68f172077bed6172fe97cfcf57eace07809d4845c85a52", "blocked.ResampleSuite.time_xarray": "364ac7c62ba036563517fdb59f62d4ce79f49fe8d6476f286494b9671c236610", "event.EventDistributedSuite.time_find_event": "c5f98ef36a9e8d6fdefb8f5a70d489bad9a34b78345c40b8a9bb628a0bb0d04e", "event.EventSuite.time_find_event": "a231d487f53daed34d1f991be54d797f3e1f15a79e7d0a811686326b16fc028e"}, "version": 1} -------------------------------------------------------------------------------- /src/climtas/grid.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright 2018 ARC Centre of Excellence for Climate Extremes 3 | # author: Scott Wales 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | from __future__ import print_function 17 | from abc import ABCMeta, abstractmethod 18 | 19 | import xarray 20 | import numpy 21 | import iris 22 | import mule 23 | import os 24 | 25 | 26 | """ 27 | Different grid types 28 | """ 29 | 30 | 31 | def identify_grid(dataset): 32 | """ 33 | Identify the grid used by a Dataset. Returns the appropriate :class:`Grid` 34 | object 35 | 36 | Args: 37 | dataset (xarray.DataArray): Input dataset 38 | 39 | Returns: 40 | Grid: Grid for that dataset 41 | """ 42 | 43 | if isinstance(dataset, Grid): 44 | return dataset 45 | 46 | try: 47 | if dataset.attrs["conventions"] == "SCRIP": 48 | return ScripGrid(dataset) 49 | except KeyError: 50 | pass 51 | except AttributeError: 52 | pass 53 | 54 | try: 55 | if dataset.lon.ndim == 1 and dataset.lat.ndim == 1: 56 | return LonLatGrid(lons=dataset.lon, lats=dataset.lat) 57 | except AttributeError: 58 | pass 59 | 60 | raise NotImplementedError 61 | 62 | 63 | class Grid(object, metaclass=ABCMeta): 64 | """Abstract base class for grids""" 65 | 66 | @abstractmethod 67 | def to_cdo_grid(self, outfile): 68 | """ 69 | Write the grid to a format readable by CDO's regridder (either text or 70 | SCRIP format) 71 | 72 | Args: 73 | outfile: File-like object to write to 74 | """ 75 | 76 | @abstractmethod 77 | def to_netcdf(self, outfile): 78 | """ 79 | Create a netCDF file using the grid 80 | 81 | Args: 82 | outfile: Path or File-like object to write to 83 | 84 | Note that if `outfile` is a file object it will be closed 85 | automatically. 86 | """ 87 | 88 | def to_scrip(self): 89 | """ 90 | Create a SCRIP file using the grid 91 | 92 | Returns: 93 | xarray.Dataset containing SCRIP grid description 94 | """ 95 | raise NotImplementedError 96 | 97 | 98 | class LonLatGrid(Grid): 99 | """ 100 | A cartesian grid, with lats and lons one dimensional arrays 101 | """ 102 | 103 | def __init__(self, lats, lons, mask=None): 104 | """ 105 | Args: 106 | lats (numpy.array): Grid latitudes 107 | lons (numpy.array): Grid longitude 108 | """ 109 | 110 | self.lats = lats 111 | self.lons = lons 112 | self.mask = mask 113 | 114 | if self.mask is None: 115 | self.mask = numpy.ones((self.lons.size, self.lats.size)) 116 | 117 | if self.lats.ndim != 1 or self.lons.ndim != 1: 118 | raise Exception("Lons and Lats must be 1D") 119 | 120 | def to_cdo_grid(self, outfile): 121 | outfile.write("gridtype = lonlat\n".encode()) 122 | 123 | outfile.write(("xsize = %d\n" % len(self.lons)).encode()) 124 | outfile.write( 125 | ("xvals = %s\n" % (",".join(["%f" % x for x in self.lons]))).encode() 126 | ) 127 | 128 | outfile.write(("ysize = %d\n" % len(self.lats)).encode()) 129 | outfile.write( 130 | ("yvals = %s\n" % (",".join(["%f" % x for x in self.lats]))).encode() 131 | ) 132 | 133 | outfile.flush() 134 | 135 | def to_netcdf(self, outfile): 136 | ds = xarray.DataArray( 137 | data=numpy.zeros((len(self.lats), len(self.lons))), 138 | coords=[("lat", self.lats.data), ("lon", self.lons.data)], 139 | ) 140 | ds.lat.attrs["units"] = "degrees_north" 141 | ds.lon.attrs["units"] = "degrees_east" 142 | ds.to_netcdf(outfile) 143 | 144 | def to_scrip(self): 145 | lat = self.lats 146 | lon = self.lons % 360 147 | 148 | top = (lat.shift(lat=-1) + lat) / 2.0 149 | top[-1] = 90 150 | 151 | bot = (lat.shift(lat=1) + lat) / 2.0 152 | bot[0] = -90 153 | 154 | left = (lon - (lon - lon.roll(lon=1).values) % 360) / 2.0 155 | right = lon + ((lon.roll(lon=-1).values - lon) % 360) / 2.0 156 | 157 | center_lon, center_lat = numpy.meshgrid(lon, lat) 158 | 159 | corner_lon0, corner_lat0 = numpy.meshgrid(left, bot) 160 | corner_lon1, corner_lat1 = numpy.meshgrid(right, bot) 161 | corner_lon2, corner_lat2 = numpy.meshgrid(right, top) 162 | corner_lon3, corner_lat3 = numpy.meshgrid(left, top) 163 | 164 | corner_lat = numpy.array( 165 | [ 166 | x.reshape(-1) 167 | for x in [corner_lat0, corner_lat1, corner_lat2, corner_lat3] 168 | ] 169 | ) 170 | corner_lon = numpy.array( 171 | [ 172 | x.reshape(-1) 173 | for x in [corner_lon0, corner_lon1, corner_lon2, corner_lon3] 174 | ] 175 | ) 176 | 177 | scrip = xarray.Dataset( 178 | coords={ 179 | "grid_dims": ( 180 | ["grid_rank"], 181 | numpy.array([lon.size, lat.size], dtype="i4"), 182 | ), 183 | "grid_center_lat": (["grid_size"], center_lat.reshape(-1)), 184 | "grid_center_lon": (["grid_size"], center_lon.reshape(-1)), 185 | "grid_imask": (["grid_size"], self.mask.reshape(-1).astype("i4")), 186 | "grid_corner_lat": (["grid_size", "grid_corners"], corner_lat.T), 187 | "grid_corner_lon": (["grid_size", "grid_corners"], corner_lon.T), 188 | } 189 | ) 190 | 191 | scrip.grid_center_lat.attrs["units"] = "degrees" 192 | scrip.grid_center_lon.attrs["units"] = "degrees" 193 | scrip.grid_corner_lat.attrs["units"] = "degrees" 194 | scrip.grid_corner_lon.attrs["units"] = "degrees" 195 | 196 | return scrip 197 | 198 | 199 | class UMGrid(LonLatGrid): 200 | @classmethod 201 | def from_mask(cls, mask_path): 202 | umfile = mule.load_umfile(mask_path) 203 | mask_field = None 204 | for f in umfile.fields: 205 | if f.lbuser4 == 30: 206 | mask_field = f 207 | break 208 | 209 | mask = xarray.DataArray( 210 | mask_field.get_data(), dims=["lat", "lon"], name=os.path.basename(mask_path) 211 | ) 212 | mask.coords["lon"] = ( 213 | mask_field.bzx + (1 + numpy.arange(mask.shape[1])) * mask_field.bdx 214 | ) 215 | mask.coords["lat"] = ( 216 | mask_field.bzy + (1 + numpy.arange(mask.shape[0])) * mask_field.bdy 217 | ) 218 | 219 | mask = mask.where(mask == 0) 220 | 221 | mask.lon.attrs["standard_name"] = "longitude" 222 | mask.lat.attrs["standard_name"] = "latitude" 223 | mask.lon.attrs["units"] = "degrees_east" 224 | mask.lat.attrs["units"] = "degrees_north" 225 | 226 | return mask 227 | 228 | 229 | class ScripGrid(Grid): 230 | def __init__(self, grid): 231 | self._grid = grid 232 | 233 | def to_cdo_grid(self, outfile): 234 | self._grid.to_netcdf(outfile) 235 | 236 | def to_netcdf(self, outfile): 237 | self._grid.to_netcdf(outfile) 238 | 239 | def to_scrip(self): 240 | return self._grid 241 | -------------------------------------------------------------------------------- /src/climtas/io.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright 2019 Scott Wales 3 | # author: Scott Wales 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | """Functions for reading and saving data 18 | 19 | These functions try to use sensible chunking both for dask objects read and 20 | netcdf files written 21 | """ 22 | 23 | import xarray 24 | import dask 25 | import pandas 26 | import typing as T 27 | import pathlib 28 | import logging 29 | 30 | from .helpers import optimized_dask_get, throttle_futures 31 | 32 | 33 | def _ds_encoding(ds, complevel): 34 | # Setup compression and chunking 35 | encoding = {} 36 | logging.basicConfig(level=logging.DEBUG) 37 | for k, v in ds.data_vars.items(): 38 | 39 | # Get original encoding 40 | encoding[k] = v.encoding 41 | 42 | # Update encoding to enable compression 43 | encoding[k].update( 44 | { 45 | "zlib": True, 46 | "shuffle": True, 47 | "complevel": complevel, 48 | "chunksizes": getattr(v.data, "chunksize", None), 49 | } 50 | ) 51 | 52 | # Clean up encoding 53 | encoding[k] = { 54 | kk: vv 55 | for kk, vv in encoding[k].items() 56 | if kk 57 | in [ 58 | "fletcher32", 59 | "chunksizes", 60 | "complevel", 61 | "least_significant_digit", 62 | "shuffle", 63 | "contiguous", 64 | "zlib", 65 | "_FillValue", 66 | "dtype", 67 | ] 68 | } 69 | 70 | # Log removed keys 71 | removed_keys = [kk for kk in v.encoding.keys() if not kk in encoding[k].keys()] 72 | if len(removed_keys) > 0: 73 | logging.debug(f"removed encoding keys for {k}: {removed_keys}") 74 | return encoding 75 | 76 | 77 | def to_netcdf_throttled( 78 | ds: T.Union[xarray.DataArray, xarray.Dataset], 79 | path: T.Union[str, pathlib.Path], 80 | complevel: int = 4, 81 | max_tasks: int = None, 82 | show_progress: bool = True, 83 | ): 84 | """ 85 | Save a DataArray to file by calculating each chunk separately (rather than 86 | submitting the whole Dask graph at once). This may be helpful when chunks 87 | are large, e.g. doing an operation on dayofyear grouping for a long timeseries. 88 | 89 | Chunks are calculated with at most 'max_tasks' chunks running in parallel - 90 | this defaults to the number of workers in your dask.distributed.Client, or 91 | is 1 if distributed is not being used. 92 | 93 | This is a very basic way to handle backpressure, where data is coming in 94 | faster than it can be processed and so fills up memory. Ideally this will 95 | be fixed in Dask itself, see e.g. 96 | https://github.com/dask/distributed/issues/2602 97 | 98 | In particular, it will only work well if the chunks in the dataset are 99 | independent (e.g. if doing operations over a timeseries for a single 100 | horizontal chunk so the horizontal chunks are isolated). 101 | 102 | Args: 103 | da (:class:`xarray.Dataset` or :class:`xarray.DataArray`): Data to save 104 | path (:class:`str` or :class:`pathlib.Path`): Path to save to 105 | complevel (:class:`int`): NetCDF compression level 106 | max_tasks (:class:`int`): Maximum tasks to run at once (default number of distributed 107 | workers) 108 | show_progress (:class:`bool`): Show a progress bar with estimated completion time 109 | """ 110 | 111 | if isinstance(ds, xarray.DataArray): 112 | ds = ds.to_dataset() 113 | 114 | # Setup compression and chunking 115 | encoding = _ds_encoding(ds, complevel) 116 | 117 | # Prepare storing the data to netcdf, but don't evaluate 118 | f = ds.to_netcdf(str(path), encoding=encoding, compute=False) 119 | 120 | # This is some very low-level dask operations. behind the scenes dask 121 | # stores its objects as a graph of operations and their dependencies. 122 | # We're going to grab a specific operation, 'dask.array.core.store_chunk', 123 | # and run each instance of that operation in a throttled manner, so they 124 | # don't all just get submitted at once and overwhelm memory, at the expense 125 | # of having to do stuff like reading input multiple times rather than just 126 | # once. 127 | 128 | # We also need to make a new graph, where the tasks that have 'store_chunk' 129 | # as a dependency know that their pre-requisite has been completed. To do 130 | # this we just need to fix up the 'store_chunk' tasks, other tasks that 131 | # 'store_chunk' depends on will be automatically cleaned up when dask 132 | # optimises the graph 133 | 134 | old_graph = f.__dask_graph__() # type: ignore 135 | new_graph = {} # type: ignore 136 | store_keys = [] 137 | 138 | # Pull out the 'store_chunk' operations from the graph and put them in a 139 | # list 140 | for k, v in old_graph.items(): 141 | try: 142 | if v[0] == dask.array.core.store_chunk: 143 | store_keys.append(k) 144 | new_graph[k] = None # Mark the task done in new_graph 145 | continue 146 | except ValueError: 147 | # Found a numpy array or similar, so comparison fails 148 | pass 149 | except IndexError: 150 | pass 151 | new_graph[k] = v 152 | 153 | if show_progress: 154 | from tqdm.auto import tqdm 155 | 156 | store_keys = tqdm(store_keys) 157 | 158 | # Run the 'store_chunk' tasks with 'old_graph' 159 | throttle_futures(old_graph, store_keys, max_tasks=max_tasks) 160 | 161 | # Finalise any remaining operations with 'new_graph' 162 | optimized_dask_get(new_graph, list(f.__dask_layers__())) # type: ignore 163 | 164 | 165 | def to_netcdf_series( 166 | ds: T.Union[xarray.DataArray, xarray.Dataset], 167 | path: T.Union[str, pathlib.Path], 168 | groupby: str, 169 | complevel: int = 4, 170 | ): 171 | """ 172 | Split a dataset into multiple parts, and save each part into its own file 173 | 174 | path should be a :meth:`str.format()`-compatible string. It is formatted 175 | with three arguments: `start` and `end`, which are 176 | :obj:`pandas.Timestamp`, and `group` which is the name of the current 177 | group being output (e.g. the year when using `groupby='time.year'`). These 178 | can be used to name the file, e.g.:: 179 | 180 | path_a = 'data_{group}.nc' 181 | path_b = 'data_{start.month}_{end.month}.nc' 182 | path_c = 'data_{start.year:04d}{start.month:02d}{start.day:02d}.nc' 183 | 184 | Note that `start` and `end` are the first and last timestamps of the 185 | group's data, which may not match the boundary start and end dates 186 | 187 | Args: 188 | da (:class:`xarray.Dataset` or :class:`xarray.DataArray`): Data to save 189 | path (:class:`str` or :class:`pathlib.Path`): Path template to save to 190 | groupby (:class:`str`): Grouping, as used by :meth:`xarray.DataArray.groupby` 191 | complevel (:class:`int`): NetCDF compression level 192 | """ 193 | 194 | if isinstance(ds, xarray.DataArray): 195 | ds = ds.to_dataset() 196 | 197 | dim = groupby.split(".")[0] 198 | 199 | encoding = _ds_encoding(ds, complevel) 200 | 201 | for key, part in ds.groupby(groupby): 202 | start = pandas.Timestamp(part[dim].values[0]) 203 | end = pandas.Timestamp(part[dim].values[-1]) 204 | 205 | fpath = str(path).format(start=start, end=end, group=key) 206 | part.to_netcdf(fpath, encoding=encoding) 207 | -------------------------------------------------------------------------------- /asv.conf.json: -------------------------------------------------------------------------------- 1 | { 2 | // The version of the config file format. Do not change, unless 3 | // you know what you are doing. 4 | "version": 1, 5 | // The name of the project being benchmarked 6 | "project": "climtas", 7 | // The project's homepage 8 | "project_url": "http://github.com/coecms/climtas/", 9 | // The URL or local path of the source code repository for the 10 | // project being benchmarked 11 | "repo": ".", 12 | // The Python project's subdirectory in your repo. If missing or 13 | // the empty string, the project is assumed to be located at the root 14 | // of the repository. 15 | "repo_subdir": "", 16 | // Customizable commands for building, installing, and 17 | // uninstalling the project. See asv.conf.json documentation. 18 | // 19 | // "install_command": ["in-dir={env_dir} python -mpip install {wheel_file}"], 20 | // "uninstall_command": ["return-code=any python -mpip uninstall -y {project}"], 21 | // "build_command": [ 22 | // "python setup.py build", 23 | // "PIP_NO_BUILD_ISOLATION=false python -mpip wheel --no-deps --no-index -w {build_cache_dir} {build_dir}" 24 | // ], 25 | // List of branches to benchmark. If not provided, defaults to "master" 26 | // (for git) or "default" (for mercurial). 27 | "branches": [ 28 | "master" 29 | ], // for git 30 | // "branches": ["default"], // for mercurial 31 | // The DVCS being used. If not set, it will be automatically 32 | // determined from "repo" by looking at the protocol in the URL 33 | // (if remote), or by looking for special directories, such as 34 | // ".git" (if local). 35 | "dvcs": "git", 36 | // The tool to use to create environments. May be "conda", 37 | // "virtualenv" or other value depending on the plugins in use. 38 | // If missing or the empty string, the tool will be automatically 39 | // determined by looking for tools on the PATH environment 40 | // variable. 41 | "environment_type": "conda", 42 | // timeout in seconds for installing any dependencies in environment 43 | // defaults to 10 min 44 | "install_timeout": 600, 45 | // the base URL to show a commit for the project. 46 | "show_commit_url": "http://github.com/coecms/climtas/commit/", 47 | // The Pythons you'd like to test against. If not provided, defaults 48 | // to the current version of Python used to run `asv`. 49 | // "pythons": ["2.7", "3.6"], 50 | // The list of conda channel names to be searched for benchmark 51 | // dependency packages in the specified order 52 | "conda_channels": [ 53 | "conda-forge", 54 | "coecms", 55 | "defaults" 56 | ], 57 | // The matrix of dependencies to test. Each key is the name of a 58 | // package (in PyPI) and the values are version numbers. An empty 59 | // list or empty string indicates to just test against the default 60 | // (latest) version. null indicates that the package is to not be 61 | // installed. If the package to be tested is only available from 62 | // PyPi, and the 'environment_type' is conda, then you can preface 63 | // the package name by 'pip+', and the package will be installed via 64 | // pip (with all the conda available packages installed first, 65 | // followed by the pip installed packages). 66 | // 67 | // "matrix": { 68 | // "numpy": ["1.6", "1.7"], 69 | // "six": ["", null], // test with and without six installed 70 | // "pip+emcee": [""], // emcee is only available for install with pip. 71 | // }, 72 | "matrix": { 73 | "python": [ 74 | "" 75 | ], 76 | "dask": [ 77 | "" 78 | ], 79 | "netcdf4": [ 80 | "" 81 | ], 82 | "pandas": [ 83 | "" 84 | ], 85 | "scipy": [ 86 | "" 87 | ], 88 | "tqdm": [ 89 | "" 90 | ], 91 | "xarray": [ 92 | "" 93 | ], 94 | "typing_extensions": [ 95 | "" 96 | ], 97 | "iris": [ 98 | "" 99 | ], 100 | "cfunits": [ 101 | "" 102 | ], 103 | "mule": [ 104 | "" 105 | ], 106 | "sparse": [ 107 | "" 108 | ], 109 | "python-graphviz": [ 110 | "" 111 | ], 112 | }, 113 | // Combinations of libraries/python versions can be excluded/included 114 | // from the set to test. Each entry is a dictionary containing additional 115 | // key-value pairs to include/exclude. 116 | // 117 | // An exclude entry excludes entries where all values match. The 118 | // values are regexps that should match the whole string. 119 | // 120 | // An include entry adds an environment. Only the packages listed 121 | // are installed. The 'python' key is required. The exclude rules 122 | // do not apply to includes. 123 | // 124 | // In addition to package names, the following keys are available: 125 | // 126 | // - python 127 | // Python version, as in the *pythons* variable above. 128 | // - environment_type 129 | // Environment type, as above. 130 | // - sys_platform 131 | // Platform, as in sys.platform. Possible values for the common 132 | // cases: 'linux2', 'win32', 'cygwin', 'darwin'. 133 | // 134 | // "exclude": [ 135 | // {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows 136 | // {"environment_type": "conda", "six": null}, // don't run without six on conda 137 | // ], 138 | // 139 | // "include": [ 140 | // // additional env for python2.7 141 | // {"python": "2.7", "numpy": "1.8"}, 142 | // // additional env if run on windows+conda 143 | // {"platform": "win32", "environment_type": "conda", "python": "2.7", "libpython": ""}, 144 | // ], 145 | // The directory (relative to the current directory) that benchmarks are 146 | // stored in. If not provided, defaults to "benchmarks" 147 | "benchmark_dir": "benchmarks", 148 | // The directory (relative to the current directory) to cache the Python 149 | // environments in. If not provided, defaults to "env" 150 | "env_dir": ".asv/env", 151 | // The directory (relative to the current directory) that raw benchmark 152 | // results are stored in. If not provided, defaults to "results". 153 | "results_dir": ".asv/results", 154 | // The directory (relative to the current directory) that the html tree 155 | // should be written to. If not provided, defaults to "html". 156 | "html_dir": "doc/_static/asv", 157 | // The number of characters to retain in the commit hashes. 158 | // "hash_length": 8, 159 | // `asv` will cache results of the recent builds in each 160 | // environment, making them faster to install next time. This is 161 | // the number of builds to keep, per environment. 162 | // "build_cache_size": 2, 163 | // The commits after which the regression search in `asv publish` 164 | // should start looking for regressions. Dictionary whose keys are 165 | // regexps matching to benchmark names, and values corresponding to 166 | // the commit (exclusive) after which to start looking for 167 | // regressions. The default is to start from the first commit 168 | // with results. If the commit is `null`, regression detection is 169 | // skipped for the matching benchmark. 170 | // 171 | // "regressions_first_commits": { 172 | // "some_benchmark": "352cdf", // Consider regressions only after this commit 173 | // "another_benchmark": null, // Skip regression detection altogether 174 | // }, 175 | // The thresholds for relative change in results, after which `asv 176 | // publish` starts reporting regressions. Dictionary of the same 177 | // form as in ``regressions_first_commits``, with values 178 | // indicating the thresholds. If multiple entries match, the 179 | // maximum is taken. If no entry matches, the default is 5%. 180 | // 181 | // "regressions_thresholds": { 182 | // "some_benchmark": 0.01, // Threshold of 1% 183 | // "another_benchmark": 0.5, // Threshold of 50% 184 | // }, 185 | } 186 | -------------------------------------------------------------------------------- /doc/_static/asv/asv_ui.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | $(document).ready(function() { 4 | function make_panel(nav, heading) { 5 | var panel = $('
'); 6 | nav.append(panel); 7 | var panel_header = $( 8 | '
' + heading + '
'); 9 | panel.append(panel_header); 10 | var panel_body = $('
'); 11 | panel.append(panel_body); 12 | return panel_body; 13 | } 14 | 15 | function make_value_selector_panel(nav, heading, values, setup_callback) { 16 | var panel_body = make_panel(nav, heading); 17 | var vertical = false; 18 | var buttons = $('
'); 20 | 21 | panel_body.append(buttons); 22 | 23 | $.each(values, function (idx, value) { 24 | var button = $( 25 | ''); 26 | setup_callback(idx, value, button); 27 | buttons.append(button); 28 | }); 29 | 30 | return panel_body; 31 | } 32 | 33 | function reflow_value_selector_panels(no_timeout) { 34 | $('.panel').each(function (i, panel_obj) { 35 | var panel = $(panel_obj); 36 | panel.find('.btn-group').each(function (i, buttons_obj) { 37 | var buttons = $(buttons_obj); 38 | var width = 0; 39 | 40 | if (buttons.hasClass('reflow-done')) { 41 | /* already processed */ 42 | return; 43 | } 44 | 45 | $.each(buttons.children(), function(idx, value) { 46 | width += value.scrollWidth; 47 | }); 48 | 49 | var max_width = panel_obj.clientWidth; 50 | 51 | if (width >= max_width) { 52 | buttons.addClass("btn-group-vertical"); 53 | buttons.css("width", "100%"); 54 | buttons.css("max-height", "20ex"); 55 | buttons.css("overflow-y", "auto"); 56 | } 57 | else { 58 | buttons.addClass("btn-group-justified"); 59 | } 60 | 61 | /* The widths can be zero if the UI is not fully layouted yet, 62 | so mark the adjustment complete only if this is not the case */ 63 | if (width > 0 && max_width > 0) { 64 | buttons.addClass("reflow-done"); 65 | } 66 | }); 67 | }); 68 | 69 | if (!no_timeout) { 70 | /* Call again asynchronously, in case the UI was not fully layouted yet */ 71 | setTimeout(function() { $.asv.ui.reflow_value_selector_panels(true); }, 0); 72 | } 73 | } 74 | 75 | function network_error(ajax, status, error) { 76 | $("#error-message").text( 77 | "Error fetching content. " + 78 | "Perhaps web server has gone down."); 79 | $("#error").modal('show'); 80 | } 81 | 82 | function hover_graph(element, graph_url, benchmark_basename, parameter_idx, revisions) { 83 | /* Show the summary graph as a popup */ 84 | var plot_div = $('
'); 85 | plot_div.css('width', '11.8em'); 86 | plot_div.css('height', '7em'); 87 | plot_div.css('border', '2px solid black'); 88 | plot_div.css('background-color', 'white'); 89 | 90 | function update_plot() { 91 | var markings = []; 92 | 93 | if (revisions) { 94 | $.each(revisions, function(i, revs) { 95 | var rev_a = revs[0]; 96 | var rev_b = revs[1]; 97 | 98 | if (rev_a !== null) { 99 | markings.push({ color: '#d00', lineWidth: 2, xaxis: { from: rev_a, to: rev_a }}); 100 | markings.push({ color: "rgba(255,0,0,0.1)", xaxis: { from: rev_a, to: rev_b }}); 101 | } 102 | markings.push({ color: '#d00', lineWidth: 2, xaxis: { from: rev_b, to: rev_b }}); 103 | }); 104 | } 105 | 106 | $.asv.load_graph_data( 107 | graph_url 108 | ).done(function (data) { 109 | var params = $.asv.master_json.benchmarks[benchmark_basename].params; 110 | data = $.asv.filter_graph_data_idx(data, 0, parameter_idx, params); 111 | var options = { 112 | colors: ['#000'], 113 | series: { 114 | lines: { 115 | show: true, 116 | lineWidth: 2 117 | }, 118 | shadowSize: 0 119 | }, 120 | grid: { 121 | borderWidth: 1, 122 | margin: 0, 123 | labelMargin: 0, 124 | axisMargin: 0, 125 | minBorderMargin: 0, 126 | markings: markings, 127 | }, 128 | xaxis: { 129 | ticks: [], 130 | }, 131 | yaxis: { 132 | ticks: [], 133 | min: 0 134 | }, 135 | legend: { 136 | show: false 137 | } 138 | }; 139 | var plot = $.plot(plot_div, [{data: data}], options); 140 | }).fail(function () { 141 | // TODO: Handle failure 142 | }); 143 | 144 | return plot_div; 145 | } 146 | 147 | element.popover({ 148 | placement: 'left auto', 149 | trigger: 'hover', 150 | html: true, 151 | delay: 50, 152 | content: $('
').append(plot_div) 153 | }); 154 | 155 | element.on('show.bs.popover', update_plot); 156 | } 157 | 158 | function hover_summary_graph(element, benchmark_basename) { 159 | /* Show the summary graph as a popup */ 160 | var plot_div = $('
'); 161 | plot_div.css('width', '11.8em'); 162 | plot_div.css('height', '7em'); 163 | plot_div.css('border', '2px solid black'); 164 | plot_div.css('background-color', 'white'); 165 | 166 | function update_plot() { 167 | var markings = []; 168 | 169 | $.asv.load_graph_data( 170 | 'graphs/summary/' + benchmark_basename + '.json' 171 | ).done(function (data) { 172 | var options = { 173 | colors: $.asv.colors, 174 | series: { 175 | lines: { 176 | show: true, 177 | lineWidth: 2 178 | }, 179 | shadowSize: 0 180 | }, 181 | grid: { 182 | borderWidth: 1, 183 | margin: 0, 184 | labelMargin: 0, 185 | axisMargin: 0, 186 | minBorderMargin: 0, 187 | markings: markings, 188 | }, 189 | xaxis: { 190 | ticks: [], 191 | }, 192 | yaxis: { 193 | ticks: [], 194 | min: 0 195 | }, 196 | legend: { 197 | show: false 198 | } 199 | }; 200 | var plot = $.plot(plot_div, [{data: data}], options); 201 | }).fail(function () { 202 | // TODO: Handle failure 203 | }); 204 | 205 | return plot_div; 206 | } 207 | 208 | element.popover({ 209 | placement: 'left auto', 210 | trigger: 'hover', 211 | html: true, 212 | delay: 50, 213 | content: $('
').append(plot_div) 214 | }); 215 | 216 | element.on('show.bs.popover', update_plot); 217 | } 218 | 219 | /* 220 | Set up $.asv.ui 221 | */ 222 | 223 | this.network_error = network_error; 224 | this.make_panel = make_panel; 225 | this.make_value_selector_panel = make_value_selector_panel; 226 | this.reflow_value_selector_panels = reflow_value_selector_panels; 227 | this.hover_graph = hover_graph; 228 | this.hover_summary_graph = hover_summary_graph; 229 | 230 | $.asv.ui = this; 231 | }); 232 | -------------------------------------------------------------------------------- /doc/_static/asv/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | airspeed velocity 6 | 11 | 15 | 19 | 23 | 27 | 31 | 34 | 37 | 40 | 43 | 47 | 50 | 53 | 56 | 59 | 62 | 65 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 | commits 107 |
108 |
109 | 110 |
111 |
112 | 113 | 152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 | 165 |
166 |
167 |
168 |
169 | 170 | 171 | 185 | 186 | 187 | 188 | -------------------------------------------------------------------------------- /test/test_event.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright 2020 ARC Centre of Excellence for Climate Extremes 3 | # author: Scott Wales 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | from climtas.event import * 18 | 19 | import xarray 20 | import numpy 21 | from unittest.mock import patch 22 | 23 | 24 | def test_find_events(): 25 | da = xarray.DataArray([[0, 1, 1, 1, 0]], dims=["x", "time"]) 26 | events = find_events(da > 0) 27 | 28 | events = events.set_index(["time", "x"]) 29 | assert events.index[0][0] == 1 30 | assert events.index[0][1] == 0 31 | assert events["event_duration"].iloc[0] == 3 32 | assert events["event_duration"].loc[1, 0] == 3 33 | assert len(events) == 1 34 | 35 | da = xarray.DataArray([[0, 1, 1, 1, 0], [1, 1, 0, 1, 1]], dims=["x", "time"]) 36 | events = find_events(da > 0) 37 | 38 | events = events.set_index(["time", "x"]) 39 | assert events["event_duration"].loc[1, 0] == 3 40 | assert events["event_duration"].loc[0, 1] == 2 41 | assert events["event_duration"].loc[3, 1] == 2 42 | assert len(events) == 3 43 | 44 | da = xarray.DataArray([[0, 1, 1, 1, 0], [1, 1, 1, 1, 0]], dims=["x", "time"]) 45 | events = find_events(da > 0) 46 | 47 | events = events.set_index(["time", "x"]) 48 | assert events["event_duration"].loc[1, 0] == 3 49 | assert events["event_duration"].loc[0, 1] == 4 50 | assert len(events) == 2 51 | 52 | da = xarray.DataArray([[0, 1, 1, 1, 0], [1, 1, 0, 1, 1]], dims=["x", "time"]) 53 | events = find_events(da > 0, min_duration=3) 54 | 55 | events = events.set_index(["time", "x"]) 56 | assert events["event_duration"].loc[1, 0] == 3 57 | assert len(events) == 1 58 | 59 | da = da.chunk({"x": 1, "time": 2}) 60 | events = find_events(da > 0, min_duration=3) 61 | 62 | print(events) 63 | 64 | events = events.set_index(["time", "x"]) 65 | assert events["event_duration"].loc[1, 0] == 3 66 | assert len(events) == 1 67 | 68 | 69 | def test_find_events_1d(): 70 | da = xarray.DataArray([0, 1, 1, 1, 0], dims=["time"]) 71 | events = find_events(da > 0) 72 | 73 | events = events.set_index(["time"]) 74 | assert events["event_duration"].loc[1] == 3 75 | assert len(events) == 1 76 | 77 | 78 | def test_map_events(): 79 | da = xarray.DataArray([0, 1, 1, 1, 0], dims=["time"]) 80 | events = find_events(da > 0) 81 | 82 | sums = map_events(da, events, lambda x: x.sum()) 83 | assert sums.iloc[0] == 3 84 | 85 | 86 | def test_atleastn(): 87 | sample = [[0, 1, 1, 1, 0, 1, 1, 1, 1], [1, 0, 1, 1, 0, 0, 0, 0, 1]] 88 | 89 | expect = [[0, 1, 1, 1, 0, 1, 1, 1, 1], [0, 0, 0, 0, 0, 0, 0, 0, 0]] 90 | expect = numpy.array(expect) 91 | expect = numpy.where(expect > 0, expect, numpy.nan) 92 | 93 | da = xarray.DataArray(sample, dims=["x", "time"]) 94 | filtered = atleastn(da.where(da > 0), 3) 95 | numpy.testing.assert_array_equal(filtered, expect) 96 | 97 | da = da.chunk({"x": 2}) 98 | filtered = atleastn(da.where(da > 0), 3) 99 | numpy.testing.assert_array_equal(filtered, expect) 100 | 101 | 102 | def test_find_events_dask(): 103 | da = xarray.DataArray( 104 | [[0, 1, 1, 1, 0], [1, 1, 1, 1, 0], [0, 0, 0, 1, 1]], dims=["x", "time"] 105 | ) 106 | 107 | da_dask = da.chunk({"x": 1, "time": 3}) 108 | events = find_events(da_dask > 0, min_duration=3, use_dask=True) 109 | 110 | # Results are t, x, length 111 | numpy.testing.assert_array_equal(events.to_numpy(), [[1, 0, 3], [0, 1, 4]]) 112 | 113 | 114 | def test_find_events_block(): 115 | # find_events_block works like find_events, but adds a offset to the coordinates 116 | da = xarray.DataArray( 117 | [[0, 1, 1, 1, 0], [1, 1, 1, 1, 0], [0, 0, 0, 0, 0]], dims=["x", "time"] 118 | ) 119 | 120 | da_block = da[1:, 2:] 121 | 122 | events = find_events_block(da_block > 0, min_duration=3, offset=(1, 2)) 123 | 124 | # Results are t, x, length 125 | numpy.testing.assert_array_equal(events.to_numpy(), [[2, 1, 2]]) 126 | 127 | da_block = da[0, :3] 128 | events = find_events_block(da_block > 0, min_duration=3, offset=(0,)) 129 | 130 | numpy.testing.assert_array_equal(events.to_numpy(), [[1, 2]]) 131 | 132 | da_block = da[2, :] 133 | events = find_events_block(da_block > 0, min_duration=3, offset=(0,)) 134 | 135 | numpy.testing.assert_array_equal(events.shape, (0, 2)) 136 | 137 | 138 | def test_join_events(): 139 | events = [ 140 | pandas.DataFrame([[1, 2]], columns=["time", "event_duration"]), 141 | pandas.DataFrame([[3, 3]], columns=["time", "event_duration"]), 142 | pandas.DataFrame([[6, 1]], columns=["time", "event_duration"]), 143 | ] 144 | 145 | joined = join_events(events) 146 | 147 | numpy.testing.assert_array_equal(joined.to_numpy(), [[1, 6]]) 148 | 149 | events = [ 150 | pandas.DataFrame([[1, 0, 2]], columns=["time", "x", "event_duration"]), 151 | pandas.DataFrame([[3, 0, 1]], columns=["time", "x", "event_duration"]), 152 | pandas.DataFrame([[5, 0, 1]], columns=["time", "x", "event_duration"]), 153 | ] 154 | 155 | joined = join_events(events) 156 | 157 | numpy.testing.assert_array_equal(joined.to_numpy(), [[1, 0, 3], [5, 0, 1]]) 158 | 159 | events = [ 160 | pandas.DataFrame([[1, 0, 2]], columns=["time", "x", "event_duration"]), 161 | pandas.DataFrame([[3, 0, 1]], columns=["time", "x", "event_duration"]), 162 | pandas.DataFrame([[1, 1, 2]], columns=["time", "x", "event_duration"]), 163 | pandas.DataFrame([[3, 1, 1]], columns=["time", "x", "event_duration"]), 164 | ] 165 | offsets = [[0, 0], [3, 0], [0, 1], [3, 1]] 166 | 167 | joined = join_events(events, offsets=offsets, dims=["time", "x"]) 168 | 169 | numpy.testing.assert_array_equal(joined.to_numpy(), [[1, 0, 3], [1, 1, 3]]) 170 | 171 | 172 | def test_event_values(): 173 | da = xarray.DataArray([[0, 1, 3, 2, 0]], dims=["x", "time"]) 174 | events = find_events(da > 0) 175 | 176 | # Basic call, no dask 177 | values = event_values(da, events).sort_values(["time", "event_id"]) 178 | numpy.testing.assert_array_equal( 179 | values.to_numpy(), [[1, 0, 1], [2, 0, 3], [3, 0, 2]] 180 | ) 181 | 182 | # Call with dask 183 | da_dask = da.chunk({"time": 3}) 184 | values = event_values(da_dask, events).compute().sort_values(["time", "event_id"]) 185 | numpy.testing.assert_array_equal( 186 | values.to_numpy(), [[1, 0, 1], [2, 0, 3], [3, 0, 2]] 187 | ) 188 | 189 | # Call with dask in 2d 190 | da = xarray.DataArray([[0, 0, 3, 2, 0], [9, 8, 0, 0, 7]], dims=["x", "time"]) 191 | da_dask = da.chunk({"time": 3, "x": 1}) 192 | events = find_events(da > 0) 193 | values = event_values(da_dask, events).compute().sort_values(["time", "event_id"]) 194 | numpy.testing.assert_array_equal( 195 | values.to_numpy(), 196 | [[0, 0, 9], [1, 0, 8], [2, 1, 3], [3, 1, 2], [4, 2, 7]], 197 | ) 198 | 199 | # Make sure the values aren't evaluated when using dask 200 | with patch("climtas.event.event_values_block") as mock: 201 | values = event_values(da_dask, events) 202 | assert not mock.called 203 | 204 | 205 | def test_event_values_dask_nd(): 206 | da = xarray.DataArray([[0, 0, 3, 2, 0], [9, 8, 0, 0, 7]], dims=["x", "time"]) 207 | da_dask = da.chunk({"time": 3, "x": 1}) 208 | 209 | events = find_events(da > 0) 210 | 211 | values = event_values_block(da_dask[1:, 3:], events, offset=(1, 3)) 212 | 213 | values = event_values(da_dask, events) 214 | 215 | values = values.compute().sort_values(["time", "event_id"]) 216 | 217 | numpy.testing.assert_array_equal( 218 | values.to_numpy(), 219 | [[0, 0, 9], [1, 0, 8], [2, 1, 3], [3, 1, 2], [4, 2, 7]], 220 | ) 221 | 222 | 223 | def test_event_values_dask_start(): 224 | # da = xarray.DataArray(numpy.random.random((10, 5)), dims=["time", "x"]) 225 | 226 | da = xarray.DataArray( 227 | [ 228 | [0.52649977, 0.17030355, 0.4356391, 0.45381215, 0.10209542], 229 | [0.53071507, 0.52002751, 0.29184523, 0.73629857, 0.80460948], 230 | [0.69110762, 0.44041389, 0.72885754, 0.20011198, 0.4447704], 231 | [0.73218493, 0.23160268, 0.81427243, 0.2787576, 0.09275809], 232 | [0.62564712, 0.48733569, 0.86497213, 0.13677226, 0.3936849], 233 | [0.41741731, 0.72461981, 0.20865482, 0.7148711, 0.69312872], 234 | ], 235 | dims=["time", "x"], 236 | ) 237 | 238 | da_dask = da.chunk({"time": 5, "x": 2}) 239 | 240 | events = find_events(da_dask > 0.4, use_dask=True) 241 | values = event_values(da_dask, events) 242 | 243 | 244 | def test_event_values_reduce(): 245 | da = xarray.DataArray([[0, 0, 3, 2, 0], [9, 8, 0, 0, 7]], dims=["x", "time"]) 246 | da_dask = da.chunk({"time": 3, "x": 1}) 247 | 248 | events = find_events(da > 0) 249 | 250 | values = event_values(da_dask, events) 251 | 252 | stats = values.groupby("event_id")["value"].min().compute() 253 | 254 | numpy.testing.assert_array_equal( 255 | stats.to_numpy(), 256 | [2, 8, 7], 257 | ) 258 | --------------------------------------------------------------------------------