10 |
11 | Can not determine continental origin of swallow.
12 |
13 |
14 |
15 | One or more external (JavaScript) dependencies of airspeed velocity failed to load.
16 |
17 |
18 |
19 | Make sure you have an active internet connection and enable 3rd-party scripts
20 | in your browser the first time you load airspeed velocity.
21 |
22 |
23 |
24 |
--------------------------------------------------------------------------------
/doc/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SPHINXPROJ = climtas
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | serve: html
18 | python -m http.server --bind localhost --directory _build/html/
19 |
20 | # Catch-all target: route all unknown targets to Sphinx using the new
21 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
22 | %: Makefile
23 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
24 |
--------------------------------------------------------------------------------
/test/test_daskutil.py:
--------------------------------------------------------------------------------
1 | from climtas.daskutil import *
2 |
3 |
4 | def compare_compute(s):
5 | a = throttled_compute(s, n=10)
6 | (b,) = dask.compute(s)
7 | numpy.testing.assert_array_equal(a, b)
8 |
9 |
10 | def test_throttled_compute():
11 | s = numpy.random.random((10, 10))
12 | compare_compute(s)
13 |
14 | s = dask.array.from_array(s, chunks=(5, 5))
15 | compare_compute(s)
16 |
17 | s = dask.array.random.random((10, 10), chunks=(5, 5))
18 | compare_compute(s)
19 |
20 | t = dask.array.random.random((10, 10), chunks=(2, 2))
21 | s = s @ t
22 | compare_compute(s)
23 |
24 |
25 | def test_visualize_block():
26 | import dask.dot
27 |
28 | s = dask.array.random.random((10, 10), chunks=(5, 5))
29 | s = s + 1
30 | v = visualize_block(s)
31 |
32 | assert "label=add" in v.source
33 |
--------------------------------------------------------------------------------
/doc/index.rst:
--------------------------------------------------------------------------------
1 | .. climtas documentation master file, created by
2 | sphinx-quickstart on Tue Mar 13 15:48:49 2018.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | climtas: Climate Timeseries Analysis
7 | ====================================
8 |
9 | Climtas is a package for working with large climate analyses. It focuses on the
10 | time domain with custom functions for `Xarray `_ and
11 | `Dask `_ data.
12 |
13 | Contents
14 | --------
15 | .. toctree::
16 | :maxdepth: 2
17 | :caption: Contents:
18 |
19 | event
20 | groupby
21 | percentile
22 | regrid
23 | resample
24 |
25 |
26 | Reference
27 | ---------
28 | .. toctree::
29 | :caption: Reference:
30 |
31 | api/index
32 | nci
33 |
34 | genindex
35 |
36 | `Benchmark graphs <_static/asv/index.html>`_
37 |
38 |
39 |
--------------------------------------------------------------------------------
/doc/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 | set SPHINXPROJ=climtas
13 |
14 | if "%1" == "" goto help
15 |
16 | %SPHINXBUILD% >NUL 2>NUL
17 | if errorlevel 9009 (
18 | echo.
19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
20 | echo.installed, then set the SPHINXBUILD environment variable to point
21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
22 | echo.may add the Sphinx directory to PATH.
23 | echo.
24 | echo.If you don't have Sphinx installed, grab it from
25 | echo.http://sphinx-doc.org/
26 | exit /b 1
27 | )
28 |
29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
30 | goto end
31 |
32 | :help
33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
34 |
35 | :end
36 | popd
37 |
--------------------------------------------------------------------------------
/doc/_static/asv/regressions.css:
--------------------------------------------------------------------------------
1 | #regressions-body {
2 | margin-left: 2em;
3 | margin-right: 2em;
4 | margin-top: 1em;
5 | margin-bottom: 2em;
6 | }
7 |
8 | #regressions-body table thead th {
9 | cursor: pointer;
10 | white-space: nowrap;
11 | }
12 |
13 | #regressions-body table thead th.desc:after {
14 | content: ' \2191';
15 | }
16 |
17 | #regressions-body table thead th.asc:after {
18 | content: ' \2193';
19 | }
20 |
21 | #regressions-body table.ignored {
22 | padding-top: 1em;
23 | color: #ccc;
24 | background-color: #eee;
25 | }
26 |
27 | #regressions-body table.ignored a {
28 | color: #82abda;
29 | }
30 |
31 | #regressions-body .feed-div {
32 | float: right;
33 | }
34 |
35 | #regressions-body table tbody td.date {
36 | white-space: nowrap;
37 | }
38 |
39 | #regressions-body table button {
40 | margin-top: -2px;
41 | padding-top: 2px;
42 | padding-bottom: 0px;
43 | white-space: nowrap;
44 | }
45 |
--------------------------------------------------------------------------------
/doc/_static/asv/summarylist.css:
--------------------------------------------------------------------------------
1 | #summarylist-body {
2 | padding-left: 2em;
3 | padding-right: 2em;
4 | padding-top: 1em;
5 | padding-bottom: 2em;
6 | }
7 |
8 | #summarylist-body table thead th {
9 | cursor: pointer;
10 | white-space: nowrap;
11 | }
12 |
13 | #summarylist-body table thead th.desc:after {
14 | content: ' \2191';
15 | }
16 |
17 | #summarylist-body table thead th.asc:after {
18 | content: ' \2193';
19 | }
20 |
21 | #summarylist-body table.ignored {
22 | padding-top: 1em;
23 | color: #ccc;
24 | background-color: #eee;
25 | }
26 |
27 | #summarylist-body table.ignored a {
28 | color: #82abda;
29 | }
30 |
31 | #summarylist-body table tbody td.positive-change {
32 | background-color: #fdd;
33 | }
34 |
35 | #summarylist-body table tbody td.negative-change {
36 | background-color: #dfd;
37 | }
38 |
39 | #summarylist-body table tbody td.value {
40 | white-space: nowrap;
41 | }
42 |
43 | #summarylist-body table tbody td.change a {
44 | color: black;
45 | white-space: nowrap;
46 | }
47 |
48 | #summarylist-body table tbody td.change-date {
49 | white-space: nowrap;
50 | }
51 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [versioneer]
2 | VCS = git
3 | style = pep440
4 | versionfile_source = src/climtas/_version.py
5 | versionfile_build = climtas/_version.py
6 | tag_prefix =
7 | parentdir_prefix = climtas-
8 |
9 | [coverage:paths]
10 | source =
11 | src
12 | */site-packages
13 |
14 | [tool:pytest]
15 | addopts = --doctest-modules --doctest-glob="*.rst"
16 | doctest_optionflags=ELLIPSIS
17 | norecursedirs = benchmarks notebooks .asv
18 |
19 | [mypy]
20 | files = src/climtas,test
21 | #plugins = numpy.typing.mypy_plugin
22 |
23 | [mypy-climtas._version]
24 | ignore_errors = True
25 |
26 | [mypy-dask.*]
27 | ignore_missing_imports = True
28 |
29 | [mypy-pandas.*]
30 | ignore_missing_imports = True
31 |
32 | [mypy-tqdm.*]
33 | ignore_missing_imports = True
34 |
35 | [mypy-pytest.*]
36 | ignore_missing_imports = True
37 |
38 | [mypy-scipy.*]
39 | ignore_missing_imports = True
40 |
41 | [mypy-iris.*]
42 | ignore_missing_imports = True
43 |
44 | [mypy-mule.*]
45 | ignore_missing_imports = True
46 |
47 | [mypy-sparse.*]
48 | ignore_missing_imports = True
49 |
50 | [mypy-cfunits.*]
51 | ignore_missing_imports = True
52 |
53 | [mypy-graphviz.*]
54 | ignore_missing_imports = True
55 |
--------------------------------------------------------------------------------
/meta.yaml:
--------------------------------------------------------------------------------
1 | {% set data = load_setup_py_data() %}
2 |
3 | package:
4 | name: climtas
5 | version: {{ data.get('version') }}
6 |
7 | source:
8 | path: .
9 |
10 |
11 | build:
12 | script: "{{ PYTHON }} -m pip install . --no-deps"
13 | noarch: python
14 | number: {{ GIT_DESCRIBE_NUMBER }}
15 |
16 |
17 | requirements:
18 | host:
19 | - python >=3.8
20 | - pip
21 | run:
22 | - python >=3.8
23 | - dask >=2015.5
24 | - netcdf4
25 | - pandas
26 | - scipy
27 | - tqdm
28 | - xarray
29 | - typing_extensions
30 | - iris
31 | - cfunits
32 | - mule
33 | - sparse
34 | - python-graphviz
35 |
36 | test:
37 | imports:
38 | - climtas
39 | requires:
40 | - pytest
41 | - coverage
42 | - cdo
43 | - esmf
44 | - hdf5
45 | - nco
46 | files:
47 | - setup.cfg
48 | - test
49 | - README.rst
50 | - doc
51 | script_env:
52 | - TEST_OUTPUT
53 | commands:
54 | - COVERAGE_FILE=${TEST_OUTPUT:-.}/coverage coverage run --source climtas -m pytest ./test --pyargs climtas --junit-xml=${TEST_OUTPUT:-.}/pytest/junit.xml
55 |
56 |
--------------------------------------------------------------------------------
/benchmarks/blocked.py:
--------------------------------------------------------------------------------
1 | import climtas
2 | import dask
3 | import tempfile
4 | from .sample import sample_data
5 |
6 |
7 | class GroupbySuite:
8 | def setup(self):
9 | self.data = sample_data(years=5, freq="D")
10 |
11 | def time_xarray_dayofyear(self):
12 | self.data.groupby("time.dayofyear").mean().load()
13 |
14 | def time_blocked_dayofyear(self):
15 | climtas.blocked.blocked_groupby(self.data, time="dayofyear").mean().load()
16 |
17 | def time_blocked_monthday(self):
18 | climtas.blocked.blocked_groupby(self.data, time="monthday").mean().load()
19 |
20 |
21 | class ResampleSuite:
22 | def setup(self):
23 | self.data = sample_data(years=2, freq="6H")
24 |
25 | def time_xarray(self):
26 | self.data.resample(time="D").mean().load()
27 |
28 | def time_blocked(self):
29 | climtas.blocked.blocked_resample(self.data, time=4).mean().load()
30 |
31 |
32 | class GroupbyDistributedSuite(GroupbySuite):
33 | def setup(self):
34 | self.tmpdir = tempfile.TemporaryDirectory()
35 | self.client = dask.distributed.Client(local_directory=self.tmpdir.name)
36 | super().setup()
37 |
38 | def teardown(self):
39 | self.client.close()
40 |
41 |
42 | class ResampleDistributedSuite(ResampleSuite):
43 | def setup(self):
44 | self.tmpdir = tempfile.TemporaryDirectory()
45 | self.client = dask.distributed.Client(local_directory=self.tmpdir.name)
46 | super().setup()
47 |
48 | def teardown(self):
49 | self.client.close()
50 |
--------------------------------------------------------------------------------
/test/test_grid.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Copyright 2018 ARC Centre of Excellence for Climate Extremes
3 | # author: Scott Wales
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 | from __future__ import print_function
17 |
18 | from climtas.grid import *
19 | import xarray
20 | import numpy
21 | import tempfile
22 |
23 |
24 | def test_latlon_grid_to_scrip():
25 | d = xarray.DataArray(
26 | data=numpy.ones((2, 4)), coords=[("lat", [-45, 45]), ("lon", [0, 90, 180, 270])]
27 | )
28 | d.lat.attrs["units"] = "degrees_north"
29 | d.lon.attrs["units"] = "degrees_east"
30 |
31 | center_lon, center_lat = numpy.meshgrid(d.lon, d.lat)
32 | d[:, :] = center_lon
33 |
34 | s = identify_grid(d).to_scrip()
35 |
36 | assert s.grid_dims[0] == 4
37 | assert s.grid_dims[1] == 2
38 |
39 | # Bottom left corner of bottom left cell
40 | assert s.grid_corner_lat[0, 0] == -90
41 | assert s.grid_corner_lon[0, 0] == -45
42 |
43 | # Top left corner of bottom left cell
44 | assert s.grid_corner_lat[0, 3] == 0
45 | assert s.grid_corner_lon[0, 3] == -45
46 |
--------------------------------------------------------------------------------
/profile_event.py:
--------------------------------------------------------------------------------
1 | import xarray
2 | import tempfile
3 | import climtas
4 | import climtas.nci
5 | import time
6 |
7 |
8 | class Timer:
9 | def __init__(self):
10 | self.starts = {}
11 | self.ends = {}
12 |
13 | def mark(self, name):
14 | if name not in self.starts:
15 | self.starts[name] = time.perf_counter()
16 | else:
17 | self.ends[name] = time.perf_counter()
18 |
19 | def results(self):
20 | return {k: v - self.starts[k] for k, v in self.ends.items()}
21 |
22 |
23 | if __name__ == "__main__":
24 | t = Timer()
25 | t.mark("full")
26 |
27 | client = climtas.nci.GadiClient()
28 | workers = len(client.cluster.workers)
29 | threads = sum([w.nthreads for w in client.cluster.workers.values()])
30 |
31 | t.mark("load")
32 | oisst = xarray.open_mfdataset(
33 | "/g/data/ua8/NOAA_OISST/AVHRR/v2-0_modified/oisst_avhrr_v2_*.nc",
34 | chunks={"time": 1},
35 | )
36 | sst = oisst.sst
37 | t.mark("load")
38 |
39 | clim_file = "/scratch/w35/saw562/tmp/oisst_clim.nc"
40 |
41 | t.mark("clim")
42 | # climatology = climtas.blocked_groupby(
43 | # sst.sel(time=slice("1985", "1987")), time="monthday"
44 | # ).percentile(90)
45 | # climatology.name = "sst_thresh"
46 |
47 | # climtas.io.to_netcdf_throttled(climatology, clim_file)
48 |
49 | climatology = xarray.open_dataarray(clim_file, chunks={"monthday": 1})
50 | t.mark("clim")
51 |
52 | t.mark("find")
53 | delta = climtas.blocked_groupby(sst.sel(time="1985"), time="monthday") - climatology
54 | delta = delta.chunk({"time": 30, "lat": 100, "lon": 100})
55 | print(delta)
56 | events = climtas.event.find_events_block(
57 | delta > 0, min_duration=10, offset=(0, 0, 0)
58 | )
59 | t.mark("find")
60 |
61 | t.mark("full")
62 |
63 | print("workers ", workers, " threads ", threads)
64 | print(t.results())
65 |
--------------------------------------------------------------------------------
/test/test_dimension.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Copyright 2018 ARC Centre of Excellence for Climate Extremes
3 | # author: Scott Wales
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 | from __future__ import print_function
17 |
18 | from climtas.dimension import *
19 |
20 | import pytest
21 | import xarray
22 | import numpy
23 |
24 |
25 | def test_remove_degenerate_axes():
26 | a = xarray.DataArray([1, 2], dims=["i"])
27 | o = remove_degenerate_axes(a)
28 |
29 | numpy.testing.assert_array_equal(a.data, o.data)
30 |
31 | b = xarray.DataArray([[1, 2], [1, 2]], dims=["i", "j"])
32 | o = remove_degenerate_axes(b)
33 |
34 | numpy.testing.assert_array_equal([1, 2], o.data)
35 |
36 |
37 | def test_identify_lat_lon():
38 | da = xarray.DataArray([[0, 0], [0, 0]], coords=[("lat", [0, 1]), ("lon", [0, 1])])
39 |
40 | # Missing CF metadata is an error
41 | with pytest.raises(Exception):
42 | lat, lon = identify_lat_lon(da)
43 |
44 | # Should find units, axis or standard_name attributes
45 | da.lat.attrs["units"] = "degrees_north"
46 | da.lon.attrs["axis"] = "X"
47 | lat, lon = identify_lat_lon(da)
48 | assert lat.equals(da.lat)
49 | assert lon.equals(da.lon)
50 |
51 |
52 | def test_identify_time():
53 | da = xarray.DataArray([0, 0], coords=[("time", [0, 1])])
54 |
55 | # Missing CF metadata is an error
56 | with pytest.raises(Exception):
57 | time = identify_time(da)
58 |
59 | # Units should be identified
60 | da.time.attrs["units"] = "days since 2006-01-09"
61 | time = identify_time(da)
62 | assert time.equals(da.time)
63 |
64 | # Units should work with CF decoding
65 | da = xarray.decode_cf(xarray.Dataset({"da": da})).da
66 | time = identify_time(da)
67 | assert time.equals(da.time)
68 |
--------------------------------------------------------------------------------
/doc/_static/asv/regressions.json:
--------------------------------------------------------------------------------
1 | {"regressions": [["event.EventSuite.time_find_event", "graphs/arch-x86_64/branch-master/cfunits/cpu-AMD Ryzen 5 3600X 6-Core Processor/dask/iris/machine-Freya/mule/netcdf4/num_cpu-12/os-Linux 4.4.0-19041-Microsoft/pandas/python/python-graphviz/ram-16726988/scipy/sparse/tqdm/typing_extensions/xarray/event.EventSuite.time_find_event.json", {"cpu": "AMD Ryzen 5 3600X 6-Core Processor", "machine": "Freya", "num_cpu": "12", "os": "Linux 4.4.0-19041-Microsoft", "ram": "16726988"}, null, 1.4560975000204053, 1.1528359999938402, [[208, 235, 1.2022353499996825, 1.4560975000204053]]], ["event.EventSuite.time_find_event", "graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/event.EventSuite.time_find_event.json", {"cpu": "Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz", "machine": "gadi", "num_cpu": "48", "os": "Linux 4.18.0-240.1.1.el8.nci.x86_64", "ram": "262432756"}, null, 5.249316045985324, 3.1751911519968417, [[162, 231, 3.1751911519968417, 5.249316045985324]]], ["blocked.GroupbySuite.time_blocked_monthday", "graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/blocked.GroupbySuite.time_blocked_monthday.json", {"cpu": "Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz", "machine": "gadi", "num_cpu": "48", "os": "Linux 4.18.0-240.1.1.el8.nci.x86_64", "ram": "262432756"}, null, 0.5356181205133907, 0.4298640069901012, [[162, 231, 0.4298640069901012, 0.5356181205133907]]], ["blocked.ResampleDistributedSuite.time_xarray", "graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/blocked.ResampleDistributedSuite.time_xarray.json", {"cpu": "Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz", "machine": "gadi", "num_cpu": "48", "os": "Linux 4.18.0-240.1.1.el8.nci.x86_64", "ram": "262432756"}, null, 3.9142326589790173, 3.634890771994833, [[null, 235, 3.634890771994833, 3.9142326589790173]]]]}
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # Rope project settings
118 | .ropeproject
119 |
120 | # mkdocs documentation
121 | /site
122 |
123 | # mypy
124 | .mypy_cache/
125 | .dmypy.json
126 | dmypy.json
127 |
128 | # Pyre type checker
129 | .pyre/
130 | *.swp
131 |
132 | doc/_build
133 | PET*.RegridWeightGen.Log
134 | benchmark/*/log
135 | .asv/env
136 |
--------------------------------------------------------------------------------
/doc/_static/asv/vendor/jquery.flot-0.8.3.categories.min.js:
--------------------------------------------------------------------------------
1 | /*
2 | Copyright (c) 2007-2014 IOLA and Ole Laursen
3 |
4 | Permission is hereby granted, free of charge, to any person
5 | obtaining a copy of this software and associated documentation
6 | files (the "Software"), to deal in the Software without
7 | restriction, including without limitation the rights to use,
8 | copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the
10 | Software is furnished to do so, subject to the following
11 | conditions:
12 |
13 | The above copyright notice and this permission notice shall be
14 | included in all copies or substantial portions of the Software.
15 |
16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
18 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
20 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
21 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
22 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
23 | OTHER DEALINGS IN THE SOFTWARE.
24 | */
25 |
26 | !function(r){function o(r,o,e,i){var s="categories"==o.xaxis.options.mode,n="categories"==o.yaxis.options.mode;if(s||n){var a=i.format;if(!a){var t=o;if(a=[],a.push({x:!0,number:!0,required:!0}),a.push({y:!0,number:!0,required:!0}),t.bars.show||t.lines.show&&t.lines.fill){var u=!!(t.bars.show&&t.bars.zero||t.lines.show&&t.lines.zero);a.push({y:!0,number:!0,required:!1,defaultValue:0,autoscale:u}),t.bars.horizontal&&(delete a[a.length-1].y,a[a.length-1].x=!0)}i.format=a}for(var f=0;fo&&(o=r[e]);return o+1}function i(r){var o=[];for(var e in r.categories){var i=r.categories[e];i>=r.min&&i<=r.max&&o.push([i,e])}return o.sort(function(r,o){return r[0]-o[0]}),o}function s(o,e,s){if("categories"==o[e].options.mode){if(!o[e].categories){var a={},t=o[e].options.categories||{};if(r.isArray(t))for(var u=0;uc;++c){var l=s[f+c];null!=l&&a[c][t]&&(l in i||(i[l]=u,++u),s[f+c]=i[l])}}function a(r,o,e){s(o,"xaxis",e),s(o,"yaxis",e)}function t(r){r.hooks.processRawData.push(o),r.hooks.processDatapoints.push(a)}var u={xaxis:{categories:null},yaxis:{categories:null}};r.plot.plugins.push({init:t,options:u,name:"categories",version:"1.0"})}(jQuery);
--------------------------------------------------------------------------------
/doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-AMD Ryzen 5 3600X 6-Core Processor/dask/iris/machine-Freya/mule/netcdf4/num_cpu-12/os-Linux 4.4.0-19041-Microsoft/pandas/python/python-graphviz/ram-16726988/scipy/sparse/tqdm/typing_extensions/xarray/summary.json:
--------------------------------------------------------------------------------
1 | [{"name": "blocked.GroupbyDistributedSuite.time_blocked_dayofyear", "idx": null, "pretty_name": "blocked.GroupbyDistributedSuite.time_blocked_dayofyear", "last_rev": 235, "last_value": 1.1829518500017002, "last_err": 0.0, "prev_value": null, "change_rev": null}, {"name": "blocked.GroupbyDistributedSuite.time_blocked_monthday", "idx": null, "pretty_name": "blocked.GroupbyDistributedSuite.time_blocked_monthday", "last_rev": 235, "last_value": 0.963999400002649, "last_err": 0.0, "prev_value": null, "change_rev": null}, {"name": "blocked.GroupbyDistributedSuite.time_xarray_dayofyear", "idx": null, "pretty_name": "blocked.GroupbyDistributedSuite.time_xarray_dayofyear", "last_rev": 235, "last_value": 6.495260349998716, "last_err": 0.0, "prev_value": null, "change_rev": null}, {"name": "blocked.GroupbySuite.time_blocked_dayofyear", "idx": null, "pretty_name": "blocked.GroupbySuite.time_blocked_dayofyear", "last_rev": 235, "last_value": 0.30014864999975543, "last_err": 0.008690700654255389, "prev_value": null, "change_rev": null}, {"name": "blocked.GroupbySuite.time_blocked_monthday", "idx": null, "pretty_name": "blocked.GroupbySuite.time_blocked_monthday", "last_rev": 235, "last_value": 0.2233245499955956, "last_err": 0.007246654976722034, "prev_value": null, "change_rev": null}, {"name": "blocked.GroupbySuite.time_xarray_dayofyear", "idx": null, "pretty_name": "blocked.GroupbySuite.time_xarray_dayofyear", "last_rev": 235, "last_value": 2.9537656500033336, "last_err": 0.12581787973661263, "prev_value": null, "change_rev": null}, {"name": "blocked.ResampleDistributedSuite.time_blocked", "idx": null, "pretty_name": "blocked.ResampleDistributedSuite.time_blocked", "last_rev": 235, "last_value": 0.7067086999886669, "last_err": 0.0, "prev_value": null, "change_rev": null}, {"name": "blocked.ResampleDistributedSuite.time_xarray", "idx": null, "pretty_name": "blocked.ResampleDistributedSuite.time_xarray", "last_rev": 235, "last_value": 3.3373256499762647, "last_err": 0.0, "prev_value": null, "change_rev": null}, {"name": "blocked.ResampleSuite.time_blocked", "idx": null, "pretty_name": "blocked.ResampleSuite.time_blocked", "last_rev": 235, "last_value": 0.12802800003555603, "last_err": 0.0, "prev_value": 0.1291424500013818, "change_rev": [208, 235]}, {"name": "blocked.ResampleSuite.time_xarray", "idx": null, "pretty_name": "blocked.ResampleSuite.time_xarray", "last_rev": 235, "last_value": 1.8484123000016552, "last_err": 0.060204885292630156, "prev_value": null, "change_rev": null}, {"name": "event.EventDistributedSuite.time_find_event", "idx": null, "pretty_name": "event.EventDistributedSuite.time_find_event", "last_rev": 235, "last_value": 2.0268603999866173, "last_err": 0.0, "prev_value": null, "change_rev": null}, {"name": "event.EventSuite.time_find_event", "idx": null, "pretty_name": "event.EventSuite.time_find_event", "last_rev": 235, "last_value": 1.4560975000204053, "last_err": 0.0, "prev_value": 1.2022353499996825, "change_rev": [208, 235]}]
--------------------------------------------------------------------------------
/doc/_static/asv/graphs/arch-x86_64/branch-master/cfunits/cpu-Intel(R) Xeon(R) Platinum 8268 CPU @ 2.90GHz/dask/iris/machine-gadi/mule/netcdf4/num_cpu-48/os-Linux 4.18.0-240.1.1.el8.nci.x86_64/pandas/python/python-graphviz/ram-262432756/scipy/sparse/tqdm/typing_extensions/xarray/summary.json:
--------------------------------------------------------------------------------
1 | [{"name": "blocked.GroupbyDistributedSuite.time_blocked_dayofyear", "idx": null, "pretty_name": "blocked.GroupbyDistributedSuite.time_blocked_dayofyear", "last_rev": 235, "last_value": 1.1509175270039123, "last_err": 0.015101201900593305, "prev_value": null, "change_rev": null}, {"name": "blocked.GroupbyDistributedSuite.time_blocked_monthday", "idx": null, "pretty_name": "blocked.GroupbyDistributedSuite.time_blocked_monthday", "last_rev": 235, "last_value": 0.9424207944830414, "last_err": 0.0, "prev_value": 0.9274916650028899, "change_rev": [231, 235]}, {"name": "blocked.GroupbyDistributedSuite.time_xarray_dayofyear", "idx": null, "pretty_name": "blocked.GroupbyDistributedSuite.time_xarray_dayofyear", "last_rev": 235, "last_value": 6.81930769601604, "last_err": 0.05000486624965498, "prev_value": null, "change_rev": null}, {"name": "blocked.GroupbySuite.time_blocked_dayofyear", "idx": null, "pretty_name": "blocked.GroupbySuite.time_blocked_dayofyear", "last_rev": 235, "last_value": 0.6714389724947978, "last_err": 0.03386290753635429, "prev_value": null, "change_rev": null}, {"name": "blocked.GroupbySuite.time_blocked_monthday", "idx": null, "pretty_name": "blocked.GroupbySuite.time_blocked_monthday", "last_rev": 235, "last_value": 0.5356181205133907, "last_err": 0.0, "prev_value": 0.5758137800148688, "change_rev": [231, 235]}, {"name": "blocked.GroupbySuite.time_xarray_dayofyear", "idx": null, "pretty_name": "blocked.GroupbySuite.time_xarray_dayofyear", "last_rev": 235, "last_value": 5.589740982977673, "last_err": 0.0, "prev_value": 6.291899875999661, "change_rev": [231, 235]}, {"name": "blocked.ResampleDistributedSuite.time_blocked", "idx": null, "pretty_name": "blocked.ResampleDistributedSuite.time_blocked", "last_rev": 235, "last_value": 0.7298302005219739, "last_err": 0.012202703436815713, "prev_value": null, "change_rev": null}, {"name": "blocked.ResampleDistributedSuite.time_xarray", "idx": null, "pretty_name": "blocked.ResampleDistributedSuite.time_xarray", "last_rev": 235, "last_value": 3.9142326589790173, "last_err": 0.0, "prev_value": 3.634890771994833, "change_rev": [231, 235]}, {"name": "blocked.ResampleSuite.time_blocked", "idx": null, "pretty_name": "blocked.ResampleSuite.time_blocked", "last_rev": 235, "last_value": 0.10669864900410175, "last_err": 0.0, "prev_value": 0.10901543201180175, "change_rev": [231, 235]}, {"name": "blocked.ResampleSuite.time_xarray", "idx": null, "pretty_name": "blocked.ResampleSuite.time_xarray", "last_rev": 235, "last_value": 3.708070858469, "last_err": 0.08677950767956295, "prev_value": null, "change_rev": null}, {"name": "event.EventDistributedSuite.time_find_event", "idx": null, "pretty_name": "event.EventDistributedSuite.time_find_event", "last_rev": 235, "last_value": 2.8345289284770843, "last_err": 0.0, "prev_value": 2.8652793369838037, "change_rev": [231, 235]}, {"name": "event.EventSuite.time_find_event", "idx": null, "pretty_name": "event.EventSuite.time_find_event", "last_rev": 235, "last_value": 5.249316045985324, "last_err": 0.0, "prev_value": 5.308832254988374, "change_rev": [231, 235]}]
--------------------------------------------------------------------------------
/src/climtas/dimension.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Copyright 2018 ARC Centre of Excellence for Climate Extremes
3 | # author: Scott Wales
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 | from __future__ import print_function
17 |
18 | from cfunits import Units
19 | import numpy
20 |
21 |
22 | def remove_degenerate_axes(coord):
23 | """
24 | Remove any degenerate axes from the coordinate, where all the values along a dimension are identical
25 |
26 | Args:
27 | coord (xarray.DataArray): Co-ordinate to operate on
28 |
29 | Returns:
30 | xarray.DataArray with degenerate axes removed
31 | """
32 |
33 | for d in coord.dims:
34 | if numpy.allclose(coord.max(dim=d) - coord.min(dim=d), 0):
35 | coord = coord.mean(dim=d)
36 |
37 | return coord
38 |
39 |
40 | def identify_lat_lon(dataarray):
41 | """
42 | Identify the latitude and longitude dimensions of a dataarray using CF
43 | attributes
44 |
45 | Args:
46 | dataarray: Source dataarray
47 |
48 | Returns:
49 | (lat, lon): Tuple of `xarray.Dataarray` for the latitude and longitude
50 | dimensions
51 |
52 | Todo:
53 | * Assumes latitude and longitude are unique
54 | """
55 |
56 | lat = None
57 | lon = None
58 |
59 | for c in dataarray.coords.values():
60 | if (
61 | c.attrs.get("standard_name", "") == "latitude"
62 | or Units(c.attrs.get("units", "")).islatitude
63 | or c.attrs.get("axis", "") == "Y"
64 | ):
65 | lat = c
66 |
67 | if (
68 | c.attrs.get("standard_name", "") == "longitude"
69 | or Units(c.attrs.get("units", "")).islongitude
70 | or c.attrs.get("axis", "") == "X"
71 | ):
72 | lon = c
73 |
74 | if lat is None or lon is None:
75 | raise Exception("Couldn't identify horizontal coordinates")
76 |
77 | return (lat, lon)
78 |
79 |
80 | def identify_time(dataarray):
81 | """
82 | Identify the time dimension of a dataarray using CF attributes
83 |
84 | Args:
85 | dataarray: Source dataarray
86 |
87 | Returns:
88 | :obj:`xarray.Dataarray` for the time dimension
89 |
90 | Todo:
91 | * Assumes time dimension is unique
92 | """
93 |
94 | for c in dataarray.coords.values():
95 | if (
96 | c.attrs.get("standard_name", "") == "time"
97 | or Units(c.attrs.get("units", "")).isreftime
98 | or Units(c.encoding.get("units", "")).isreftime
99 | or c.attrs.get("axis", "") == "T"
100 | ):
101 | return c
102 |
103 | raise Exception("No time axis found")
104 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | climtas
2 | ==========
3 |
4 | .. image:: https://img.shields.io/circleci/build/github/ScottWales/climtas/master
5 | :target: https://circleci.com/gh/ScottWales/climtas
6 | :alt: CircleCI
7 |
8 | .. image:: https://img.shields.io/codecov/c/github/ScottWales/climtas/master
9 | :target: https://codecov.io/gh/ScottWales/climtas
10 | :alt: Codecov
11 |
12 | .. image:: https://img.shields.io/readthedocs/climtas/latest
13 | :target: https://climtas.readthedocs.io/en/latest/
14 | :alt: Read the Docs (latest)
15 |
16 | .. image:: https://img.shields.io/conda/v/coecms/climtas
17 | :target: https://anaconda.org/coecms/climtas
18 | :alt: Conda
19 |
20 | Functions for working with large (> 10 GB) datasets using Xarray and Dask,
21 | especially for working in the time domain
22 |
23 | Topics
24 | ------
25 |
26 | `Apply a function grouping by day of year, without massive numbers of dask chunks `_
27 | ~~~~
28 |
29 | Climtas' blocked resample and groupby operations use array reshaping, rather than Xarray's default slicing methods. This results in a much simpler and efficient Dask graph, at the cost of some restrictions to the data (the data must be regularly spaced and start/end on a resampling boundary)
30 |
31 | Example notebook: `ERA-5 90th percentile climatology `_
32 |
33 | .. image:: benchmark/climatology/climatology_walltime.png
34 | :alt: Walltime of Climtas climatology vs xarray
35 |
36 | .. code-block:: python
37 |
38 | >>> import numpy; import pandas; import xarray
39 | >>> time = pandas.date_range("20010101", "20030101", closed="left")
40 | >>> data = numpy.random.rand(len(time))
41 | >>> da = xarray.DataArray(data, coords=[("time", time)])
42 | >>> da = da.chunk({"time": 365})
43 |
44 | >>> from climtas import blocked_groupby
45 | >>> blocked_groupby(da, time='dayofyear').mean()
46 |
47 | dask.array
48 | Coordinates:
49 | * dayofyear (dayofyear) int64 1 2 3 4 5 6 7 8 ... 360 361 362 363 364 365 366
50 |
51 |
52 |
53 | `Find and apply a function to events `_
54 | ~~~~
55 |
56 | Climtas includes a number of parallelised building blocks for heatwave detection
57 |
58 | .. code-block:: python
59 |
60 | >>> from climtas.event import find_events, map_events
61 | >>> temp = xarray.DataArray([28,31,34,32,30,35,39], dims=['time'])
62 | >>> events = find_events(temp > 30)
63 | >>> sums = map_events(temp, events, lambda x: {'sum': x.sum().item()})
64 | >>> events.join(sums)
65 | time event_duration sum
66 | 0 1 3 97
67 | 1 5 2 74
68 |
69 | `Memory-saving write to NetCDF `_
70 | ~~~~
71 |
72 | Climtas' throttled saver reduces memory usage, by limiting the number of Dask output chunks that get processed at one time
73 |
74 | Examples
75 | --------
76 |
77 | See the examples in the `notebooks `_ directory for mores ideas on how to
78 | use these functions to analyse large datasets
79 |
--------------------------------------------------------------------------------
/doc/_static/asv/asv.css:
--------------------------------------------------------------------------------
1 | /* Basic navigation */
2 |
3 | .asv-navigation {
4 | padding: 2px;
5 | }
6 |
7 | nav ul li.active a {
8 | height: 52px;
9 | }
10 |
11 | nav li.active span.navbar-brand {
12 | background-color: #e7e7e7;
13 | height: 52px;
14 | }
15 |
16 | nav li.active span.navbar-brand:hover {
17 | background-color: #e7e7e7;
18 | }
19 |
20 | .navbar-default .navbar-link {
21 | color: #2458D9;
22 | }
23 |
24 | .panel-body {
25 | padding: 0;
26 | }
27 |
28 | .panel {
29 | margin-bottom: 4px;
30 | -webkit-box-shadow: none;
31 | box-shadow: none;
32 | border-radius: 0;
33 | border-top-left-radius: 3px;
34 | border-top-right-radius: 3px;
35 | }
36 |
37 | .panel-default>.panel-heading,
38 | .panel-heading {
39 | font-size: 12px;
40 | font-weight:bold;
41 | padding: 2px;
42 | text-align: center;
43 | border-top-left-radius: 3px;
44 | border-top-right-radius: 3px;
45 | background-color: #eee;
46 | }
47 |
48 | .btn,
49 | .btn-group,
50 | .btn-group-vertical>.btn:first-child,
51 | .btn-group-vertical>.btn:last-child:not(:first-child),
52 | .btn-group-vertical>.btn:last-child {
53 | border: none;
54 | border-radius: 0px;
55 | overflow: hidden;
56 | }
57 |
58 | .btn-default:focus, .btn-default:active, .btn-default.active {
59 | border: none;
60 | color: #fff;
61 | background-color: #99bfcd;
62 | }
63 |
64 | #range {
65 | font-family: monospace;
66 | text-align: center;
67 | background: #ffffff;
68 | }
69 |
70 | .form-control {
71 | border: none;
72 | border-radius: 0px;
73 | font-size: 12px;
74 | padding: 0px;
75 | }
76 |
77 | .tooltip-inner {
78 | min-width: 100px;
79 | max-width: 800px;
80 | text-align: left;
81 | white-space: pre;
82 | font-family: monospace;
83 | }
84 |
85 | /* Benchmark tree */
86 |
87 | .nav-list {
88 | font-size: 12px;
89 | padding: 0;
90 | padding-left: 15px;
91 | }
92 |
93 | .nav-list>li {
94 | overflow-x: hidden;
95 | }
96 |
97 | .nav-list>li>a {
98 | padding: 0;
99 | padding-left: 5px;
100 | color: #000;
101 | }
102 |
103 | .nav-list>li>a:focus {
104 | color: #fff;
105 | background-color: #99bfcd;
106 | box-shadow: inset 0 3px 5px rgba(0,0,0,.125);
107 | }
108 |
109 | .nav-list>li>.nav-header {
110 | white-space: nowrap;
111 | font-weight: 500;
112 | margin-bottom: 2px;
113 | }
114 |
115 | .caret-right {
116 | display: inline-block;
117 | width: 0;
118 | height: 0;
119 | margin-left: 2px;
120 | vertical-align: middle;
121 | border-left: 4px solid;
122 | border-bottom: 4px solid transparent;
123 | border-top: 4px solid transparent;
124 | }
125 |
126 | /* Summary page */
127 |
128 | .benchmark-group > h1 {
129 | text-align: center;
130 | }
131 |
132 | .benchmark-container {
133 | width: 300px;
134 | height: 116px;
135 | padding: 4px;
136 | border-radius: 3px;
137 | }
138 |
139 | .benchmark-container:hover {
140 | background-color: #eee;
141 | }
142 |
143 | .benchmark-plot {
144 | width: 292px;
145 | height: 88px;
146 | }
147 |
148 | .benchmark-text {
149 | font-size: 12px;
150 | color: #000;
151 | width: 292px;
152 | overflow: hidden;
153 | }
154 |
155 | #extra-buttons {
156 | margin: 1em;
157 | }
158 |
159 | #extra-buttons a {
160 | border: solid 1px #ccc;
161 | }
162 |
--------------------------------------------------------------------------------
/test/test_io.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Copyright 2019 Scott Wales
3 | # author: Scott Wales
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
17 | import xarray
18 | import numpy
19 | import dask
20 | import pandas
21 |
22 | from climtas import io
23 |
24 |
25 | def test_to_netcdf_throttled(tmpdir, distributed_client):
26 | def helper(path, data):
27 | da = xarray.DataArray(data, dims=["t", "x", "y"], name="test")
28 | io.to_netcdf_throttled(da, path)
29 | out = xarray.open_dataset(str(path)).test
30 | xarray.testing.assert_identical(da, out)
31 |
32 | path = tmpdir / "numpy.nc"
33 | data = numpy.zeros([10, 10, 10])
34 | helper(path, data)
35 |
36 | path = tmpdir / "dask.nc"
37 | data = dask.array.zeros([10, 10, 10])
38 | helper(path, data)
39 |
40 | data = dask.array.random.random([10, 10, 10]) + numpy.random.random([10, 10, 10])
41 | helper(path, data)
42 |
43 |
44 | def test_to_netcdf_throttled_serial(tmpdir):
45 | def helper(path, data):
46 | da = xarray.DataArray(data, dims=["t", "x", "y"], name="test")
47 | io.to_netcdf_throttled(da, path)
48 | out = xarray.open_dataset(str(path)).test
49 | xarray.testing.assert_identical(da, out)
50 |
51 | path = tmpdir / "numpy.nc"
52 | data = numpy.zeros([10, 10, 10])
53 | helper(path, data)
54 |
55 | path = tmpdir / "dask.nc"
56 | data = dask.array.zeros([10, 10, 10])
57 | helper(path, data)
58 |
59 |
60 | def test_to_netcdf_series(tmpdir):
61 | path = tmpdir / "data_{start.year}.nc"
62 | data = xarray.DataArray(
63 | numpy.zeros([20]),
64 | coords=[("time", pandas.date_range("20010101", freq="MS", periods=20))],
65 | name="test",
66 | )
67 |
68 | io.to_netcdf_series(data, path, groupby="time.year")
69 |
70 | assert (tmpdir / "data_2001.nc").exists()
71 | assert (tmpdir / "data_2002.nc").exists()
72 |
73 | data.coords["group"] = (
74 | "time",
75 | [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
76 | )
77 | path = tmpdir / "data_{group}.nc"
78 | io.to_netcdf_series(data, path, groupby="group")
79 | assert (tmpdir / "data_0.nc").exists()
80 | assert (tmpdir / "data_1.nc").exists()
81 |
82 |
83 | def test_to_netcdf_throttled_fillvalue(tmpdir, distributed_client):
84 | def helper(path, data):
85 | da = xarray.DataArray(data, dims=["t", "x", "y"], name="test")
86 | da.encoding["_FillValue"] = 1
87 | io.to_netcdf_throttled(da, path)
88 | out = xarray.open_dataset(str(path)).test
89 | xarray.testing.assert_identical(da, out)
90 | assert out.encoding["_FillValue"] == 1
91 |
92 | path = tmpdir / "numpy.nc"
93 | data = numpy.zeros([10, 10, 10])
94 | helper(path, data)
95 |
96 | path = tmpdir / "dask.nc"
97 | data = dask.array.zeros([10, 10, 10])
98 | helper(path, data)
99 |
100 | data = dask.array.random.random([10, 10, 10]) + numpy.random.random([10, 10, 10])
101 | helper(path, data)
102 |
--------------------------------------------------------------------------------
/test/test_helpers.py:
--------------------------------------------------------------------------------
1 | import dask
2 | import pandas
3 | import numpy
4 | from climtas.helpers import *
5 |
6 |
7 | def test_blockwise():
8 | da = dask.array.zeros((10, 10), chunks=(5, 5))
9 |
10 | def func(da):
11 | return pandas.DataFrame({"mean": da.mean()}, index=[1])
12 |
13 | meta = pandas.DataFrame({"mean": pandas.Series([], dtype=da.dtype)})
14 |
15 | df = map_blocks_array_to_dataframe(func, da, meta=meta)
16 | df = df.compute()
17 |
18 | numpy.testing.assert_array_equal(df.to_numpy(), [[0], [0], [0], [0]])
19 |
20 | def func(da, block_info=None):
21 | return pandas.DataFrame.from_records([block_info[0]], index=[1])
22 |
23 | df = map_blocks_array_to_dataframe(func, da, meta=meta)
24 | df = df.compute()
25 |
26 | numpy.testing.assert_array_equal(
27 | df["chunk-location"].sort_values().apply(lambda x: x[0]),
28 | numpy.array(
29 | [
30 | 0,
31 | 0,
32 | 1,
33 | 1,
34 | ]
35 | ),
36 | )
37 |
38 | numpy.testing.assert_array_equal(
39 | df["chunk-location"].sort_values().apply(lambda x: x[1]),
40 | numpy.array(
41 | [
42 | 0,
43 | 1,
44 | 0,
45 | 1,
46 | ]
47 | ),
48 | )
49 |
50 |
51 | def test_blockwise_xarray():
52 | da = dask.array.zeros((10, 10), chunks=(5, 5))
53 | xda = xarray.DataArray(da, dims=["t", "x"])
54 |
55 | def func(da, block_info=None):
56 | meta = locate_block_in_dataarray(
57 | da, xda.name, xda.dims, xda.coords, block_info[0]
58 | )
59 | return pandas.DataFrame({"mean": meta.mean().values}, index=[1])
60 |
61 | meta = pandas.DataFrame({"mean": pandas.Series([], dtype=da.dtype)})
62 |
63 | df = map_blocks_array_to_dataframe(func, xda.data, meta=meta)
64 | df = df.compute()
65 |
66 | numpy.testing.assert_array_equal(df.to_numpy(), [[0], [0], [0], [0]])
67 |
68 |
69 | def test_throttled_compute():
70 |
71 | # Numpy array
72 | s = numpy.random.random((10, 10))
73 |
74 | tc = throttled_compute(s, n=1)
75 | (dc,) = dask.compute(s)
76 | numpy.testing.assert_array_equal(tc, dc)
77 |
78 | # Numpy array converted to dask
79 | s = dask.array.from_array(s, chunks=(5, 5))
80 |
81 | tc = throttled_compute(s, n=1)
82 | (dc,) = dask.compute(s)
83 | numpy.testing.assert_array_equal(tc, dc)
84 |
85 | # Pure dask array
86 | s = dask.array.random.random((10, 10), chunks=(5, 5))
87 |
88 | tc = throttled_compute(s, n=1)
89 | (dc,) = dask.compute(s)
90 | numpy.testing.assert_array_equal(tc, dc)
91 |
92 | # Xarray + Numpy
93 | s = numpy.random.random((10, 10))
94 | s = xarray.DataArray(s, name="foo")
95 |
96 | tc = throttled_compute(s, n=1)
97 | (dc,) = dask.compute(s)
98 | numpy.testing.assert_array_equal(tc, dc)
99 | assert tc.name == "foo"
100 |
101 | # Xarray + Dask
102 | s = dask.array.random.random((10, 10), chunks=(5, 5))
103 | s = xarray.DataArray(s, name="foo")
104 |
105 | tc = throttled_compute(s, n=1)
106 | (dc,) = dask.compute(s)
107 | numpy.testing.assert_array_equal(tc, dc)
108 | assert tc.name == "foo"
109 |
110 |
111 | def test_array_blocks_to_dataframe():
112 | meta = pandas.DataFrame({"a": [0, 1, 2], "b": [2, 3, 4]})
113 |
114 | array = numpy.array([0, 1, 2, 3])
115 | a = dask.array.from_array(array, chunks=(2,))
116 |
117 | def mapper(x):
118 | if x[0] == 0:
119 | return meta.iloc[0:1]
120 | else:
121 | return meta.iloc[1:]
122 |
123 | b = dask.array.map_blocks(mapper, a, dtype="object")
124 |
125 | result = array_blocks_to_dataframe(b, meta).compute()
126 |
127 | assert meta.equals(result)
128 |
--------------------------------------------------------------------------------
/.asv/results/Freya/3801e053-conda-py-cfunits-dask-iris-mule-netcdf4-pandas-python-graphviz-scipy-sparse-tqdm-typing_extensions-xarray.json:
--------------------------------------------------------------------------------
1 | {"results": {"blocked.GroupbySuite.time_blocked_dayofyear": {"result": [0.3126271499932045], "stats": [{"ci_99": [0.2977016999939224, 0.34394289999909233], "q_25": 0.3003146499977447, "q_75": 0.3215577500013751, "min": 0.2977016999939224, "max": 0.34394289999909233, "mean": 0.3137292899962631, "std": 0.014682803495529122, "repeat": 10, "number": 1}]}, "blocked.GroupbySuite.time_blocked_monthday": {"result": [0.2437466499977745], "stats": [{"ci_99": [0.22851909999735653, 0.27990589999535587], "q_25": 0.23839082500126096, "q_75": 0.2593821000009484, "min": 0.22851909999735653, "max": 0.27990589999535587, "mean": 0.2484980499968515, "std": 0.014908833143747205, "repeat": 10, "number": 1}]}, "blocked.GroupbySuite.time_xarray_dayofyear": {"result": [3.0456320500015863], "stats": [{"ci_99": [3.0129942999919876, 3.064236629433123], "q_25": 3.031911849997414, "q_75": 3.05106107499887, "min": 3.0129942999919876, "max": 3.05717949999962, "mean": 3.0402745166647946, "std": 0.01522875723199255, "repeat": 6, "number": 1}]}, "blocked.ResampleSuite.time_blocked": {"result": [0.1291424500013818], "stats": [{"ci_99": [0.11620689999836031, 0.14901609999651555], "q_25": 0.12433564999810187, "q_75": 0.13542332499855547, "min": 0.11620689999836031, "max": 0.14901609999651555, "mean": 0.13057890999916708, "std": 0.009049610063400704, "repeat": 10, "number": 1}]}, "blocked.ResampleSuite.time_xarray": {"result": [1.8962347999986378], "stats": [{"ci_99": [1.8759468000062043, 1.9226966000132961], "q_25": 1.8873265749934944, "q_75": 1.906550950006931, "min": 1.8759468000062043, "max": 1.9226966000132961, "mean": 1.8969048800005113, "std": 0.013334025574830217, "repeat": 10, "number": 1}]}, "event.EventSuite.time_find_event": {"result": [1.2022353499996825], "stats": [{"ci_99": [1.185307500010822, 1.2574620000086725], "q_25": 1.1946725249981682, "q_75": 1.21394304999194, "min": 1.185307500010822, "max": 1.2574620000086725, "mean": 1.2073511100010363, "std": 0.019356085737859734, "repeat": 10, "number": 1}]}}, "params": {"arch": "x86_64", "cpu": "AMD Ryzen 5 3600X 6-Core Processor", "machine": "Freya", "num_cpu": "12", "os": "Linux 4.4.0-19041-Microsoft", "ram": "16726988", "python": "", "cfunits": "", "dask": "", "iris": "", "mule": "", "netcdf4": "", "pandas": "", "python-graphviz": "", "scipy": "", "sparse": "", "tqdm": "", "typing_extensions": "", "xarray": ""}, "requirements": {"cfunits": "", "dask": "", "iris": "", "mule": "", "netcdf4": "", "pandas": "", "python-graphviz": "", "scipy": "", "sparse": "", "tqdm": "", "typing_extensions": "", "xarray": ""}, "commit_hash": "3801e053df60b21ffab4534be4c2642bf0a17e01", "date": 1609824900000, "env_name": "conda-py-cfunits-dask-iris-mule-netcdf4-pandas-python-graphviz-scipy-sparse-tqdm-typing_extensions-xarray", "python": "", "profiles": {}, "started_at": {"blocked.GroupbySuite.time_blocked_dayofyear": 1609825020542, "blocked.GroupbySuite.time_blocked_monthday": 1609825022725, "blocked.GroupbySuite.time_xarray_dayofyear": 1609825024565, "blocked.ResampleSuite.time_blocked": 1609825037028, "blocked.ResampleSuite.time_xarray": 1609825038009, "event.EventSuite.time_find_event": 1609825049789}, "ended_at": {"blocked.GroupbySuite.time_blocked_dayofyear": 1609825022725, "blocked.GroupbySuite.time_blocked_monthday": 1609825024565, "blocked.GroupbySuite.time_xarray_dayofyear": 1609825037027, "blocked.ResampleSuite.time_blocked": 1609825038009, "blocked.ResampleSuite.time_xarray": 1609825049789, "event.EventSuite.time_find_event": 1609825057304}, "benchmark_version": {"blocked.GroupbySuite.time_blocked_dayofyear": "01d3740b4f5916532dc07a5d1aec3619fd38128f350845bcc1d9d15cd4d3b1dd", "blocked.GroupbySuite.time_blocked_monthday": "ea1ecf7949f8513fba397204403a348b9efc7ccd3753e255e48e9e64689684d2", "blocked.GroupbySuite.time_xarray_dayofyear": "c5d3ef9404ad60cb7e3b28de3c565c02f7a75c1041aab3ccdeba1cc6c7618e7b", "blocked.ResampleSuite.time_blocked": "62b1f41ff720dc40ea68f172077bed6172fe97cfcf57eace07809d4845c85a52", "blocked.ResampleSuite.time_xarray": "364ac7c62ba036563517fdb59f62d4ce79f49fe8d6476f286494b9671c236610", "event.EventSuite.time_find_event": "a231d487f53daed34d1f991be54d797f3e1f15a79e7d0a811686326b16fc028e"}, "version": 1}
--------------------------------------------------------------------------------
/src/climtas/daskutil.py:
--------------------------------------------------------------------------------
1 | """
2 | Utilities for working with Dask
3 | """
4 |
5 | import xarray
6 | import dask
7 | import numpy
8 | from itertools import zip_longest
9 | import graphviz
10 | import typing as T
11 |
12 |
13 | # An array-like value for typing
14 | ArrayVar = T.TypeVar("ArrayVar", xarray.DataArray, dask.array.Array, numpy.ndarray)
15 |
16 |
17 | def _grouper(iterable, n, fillvalue=None):
18 | "Collect data into fixed-length chunks or blocks"
19 | # grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx"
20 | args = [iter(iterable)] * n
21 | return zip_longest(*args, fillvalue=fillvalue)
22 |
23 |
24 | def throttled_compute(arr: ArrayVar, *, n: int, name: T.Hashable = None) -> ArrayVar:
25 | """
26 | Compute a Dask object N chunks at a time
27 |
28 | Args:
29 | obj: Object to compute
30 | n: Number of chunks to process at once
31 | name: Dask layer name to compute (default obj.name)
32 |
33 | Returns:
34 | 'obj', with each chunk computed
35 | """
36 |
37 | # Copy the input in case it's a xarray object
38 | obj = arr
39 |
40 | if isinstance(arr, xarray.DataArray):
41 | # Work on the data
42 | obj = arr.data
43 |
44 | if not hasattr(obj, "dask") or isinstance(obj, numpy.ndarray):
45 | # Short-circuit non-dask arrays
46 | return arr
47 |
48 | # Current dask scheduler
49 | schedule = dask.base.get_scheduler(collections=[obj])
50 |
51 | # Get the layer to work on
52 | if name is None:
53 | name = obj.name
54 | top_layer = obj.dask.layers[name]
55 |
56 | result = {}
57 |
58 | # Compute chunks N at a time
59 | for x in _grouper(top_layer, n):
60 | x = [xx for xx in x if xx is not None]
61 |
62 | graph = obj.dask.cull(set(x))
63 | values = schedule(graph, list(x))
64 | result.update(dict(zip(x, values)))
65 |
66 | # Build a new dask graph
67 | layer = dask.highlevelgraph.MaterializedLayer(result)
68 | graph = dask.highlevelgraph.HighLevelGraph.from_collections(name, layer)
69 |
70 | obj.dask = graph
71 |
72 | if isinstance(arr, xarray.DataArray):
73 | # Add back metadata
74 | obj = xarray.DataArray(
75 | obj, name=arr.name, dims=arr.dims, coords=arr.coords, attrs=arr.attrs
76 | )
77 |
78 | return obj
79 |
80 |
81 | def visualize_block(arr: dask.array.Array, sizes=True) -> graphviz.Digraph:
82 | """
83 | Visualise the graph of a single chunk from 'arr'
84 |
85 | In a Jupyter notebook the graph will automatically display, otherwise use
86 | :meth:`graphviz.Digraph.render` to create an image.
87 |
88 | Args:
89 | arr: Array to visualise
90 | sizes: Calculate the sizes of each node and display as the node label
91 | if True
92 | """
93 | import dask.dot
94 |
95 | name = arr.name
96 | graph = arr.dask
97 | layer = graph.layers[name]
98 | block = next(iter(layer.keys()))
99 | culled = graph.cull(set([block]))
100 |
101 | attrs = {}
102 | if sizes:
103 | attrs = graph_sizes(arr)
104 |
105 | graph = dask.dot.to_graphviz(culled, data_attributes=attrs)
106 |
107 | return graph
108 |
109 |
110 | def graph_sizes(arr: dask.array.Array) -> T.Dict[T.Hashable, T.Dict]:
111 | """
112 | Get the node sizes for each node in arr's Dask graph, to be used in
113 | visualisation functions
114 |
115 | Sizes are returned using the 'label' graphviz attribute
116 |
117 | >>> import dask.dot
118 | >>> a = dask.array.zeros((10,10), chunks=(5,5))
119 | >>> sizes = graph_sizes(a)
120 | >>> dask.dot.to_graphviz(a.dask, data_attributes=sizes) # doctest: +ELLIPSIS
121 |
122 |
123 | Note: All nodes will be computed to calculate the size
124 | """
125 |
126 | keys = list(arr.dask.keys())
127 | sizes = dict(
128 | zip(
129 | keys,
130 | [
131 | {"label": dask.utils.format_bytes(x.nbytes)}
132 | if isinstance(x, numpy.ndarray)
133 | else {}
134 | for x in dask.get(arr.dask, keys)
135 | ],
136 | )
137 | )
138 |
139 | return sizes
140 |
--------------------------------------------------------------------------------
/doc/_static/asv/vendor/jquery.flot-0.8.3.selection.min.js:
--------------------------------------------------------------------------------
1 | /*
2 | Copyright (c) 2007-2014 IOLA and Ole Laursen
3 |
4 | Permission is hereby granted, free of charge, to any person
5 | obtaining a copy of this software and associated documentation
6 | files (the "Software"), to deal in the Software without
7 | restriction, including without limitation the rights to use,
8 | copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the
10 | Software is furnished to do so, subject to the following
11 | conditions:
12 |
13 | The above copyright notice and this permission notice shall be
14 | included in all copies or substantial portions of the Software.
15 |
16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
18 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
20 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
21 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
22 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
23 | OTHER DEALINGS IN THE SOFTWARE.
24 | */
25 |
26 | !function(e){function t(t){function o(e){h.active&&(a(e),t.getPlaceholder().trigger("plotselecting",[s()]))}function n(t){1==t.which&&(document.body.focus(),void 0!==document.onselectstart&&null==m.onselectstart&&(m.onselectstart=document.onselectstart,document.onselectstart=function(){return!1}),void 0!==document.ondrag&&null==m.ondrag&&(m.ondrag=document.ondrag,document.ondrag=function(){return!1}),l(h.first,t),h.active=!0,x=function(e){i(e)},e(document).one("mouseup",x))}function i(e){return x=null,void 0!==document.onselectstart&&(document.onselectstart=m.onselectstart),void 0!==document.ondrag&&(document.ondrag=m.ondrag),h.active=!1,a(e),g()?r():(t.getPlaceholder().trigger("plotunselected",[]),t.getPlaceholder().trigger("plotselecting",[null])),!1}function s(){if(!g())return null;if(!h.show)return null;var o={},n=h.first,i=h.second;return e.each(t.getAxes(),function(e,t){if(t.used){var s=t.c2p(n[t.direction]),r=t.c2p(i[t.direction]);o[e]={from:Math.min(s,r),to:Math.max(s,r)}}}),o}function r(){var e=s();t.getPlaceholder().trigger("plotselected",[e]),e.xaxis&&e.yaxis&&t.getPlaceholder().trigger("selected",[{x1:e.xaxis.from,y1:e.yaxis.from,x2:e.xaxis.to,y2:e.yaxis.to}])}function c(e,t,o){return e>t?e:t>o?o:t}function l(e,o){var n=t.getOptions(),i=t.getPlaceholder().offset(),s=t.getPlotOffset();e.x=c(0,o.pageX-i.left-s.left,t.width()),e.y=c(0,o.pageY-i.top-s.top,t.height()),"y"==n.selection.mode&&(e.x=e==h.first?0:t.width()),"x"==n.selection.mode&&(e.y=e==h.first?0:t.height())}function a(e){null!=e.pageX&&(l(h.second,e),g()?(h.show=!0,t.triggerRedrawOverlay()):u(!0))}function u(e){h.show&&(h.show=!1,t.triggerRedrawOverlay(),e||t.getPlaceholder().trigger("plotunselected",[]))}function d(e,o){var n,i,s,r,c=t.getAxes();for(var l in c)if(n=c[l],n.direction==o&&(r=o+n.n+"axis",e[r]||1!=n.n||(r=o+"axis"),e[r])){i=e[r].from,s=e[r].to;break}if(e[r]||(n="x"==o?t.getXAxes()[0]:t.getYAxes()[0],i=e[o+"1"],s=e[o+"2"]),null!=i&&null!=s&&i>s){var a=i;i=s,s=a}return{from:i,to:s,axis:n}}function f(e,o){var n,i=t.getOptions();"y"==i.selection.mode?(h.first.x=0,h.second.x=t.width()):(n=d(e,"x"),h.first.x=n.axis.p2c(n.from),h.second.x=n.axis.p2c(n.to)),"x"==i.selection.mode?(h.first.y=0,h.second.y=t.height()):(n=d(e,"y"),h.first.y=n.axis.p2c(n.from),h.second.y=n.axis.p2c(n.to)),h.show=!0,t.triggerRedrawOverlay(),!o&&g()&&r()}function g(){var e=t.getOptions().selection.minSize;return Math.abs(h.second.x-h.first.x)>=e&&Math.abs(h.second.y-h.first.y)>=e}var h={first:{x:-1,y:-1},second:{x:-1,y:-1},show:!1,active:!1},m={},x=null;t.clearSelection=u,t.setSelection=f,t.getSelection=s,t.hooks.bindEvents.push(function(e,t){var i=e.getOptions();null!=i.selection.mode&&(t.mousemove(o),t.mousedown(n))}),t.hooks.drawOverlay.push(function(t,o){if(h.show&&g()){var n=t.getPlotOffset(),i=t.getOptions();o.save(),o.translate(n.left,n.top);var s=e.color.parse(i.selection.color);o.strokeStyle=s.scale("a",.8).toString(),o.lineWidth=1,o.lineJoin=i.selection.shape,o.fillStyle=s.scale("a",.4).toString();var r=Math.min(h.first.x,h.second.x)+.5,c=Math.min(h.first.y,h.second.y)+.5,l=Math.abs(h.second.x-h.first.x)-1,a=Math.abs(h.second.y-h.first.y)-1;o.fillRect(r,c,l,a),o.strokeRect(r,c,l,a),o.restore()}}),t.hooks.shutdown.push(function(t,i){i.unbind("mousemove",o),i.unbind("mousedown",n),x&&e(document).unbind("mouseup",x)})}e.plot.plugins.push({init:t,options:{selection:{mode:null,color:"#e8cfac",shape:"round",minSize:5}},name:"selection",version:"1.1"})}(jQuery);
--------------------------------------------------------------------------------
/doc/_static/asv/summarygrid.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | $(document).ready(function() {
4 | var summary_loaded = false;
5 |
6 | /* Callback a function when an element comes in view */
7 | function callback_in_view(element, func) {
8 | function handler(evt) {
9 | var visible = (
10 | $('#summarygrid-display').css('display') != 'none' &&
11 | (element.offset().top <= $(window).height() + $(window).scrollTop()) &&
12 | (element.offset().top + element.height() >= $(window).scrollTop()));
13 | if (visible) {
14 | func();
15 | $(window).off('scroll', handler);
16 | }
17 | }
18 | $(window).on('scroll', handler);
19 | }
20 |
21 | function get_benchmarks_by_groups() {
22 | var master_json = $.asv.master_json;
23 | var groups = {};
24 | $.each(master_json.benchmarks, function(bm_name, bm) {
25 | var i = bm_name.indexOf('.');
26 | var group = bm_name.slice(0, i);
27 | var name = bm_name.slice(i + 1);
28 | if (groups[group] === undefined) {
29 | groups[group] = [];
30 | }
31 | groups[group].push(bm_name);
32 | });
33 | return groups;
34 | }
35 |
36 | function benchmark_container(bm) {
37 | var container = $(
38 | '');
40 | var plot_div = $(
41 | '');
42 | var display_name = bm.pretty_name || bm.name.slice(bm.name.indexOf('.') + 1);
43 | var name = $('