├── .github ├── ISSUE_TEMPLATE │ └── issue-report.md └── move.yml ├── .gitignore ├── .readthedocs.yml ├── LICENSE ├── Makefile ├── README.md ├── SuPy.yml ├── azure-pipelines.yml ├── docs ├── Makefile ├── requirements.txt └── source │ ├── api.rst │ ├── api │ ├── supy.cmd │ │ ├── suews-convert.rst │ │ └── suews-run.rst │ ├── supy.util │ │ ├── supy.util.cal_gs_mod.rst │ │ ├── supy.util.cal_gs_obs.rst │ │ ├── supy.util.cal_neutral.rst │ │ ├── supy.util.calib_g.rst │ │ ├── supy.util.derive_ohm_coef.rst │ │ ├── supy.util.download_era5.rst │ │ ├── supy.util.extract_reclassification.rst │ │ ├── supy.util.fill_gap_all.rst │ │ ├── supy.util.fill_gap_one.rst │ │ ├── supy.util.gen_epw.rst │ │ ├── supy.util.gen_forcing_era5.rst │ │ ├── supy.util.loc_gap.rst │ │ ├── supy.util.optimize_MO.rst │ │ ├── supy.util.plot_comp.rst │ │ ├── supy.util.plot_day_clm.rst │ │ ├── supy.util.plot_reclassification.rst │ │ ├── supy.util.read_epw.rst │ │ └── supy.util.sim_ohm.rst │ └── supy │ │ ├── supy.init_supy.rst │ │ ├── supy.load_SampleData.rst │ │ ├── supy.load_forcing_grid.rst │ │ ├── supy.run_supy.rst │ │ ├── supy.save_supy.rst │ │ └── supy.show_version.rst │ ├── conf.py │ ├── data-structure │ ├── df_forcing.rst │ ├── df_output.rst │ ├── df_state.rst │ ├── supy-io.ipynb │ └── supy-io.py │ ├── faq.rst │ ├── index.rst │ ├── proc_var_info │ ├── df_forcing.csv │ ├── df_output.csv │ ├── df_state.csv │ ├── gen_df_forcing_output_csv.py │ ├── gen_df_state_csv.py │ ├── gen_rst.py │ └── nml_rst_proc.py │ ├── sample_run │ ├── tutorial │ ├── AMF-sim.ipynb │ ├── data │ │ └── US-AR1_2010_data_60.txt │ ├── external-interaction.ipynb │ ├── impact-studies.ipynb │ ├── quick-start.ipynb │ └── tutorial.rst │ └── version-history │ ├── 20181215.rst │ ├── 20190101.rst │ ├── 20190208.rst │ ├── 20190219.rst │ ├── 20190224.rst │ ├── 20190225.rst │ ├── 20190314.rst │ ├── 20190321.rst │ ├── 20190415.rst │ ├── 20190417.rst │ ├── 20190429.rst │ ├── 20190528.rst │ ├── 20190608.rst │ ├── 20190717.rst │ ├── 20190829.rst │ ├── 20200202.rst │ ├── 20200529.rst │ ├── 20201103.rst │ └── version-history.rst ├── sample_plot.png └── src ├── Makefile ├── data_test ├── multi-grid │ ├── 51.5N0.125W-201310-ml.nc │ ├── 51.5N0.125W-201310-sfc.nc │ ├── 51.5N0.125W-201311-ml.nc │ ├── 51.5N0.125W-201311-sfc.nc │ ├── 51.5N0.125W-201312-ml.nc │ └── 51.5N0.125W-201312-sfc.nc └── single-grid │ ├── 57.75N12.0E-200301-ml.nc │ ├── 57.75N12.0E-200301-sfc.nc │ ├── 57.75N12.0E-200302-ml.nc │ ├── 57.75N12.0E-200302-sfc.nc │ ├── 57.75N12.0E-200303-ml.nc │ ├── 57.75N12.0E-200303-sfc.nc │ ├── 57.75N12.0E-200304-ml.nc │ ├── 57.75N12.0E-200304-sfc.nc │ ├── 57.75N12.0E-200305-ml.nc │ ├── 57.75N12.0E-200305-sfc.nc │ ├── 57.75N12.0E-200306-ml.nc │ ├── 57.75N12.0E-200306-sfc.nc │ ├── 57.75N12.0E-200307-ml.nc │ ├── 57.75N12.0E-200307-sfc.nc │ ├── 57.75N12.0E-200308-ml.nc │ ├── 57.75N12.0E-200308-sfc.nc │ ├── 57.75N12.0E-200309-ml.nc │ ├── 57.75N12.0E-200309-sfc.nc │ ├── 57.75N12.0E-200310-ml.nc │ ├── 57.75N12.0E-200310-sfc.nc │ ├── 57.75N12.0E-200311-ml.nc │ ├── 57.75N12.0E-200311-sfc.nc │ ├── 57.75N12.0E-200312-ml.nc │ └── 57.75N12.0E-200312-sfc.nc ├── setup.py └── supy ├── __init__.py ├── _check.py ├── _env.py ├── _load.py ├── _misc.py ├── _post.py ├── _run.py ├── _save.py ├── _supy_module.py ├── _version.py ├── checker_rules_indiv.json ├── checker_rules_joint.json ├── cmd ├── SUEWS.py ├── __init__.py └── table_converter.py ├── code2file.json ├── sample_run ├── Input │ ├── ESTMinput.nml │ ├── InitialConditionsKc_2011.nml │ ├── Kc_2011_data_60.txt │ ├── Kc_2012_data_60.txt │ ├── SUEWS_AnthropogenicEmission.txt │ ├── SUEWS_BiogenCO2.txt │ ├── SUEWS_Conductance.txt │ ├── SUEWS_ESTMCoefficients.txt │ ├── SUEWS_Irrigation.txt │ ├── SUEWS_NonVeg.txt │ ├── SUEWS_OHMCoefficients.txt │ ├── SUEWS_Profiles.txt │ ├── SUEWS_SiteSelect.txt │ ├── SUEWS_Snow.txt │ ├── SUEWS_Soil.txt │ ├── SUEWS_Veg.txt │ ├── SUEWS_Water.txt │ └── SUEWS_WithinGridWaterDist.txt └── RunControl.nml ├── supy_version.json ├── test └── test_SuPy.py ├── util ├── __init__.py ├── _atm.py ├── _converter.py ├── _debug.py ├── _era5.py ├── _gap_filler.py ├── _io.py ├── _ohm.py ├── _plot.py ├── _roughness.py ├── _tmy.py ├── _wrf.py └── rules.csv └── var2siteselect.json /.github/ISSUE_TEMPLATE/issue-report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Issue report 3 | about: Create a report to help us improve 4 | 5 | --- 6 | 7 | **Describe the Issue** 8 | 9 | 10 | **Location** 11 | 12 | 13 | **Screenshots** 14 | 15 | 16 | 17 | **Additional context** 18 | 19 | -------------------------------------------------------------------------------- /.github/move.yml: -------------------------------------------------------------------------------- 1 | # Configuration for Move Issues - https://github.com/dessant/move-issues 2 | 3 | # Delete the command comment when it contains no other content 4 | deleteCommand: true 5 | 6 | # Close the source issue after moving 7 | closeSourceIssue: true 8 | 9 | # Lock the source issue after moving 10 | lockSourceIssue: false 11 | 12 | # Mention issue and comment authors 13 | mentionAuthors: true 14 | 15 | # Preserve mentions in the issue content 16 | keepContentMentions: false 17 | 18 | # Move labels that also exist on the target repository 19 | moveLabels: true 20 | 21 | # Set custom aliases for targets 22 | # aliases: 23 | # r: repo 24 | # or: owner/repo 25 | 26 | # Repository to extend settings from 27 | # _extends: repo 28 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | warnings.txt 106 | suews_sample.txt 107 | suews_1h.txt 108 | *.lock 109 | *.dirlock 110 | 111 | # dask 112 | dask-worker-space 113 | 114 | # dev space 115 | dev-test/ 116 | dev-test 117 | .vscode/launch.json 118 | .vscode/settings.json 119 | _build/ 120 | 121 | pipfile.lock 122 | .vscode/spellright.dict 123 | 124 | # supy test logs 125 | SuPy.log.* 126 | SuPy.log 127 | 128 | # pycharm configuration 129 | .idea/ 130 | 131 | .DS_Store 132 | src/data_test/*/ERA5_*.txt 133 | 134 | Output/ -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yml 2 | 3 | # Required 4 | version: 2 5 | 6 | build: 7 | image: latest 8 | 9 | python: 10 | version: 3.7 11 | install: 12 | - requirements: docs/requirements.txt 13 | - method: pip 14 | path: src 15 | # extra_requirements: 16 | # - docs 17 | # - method: setuptools 18 | # path: src 19 | system_packages: true 20 | 21 | formats: 22 | - epub 23 | - pdf 24 | 25 | sphinx: 26 | builder: html 27 | configuration: docs/source/conf.py 28 | fail_on_warning: false -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # -*- makefile -*- 2 | .PHONY: main clean test pip supy docs 3 | 4 | # OS-specific configurations 5 | ifeq ($(OS),Windows_NT) 6 | PYTHON_exe = python.exe 7 | 8 | else 9 | UNAME_S := $(shell uname -s) 10 | 11 | 12 | ifeq ($(UNAME_S),Linux) # Linux 13 | PYTHON_exe=python 14 | 15 | endif 16 | 17 | ifeq ($(UNAME_S),Darwin) # macOS 18 | PYTHON_exe=python 19 | 20 | endif 21 | 22 | endif 23 | 24 | src_dir = src 25 | docs_dir = docs 26 | 27 | 28 | PYTHON := $(if $(PYTHON_exe),$(PYTHON_exe),python) 29 | # All the files which include modules used by other modules (these therefore 30 | # need to be compiled first) 31 | 32 | MODULE = supy 33 | 34 | # default make options 35 | main: 36 | $(MAKE) -C $(src_dir) main 37 | $(MAKE) -C $(docs_dir) html 38 | 39 | # house cleaning 40 | clean: 41 | $(MAKE) -C $(src_dir) clean 42 | $(MAKE) -C $(docs_dir) clean 43 | 44 | # make supy and run test cases 45 | test: 46 | $(MAKE) -C $(src_dir) test 47 | 48 | # make docs and open index 49 | docs: 50 | $(MAKE) -C $(docs_dir) html 51 | open $(docs_dir)/build/html/index.html 52 | 53 | # upload wheels to pypi using twine 54 | upload: 55 | $(MAKE) -C $(src_dir) upload 56 | 57 | # upload wheels to pypi using twine 58 | livehtml: 59 | $(MAKE) -C $(docs_dir) livehtml -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SuPy 2 | 3 | **THIS PROJECT IS ARCHIVED AND NOW MAINTAINED [HERE](https://github.com/UMEP-dev/SuPy) BY [@UMEP-dev](https://github.com/UMEP-dev).** 4 | 5 | [![Python Version Support Status](https://img.shields.io/pypi/pyversions/supy.svg)](https://pypi.org/project/supy) 6 | [![Latest Version Status](https://img.shields.io/pypi/v/supy.svg)](https://pypi.org/project/supy) 7 | [![Downloads](https://pepy.tech/badge/supy)](https://pepy.tech/project/supy) 8 | [![Launch Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/sunt05/SuPy/master) 9 | 10 | [![Build Status](https://dev.azure.com/sunt05/SuPy/_apis/build/status/sunt05.SuPy?branchName=master)](https://dev.azure.com/sunt05/SuPy/_build/latest?definitionId=11?branchName=master) 11 | [![Documentation Status](https://readthedocs.org/projects/supy/badge/?version=latest)](https://supy.readthedocs.io/en/latest/?badge=latest) 12 | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.2574404.svg)](https://doi.org/10.5281/zenodo.2574404) 13 | 14 | 15 | 16 | [**SU**EWS](https://suews-docs.readthedocs.io) that speaks **Py**thon 17 | 18 | ## Installation 19 | 20 | SuPy requires 64-bit `python` 3.6+ and can be installed with `pip` in command line prompt: 21 | 22 | ```shell 23 | python3 -m pip install supy --upgrade 24 | ``` 25 | 26 | ## Quick Demo 27 | 28 | Once installed, `supy` can be quickly started to get [SUEWS](https://suews-docs.readthedocs.io) simulations done: 29 | 30 | ```python {cmd} 31 | import supy as sp 32 | import matplotlib.pyplot as plt 33 | 34 | #load sample data 35 | df_state_init, df_forcing = sp.load_SampleData() 36 | grid = df_state_init.index[0] 37 | 38 | #run supy/SUEWS simulation 39 | df_output, df_state_end = sp.run_supy(df_forcing, df_state_init) 40 | 41 | #plot results and save figure 42 | res_plot = df_output.SUEWS.loc[grid, ['QN', 'QF', 'QS', 'QE', 'QH']] 43 | ax=res_plot.loc['2012 6 4':'2012 6 6'].resample('30T').mean().plot() 44 | plt.show() 45 | ax.figure.savefig('sample_plot.png') 46 | ``` 47 | 48 | The above code will produce a plot of surface energy balance components as follows: 49 | 50 | ![sample plot](https://github.com/sunt05/SuPy/raw/master/sample_plot.png) 51 | 52 | ## Tutorial 53 | 54 | Please check out [more SuPy tutorials here!](https://supy.readthedocs.io/en/latest/tutorial/tutorial.html) 55 | 56 | -------------------------------------------------------------------------------- /SuPy.yml: -------------------------------------------------------------------------------- 1 | name: SuPy 2 | channels: 3 | - conda-forge 4 | - defaults 5 | dependencies: 6 | - numpy 7 | - pandas 8 | - python==3.8 9 | - xarray 10 | - matplotlib 11 | - seaborn 12 | - scipy 13 | - scikit-learn 14 | - pip: 15 | - click 16 | - atmosp 17 | - f90nml 18 | - dask[complete] 19 | - lmfit 20 | - floweaver 21 | - tables 22 | - jupyter 23 | - pytest 24 | 25 | -------------------------------------------------------------------------------- /azure-pipelines.yml: -------------------------------------------------------------------------------- 1 | # Python package 2 | # Create and test a Python package on multiple Python versions. 3 | # Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more: 4 | # https://docs.microsoft.com/azure/devops/pipelines/languages/python 5 | 6 | jobs: 7 | 8 | - job: 'Build_Test_Ubuntu_1604' 9 | pool: 10 | vmImage: 'ubuntu-16.04' 11 | strategy: 12 | maxParallel: 4 13 | matrix: 14 | # Python35: 15 | # python.version: '3.5' 16 | Python36: 17 | python.version: '3.6' 18 | Python37: 19 | python.version: '3.7' 20 | Python38: 21 | python.version: '3.8' 22 | 23 | 24 | steps: 25 | - task: UsePythonVersion@0 26 | inputs: 27 | versionSpec: '$(python.version)' 28 | architecture: 'x64' 29 | 30 | - script: | 31 | python -m pip install --upgrade 32 | pip install pipreqs pytest 33 | pipreqs src 34 | cat src/requirements.txt 35 | pip install -r src/requirements.txt 36 | pip install dask[complete] --upgrade 37 | displayName: 'Install dependencies' 38 | 39 | - script: | 40 | cd src 41 | make test 42 | displayName: 'pytest' 43 | 44 | - job: 'Build_Test_Ubuntu_1804' 45 | pool: 46 | vmImage: 'ubuntu-18.04' 47 | strategy: 48 | maxParallel: 4 49 | matrix: 50 | # Python35: 51 | # python.version: '3.5' 52 | Python36: 53 | python.version: '3.6' 54 | Python37: 55 | python.version: '3.7' 56 | Python38: 57 | python.version: '3.8' 58 | 59 | 60 | steps: 61 | - task: UsePythonVersion@0 62 | inputs: 63 | versionSpec: '$(python.version)' 64 | architecture: 'x64' 65 | 66 | - script: | 67 | python -m pip install --upgrade 68 | pip install pipreqs pytest 69 | pipreqs src 70 | cat src/requirements.txt 71 | pip install -r src/requirements.txt 72 | pip install dask[complete] --upgrade 73 | displayName: 'Install dependencies' 74 | 75 | - script: | 76 | cd src 77 | make test 78 | displayName: 'pytest' 79 | 80 | - job: 'Build_Test_Upload_macOS' 81 | pool: 82 | vmImage: 'macOS-10.14' 83 | strategy: 84 | matrix: 85 | # Python35: 86 | # python.version: '3.5' 87 | Python36: 88 | python.version: '3.6' 89 | Python37: 90 | python.version: '3.7' 91 | Python38: 92 | python.version: '3.8' 93 | maxParallel: 4 94 | 95 | steps: 96 | - task: UsePythonVersion@0 97 | inputs: 98 | versionSpec: '$(python.version)' 99 | architecture: 'x64' 100 | 101 | - script: | 102 | mkdir -p ~/.matplotlib 103 | touch ~/.matplotlib/matplotlibrc 104 | echo "backend: TkAgg" >> ~/.matplotlib/matplotlibrc 105 | python -m pip install --upgrade pip 106 | pip install pipreqs pytest 107 | pipreqs src 108 | cat src/requirements.txt 109 | pip install -r src/requirements.txt 110 | pip install dask[complete] --upgrade 111 | displayName: 'Install dependencies' 112 | 113 | - script: | 114 | cd src 115 | make test 116 | displayName: 'pytest' 117 | 118 | - task: TwineAuthenticate@0 119 | inputs: 120 | # artifactFeeds: 'feed_name1, feed_name2' 121 | externalFeeds: 'PyPI' 122 | 123 | - script: | 124 | pip3 install twine wheel 125 | cd src 126 | make 127 | twine upload -r SuPy --config-file $(PYPIRC_PATH) --skip-existing dist/*whl 128 | displayName: 'upload supy' 129 | continueOnError: true 130 | 131 | 132 | - job: 'Build_Test_Windows' 133 | pool: 134 | vmImage: 'vs2017-win2016' 135 | strategy: 136 | matrix: 137 | # Python35: 138 | # python.version: '3.5' 139 | Python36: 140 | python.version: '3.6' 141 | Python37: 142 | python.version: '3.7' 143 | Python38: 144 | python.version: '3.8' 145 | maxParallel: 4 146 | 147 | steps: 148 | - task: UsePythonVersion@0 149 | inputs: 150 | versionSpec: '$(python.version)' 151 | architecture: 'x64' 152 | 153 | - bash: | 154 | python -m pip install --upgrade pip 155 | pip install pipreqs pytest 156 | pipreqs src 157 | pip install -r src/requirements.txt 158 | pip install dask[complete] --upgrade 159 | displayName: 'Install dependencies' 160 | 161 | - bash: | 162 | cd src 163 | make test 164 | displayName: 'pytest' 165 | # 166 | # - task: PublishTestResults@2 167 | # inputs: 168 | # testResultsFiles: '**/test-results.xml' 169 | # testRunTitle: 'Python $(python.version)' 170 | # condition: succeededOrFailed() 171 | 172 | - job: 'Build_Test_Docs' 173 | dependsOn: 'Build_Test_Ubuntu_1804' 174 | pool: 175 | vmImage: 'ubuntu-latest' 176 | strategy: 177 | matrix: 178 | # Python35: 179 | # python.version: '3.5' 180 | # Python36: 181 | # python.version: '3.6' 182 | Python37: 183 | python.version: '3.7' 184 | maxParallel: 4 185 | 186 | steps: 187 | - task: UsePythonVersion@0 188 | inputs: 189 | versionSpec: '$(python.version)' 190 | architecture: 'x64' 191 | 192 | - script: | 193 | python -m pip install --upgrade pip 194 | pip install pipreqs 195 | pip install dask[complete] --upgrade 196 | pip install -e src 197 | cd docs 198 | sudo apt-get install pandoc 199 | make html 200 | displayName: 'test docs' 201 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = supy 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | pip install -r requirements.txt 21 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 22 | 23 | livehtml: 24 | sphinx-autobuild -b html "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy>=1.15.4 2 | nbsphinx>=0.3.5 3 | nbconvert>=5.4.1 4 | nbformat>=4.4.0 5 | docutils>=0.14 6 | Sphinx>=1.8.1 7 | sphinx-autobuild>=0.7.1 8 | sphinx-rtd-theme>=0.4.2 9 | sphinxcontrib-websupport>=1.1.0 10 | sphinx-click 11 | ipykernel>=4.8.2 12 | sympy 13 | jinja2>=2.10 14 | traitlets>=4.3.2 15 | pandas>=0.23.4 16 | supy>=2019.7.26 17 | seaborn 18 | matplotlib 19 | dask[complete] 20 | toolz 21 | urlpath 22 | atmosp>=0.2.8 23 | cdsapi 24 | xarray 25 | sklearn 26 | lmfit 27 | floweaver -------------------------------------------------------------------------------- /docs/source/api.rst: -------------------------------------------------------------------------------- 1 | .. _api_ref: 2 | 3 | 4 | 5 | API reference 6 | ============= 7 | 8 | 9 | Top-level Functions 10 | ------------------- 11 | .. currentmodule:: supy 12 | 13 | .. autosummary:: 14 | :toctree: api/supy 15 | 16 | init_supy 17 | load_forcing_grid 18 | run_supy 19 | save_supy 20 | load_SampleData 21 | show_version 22 | 23 | 24 | Utility Functions 25 | ------------------- 26 | .. currentmodule:: supy.util 27 | 28 | ERA-5 Data Downloader 29 | ~~~~~~~~~~~~~~~~~~~~~ 30 | .. autosummary:: 31 | :toctree: api/supy.util 32 | 33 | download_era5 34 | gen_forcing_era5 35 | 36 | Typical Meteorological Year 37 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ 38 | .. autosummary:: 39 | :toctree: api/supy.util 40 | 41 | gen_epw 42 | read_epw 43 | 44 | Gap Filling 45 | ~~~~~~~~~~~ 46 | 47 | .. autosummary:: 48 | :toctree: api/supy.util 49 | 50 | fill_gap_all 51 | 52 | OHM 53 | ~~~~~~~~~~~ 54 | 55 | .. autosummary:: 56 | :toctree: api/supy.util 57 | 58 | derive_ohm_coef 59 | sim_ohm 60 | 61 | Surface Conductance 62 | ~~~~~~~~~~~~~~~~~~~ 63 | 64 | .. autosummary:: 65 | :toctree: api/supy.util 66 | 67 | cal_gs_mod 68 | cal_gs_obs 69 | calib_g 70 | 71 | WRF-SUEWS 72 | ~~~~~~~~~ 73 | 74 | .. autosummary:: 75 | :toctree: api/supy.util 76 | 77 | extract_reclassification 78 | plot_reclassification 79 | 80 | Plotting 81 | ~~~~~~~~ 82 | 83 | .. autosummary:: 84 | :toctree: api/supy.util 85 | 86 | plot_comp 87 | plot_day_clm 88 | plot_rsl 89 | 90 | Roughness Calculation 91 | ~~~~~~~~~~~~~~~~~~~~~ 92 | 93 | .. autosummary:: 94 | :toctree: api/supy.util 95 | 96 | optimize_MO 97 | cal_neutral 98 | 99 | 100 | Command-Line Tools 101 | ------------------- 102 | .. toctree:: 103 | :maxdepth: 1 104 | 105 | api/supy.cmd/suews-run 106 | api/supy.cmd/suews-convert 107 | 108 | 109 | 110 | Key Data Structures 111 | ------------------- 112 | 113 | .. toctree:: 114 | :maxdepth: 1 115 | 116 | data-structure/df_state 117 | data-structure/df_forcing 118 | data-structure/df_output 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | -------------------------------------------------------------------------------- /docs/source/api/supy.cmd/suews-convert.rst: -------------------------------------------------------------------------------- 1 | 2 | 3 | .. click:: supy.cmd.table_converter:convert_table_cmd 4 | :prog: suews-convert -------------------------------------------------------------------------------- /docs/source/api/supy.cmd/suews-run.rst: -------------------------------------------------------------------------------- 1 | 2 | 3 | .. click:: supy.cmd.SUEWS:SUEWS 4 | :prog: suews-run -------------------------------------------------------------------------------- /docs/source/api/supy.util/supy.util.cal_gs_mod.rst: -------------------------------------------------------------------------------- 1 | supy.util.cal\_gs\_mod 2 | ====================== 3 | 4 | .. currentmodule:: supy.util 5 | 6 | .. autofunction:: cal_gs_mod -------------------------------------------------------------------------------- /docs/source/api/supy.util/supy.util.cal_gs_obs.rst: -------------------------------------------------------------------------------- 1 | supy.util.cal\_gs\_obs 2 | ====================== 3 | 4 | .. currentmodule:: supy.util 5 | 6 | .. autofunction:: cal_gs_obs -------------------------------------------------------------------------------- /docs/source/api/supy.util/supy.util.cal_neutral.rst: -------------------------------------------------------------------------------- 1 | supy.util.cal\_neutral 2 | ====================== 3 | 4 | .. currentmodule:: supy.util 5 | 6 | .. autofunction:: cal_neutral -------------------------------------------------------------------------------- /docs/source/api/supy.util/supy.util.calib_g.rst: -------------------------------------------------------------------------------- 1 | supy.util.calib\_g 2 | ================== 3 | 4 | .. currentmodule:: supy.util 5 | 6 | .. autofunction:: calib_g -------------------------------------------------------------------------------- /docs/source/api/supy.util/supy.util.derive_ohm_coef.rst: -------------------------------------------------------------------------------- 1 | supy.util.derive\_ohm\_coef 2 | =========================== 3 | 4 | .. currentmodule:: supy.util 5 | 6 | .. autofunction:: derive_ohm_coef -------------------------------------------------------------------------------- /docs/source/api/supy.util/supy.util.download_era5.rst: -------------------------------------------------------------------------------- 1 | supy.util.download\_era5 2 | ======================== 3 | 4 | .. currentmodule:: supy.util 5 | 6 | .. autofunction:: download_era5 -------------------------------------------------------------------------------- /docs/source/api/supy.util/supy.util.extract_reclassification.rst: -------------------------------------------------------------------------------- 1 | supy.util.extract\_reclassification 2 | =================================== 3 | 4 | .. currentmodule:: supy.util 5 | 6 | .. autofunction:: extract_reclassification -------------------------------------------------------------------------------- /docs/source/api/supy.util/supy.util.fill_gap_all.rst: -------------------------------------------------------------------------------- 1 | supy.util.fill\_gap\_all 2 | ======================== 3 | 4 | .. currentmodule:: supy.util 5 | 6 | .. autofunction:: fill_gap_all -------------------------------------------------------------------------------- /docs/source/api/supy.util/supy.util.fill_gap_one.rst: -------------------------------------------------------------------------------- 1 | supy.util.fill\_gap\_one 2 | ======================== 3 | 4 | .. currentmodule:: supy.util 5 | 6 | .. autofunction:: fill_gap_one -------------------------------------------------------------------------------- /docs/source/api/supy.util/supy.util.gen_epw.rst: -------------------------------------------------------------------------------- 1 | supy.util.gen\_epw 2 | ================== 3 | 4 | .. currentmodule:: supy.util 5 | 6 | .. autofunction:: gen_epw -------------------------------------------------------------------------------- /docs/source/api/supy.util/supy.util.gen_forcing_era5.rst: -------------------------------------------------------------------------------- 1 | supy.util.gen\_forcing\_era5 2 | ============================ 3 | 4 | .. currentmodule:: supy.util 5 | 6 | .. autofunction:: gen_forcing_era5 -------------------------------------------------------------------------------- /docs/source/api/supy.util/supy.util.loc_gap.rst: -------------------------------------------------------------------------------- 1 | supy.util.loc\_gap 2 | ================== 3 | 4 | .. currentmodule:: supy.util 5 | 6 | .. autofunction:: loc_gap -------------------------------------------------------------------------------- /docs/source/api/supy.util/supy.util.optimize_MO.rst: -------------------------------------------------------------------------------- 1 | supy.util.optimize\_MO 2 | ====================== 3 | 4 | .. currentmodule:: supy.util 5 | 6 | .. autofunction:: optimize_MO -------------------------------------------------------------------------------- /docs/source/api/supy.util/supy.util.plot_comp.rst: -------------------------------------------------------------------------------- 1 | supy.util.plot\_comp 2 | ==================== 3 | 4 | .. currentmodule:: supy.util 5 | 6 | .. autofunction:: plot_comp -------------------------------------------------------------------------------- /docs/source/api/supy.util/supy.util.plot_day_clm.rst: -------------------------------------------------------------------------------- 1 | supy.util.plot\_day\_clm 2 | ======================== 3 | 4 | .. currentmodule:: supy.util 5 | 6 | .. autofunction:: plot_day_clm -------------------------------------------------------------------------------- /docs/source/api/supy.util/supy.util.plot_reclassification.rst: -------------------------------------------------------------------------------- 1 | supy.util.plot\_reclassification 2 | ================================ 3 | 4 | .. currentmodule:: supy.util 5 | 6 | .. autofunction:: plot_reclassification -------------------------------------------------------------------------------- /docs/source/api/supy.util/supy.util.read_epw.rst: -------------------------------------------------------------------------------- 1 | supy.util.read\_epw 2 | =================== 3 | 4 | .. currentmodule:: supy.util 5 | 6 | .. autofunction:: read_epw -------------------------------------------------------------------------------- /docs/source/api/supy.util/supy.util.sim_ohm.rst: -------------------------------------------------------------------------------- 1 | supy.util.sim\_ohm 2 | ================== 3 | 4 | .. currentmodule:: supy.util 5 | 6 | .. autofunction:: sim_ohm -------------------------------------------------------------------------------- /docs/source/api/supy/supy.init_supy.rst: -------------------------------------------------------------------------------- 1 | supy.init\_supy 2 | =============== 3 | 4 | .. currentmodule:: supy 5 | 6 | .. autofunction:: init_supy -------------------------------------------------------------------------------- /docs/source/api/supy/supy.load_SampleData.rst: -------------------------------------------------------------------------------- 1 | supy.load\_SampleData 2 | ===================== 3 | 4 | .. currentmodule:: supy 5 | 6 | .. autofunction:: load_SampleData -------------------------------------------------------------------------------- /docs/source/api/supy/supy.load_forcing_grid.rst: -------------------------------------------------------------------------------- 1 | supy.load\_forcing\_grid 2 | ======================== 3 | 4 | .. currentmodule:: supy 5 | 6 | .. autofunction:: load_forcing_grid -------------------------------------------------------------------------------- /docs/source/api/supy/supy.run_supy.rst: -------------------------------------------------------------------------------- 1 | supy.run\_supy 2 | ============== 3 | 4 | .. currentmodule:: supy 5 | 6 | .. autofunction:: run_supy -------------------------------------------------------------------------------- /docs/source/api/supy/supy.save_supy.rst: -------------------------------------------------------------------------------- 1 | supy.save\_supy 2 | =============== 3 | 4 | .. currentmodule:: supy 5 | 6 | .. autofunction:: save_supy -------------------------------------------------------------------------------- /docs/source/api/supy/supy.show_version.rst: -------------------------------------------------------------------------------- 1 | supy.show\_version 2 | ================== 3 | 4 | .. currentmodule:: supy 5 | 6 | .. autofunction:: show_version -------------------------------------------------------------------------------- /docs/source/data-structure/df_forcing.rst: -------------------------------------------------------------------------------- 1 | 2 | .. _df_forcing_var: 3 | 4 | ``df_forcing`` variables 5 | ============================ 6 | 7 | 8 | 9 | .. note:: Data structure of ``df_forcing`` is explained :ref:`here `. 10 | 11 | .. option:: RH 12 | 13 | :Description: 14 | Relative Humidity [%] 15 | 16 | 17 | .. option:: Tair 18 | 19 | :Description: 20 | Air temperature [°C] 21 | 22 | 23 | .. option:: U 24 | 25 | :Description: 26 | Wind speed [m s-1] Height of the wind speed measurement (z) is needed in `SUEWS_SiteSelect.txt`. 27 | 28 | 29 | .. option:: Wuh 30 | 31 | :Description: 32 | External water use [|m^3|] 33 | 34 | 35 | .. option:: fcld 36 | 37 | :Description: 38 | Cloud fraction [tenths] 39 | 40 | 41 | .. option:: id 42 | 43 | :Description: 44 | Day of year [DOY] 45 | 46 | 47 | .. option:: imin 48 | 49 | :Description: 50 | Minute [M] 51 | 52 | 53 | .. option:: isec 54 | 55 | :Description: 56 | Second [S] 57 | 58 | 59 | .. option:: it 60 | 61 | :Description: 62 | Hour [H] 63 | 64 | 65 | .. option:: iy 66 | 67 | :Description: 68 | Year [YYYY] 69 | 70 | 71 | .. option:: kdiff 72 | 73 | :Description: 74 | Diffuse radiation [W |m^-2|] |Recmd| if `SOLWEIGUse` = 1 75 | 76 | 77 | .. option:: kdir 78 | 79 | :Description: 80 | Direct radiation [W |m^-2|] |Recmd| if `SOLWEIGUse` = 1 81 | 82 | 83 | .. option:: kdown 84 | 85 | :Description: 86 | Incoming shortwave radiation [W |m^-2|] Must be > 0 W |m^-2|. 87 | 88 | 89 | .. option:: lai 90 | 91 | :Description: 92 | Observed leaf area index [|m^-2| |m^-2|] 93 | 94 | 95 | .. option:: ldown 96 | 97 | :Description: 98 | Incoming longwave radiation [W |m^-2|] 99 | 100 | 101 | .. option:: pres 102 | 103 | :Description: 104 | Barometric pressure [kPa] 105 | 106 | 107 | .. option:: qe 108 | 109 | :Description: 110 | Latent heat flux [W |m^-2|] 111 | 112 | 113 | .. option:: qf 114 | 115 | :Description: 116 | Anthropogenic heat flux [W |m^-2|] 117 | 118 | 119 | .. option:: qh 120 | 121 | :Description: 122 | Sensible heat flux [W |m^-2|] 123 | 124 | 125 | .. option:: qn 126 | 127 | :Description: 128 | Net all-wave radiation [W |m^-2|] Required if `NetRadiationMethod` = 0. 129 | 130 | 131 | .. option:: qs 132 | 133 | :Description: 134 | Storage heat flux [W |m^-2|] 135 | 136 | 137 | .. option:: rain 138 | 139 | :Description: 140 | Rainfall [mm] 141 | 142 | 143 | .. option:: snow 144 | 145 | :Description: 146 | Snow cover fraction (0 – 1) [-] Required if `SnowUse` = 1 147 | 148 | 149 | .. option:: wdir 150 | 151 | :Description: 152 | Wind direction [°] |NotAvail| 153 | 154 | 155 | .. option:: xsmd 156 | 157 | :Description: 158 | Observed soil moisture [|m^3| |m^-3|] or [kg |kg^-1|] 159 | 160 | -------------------------------------------------------------------------------- /docs/source/data-structure/supy-io.py: -------------------------------------------------------------------------------- 1 | #%% Change working directory from the workspace root to the ipynb file location. Turn this addition off with the DataScience.changeDirOnImportExport setting 2 | import os 3 | try: 4 | os.chdir(os.path.join(os.getcwd(), 'docs/source/data-structure')) 5 | print(os.getcwd()) 6 | except: 7 | pass 8 | #%% [markdown] 9 | # # Key IO Data Structures in SuPy 10 | #%% [markdown] 11 | # ## Introduction 12 | #%% [markdown] 13 | # The Cell below demonstrates a minimal case of SuPy simulation with all key IO data structures included: 14 | 15 | #%% 16 | import supy as sp 17 | df_state_init, df_forcing = sp.load_SampleData() 18 | df_output, df_state_final = sp.run_supy(df_forcing.iloc[:288], df_state_init) 19 | 20 | #%% [markdown] 21 | # * Input: 22 | # * `df_state_init`: model initial states 23 | # * `df_forcing`: forcing data 24 | # * Output: 25 | # * `df_state_final`: model final states 26 | # * `df_output`: model output results 27 | 28 | 29 | #%% [markdown] 30 | # ## Input 31 | #%% [markdown] 32 | # ### `df_state_init`: model initial states 33 | 34 | #%% 35 | df_state_init.head() 36 | 37 | #%% [markdown] 38 | # ### `df_forcing`: forcing data 39 | 40 | #%% 41 | df_forcing.head() 42 | 43 | #%% [markdown] 44 | # ## Output 45 | #%% [markdown] 46 | # ### `df_state_final`: model final states 47 | 48 | #%% 49 | df_state_final.head() 50 | 51 | #%% [markdown] 52 | # ### `df_output`: model output results 53 | 54 | #%% 55 | df_output.head() 56 | 57 | #%% [markdown] 58 | # [test-link-object: ah_slope_cooling](df_state.rst#cmdoption-arg-ah-slope-cooling) 59 | 60 | -------------------------------------------------------------------------------- /docs/source/faq.rst: -------------------------------------------------------------------------------- 1 | .. _faq: 2 | 3 | 4 | FAQ 5 | === 6 | 7 | .. contents:: Contents 8 | :local: 9 | :backlinks: none 10 | 11 | I cannot install SuPy following the docs, what is wrong there? 12 | ---------------------------------------------------------------- 13 | 14 | please check if your environment meets the following requirements: 15 | 16 | 1. Operating system (OS): 17 | 18 | a. is it 64 bit? only 64 bit systems are supported. 19 | 20 | b. is your OS up to date? only recent desktop systems are supported: 21 | 22 | - Windows 10 and above 23 | - macOS 10.13 and above 24 | - Linux: no restriction; 25 | If SuPy cannot run on your specific Linux distribution, 26 | please report it to us. 27 | 28 | You can get the OS information with the following code: 29 | 30 | .. code-block:: python 31 | 32 | import platform 33 | platform.platform() 34 | 35 | 2. Python interpreter: 36 | 37 | a. is your Python interpreter 64 bit? 38 | 39 | Check running mode with the following code: 40 | 41 | .. code-block:: python 42 | 43 | import struct 44 | struct.calcsize('P')*8 45 | 46 | b. is your Python version above 3.5? 47 | 48 | Check version info with the following code: 49 | 50 | .. code-block:: python 51 | 52 | import sys 53 | sys.version 54 | 55 | If your environment doesn't meet the requirement by SuPy, 56 | please use a proper environment; 57 | otherwise, `please report your issue`__. 58 | 59 | __ new_issue_ 60 | 61 | How do I know which version of SuPy I am using? 62 | ----------------------------------------------- 63 | 64 | Use the following code: 65 | 66 | .. code-block:: python 67 | 68 | import supy 69 | supy.show_version() 70 | 71 | .. note:: `show_version` is only available after v2019.5.28. 72 | 73 | 74 | 75 | A `kernel may have died` exception happened, where did I go wrong? 76 | ------------------------------------------------------------------ 77 | 78 | The issue is highly likely due to invalid input to SuPy and SUEWS kernel. 79 | We are trying to avoid such exceptions, 80 | but unfortunately they might happen in some edge cases. 81 | 82 | Please `report such issues to us`__ with your input files for debugging. 83 | Thanks! 84 | 85 | __ new_issue_ 86 | 87 | 88 | How can I upgrade SuPy to an up-to-date version? 89 | ------------------------------------------------ 90 | Run the following code in your terminal: 91 | 92 | .. code-block:: python 93 | 94 | python3 -m pip install supy --upgrade 95 | 96 | 97 | 98 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. _index_page: 2 | 3 | SuPy: SUEWS that speaks Python 4 | ------------------------------ 5 | 6 | .. image:: https://img.shields.io/pypi/pyversions/supy.svg 7 | :target: https://pypi.org/project/supy 8 | :alt: Python Version Support Status 9 | 10 | .. image:: https://img.shields.io/pypi/v/supy.svg 11 | :target: https://pypi.org/project/supy 12 | :alt: Latest Version Status 13 | 14 | .. image:: https://pepy.tech/badge/supy 15 | :target: https://pepy.tech/project/supy 16 | :alt: Downloads 17 | 18 | .. image:: https://mybinder.org/badge_logo.svg 19 | :target: https://mybinder.org/v2/gh/sunt05/SuPy/master 20 | :alt: Binder Status 21 | 22 | .. image:: https://readthedocs.org/projects/supy/badge/?version=latest 23 | :target: https://supy.readthedocs.io/en/latest/?badge=latest 24 | :alt: Documentation Status 25 | 26 | .. image:: https://dev.azure.com/sunt05/SuPy/_apis/build/status/sunt05.SuPy?branchName=master 27 | :target: https://dev.azure.com/sunt05/SuPy/_build/latest?definitionId=11?branchName=master 28 | :alt: Build Status 29 | 30 | .. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.2574404.svg 31 | :target: https://doi.org/10.5281/zenodo.2574404 32 | :alt: DOI 33 | 34 | 35 | 36 | - **What is SuPy?** 37 | 38 | SuPy is a Python-enhanced urban climate model 39 | with `SUEWS`_ as its computation core. 40 | 41 | The scientific rigour in SuPy results is thus gurranteed by SUEWS 42 | (see :ref:`SUEWS publications ` and 43 | :ref:`Parameterisations and sub-models within SUEWS`). 44 | 45 | Meanwhile, the data analysis ability of SuPy is greatly enhanced 46 | by `the Python-based SciPy Stack `_, notably `numpy`_ and `pandas`_. 47 | More details are described in `our SuPy paper `_. 48 | 49 | 50 | .. _SUEWS: https://suews-docs.readthedocs.io/en/latest/ 51 | .. _numpy: https://www.numpy.org 52 | .. _pandas: http://pandas.pydata.org/ 53 | 54 | 55 | - **How to get SuPy?** 56 | 57 | SuPy is available on all major platforms (macOS, Windows, Linux) for Python 3.6+ (64-bit only) 58 | via `PyPI `_: 59 | 60 | .. code-block:: shell 61 | 62 | python3 -m pip install supy --upgrade 63 | 64 | - **How to use SuPy?** 65 | 66 | * Please follow :ref:`Quickstart of SuPy` and :ref:`other tutorials `. 67 | 68 | * Please see `api` for details. 69 | 70 | * Please see `faq` if any issue. 71 | 72 | - **How to contribute to SuPy?** 73 | 74 | * Add your development via `Pull Request `_ 75 | * Report issues via the `GitHub page `_. 76 | * Cite `our SuPy paper `_. 77 | * Provide suggestions and feedback. 78 | 79 | .. toctree:: 80 | :hidden: 81 | :maxdepth: 2 82 | 83 | tutorial/tutorial 84 | data-structure/supy-io 85 | api 86 | faq 87 | version-history/version-history 88 | 89 | -------------------------------------------------------------------------------- /docs/source/proc_var_info/df_forcing.csv: -------------------------------------------------------------------------------- 1 | variable,Description 2 | iy,Year [YYYY] 3 | id,Day of year [DOY] 4 | it,Hour [H] 5 | imin,Minute [M] 6 | qn,Net all-wave radiation [W |m^-2|] Required if `NetRadiationMethod` = 0. 7 | qh,Sensible heat flux [W |m^-2|] 8 | qe,Latent heat flux [W |m^-2|] 9 | qs,Storage heat flux [W |m^-2|] 10 | qf,Anthropogenic heat flux [W |m^-2|] 11 | U,Wind speed [m s-1] Height of the wind speed measurement (z) is needed in `SUEWS_SiteSelect.txt`. 12 | RH,Relative Humidity [%] 13 | Tair,Air temperature [°C] 14 | pres,Barometric pressure [kPa] 15 | rain,Rainfall [mm] 16 | kdown,Incoming shortwave radiation [W |m^-2|] Must be > 0 W |m^-2|. 17 | snow,Snow cover fraction (0 – 1) [-] Required if `SnowUse` = 1 18 | ldown,Incoming longwave radiation [W |m^-2|] 19 | fcld,Cloud fraction [tenths] 20 | Wuh,External water use [|m^3|] 21 | xsmd,Observed soil moisture [|m^3| |m^-3|] or [kg |kg^-1|] 22 | lai,Observed leaf area index [|m^-2| |m^-2|] 23 | kdiff,Diffuse radiation [W |m^-2|] |Recmd| if `SOLWEIGUse` = 1 24 | kdir,Direct radiation [W |m^-2|] |Recmd| if `SOLWEIGUse` = 1 25 | wdir,Wind direction [°] |NotAvail| 26 | isec,Second [S] 27 | -------------------------------------------------------------------------------- /docs/source/proc_var_info/gen_df_forcing_output_csv.py: -------------------------------------------------------------------------------- 1 | # %% Change working directory from the workspace root to the ipynb file location. Turn this addition off with the DataSciece.changeDirOnImportExport setting 2 | 3 | import os 4 | 5 | try: 6 | os.chdir(os.path.join(os.getcwd(), "docs/proc_var_info")) 7 | print(os.getcwd()) 8 | except: 9 | pass 10 | 11 | # %% 12 | from urlpath import URL 13 | from pathlib import Path 14 | import numpy as np 15 | import pandas as pd 16 | import supy as sp 17 | import os 18 | 19 | 20 | os.getcwd() 21 | # %% sample run 22 | print("loading in", "gen_df_forcing", "...") 23 | df_state_init_sample, df_forcing_sample = sp.load_SampleData() 24 | df_output_sample, df_state_end_sample = sp.run_supy( 25 | df_forcing_sample.iloc[:10], df_state_init_sample 26 | ) 27 | print("loading in", "gen_df_forcing", "...") 28 | 29 | # %% [markdown] 30 | # ## generate forcing related dataframe 31 | # %% 32 | # ### load `SUEWS_***.txt` related tables 33 | from nml_rst_proc import url_repo_base, url_repo_input 34 | 35 | url_repo_output = URL(url_repo_base) / "output_files" 36 | 37 | 38 | def gen_df_forcing( 39 | path_csv_in="SSss_YYYY_data_tt.csv", url_base=url_repo_input, 40 | ) -> pd.DataFrame: 41 | """Generate description info of supy forcing data into a dataframe 42 | 43 | Parameters 44 | ---------- 45 | path_csv_in : str, optional 46 | path to the input csv file relative to url_base (the default is '/input_files/SSss_YYYY_data_tt.csv']) 47 | url_base : urlpath.URL, optional 48 | URL to the input files of repo base (the default is url_repo_input, which is defined at the top of this file) 49 | 50 | Returns 51 | ------- 52 | pd.DataFrame 53 | Description info of supy forcing data 54 | """ 55 | 56 | try: 57 | # load info from SUEWS docs repo 58 | # this is regarded as the official source 59 | urlpath_table = url_base / path_csv_in 60 | df_var_info = pd.read_csv(urlpath_table) 61 | except: 62 | print(f"{urlpath_table} not existing!") 63 | else: 64 | # clean info dataframe 65 | df_var_forcing = df_var_info.drop(["No.", "Use"], axis=1) 66 | 67 | # set index with `Column name` 68 | df_var_forcing = df_var_forcing.set_index("Column Name") 69 | df_var_forcing.index = df_var_forcing.index.map( 70 | lambda x: x.replace("`", "") 71 | ).rename("variable") 72 | 73 | # add `Second` info 74 | df_var_forcing.loc["isec"] = "Second [S]" 75 | 76 | return df_var_forcing 77 | 78 | 79 | # %% [markdown] 80 | # ## generate output related dataframe 81 | 82 | 83 | # %% 84 | def gen_df_output( 85 | list_csv_in=[ 86 | "SSss_YYYY_SUEWS_TT.csv", 87 | "SSss_DailyState.csv", 88 | "SSss_YYYY_snow_TT.csv", 89 | "SSss_YYYY_RSL_TT.csv", 90 | "SSss_YYYY_SOLWEIG_TT.csv", 91 | ], 92 | url_base=url_repo_output, 93 | ) -> Path: 94 | """Generate description info of supy output results into dataframe 95 | 96 | Parameters 97 | ---------- 98 | list_csv_in : list, optional 99 | list of file names for csv files with meta info (the default is ['SSss_YYYY_SUEWS_TT.csv','SSss_DailyState.csv','SSss_YYYY_snow_TT.csv',], which [default_description]) 100 | url_base : [type], optional 101 | URL to the output dir of repo base (the default is url_repo_output, which is defined at the top of this file) 102 | 103 | Returns 104 | ------- 105 | pd.DataFrame 106 | Description info of supy output results 107 | """ 108 | 109 | # list of URLs 110 | list_url_table = [url_base / table for table in list_csv_in] 111 | try: 112 | df_var_info = pd.concat([pd.read_csv(f) for f in list_url_table], sort=False) 113 | except: 114 | for url in list_url_table: 115 | if not url.get().ok: 116 | print(f"{url} not existing!") 117 | else: 118 | # clean meta info 119 | df_var_info_x = ( 120 | df_var_info.set_index("Name").loc[:, ["Description"]].drop_duplicates() 121 | ) 122 | 123 | df_var_output = ( 124 | df_var_info_x.copy() 125 | .assign(lower=df_var_info_x.index.str.lower()) 126 | .reset_index() 127 | .set_index("lower") 128 | ) 129 | 130 | df_var_group = df_output_sample.columns.to_frame() 131 | df_var_group.index = df_var_group.index.droplevel(0).rename("Name") 132 | 133 | # wrap into a dataframe 134 | df_var_output = ( 135 | df_var_group.merge( 136 | df_var_output.set_index("Name"), left_on="Name", right_on="Name" 137 | ) 138 | .rename(columns={"var": "variable", "group": "Group",}) 139 | .set_index("variable") 140 | .drop_duplicates() 141 | ) 142 | 143 | return df_var_output 144 | 145 | 146 | # %% [markdown] 147 | # ## generate csv files for meta info 148 | # %% 149 | # df_forcing=gen_df_forcing('SSss_YYYY_data_tt.csv') 150 | 151 | # df_output=gen_df_output( 152 | # [ 153 | # 'SSss_YYYY_SUEWS_TT.csv', 154 | # 'SSss_DailyState.csv', 155 | # 'SSss_YYYY_snow_TT.csv', 156 | # ], 157 | # ) 158 | 159 | 160 | # # %% 161 | # df_forcing.head() 162 | 163 | 164 | # #%% 165 | # df_output.head() 166 | 167 | 168 | # #%% 169 | -------------------------------------------------------------------------------- /docs/source/proc_var_info/gen_rst.py: -------------------------------------------------------------------------------- 1 | # %% Change working directory from the workspace root to the ipynb file location. Turn this addition off with the DataSciece.changeDirOnImportExport setting 2 | import os 3 | try: 4 | os.chdir(os.path.join(os.getcwd(), 'docs/proc_var_info')) 5 | print(os.getcwd()) 6 | except: 7 | pass 8 | 9 | # %% 10 | from pathlib import Path 11 | import pandas as pd 12 | import supy as sp 13 | import os 14 | try: 15 | os.chdir(os.path.join(os.getcwd(), 'docs/proc_var_info')) 16 | print(os.getcwd()) 17 | except: 18 | pass 19 | # get_ipython().run_line_magic('load_ext', 'autoreload') 20 | # get_ipython().run_line_magic('autoreload', '2') 21 | 22 | 23 | # %% 24 | from gen_df_state_csv import (gen_df_state, list_table, set_initcond, 25 | set_runcontrol, set_input_runcontrol, gen_df_dim) 26 | from gen_df_forcing_output_csv import gen_df_forcing, gen_df_output 27 | 28 | # %% [markdown] 29 | # # generate option rst files 30 | # %% [markdown] 31 | # ## generate dataframes for variable groups 32 | 33 | # %% 34 | print('generating df_state.csv ...') 35 | df_state = gen_df_state( 36 | list_table, set_initcond, set_runcontrol, set_input_runcontrol) 37 | df_state.to_csv('df_state.csv') 38 | print('df_state.csv done!') 39 | 40 | 41 | # #%% 42 | # get_ipython().run_line_magic('load_ext', 'snakeviz') 43 | # get_ipython().run_line_magic('snakeviz', 'gen_df_state(list_table, set_initcond, set_runcontrol, set_input_runcontrol)') 44 | 45 | 46 | # %% 47 | print('generating df_forcing.csv ...') 48 | df_forcing = gen_df_forcing('SSss_YYYY_data_tt.csv') 49 | df_forcing.to_csv('df_forcing.csv') 50 | print('df_forcing.csv done!') 51 | 52 | 53 | # %% 54 | print('generating df_output.csv ...') 55 | df_output = gen_df_output( 56 | [ 57 | 'SSss_YYYY_SUEWS_TT.csv', 58 | 'SSss_DailyState.csv', 59 | 'SSss_YYYY_snow_TT.csv', 60 | 'SSss_YYYY_RSL_TT.csv', 61 | 'SSss_YYYY_SOLWEIG_TT.csv', 62 | ], 63 | ) 64 | df_output.to_csv('df_output.csv') 65 | print('df_output.csv done!') 66 | 67 | # %% [markdown] 68 | # ## generate option string for rst option file 69 | 70 | # %% 71 | 72 | 73 | def gen_opt_str(ser_rec: pd.Series)->str: 74 | '''generate rst option string 75 | 76 | Parameters 77 | ---------- 78 | ser_rec : pd.Series 79 | record for specifications 80 | 81 | Returns 82 | ------- 83 | str 84 | rst string 85 | ''' 86 | 87 | name = ser_rec.name 88 | indent = r' ' 89 | str_opt = f'.. option:: {name}'+'\n\n' 90 | for spec in ser_rec.sort_index().index: 91 | str_opt += indent+f':{spec}:'+'\n' 92 | spec_content = ser_rec[spec] 93 | str_opt += indent+indent+f'{spec_content}'+'\n' 94 | return str_opt 95 | 96 | 97 | # xx=df_var_info.set_index('variable').iloc[10] 98 | # print(gen_opt_str(xx)) 99 | 100 | 101 | # %% 102 | def gen_rst(path_rst, path_df_csv, rst_title): 103 | df_var_info = pd.read_csv(path_df_csv).set_index('variable') 104 | df_var_info['rst'] = df_var_info.copy().apply(gen_opt_str, axis=1) 105 | df_var_info = df_var_info.sort_index().reset_index(drop=True) 106 | rst_txt_x = '\n\n'.join(df_var_info.rst) 107 | rst_txt = '\n'.join([rst_title, rst_txt_x]) 108 | with open(path_rst, 'w') as f: 109 | print(rst_txt, file=f) 110 | 111 | return path_rst 112 | 113 | 114 | # gen_rst( 115 | # '../source/data-structure/test.rst', 116 | # df_state, 117 | # 'xx\n') 118 | 119 | 120 | # %% 121 | def gen_group_dict( 122 | group, 123 | path_rst_base=Path('../data-structure/') 124 | )->dict: 125 | '''generate dict of rst strings for df groups. 126 | 127 | ''' 128 | 129 | rst_title = f''' 130 | .. _df_{group}_var: 131 | 132 | ``df_{group}`` variables 133 | ============================ 134 | 135 | 136 | ''' 137 | dict_info_group={ 138 | 'output': '/data-structure/supy-io.ipynb#df_output:-model-output-results', 139 | 'forcing': '/data-structure/supy-io.ipynb#df_forcing:-forcing-data', 140 | 'state': '/data-structure/supy-io.ipynb#df_state_init:-model-initial-states', 141 | } 142 | rst_info_group = f''' 143 | .. note:: Data structure of ``df_{group}`` is explained :ref:`here <{dict_info_group[group]}>`. 144 | ''' 145 | 146 | dict_group = { 147 | 'path_rst': path_rst_base/('df_'+group+'.rst'), 148 | 'path_df_csv': 'df_'+group+'.csv', 149 | 'rst_title': rst_title+rst_info_group, 150 | } 151 | 152 | return dict_group 153 | 154 | 155 | # print(gen_group_dict('state')) 156 | 157 | 158 | # %% 159 | 160 | dict_rst_out = {group: gen_group_dict(group) 161 | for group in ['state', 'forcing', 'output']} 162 | # dict_rst_out 163 | 164 | 165 | # %% 166 | for group in dict_rst_out: 167 | print('working on group:', group) 168 | print('file generated:', gen_rst(**dict_rst_out[group]), '\n') 169 | -------------------------------------------------------------------------------- /docs/source/proc_var_info/nml_rst_proc.py: -------------------------------------------------------------------------------- 1 | # %% 2 | import webbrowser 3 | import os 4 | import supy as sp 5 | import numpy as np 6 | from urlpath import URL 7 | import pandas as pd 8 | from pathlib import Path 9 | 10 | os.getcwd() 11 | 12 | # %% 13 | # code version 14 | tag_suews_ver = sp.__version_driver__ 15 | 16 | print(f"supy_driver version required: {tag_suews_ver}") 17 | 18 | # list of useful URLs 19 | def gen_url_base(tag): 20 | url_repo_base = URL( 21 | "https://github.com/" 22 | + "Urban-Meteorology-Reading/" 23 | + f"SUEWS/raw/{tag}/docs/source" 24 | ) 25 | return url_repo_base 26 | 27 | 28 | url_repo_base = ( 29 | gen_url_base(tag_suews_ver) 30 | if gen_url_base(tag_suews_ver).get().ok 31 | else gen_url_base("master") 32 | ) 33 | print(f''' 34 | ================================= 35 | SUEWS docs source: {url_repo_base} 36 | ================================= 37 | ''') 38 | 39 | url_repo_input = URL(url_repo_base) / "input_files" 40 | 41 | dict_base = { 42 | "docs": URL("https://suews-docs.readthedocs.io/en/latest/input_files/"), 43 | "github": url_repo_input, 44 | } 45 | 46 | # %% [markdown] 47 | # ### filter input variables 48 | # %% 49 | set_input = sp._load.set_var_input.copy() 50 | set_input.update(sp._load.set_var_input_multitsteps) 51 | df_init_sample, df_forcing_sample = sp.load_SampleData() 52 | set_input.difference_update(set(df_forcing_sample.columns)) 53 | # set_input, len(set_input) 54 | 55 | # %% [markdown] 56 | # #### retrieve SUEWS-related variables 57 | 58 | # %% 59 | dict_var2SiteSelect = sp._load.dict_var2SiteSelect 60 | 61 | dict_var_full = sp._load.exp_dict_full(dict_var2SiteSelect) 62 | 63 | 64 | def extract_var_suews(dict_var_full, k): 65 | x = sp._load.flatten_list(dict_var_full[k]) 66 | x = np.unique(x) 67 | x = [ 68 | xx 69 | for xx in x 70 | if xx not in ["base", "const", "0.0"] + [str(x) for x in range(24)] 71 | ] 72 | x = [xx for xx in x if "Code" not in xx] 73 | return x 74 | 75 | 76 | dict_var_ref_suews = {k: extract_var_suews(dict_var_full, k) for k in dict_var_full} 77 | 78 | df_var_ref_suews = pd.DataFrame( 79 | {k: ", ".join(dict_var_ref_suews[k]) for k in dict_var_ref_suews}, index=[0] 80 | ).T.rename({0: "SUEWS-related variables"}, axis=1) 81 | 82 | ser_input_site_exp = ( 83 | df_var_ref_suews.filter(items=set_input, axis=0) 84 | .loc[:, "SUEWS-related variables"] 85 | .str.lower() 86 | .str.split(",") 87 | ) 88 | 89 | set_site = set(x.lower().strip() for x in np.concatenate(ser_input_site_exp.values)) 90 | 91 | # set_site, len(set_site) 92 | # %% [markdown] 93 | # ### filter `runcontrol` related variables 94 | # %% 95 | # runcontrol variables for supy input 96 | path_runcontrol = sp._env.path_supy_module / "sample_run" / "Runcontrol.nml" 97 | dict_runcontrol = sp._load.load_SUEWS_dict_ModConfig(path_runcontrol).copy() 98 | set_runcontrol = set(dict_runcontrol.keys()) 99 | set_input_runcontrol = set_runcontrol.intersection(set_input) 100 | 101 | print(f''' 102 | ============================================================ 103 | set_input_runcontrol has {len(set_input_runcontrol)} variables: 104 | ''') 105 | for var in set_input_runcontrol: 106 | print(var) 107 | print(f''' 108 | ============================================================ 109 | ''') 110 | 111 | # %% [markdown] 112 | # ### filter `initialcondition` related variables 113 | # %% 114 | # initcond variables for supy input 115 | dict_initcond = sp._load.dict_InitCond_default.copy() 116 | set_initcond = set(dict_initcond.keys()) 117 | set_input_initcond = set_initcond.intersection(set_input) 118 | # set_input_initcond, len(set_input_initcond) 119 | 120 | 121 | # %% [markdown] 122 | # ### functions to process `nml` related variables 123 | # %% 124 | def form_option(str_opt): 125 | """generate option name based suffix for URL 126 | 127 | :param str_opt: opt name 128 | :type str_opt: str 129 | :return: URL suffix for the specified option 130 | :rtype: str 131 | """ 132 | 133 | str_base = "#cmdoption-arg-" 134 | str_opt_x = str_base + str_opt.lower().replace("_", "-").replace("(", "-").replace( 135 | ")", "" 136 | ) 137 | return str_opt_x 138 | 139 | 140 | # form_option('snowinitially') 141 | 142 | 143 | # %% 144 | def choose_page( 145 | str_opt, 146 | set_site=set_site, 147 | set_runcontrol=set_runcontrol, 148 | set_initcond=set_initcond, 149 | source="docs", 150 | ): 151 | # print('str_opt', str_opt) 152 | suffix_page = "html" if source == "docs" else "rst" 153 | # runcontrol variable: 154 | if str_opt in set_runcontrol: 155 | str_base = "RunControl" 156 | if str_opt.startswith("tstep"): 157 | name_page = "Time_related_options" 158 | else: 159 | name_page = "scheme_options" 160 | 161 | # initcondition variable: 162 | elif str_opt in set_initcond: 163 | str_base = "Initial_Conditions" 164 | # the following test sequence is IMPORTANT! 165 | if str_opt.startswith("soilstore"): 166 | name_page = "Soil_moisture_states" 167 | elif str_opt.startswith("snow"): 168 | name_page = "Snow_related_parameters" 169 | elif str_opt.endswith("state"): 170 | name_page = "Above_ground_state" 171 | elif str_opt in ("dayssincerain", "temp_c0"): 172 | name_page = "Recent_meteorology" 173 | else: 174 | name_page = "Vegetation_parameters" 175 | 176 | # site characteristics variable: 177 | elif str_opt in set_site: 178 | str_base = "SUEWS_SiteInfo" 179 | name_page = "Input_Options" 180 | 181 | # defaults to empty strings 182 | else: 183 | str_base = "" 184 | name_page = "" 185 | 186 | str_page = ".".join([name_page, suffix_page]) 187 | str_page_full = str_base + "/" + str_page 188 | return str_page_full 189 | 190 | 191 | # for source in ['docs','github']: 192 | # print(source) 193 | # for x in sorted(list(set_site)+list(set_runcontrol)+list(set_initcond)): 194 | # print() 195 | # print(choose_page(x, source=source)) 196 | # choose_page('tstep', set_site, set_runcontrol, set_initcond) 197 | # choose_page('snowinitially', set_site, set_runcontrol, set_initcond) 198 | 199 | 200 | # %% 201 | def gen_url_option( 202 | str_opt, 203 | set_site=set_site, 204 | set_runcontrol=set_runcontrol, 205 | set_initcond=set_initcond, 206 | source="docs", 207 | ): 208 | """construct a URL for option based on source 209 | 210 | :param str_opt: option name, defaults to '' 211 | :param str_opt: str, optional 212 | :param source: URL source: 'docs' for readthedocs.org; 'github' for github repo, defaults to 'docs' 213 | :param source: str, optional 214 | :return: a valid URL pointing to the option related resources 215 | :rtype: urlpath.URL 216 | """ 217 | 218 | url_base = dict_base[source] 219 | 220 | url_page = choose_page( 221 | str_opt, set_site, set_runcontrol, set_initcond, source=source 222 | ) 223 | # print('str_opt', str_opt, url_base, url_page) 224 | str_opt_x = form_option(str_opt) 225 | url_opt = url_base / (url_page + str_opt_x) 226 | return url_opt 227 | 228 | 229 | # for source in [ 230 | # # 'docs', 231 | # 'github', 232 | # ]: 233 | # print(source) 234 | # for x in sorted(list(set_site)+list(set_runcontrol)+list(set_initcond)): 235 | # print() 236 | # print(gen_url_option(x, source=source)) 237 | 238 | # # webbrowser.open(str(gen_url_option(x, source=source))) 239 | 240 | # gen_url_option('sss', source='github') 241 | # gen_url_option('sss', source='github').get().ok 242 | # %% 243 | # test connectivity of all generated option URLs 244 | # for opt in list(set_initcond)+list(set_runcontrol): 245 | # for source in ['github', 'docs']: 246 | # url = gen_url_option(opt, source=source) 247 | # if not url.get().ok: 248 | # print() 249 | # print(opt) 250 | # print(url) 251 | 252 | # %% 253 | def parse_block(block): 254 | xx = block.reset_index(drop=True) 255 | name_block = xx.loc[0].replace(".. option::", "").strip() 256 | ind_field = xx.index[xx.str.startswith("\t:")] 257 | list_field = [ 258 | xx.iloc[slice(*x)].str.strip().reset_index(drop=True) 259 | for x in zip(ind_field, list(ind_field[1:]) + [None]) 260 | ] 261 | name_field = [field.loc[0].replace(":", "") for field in list_field] 262 | content_field = [field.loc[1:].str.join("") for field in list_field] 263 | ser_field = pd.Series( 264 | {field.loc[0].replace(":", ""): " ".join(field.loc[1:]) for field in list_field} 265 | ).rename(name_block) 266 | return ser_field 267 | 268 | 269 | def parse_option_rst(path_rst): 270 | ser_opts = pd.read_csv(path_rst, sep="\n", skipinitialspace=True) 271 | ser_opts = ser_opts.iloc[:, 0] 272 | ind_opt = ser_opts.index[ser_opts.str.contains(".. option::")] 273 | ser_opt_name = ser_opts[ind_opt].str.replace(".. option::", "").str.strip() 274 | list_block_opt = [ 275 | ser_opts.loc[slice(*x)] for x in zip(ind_opt, list(ind_opt[1:]) + [None]) 276 | ] 277 | df_opt = pd.concat([parse_block(block) for block in list_block_opt], axis=1).T 278 | return df_opt 279 | 280 | 281 | # url_test = gen_url_option('pavedstate', source='github') 282 | # parse_option_rst(url_test) 283 | 284 | 285 | # %% 286 | -------------------------------------------------------------------------------- /docs/source/sample_run: -------------------------------------------------------------------------------- 1 | ../../src/supy/sample_run -------------------------------------------------------------------------------- /docs/source/tutorial/tutorial.rst: -------------------------------------------------------------------------------- 1 | .. _tutorial_index: 2 | 3 | Tutorials 4 | ========= 5 | 6 | 7 | To familiarise users with SuPy urban climate modelling and to demonstrate the functionality of SuPy, we provide the following tutorials in `Jupyter notebooks `_: 8 | 9 | .. toctree:: 10 | :maxdepth: 1 11 | 12 | quick-start 13 | impact-studies 14 | external-interaction 15 | AMF-sim 16 | 17 | .. Note:: 18 | 1. The Anaconda distribution is suggested as the scientific Python 3 environment for its completeness in necessary packages. Please follow the official guide for its `installation `__. 19 | 2. Users with less experience in Python are suggested to go through the following section first before using SuPy. 20 | 21 | Python 101 before SuPy 22 | ---------------------- 23 | 24 | Admittedly, this header is somewhat misleading: given the enormity of Python, it's more challenging to get this section *correct* than coding SuPy per se. As such, here a collection of data analysis oriented links to useful Python resources is provided to help novices start using Python and **then** SuPy. 25 | 26 | - `The gist of Python `_: a quick introductory blog that covers Python basics for data analysis. 27 | 28 | - Jupyter Notebook: Jupyter Notebook provides a powerful notebook-based data analysis environment that SuPy users are strongly encouraged to use. Jupyter notebooks can run in browsers (desktop, mobile) either by easy local configuration or on remote servers with pre-set environments (e.g., `Google Colaboratory `_, `Microsoft Azure Notebooks `_). In addition, Jupyter notebooks allow great shareability by incorporating source code and detailed notes in one place, which helps users to organise their computation work. 29 | 30 | - Installation 31 | 32 | Jupyter notebooks can be installed with pip on any desktop/server system and open .ipynb notebook files locally: 33 | 34 | .. code-block:: shell 35 | 36 | python3 -m pip install jupyter -U 37 | 38 | - Extensions: To empower your Jupyter Notebook environment with better productivity, please check out the `Unofficial Jupyter Notebook Extensions `_. 39 | Quick introductory blogs can be found `here `_ and `here `_. 40 | 41 | 42 | - pandas: `pandas` is heavily used in SuPy and thus better understanding of pandas is essential in SuPy workflows. 43 | 44 | - Introductory blogs: 45 | 46 | * `Quick dive into Pandas for Data Science `_: introduction to pandas. 47 | * `Basic Time Series Manipulation with Pandas `_: pandas-based time series manipulation. 48 | * `Introduction to Data Visualization in Python `_: plotting using pandas and related libraries. 49 | 50 | - A detailed tutorial in Jupyter Notebooks: 51 | 52 | * `Introduction to pandas `_ 53 | * `pandas fundamentals `_ 54 | * `Data Wrangling with pandas `_ 55 | -------------------------------------------------------------------------------- /docs/source/version-history/20181215.rst: -------------------------------------------------------------------------------- 1 | .. _new_2018.12.15: 2 | 3 | Version 2018.12.15 (internal test release in December 2018) 4 | =========================================================== 5 | 6 | 7 | - **New** 8 | 9 | #. Preview release of SuPy based on the computation kernel of SUEWS 2018b 10 | 11 | 12 | - **Improvement** 13 | 14 | #. Improved calculation of OHM-related radiation terms 15 | 16 | 17 | - **Changes** 18 | 19 | None. 20 | 21 | 22 | - **Fix** 23 | 24 | None 25 | 26 | - **Known issue** 27 | 28 | #. The heat storage modules AnOHM and ESTM are not supported yet. 29 | -------------------------------------------------------------------------------- /docs/source/version-history/20190101.rst: -------------------------------------------------------------------------------- 1 | .. _new_20190101: 2 | 3 | Version 2019.1.1 (preview release, 01 Jan 2019) 4 | ====================================================== 5 | 6 | 7 | - **New** 8 | 9 | #. Slimmed the output groups by excluding unsupported :term:`ESTM` results 10 | 11 | #. SuPy documentation 12 | 13 | * Key IO data structures documented: 14 | 15 | - `df_output_var` (:issue:`9`) 16 | - `df_state_var` (:issue:`8`) 17 | - `df_forcing_var` (:issue:`7`) 18 | 19 | 20 | * Tutorial of parallel SuPy simulations for impact studies 21 | 22 | 23 | 24 | - **Improvement** 25 | 26 | #. Improved calculation of OHM-related radiation terms 27 | 28 | 29 | - **Changes** 30 | 31 | None. 32 | 33 | 34 | - **Fix** 35 | 36 | None 37 | 38 | - **Known issue** 39 | 40 | None 41 | -------------------------------------------------------------------------------- /docs/source/version-history/20190208.rst: -------------------------------------------------------------------------------- 1 | 2 | .. _new_20190208: 3 | 4 | Version 2019.2.8 5 | ====================================================== 6 | 7 | This is a release that fixes recent bugs found in SUEWS that 8 | may lead to abnormal simulation results of storage heat flux, 9 | in particular when `SnowUse` is enabled (i.e., `snowuse=1`). 10 | 11 | - **New** 12 | 13 | None. 14 | 15 | - **Improvement** 16 | 17 | Improved the performance in loading 18 | initial model state from a large number of grids (>1k) 19 | 20 | 21 | - **Changes** 22 | 23 | Updated `SampleRun` dataset by: 24 | 1. setting surface fractions (`sfr`) to a 25 | more realistic value based on London KCL case; 26 | 2. enabling snow module (`snowuse=1`). 27 | 28 | 29 | - **Fix** 30 | 31 | 1. Fixed a bug in the calculation of storage heat flux. 32 | 2. Fixed a bug in loading ``popdens`` for calculating anthropogenic heat flux. 33 | 34 | - **Known issue** 35 | 36 | None 37 | -------------------------------------------------------------------------------- /docs/source/version-history/20190219.rst: -------------------------------------------------------------------------------- 1 | 2 | .. _new_20190219: 3 | 4 | Version 2019.2.19 5 | ====================================================== 6 | 7 | This is a release that improved the exception handling due to 8 | fatal error in `supy_driver`. 9 | 10 | - **New** 11 | 12 | Added support to handle python kernel crash caused by 13 | fatal error in `supy_driver` kernel; so python kernel won't 14 | crash any more even `supy_driver` is stopped. 15 | 16 | - **Improvement** 17 | 18 | None. 19 | 20 | 21 | - **Changes** 22 | 23 | None 24 | 25 | 26 | - **Fix** 27 | 28 | None. 29 | 30 | - **Known issue** 31 | 32 | None 33 | -------------------------------------------------------------------------------- /docs/source/version-history/20190224.rst: -------------------------------------------------------------------------------- 1 | 2 | .. _new_20190224: 3 | 4 | Version 2019.2.24 5 | ====================================================== 6 | 7 | This release added the ability to save output files. 8 | 9 | - **New** 10 | 11 | 1. Added support to save output files. See: :py:func:`supy.save_supy` 12 | 2. Added support to initialise SuPy from saved ``df_state.csv``. See: :py:func:`supy.init_supy` 13 | 14 | - **Improvement** 15 | 16 | None. 17 | 18 | 19 | - **Changes** 20 | 21 | None. 22 | 23 | 24 | - **Fix** 25 | 26 | None. 27 | 28 | - **Known issue** 29 | 30 | None 31 | -------------------------------------------------------------------------------- /docs/source/version-history/20190225.rst: -------------------------------------------------------------------------------- 1 | .. _new_20190225: 2 | 3 | Version 2019.2.25 4 | ====================================================== 5 | 6 | This release dropped support for Python 3.5 and below. 7 | 8 | - **New** 9 | 10 | None. 11 | 12 | - **Improvement** 13 | 14 | None. 15 | 16 | 17 | - **Changes** 18 | 19 | Dropped support for Python 3.5 and below. 20 | 21 | 22 | - **Fix** 23 | 24 | None. 25 | 26 | - **Known issue** 27 | 28 | None 29 | -------------------------------------------------------------------------------- /docs/source/version-history/20190314.rst: -------------------------------------------------------------------------------- 1 | 2 | .. _new_20190314: 3 | 4 | Version 2019.3.14 5 | ====================================================== 6 | 7 | This release improved memory usage. 8 | 9 | - **New** 10 | 11 | None. 12 | 13 | - **Improvement** 14 | 15 | Optimised memory consumption for longterm simulations. 16 | 17 | 18 | - **Changes** 19 | 20 | None. 21 | 22 | 23 | - **Fix** 24 | 25 | None. 26 | 27 | - **Known issue** 28 | 29 | None 30 | -------------------------------------------------------------------------------- /docs/source/version-history/20190321.rst: -------------------------------------------------------------------------------- 1 | 2 | .. _new_20190321: 3 | 4 | Version 2019.3.21 5 | ====================================================== 6 | 7 | TMY generation. 8 | 9 | - **New** 10 | 11 | Added preliminary support for generating TMY dataset with SuPy output. 12 | 13 | - **Improvement** 14 | 15 | None. 16 | 17 | - **Changes** 18 | 19 | None. 20 | 21 | 22 | - **Fix** 23 | 24 | None. 25 | 26 | - **Known issue** 27 | 28 | None 29 | -------------------------------------------------------------------------------- /docs/source/version-history/20190415.rst: -------------------------------------------------------------------------------- 1 | .. _new_20190415: 2 | 3 | Version 2019.4.15 4 | ====================================================== 5 | 6 | ERA-5 download. 7 | 8 | - **New** 9 | 10 | Added experimental support for downloading and processing ERA-5 data to force supy simulations. 11 | 12 | - **Improvement** 13 | 14 | Improved compatibility with earlier `pandas` version in resampling output. 15 | 16 | - **Changes** 17 | 18 | None. 19 | 20 | 21 | - **Fix** 22 | 23 | None. 24 | 25 | - **Known issue** 26 | 27 | None 28 | -------------------------------------------------------------------------------- /docs/source/version-history/20190417.rst: -------------------------------------------------------------------------------- 1 | .. _new_20190417: 2 | 3 | Version 2019.4.17 4 | ====================================================== 5 | 6 | UMEP compatibility tweaks. 7 | 8 | - **New** 9 | 10 | None. 11 | 12 | - **Improvement** 13 | 14 | None. 15 | 16 | - **Changes** 17 | 18 | `problems.txt` will be written out in addition to the console error message similarly as SUEWS binary. 19 | 20 | 21 | - **Fix** 22 | 23 | Incorrect caching of input libraries. 24 | 25 | - **Known issue** 26 | 27 | None 28 | -------------------------------------------------------------------------------- /docs/source/version-history/20190429.rst: -------------------------------------------------------------------------------- 1 | .. _new_20190429: 2 | 3 | Version 2019.4.29 4 | ====================================================== 5 | 6 | Parallel run. 7 | 8 | - **New** 9 | 10 | Added support for parallel run on the fly. 11 | 12 | - **Improvement** 13 | 14 | None. 15 | 16 | - **Changes** 17 | 18 | None. 19 | 20 | 21 | - **Fix** 22 | 23 | None. 24 | 25 | - **Known issue** 26 | 27 | None 28 | -------------------------------------------------------------------------------- /docs/source/version-history/20190528.rst: -------------------------------------------------------------------------------- 1 | .. _new_20190528: 2 | 3 | Version 2019.5.28 4 | ====================================================== 5 | 6 | Spring house cleaning with long-await command line tools (more on the way!). 7 | 8 | - **New** 9 | 10 | 1. Added version info function: `show_version`. 11 | 2. Added command line tools: 12 | 13 | - `suews-run`: SuPy wrapper to mimic SUEWS-binary-based simulation. 14 | 15 | - `suews-convert`: convert input tables from older versions to newer ones (one-way only). 16 | 17 | 18 | - **Improvement** 19 | 20 | None. 21 | 22 | - **Changes** 23 | 24 | None. 25 | 26 | 27 | - **Fix** 28 | 29 | 1. Fixed a bug in writing out multi-grid output files 30 | caused by incorrect dropping of temporal information by pandas . 31 | 32 | - **Known issue** 33 | 34 | ESTM is not supported yet. 35 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /docs/source/version-history/20190608.rst: -------------------------------------------------------------------------------- 1 | 2 | .. _new_20190608: 3 | 4 | Version 2019.6.8 5 | ====================================================== 6 | 7 | 8 | - **New** 9 | 10 | None. 11 | 12 | - **Improvement** 13 | 14 | None. 15 | 16 | - **Changes** 17 | 18 | None. 19 | 20 | 21 | - **Fix** 22 | 23 | 1. Fixed a bug in rescaling Kdown when loading forcing data. 24 | 25 | 26 | - **Known issue** 27 | 28 | ESTM is not supported yet. 29 | 30 | 31 | 32 | -------------------------------------------------------------------------------- /docs/source/version-history/20190717.rst: -------------------------------------------------------------------------------- 1 | .. _new_20190717: 2 | 3 | Version 2019.7.17 4 | ====================================================== 5 | 6 | 7 | - **New** 8 | 9 | 1. added OHM related functions. 10 | 2. added surface conductance related functions. 11 | 12 | - **Improvement** 13 | 14 | None. 15 | 16 | - **Changes** 17 | 18 | None. 19 | 20 | 21 | - **Fix** 22 | 23 | 1. Fixed a bug in unit conversion for TMY data generation. 24 | 25 | 26 | - **Known issue** 27 | 28 | ESTM is not supported yet. 29 | 30 | 31 | 32 | -------------------------------------------------------------------------------- /docs/source/version-history/20190829.rst: -------------------------------------------------------------------------------- 1 | .. _new_latest: 2 | .. _new_20190829: 3 | 4 | 5 | Version 2019.8.29 6 | ====================================================== 7 | 8 | 9 | - **New** 10 | 11 | 1. added WRF-SUEWS related functions. 12 | 2. added `diagnostics of canyon profiles `_. 13 | 14 | - **Improvement** 15 | 16 | None. 17 | 18 | - **Changes** 19 | 20 | 1. synchronised with v2019a interface: minimum supy_driver v2019a2. 21 | 22 | 23 | - **Fix** 24 | 25 | None. 26 | 27 | 28 | - **Known issue** 29 | 30 | 1. ESTM is not supported yet. 31 | 2. BLUEWS, a CBL modules in SUEWS, is not supported yet. 32 | 3. Performance in parallel mode can be worse than 33 | serial mode sometimes due to heavy (de)-serialisation loads. 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /docs/source/version-history/20200202.rst: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Version 2020.2.2 5 | ====================================================== 6 | 7 | 8 | - **New** 9 | 10 | 1. A checker to validate input `DataFrame`s. 11 | See option `check_input` in `run_supy`. 12 | 13 | 2. Utilities to generate forcing data using ERA-5 data. 14 | See `download_era5` and `gen_forcing_era5`. 15 | 16 | - **Improvement** 17 | 18 | 1. Improved performance of the parallel mode. 19 | 20 | - **Changes** 21 | 22 | None. 23 | 24 | 25 | - **Fix** 26 | 27 | None. 28 | 29 | 30 | - **Known issue** 31 | 32 | 1. ESTM is not supported yet. 33 | 2. BLUEWS, a CBL modules in SUEWS, is not supported yet. 34 | 3. Simulation in parallel mode is NOT supported on Windows 35 | due to system limitation. 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /docs/source/version-history/20200529.rst: -------------------------------------------------------------------------------- 1 | 2 | Version 2020.5.29 3 | ====================================================== 4 | 5 | 6 | - **New** 7 | 8 | 1. Update supy-driver to `2020a` iteration. 9 | 2. Add function for plotting RSL variables `supy.util.plot_rsl`. 10 | 11 | 12 | - **Improvement** 13 | 14 | None. 15 | 16 | - **Changes** 17 | 18 | None. 19 | 20 | 21 | - **Fix** 22 | 23 | 1. Fix the humidity variable in ERA5-based forcing generation. 24 | 2. Fix the impact study tutorial. 25 | 26 | 27 | - **Known issue** 28 | 29 | 1. ESTM is not supported yet. 30 | 2. BLUEWS, a CBL modules in SUEWS, is not supported yet. 31 | 3. Simulation in parallel mode is NOT supported on Windows 32 | due to system limitation. 33 | 34 | 35 | 36 | -------------------------------------------------------------------------------- /docs/source/version-history/20201103.rst: -------------------------------------------------------------------------------- 1 | .. _new_latest: 2 | .. _new_dev: 3 | 4 | Version 2020.11.3 5 | ====================================================== 6 | 7 | 8 | - **New** 9 | 10 | 1. Update supy-driver to `2020b` iteration. 11 | 2. Add function for plotting RSL variables `supy.util.plot_rsl`. 12 | 13 | 14 | - **Improvement** 15 | 16 | 1. The RSL related functions are more robust in dealing with broader urban morphology settings. 17 | 2. Internal changes to conform with recent upgrades in `pandas`. 18 | 19 | - **Changes** 20 | 21 | None. 22 | 23 | 24 | - **Fix** 25 | 26 | 1. Fix an issue in `supy.util.read_forcing` that improper resampling could be conducted 27 | if input temporal resolution is the same as the desirable resampling time step `tstep_mod`. 28 | 29 | 30 | - **Known issue** 31 | 32 | 1. ESTM is not supported yet. 33 | 2. BLUEWS, a CBL modules in SUEWS, is not supported yet. 34 | 3. Simulation in parallel mode is NOT supported on Windows 35 | due to system limitation. 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /docs/source/version-history/version-history.rst: -------------------------------------------------------------------------------- 1 | .. _version_history: 2 | 3 | Version History 4 | ================ 5 | 6 | 7 | .. toctree:: 8 | :maxdepth: 1 9 | 10 | 20201103 11 | 20200529 12 | 20200202 13 | 20190829 14 | 20190717 15 | 20190608 16 | 20190528 17 | 20190429 18 | 20190417 19 | 20190415 20 | 20190321 21 | 20190314 22 | 20190225 23 | 20190224 24 | 20190219 25 | 20190208 26 | 20190101 27 | 20181215 28 | 29 | 30 | 31 | 32 | -------------------------------------------------------------------------------- /sample_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/sample_plot.png -------------------------------------------------------------------------------- /src/Makefile: -------------------------------------------------------------------------------- 1 | # -*- makefile -*- 2 | .PHONY: main clean test pip supy 3 | 4 | # OS-specific configurations 5 | ifeq ($(OS),Windows_NT) 6 | PYTHON_exe = python.exe 7 | 8 | else 9 | UNAME_S := $(shell uname -s) 10 | 11 | 12 | ifeq ($(UNAME_S),Linux) # Linux 13 | PYTHON_exe=python 14 | 15 | endif 16 | 17 | ifeq ($(UNAME_S),Darwin) # macOS 18 | PYTHON_exe=python 19 | 20 | endif 21 | 22 | endif 23 | 24 | 25 | 26 | PYTHON := $(if $(PYTHON_exe),$(PYTHON_exe),python) 27 | # All the files which include modules used by other modules (these therefore 28 | # need to be compiled first) 29 | 30 | MODULE = supy 31 | 32 | main: 33 | $(MAKE) clean 34 | ${PYTHON} setup.py bdist_wheel 35 | 36 | # install package in dev mode and do pytest 37 | test: 38 | pip install -e . 39 | pytest -s ${MODULE}/test 40 | 41 | 42 | # If wanted, clean all *.o files after build 43 | clean: 44 | rm -rf ${MODULE}/*.so ${MODULE}/*.pyc ${MODULE}/__pycache__ ${MODULE}/*.dSYM 45 | rm -rf ${MODULE}/test/*.pyc ${MODULE}/test/__pycache__ 46 | rm -rf ${MODULE}/util/*.pyc ${MODULE}/util/__pycache__ 47 | rm -rf ${MODULE}/cmd/*.pyc ${MODULE}/cmd/__pycache__ 48 | rm -rf SuPy.log SuPy.log.* 49 | rm -rf build dist 50 | rm -rf .eggs .pytest_cache 51 | 52 | # upload wheels to pypi using twine 53 | upload: 54 | twine upload --skip-existing dist/*whl 55 | -------------------------------------------------------------------------------- /src/data_test/multi-grid/51.5N0.125W-201310-ml.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/multi-grid/51.5N0.125W-201310-ml.nc -------------------------------------------------------------------------------- /src/data_test/multi-grid/51.5N0.125W-201310-sfc.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/multi-grid/51.5N0.125W-201310-sfc.nc -------------------------------------------------------------------------------- /src/data_test/multi-grid/51.5N0.125W-201311-ml.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/multi-grid/51.5N0.125W-201311-ml.nc -------------------------------------------------------------------------------- /src/data_test/multi-grid/51.5N0.125W-201311-sfc.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/multi-grid/51.5N0.125W-201311-sfc.nc -------------------------------------------------------------------------------- /src/data_test/multi-grid/51.5N0.125W-201312-ml.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/multi-grid/51.5N0.125W-201312-ml.nc -------------------------------------------------------------------------------- /src/data_test/multi-grid/51.5N0.125W-201312-sfc.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/multi-grid/51.5N0.125W-201312-sfc.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200301-ml.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200301-ml.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200301-sfc.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200301-sfc.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200302-ml.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200302-ml.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200302-sfc.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200302-sfc.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200303-ml.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200303-ml.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200303-sfc.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200303-sfc.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200304-ml.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200304-ml.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200304-sfc.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200304-sfc.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200305-ml.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200305-ml.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200305-sfc.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200305-sfc.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200306-ml.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200306-ml.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200306-sfc.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200306-sfc.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200307-ml.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200307-ml.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200307-sfc.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200307-sfc.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200308-ml.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200308-ml.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200308-sfc.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200308-sfc.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200309-ml.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200309-ml.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200309-sfc.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200309-sfc.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200310-ml.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200310-ml.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200310-sfc.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200310-sfc.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200311-ml.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200311-ml.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200311-sfc.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200311-sfc.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200312-ml.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200312-ml.nc -------------------------------------------------------------------------------- /src/data_test/single-grid/57.75N12.0E-200312-sfc.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunt05/SuPy/d505c45e44b476563e49a64f272d965b93a81792/src/data_test/single-grid/57.75N12.0E-200312-sfc.nc -------------------------------------------------------------------------------- /src/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | import pandas as pd 3 | 4 | ser_ver = pd.read_json("./supy/supy_version.json", typ="series", convert_dates=False) 5 | print(ser_ver) 6 | __version__ = f"{ser_ver.ver_milestone}.{ser_ver.ver_major}.{ser_ver.ver_minor}{ser_ver.ver_remark}" 7 | 8 | 9 | def readme(): 10 | try: 11 | with open("../README.md", encoding="utf-8") as f: 12 | return f.read() 13 | except: 14 | return f'SuPy package' 15 | 16 | 17 | 18 | setup( 19 | name="supy", 20 | version=__version__, 21 | description="the SUEWS model that speaks python", 22 | long_description=readme(), 23 | long_description_content_type="text/markdown", 24 | url="https://github.com/sunt05/SuPy", 25 | author=", ".join(["Dr Ting Sun", "Dr Hamidreza Omidvar", "Prof Sue Grimmond",]), 26 | author_email=", ".join( 27 | [ 28 | "ting.sun@reading.ac.uk", 29 | "h.omidvar@reading.ac.uk", 30 | "c.s.grimmond@reading.ac.uk", 31 | ] 32 | ), 33 | license="GPL-V3.0", 34 | packages=["supy"], 35 | package_data={ 36 | "supy": ["sample_run/*", "sample_run/Input/*", "*.json", "util/*", "cmd/*",] 37 | }, 38 | # distclass=BinaryDistribution, 39 | ext_modules=[], 40 | install_requires=[ 41 | "pandas>=0.25.1", 42 | "tables", # for dumping in hdf5 43 | "scipy", 44 | "dask", # needs dask for parallel tasks 45 | "f90nml", 46 | "matplotlib", 47 | "seaborn", 48 | "atmosp", # my own `atmosp` module forked from `atmos-python` 49 | "cdsapi", 50 | "xarray", 51 | "multiprocess", # a better multiprocessing library 52 | "click", # cmd tool 53 | "lmfit", # optimiser 54 | 'pvlib', # TMY-related solar radiation calculations 55 | "platypus-opt==1.0.4", # a multi-objective optimiser 56 | "supy_driver==2020b10", # a separate f2py-based driver 57 | ], 58 | entry_points={ 59 | # command line tools 60 | "console_scripts": [ 61 | "suews-run=supy.cmd.SUEWS:SUEWS", 62 | "suews-convert=supy.cmd.table_converter:convert_table_cmd", 63 | ] 64 | }, 65 | include_package_data=True, 66 | test_suite="nose.collector", 67 | tests_require=["nose"], 68 | python_requires="~=3.6", 69 | classifiers=[ 70 | "Programming Language :: Python :: 3 :: Only", 71 | "Programming Language :: Python :: 3.6", 72 | "Programming Language :: Python :: 3.7", 73 | "Programming Language :: Python :: 3.8", 74 | "Intended Audience :: Education", 75 | "Intended Audience :: Science/Research", 76 | "Operating System :: MacOS :: MacOS X", 77 | "Operating System :: Microsoft :: Windows", 78 | "Operating System :: POSIX :: Linux", 79 | ], 80 | zip_safe=False, 81 | ) 82 | -------------------------------------------------------------------------------- /src/supy/__init__.py: -------------------------------------------------------------------------------- 1 | ########################################################################### 2 | # SuPy: SUEWS that speaks Python 3 | # Authors: 4 | # Ting Sun, ting.sun@reading.ac.uk 5 | # History: 6 | # 20 Jan 2018: first alpha release 7 | # 01 Feb 2018: performance improvement 8 | # 03 Feb 2018: improvement in output processing 9 | # 08 Mar 2018: pypi packaging 10 | # 01 Jan 2019: public release 11 | # 22 May 2019: restructure of module layout 12 | # 02 Oct 2019: logger restructured 13 | ########################################################################### 14 | 15 | 16 | # core functions 17 | from ._supy_module import ( 18 | init_supy, 19 | load_SampleData, 20 | load_forcing_grid, 21 | run_supy, 22 | save_supy, 23 | check_forcing, 24 | check_state, 25 | ) 26 | 27 | 28 | # utilities 29 | from . import util 30 | 31 | 32 | # version info 33 | from ._version import show_version, __version__, __version_driver__ 34 | 35 | 36 | # module docs 37 | __doc__ = """ 38 | supy - SUEWS that speaks Python 39 | =============================== 40 | 41 | **SuPy** is a Python-enhanced urban climate model with SUEWS as its computation core. 42 | 43 | """ 44 | -------------------------------------------------------------------------------- /src/supy/_check.py: -------------------------------------------------------------------------------- 1 | # functions to check validity of forcing and state DataFrames 2 | # import yaml 3 | # yaml.warnings({'YAMLLoadWarning': False}) 4 | import json 5 | from typing import Dict, List, Tuple 6 | 7 | import numpy as np 8 | import pandas as pd 9 | 10 | from ._env import logger_supy, path_supy_module 11 | from ._load import dict_var_type_forcing 12 | 13 | # the check list file with ranges and logics 14 | path_rules_indiv = path_supy_module / "checker_rules_indiv.json" 15 | 16 | 17 | # opening the check list file 18 | def load_rules(path_rules) -> Dict: 19 | 20 | with open(path_rules) as cf: 21 | dict_rules = json.load(cf) 22 | 23 | # making the keys lowercase to be consistent with supy 24 | dict_rules_lower = {} 25 | for key in dict_rules.keys(): 26 | # for some reason pop() did not work here!! 27 | dict_rules_lower[key.lower()] = dict_rules[key] 28 | 29 | return dict_rules_lower 30 | 31 | 32 | # store rules as a dict 33 | dict_rules_indiv = load_rules(path_rules_indiv) 34 | 35 | # checking the range of each parameter 36 | def check_range(ser_to_check: pd.Series, rule_var: dict) -> Tuple: 37 | 38 | var = ser_to_check.name.lower() 39 | 40 | min_v = rule_var[var]["param"]["min"] 41 | max_v = rule_var[var]["param"]["max"] 42 | min_v = -np.inf if isinstance(min_v, str) else min_v 43 | max_v = np.inf if isinstance(max_v, str) else max_v 44 | description = "" 45 | is_accepted_flag = False 46 | 47 | for ind, value in ser_to_check.items(): 48 | if min_v <= value <= max_v: 49 | is_accepted_flag = True 50 | elif value == -999.0: 51 | # default `na` value as such option is unnecessary in SUEWS 52 | is_accepted_flag = True 53 | else: 54 | is_accepted_flag = False 55 | description = f"`{var}` should be between [{min_v}, {max_v}] but `{value}` is found at {ind}" 56 | break 57 | 58 | if not is_accepted_flag: 59 | is_accepted = is_accepted_flag 60 | suggestion = "change the parameter to fall into the acceptable range" 61 | else: 62 | is_accepted = is_accepted_flag 63 | suggestion = "" 64 | 65 | return var, is_accepted, description, suggestion 66 | 67 | 68 | def check_zd_zh(var, values, cr): 69 | 70 | return 0 71 | 72 | 73 | # check if a valid method is set 74 | def check_method(ser_to_check: pd.Series, rule_var: dict) -> Tuple: 75 | var = ser_to_check.name.lower() 76 | 77 | list_val = rule_var[var]["param"]["allowed"] 78 | description = "" 79 | 80 | is_accepted_flag = False 81 | for value in np.nditer(ser_to_check.values): 82 | if value in list_val: 83 | is_accepted_flag = True 84 | else: 85 | description = f"`{var}` should be one of {list_val} but is set as `{value}`" 86 | 87 | if not is_accepted_flag: 88 | is_accepted = is_accepted_flag 89 | suggestion = "change the parameter to an allowed value" 90 | else: 91 | is_accepted = is_accepted_flag 92 | suggestion = "" 93 | 94 | return var, is_accepted, description, suggestion 95 | 96 | 97 | # # checks for suews parameters 98 | # def check_var_suews(var, values, cr, df_sum): 99 | 100 | # logic = cr[var]['logic'] 101 | 102 | # if logic == 'range': 103 | # out_list = check_range(var, values, cr) 104 | # elif logic == 'zd-zh': 105 | # out_list = check_zd_zh(var, values, cr) 106 | 107 | # df_sum.loc[len(df_sum)] = out_list 108 | 109 | # return df_sum 110 | list_col_forcing = list(dict_var_type_forcing.keys()) 111 | 112 | 113 | def check_forcing(df_forcing: pd.DataFrame): 114 | logger_supy.info("SuPy is validating `df_forcing`...") 115 | # collect issues 116 | list_issues = [] 117 | flag_valid = True 118 | # check the following: 119 | # 1. correct columns 120 | col_df = df_forcing.columns 121 | # 1.1 if all columns are present 122 | set_diff = set(list_col_forcing).difference(col_df) 123 | if len(set_diff) > 0: 124 | str_issue = f"Missing columns found: {set_diff}" 125 | list_issues.append(str_issue) 126 | flag_valid = False 127 | # 1.2 if all columns are in right position 128 | for col_v, col in zip(list_col_forcing, col_df): 129 | if col_v != col: 130 | str_issue = f"Column {col} is not in the valid position" 131 | list_issues.append(str_issue) 132 | flag_valid = False 133 | 134 | # 2. valid timestamps: 135 | ind_df = df_forcing.index 136 | # 2.1 must be a temporal index 137 | if not isinstance(ind_df, pd.DatetimeIndex): 138 | str_issue = f"Index must be {pd.DatetimeIndex}" 139 | list_issues.append(str_issue) 140 | flag_valid = False 141 | # 2.2 no duplicates 142 | if ind_df.has_duplicates: 143 | ind_dup = ind_df[ind_df.duplicated()] 144 | str_issue = f"Timestamps have duplicates: {ind_dup}" 145 | list_issues.append(str_issue) 146 | flag_valid = False 147 | 148 | # 2.3 monotonically increasing 149 | if not ind_df.is_monotonic_increasing: 150 | str_issue = f"Timestamps must be monotonically increasing" 151 | list_issues.append(str_issue) 152 | flag_valid = False 153 | 154 | # 2.4 must have a valid `freq` attribute 155 | if hasattr(ind_df, "freq"): 156 | if ind_df.freq is None: 157 | str_issue = f"Temporal index must have a valid `freq`" 158 | list_issues.append(str_issue) 159 | flag_valid = False 160 | else: 161 | str_issue = f"Temporal index must have `freq` attribute" 162 | list_issues.append(str_issue) 163 | flag_valid = False 164 | 165 | # 3. valid physical ranges 166 | for var in col_df: 167 | if var not in ["iy", "id", "it", "imin", "isec"]: 168 | ser_var = df_forcing[var] 169 | res_check = check_range(ser_var, dict_rules_indiv) 170 | if not res_check[1]: 171 | str_issue = res_check[2] 172 | list_issues.append(str_issue) 173 | flag_valid = False 174 | 175 | if not flag_valid: 176 | str_issue = "\n".join(["Issues found in `df_forcing`:"] + list_issues) 177 | logger_supy.error(str_issue) 178 | return list_issues 179 | else: 180 | logger_supy.info("All checks for `df_forcing` passed!") 181 | 182 | 183 | def check_state(df_state: pd.DataFrame) -> List: 184 | logger_supy.info("SuPy is validating `df_state`...") 185 | # collect issues 186 | list_issues = [] 187 | flag_valid = True 188 | list_col_state = set(dict_rules_indiv.keys()).difference( 189 | [x.lower() for x in list_col_forcing] 190 | ) 191 | 192 | # check the following: 193 | # 1. correct columns 194 | col_df = df_state.columns.get_level_values("var") 195 | # 1.1 if all columns are present 196 | set_diff = set(list_col_state).difference(col_df) 197 | if len(set_diff) > 0: 198 | str_issue = f"Mandatory columns missing from df_state: {set_diff}" 199 | list_issues.append(str_issue) 200 | flag_valid = False 201 | # 1.2 if all columns are included in the checking list 202 | set_diff = set(col_df).difference(list_col_state) 203 | if len(set_diff) > 0: 204 | str_issue = f"Columns not included in checking list: {set_diff}" 205 | list_issues.append(str_issue) 206 | flag_valid = False 207 | 208 | # 2. check based on logic types 209 | list_to_check = set(col_df).intersection(list_col_state) 210 | for var in list_to_check: 211 | # pack 212 | val = dict_rules_indiv[var] 213 | df_var = df_state[var] 214 | # 'NA' implies no checking required 215 | if val["logic"] != "NA": 216 | pass 217 | if val["logic"] == "range": 218 | for ind in df_var.index: 219 | ser_var = df_var.loc[ind].rename(var) 220 | res_check = check_range(ser_var, dict_rules_indiv) 221 | if not res_check[1]: 222 | str_issue = res_check[2] + f" at index `{ind}`" 223 | list_issues.append(str_issue) 224 | flag_valid = False 225 | if val["logic"] == "method": 226 | for ind in df_var.index: 227 | ser_var = df_var.loc[ind].rename(var) 228 | res_check = check_method(ser_var, dict_rules_indiv) 229 | if not res_check[1]: 230 | str_issue = res_check[2] + f" at index `{ind}`" 231 | list_issues.append(str_issue) 232 | flag_valid = False 233 | 234 | if not flag_valid: 235 | str_issue = "\n".join(["Issues found in `df_state`:"] + list_issues) 236 | logger_supy.error(str_issue) 237 | return list_issues 238 | else: 239 | logger_supy.info("All checks for `df_state` passed!") 240 | 241 | 242 | # flatten columns from MultiIndex to Index with compound notation 243 | def flatten_col(df_state: pd.DataFrame): 244 | # original MultiIndex columsn 245 | col_mi = df_state.columns 246 | # flattened columns 247 | col_flat = col_mi.map( 248 | lambda s: ( 249 | "_".join(s) 250 | .replace("_0", "") 251 | .replace("(", "") 252 | .replace(", ", "_") 253 | .replace(",)", "") 254 | .replace(")", "") 255 | ) 256 | ) 257 | # replace columns with flattened ones 258 | df_state_flat = df_state.set_axis(col_flat) 259 | return df_state_flat 260 | -------------------------------------------------------------------------------- /src/supy/_env.py: -------------------------------------------------------------------------------- 1 | from logging.handlers import TimedRotatingFileHandler 2 | import sys 3 | import logging 4 | import inspect 5 | from pathlib import Path 6 | import tempfile 7 | 8 | ######################################################################## 9 | # this file provides variable and functions useful for the whole module. 10 | ######################################################################## 11 | 12 | # define local path for loading resources in this package 13 | path_supy_module = Path(inspect.getsourcefile(lambda: 0)).resolve().parent 14 | 15 | # set up logger format, note `u` to guarantee UTF-8 encoding 16 | FORMATTER = logging.Formatter(u"%(asctime)s - %(name)s - %(levelname)s - %(message)s") 17 | 18 | # log file name 19 | LOG_FILE = "SuPy.log" 20 | 21 | 22 | def get_console_handler(): 23 | console_handler = logging.StreamHandler(sys.stdout) 24 | console_handler.setFormatter(FORMATTER) 25 | return console_handler 26 | 27 | 28 | def get_file_handler(): 29 | try: 30 | path_logfile = Path(LOG_FILE) 31 | path_logfile.touch() 32 | except Exception: 33 | tempdir = tempfile.gettempdir() 34 | path_logfile = Path(tempdir) / LOG_FILE 35 | 36 | file_handler = TimedRotatingFileHandler( 37 | path_logfile, when="midnight", encoding="utf-8", 38 | ) 39 | file_handler.setFormatter(FORMATTER) 40 | return file_handler 41 | 42 | 43 | def get_logger(logger_name, level=logging.DEBUG): 44 | logger = logging.getLogger(logger_name) 45 | # better to have too much log than not enough 46 | logger.setLevel(level) 47 | logger.addHandler(get_console_handler()) 48 | logger.addHandler(get_file_handler()) 49 | 50 | # with this pattern, it's rarely necessary to propagate the error up to parent 51 | logger.propagate = False 52 | return logger 53 | 54 | 55 | logger_supy = get_logger("SuPy", logging.INFO) 56 | logger_supy.debug("a debug message from SuPy") 57 | -------------------------------------------------------------------------------- /src/supy/_misc.py: -------------------------------------------------------------------------------- 1 | import urllib 2 | import os 3 | 4 | # from pathlib import Path 5 | 6 | 7 | ############################################################################## 8 | # an auxiliary function to resolve path case issues 9 | # credit: https://stackoverflow.com/a/8462613/920789 10 | def path_insensitive(path): 11 | """ 12 | Get a case-insensitive path for use on a case sensitive system. 13 | 14 | >>> path_insensitive('/Home') 15 | '/home' 16 | >>> path_insensitive('/Home/chris') 17 | '/home/chris' 18 | >>> path_insensitive('/HoME/CHris/') 19 | '/home/chris/' 20 | >>> path_insensitive('/home/CHRIS') 21 | '/home/chris' 22 | >>> path_insensitive('/Home/CHRIS/.gtk-bookmarks') 23 | '/home/chris/.gtk-bookmarks' 24 | >>> path_insensitive('/home/chris/.GTK-bookmarks') 25 | '/home/chris/.gtk-bookmarks' 26 | >>> path_insensitive('/HOME/Chris/.GTK-bookmarks') 27 | '/home/chris/.gtk-bookmarks' 28 | >>> path_insensitive("/HOME/Chris/I HOPE this doesn't exist") 29 | "/HOME/Chris/I HOPE this doesn't exist" 30 | """ 31 | 32 | return _path_insensitive(path) or path 33 | 34 | 35 | def _path_insensitive(path): 36 | """ 37 | Recursive part of path_insensitive to do the work. 38 | """ 39 | path = str(path) 40 | if path == "" or os.path.exists(path): 41 | return path 42 | 43 | base = os.path.basename(path) # may be a directory or a file 44 | dirname = os.path.dirname(path) 45 | 46 | suffix = "" 47 | if not base: # dir ends with a slash? 48 | if len(dirname) < len(path): 49 | suffix = path[: len(path) - len(dirname)] 50 | 51 | base = os.path.basename(dirname) 52 | dirname = os.path.dirname(dirname) 53 | 54 | if not os.path.exists(dirname): 55 | dirname = _path_insensitive(dirname) 56 | if not dirname: 57 | return 58 | 59 | # at this point, the directory exists but not the file 60 | 61 | try: # we are expecting dirname to be a directory, but it could be a file 62 | files = os.listdir(dirname) 63 | except OSError: 64 | return 65 | 66 | baselow = base.lower() 67 | try: 68 | basefinal = next(fl for fl in files if fl.lower() == baselow) 69 | except StopIteration: 70 | return 71 | 72 | if basefinal: 73 | return os.path.join(dirname, basefinal) + suffix 74 | else: 75 | return 76 | 77 | 78 | ############################################################################## 79 | # an auxiliary function to test URL connectivity 80 | # credit: https://stackoverflow.com/a/8462613/920789 81 | # https://gist.github.com/dehowell/884204#gistcomment-1771089 82 | def url_is_alive(url): 83 | """ 84 | Checks that a given URL is reachable. 85 | :param url: A URL 86 | :rtype: bool 87 | """ 88 | request = urllib.request.Request(url) 89 | request.get_method = lambda: "HEAD" 90 | 91 | try: 92 | urllib.request.urlopen(request) 93 | return True 94 | except urllib.request.HTTPError: 95 | return False 96 | -------------------------------------------------------------------------------- /src/supy/_post.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | from supy_driver import suews_driver as sd 4 | 5 | 6 | ############################################################################## 7 | # post-processing part 8 | # get variable information from Fortran 9 | def get_output_info_df(): 10 | size_var_list = sd.output_size() 11 | var_list_x = [np.array(sd.output_name_n(i)) for i in np.arange(size_var_list) + 1] 12 | 13 | df_var_list = pd.DataFrame(var_list_x, columns=["var", "group", "aggm", "outlevel"]) 14 | df_var_list = df_var_list.applymap(lambda x: x.decode().strip()) 15 | df_var_list_x = df_var_list.replace(r"^\s*$", np.nan, regex=True).dropna() 16 | var_dfm = df_var_list_x.set_index(["group", "var"]) 17 | return var_dfm 18 | 19 | 20 | # get variable info as a DataFrame 21 | # save `var_df` for later use 22 | var_df = get_output_info_df() 23 | 24 | # dict as var_df but keys in lowercase 25 | var_df_lower = {group.lower(): group for group in var_df.index.levels[0].str.strip()} 26 | 27 | # generate dict of functions to apply for each variable 28 | dict_func_aggm = { 29 | "T": "last", 30 | "A": "mean", 31 | "S": "sum", 32 | "L": "last", 33 | } 34 | var_df["func"] = var_df.aggm.apply(lambda x: dict_func_aggm[x]) 35 | 36 | # dict of resampling ruls: 37 | # {group: {var: agg_method}} 38 | dict_var_aggm = { 39 | group: var_df.loc[group, "func"].to_dict() for group in var_df.index.levels[0] 40 | } 41 | 42 | 43 | # generate index for variables in different model groups 44 | def gen_group_cols(group_x): 45 | # get correct group name by cleaning and swapping case 46 | group = group_x.replace("dataoutline", "").replace("line", "") 47 | # print group 48 | group = var_df_lower[group] 49 | header_group = np.apply_along_axis( 50 | list, 0, var_df.loc[["datetime", group]].index.values 51 | )[:, 1] 52 | 53 | # generate MultiIndex if not `datetimeline` 54 | if not group_x == "datetimeline": 55 | index_group = pd.MultiIndex.from_product( 56 | [[group], header_group], names=["group", "var"], sortorder=None 57 | ) 58 | else: 59 | index_group = header_group 60 | 61 | return index_group 62 | 63 | 64 | # merge_grid: useful for both `dict_output` and `dict_state` 65 | def pack_df_grid(dict_output): 66 | # pack all grid and times into index/columns 67 | df_xx = pd.DataFrame.from_dict(dict_output, orient="index") 68 | # pack 69 | df_xx0 = df_xx.applymap(pd.Series) 70 | df_xx1 = df_xx0.applymap(pd.DataFrame.from_dict) 71 | df_xx2 = pd.concat( 72 | { 73 | grid: pd.concat(df_xx1[grid].to_dict()).unstack().dropna(axis=1) 74 | for grid in df_xx1.columns 75 | } 76 | ) 77 | # drop redundant levels 78 | df_xx2.columns = df_xx2.columns.droplevel(0) 79 | # regroup by `grid` 80 | df_xx2.index.names = ["grid", "time"] 81 | gb_xx2 = df_xx2.groupby(level="grid") 82 | # merge results of each grid 83 | xx3 = gb_xx2.agg(lambda x: tuple(x.values)).applymap(np.array) 84 | 85 | return xx3 86 | 87 | 88 | # generate MultiIndex for variable groups 89 | def gen_index(varline_x): 90 | var_x = varline_x.replace("dataoutline", "").replace("line", "") 91 | group = var_df_lower[var_x] 92 | var = var_df.loc[group].index.tolist() 93 | mindex = pd.MultiIndex.from_product([[group], var], names=["group", "var"]) 94 | return mindex 95 | 96 | 97 | # generate one MultiIndex from a whole dict 98 | def gen_MultiIndex(dict_x): 99 | x_keys = dict_x.keys() 100 | mindex = pd.concat([gen_index(k).to_frame() for k in x_keys]).index 101 | return mindex 102 | 103 | 104 | # generate one Series from a dict entry 105 | def gen_Series(dict_x, varline_x): 106 | m_index = gen_index(varline_x) 107 | res_Series = pd.Series(dict_x[varline_x], index=m_index) 108 | return res_Series 109 | 110 | 111 | # merge a whole dict into one Series 112 | def comb_gen_Series(dict_x): 113 | x_keys = dict_x.keys() 114 | res_Series = pd.concat([gen_Series(dict_x, k) for k in x_keys]) 115 | return res_Series 116 | 117 | 118 | # pack up output of `run_suews` 119 | def pack_df_output(dict_output): 120 | # TODO: add output levels as in the Fortran version 121 | df_output = pd.DataFrame(dict_output).T 122 | # df_output = pd.concat(dict_output).to_frame().unstack() 123 | # set index level names 124 | index = df_output.index.set_names(["datetime", "grid"]) 125 | # clean columns 126 | columns = gen_MultiIndex(df_output.iloc[0]) 127 | values = np.apply_along_axis(np.hstack, 1, df_output.values) 128 | df_output = pd.DataFrame(values, index=index, columns=columns) 129 | return df_output 130 | 131 | 132 | def pack_df_state(dict_state): 133 | df_state = pd.DataFrame(dict_state).T 134 | # df_state = pd.concat(dict_state).to_frame().unstack() 135 | # set index level names 136 | df_state.index = df_state.index.set_names(["datetime", "grid"]) 137 | 138 | return df_state 139 | 140 | 141 | def pack_df_output_array(dict_output_array, df_forcing): 142 | grid_list = list(dict_output_array.keys()) 143 | grid_start = grid_list[0] 144 | col_df = gen_MultiIndex(dict_output_array[grid_start]) 145 | dict_df = {} 146 | for grid in grid_list: 147 | array_grid = np.hstack([v[:, 5:] for v in dict_output_array[grid].values()]) 148 | df_grid = pd.DataFrame(array_grid, columns=col_df, index=df_forcing.index) 149 | 150 | dict_df.update({grid: df_grid}) 151 | 152 | # join results of all grids 153 | df_grid_res = pd.concat(dict_df, keys=dict_df.keys()) 154 | 155 | # set index level names 156 | df_grid_res.index.set_names(["grid", "datetime"], inplace=True) 157 | 158 | return df_grid_res 159 | 160 | 161 | # resample supy output 162 | def resample_output(df_output, freq="60T", dict_aggm=dict_var_aggm): 163 | 164 | # get grid and group names 165 | list_grid = df_output.index.get_level_values("grid").unique() 166 | list_group = df_output.columns.get_level_values("group").unique() 167 | 168 | # resampling output according to different rules defined in dict_aggm 169 | # note the setting in .resample: (closed='right',label='right') 170 | # which is to conform to SUEWS convention 171 | # that timestamp refer to the ending of previous period 172 | df_rsmp = pd.concat( 173 | { 174 | grid: pd.concat( 175 | { 176 | group: df_output.loc[grid, group] 177 | .resample(freq, closed="right", label="right",) 178 | .agg(dict_aggm[group]) 179 | for group in list_group 180 | }, 181 | axis=1, 182 | names=["group", "var"], 183 | ) 184 | for grid in list_grid 185 | }, 186 | names=["grid"], 187 | ) 188 | 189 | # clean results 190 | df_rsmp = df_rsmp.dropna(how="all", axis=0) 191 | 192 | return df_rsmp 193 | 194 | 195 | def proc_df_rsl(df_output, debug=False): 196 | try: 197 | # if we work on the whole output with multi-index columns 198 | df_rsl_raw = df_output["RSL"].copy() 199 | except: 200 | # if we directly work on the RSL output 201 | df_rsl_raw = df_output.copy() 202 | 203 | try: 204 | # drop unnecessary columns if existing 205 | df_rsl_data = df_rsl_raw.drop(["Year", "DOY", "Hour", "Min", "Dectime"], axis=1) 206 | except: 207 | df_rsl_data = df_rsl_raw 208 | 209 | # retrieve data for plotting 210 | df_rsl = df_rsl_data.iloc[:, : 30 * 4] 211 | df_rsl.columns = ( 212 | df_rsl.columns.str.split("_") 213 | .map(lambda l: tuple([l[0], int(l[1])])) 214 | .rename(["var", "level"]) 215 | ) 216 | df_rsl_proc = df_rsl.stack() 217 | if debug: 218 | # retrieve debug variables 219 | df_rsl_debug = df_rsl_data.iloc[:, 120:] 220 | return df_rsl_proc, df_rsl_debug 221 | else: 222 | return df_rsl_proc 223 | -------------------------------------------------------------------------------- /src/supy/_version.py: -------------------------------------------------------------------------------- 1 | # version info for supy 2 | 3 | from supy_driver import __version__ as sd_ver 4 | from ._env import path_supy_module 5 | 6 | import pandas as pd 7 | 8 | ser_ver = pd.read_json( 9 | path_supy_module / "supy_version.json", typ="series", convert_dates=False 10 | ) 11 | __version__ = f"{ser_ver.ver_milestone}.{ser_ver.ver_major}.{ser_ver.ver_minor}{ser_ver.ver_remark}" 12 | __version_driver__ = sd_ver 13 | 14 | 15 | def show_version(): 16 | """print `supy` and `supy_driver` version information. 17 | """ 18 | print("SuPy versions") 19 | print("-------------") 20 | print(f"supy: {__version__}") 21 | print(f"supy_driver: {__version_driver__}") 22 | print("\n=================") 23 | print("SYSTEM DEPENDENCY") 24 | pd.show_versions() -------------------------------------------------------------------------------- /src/supy/checker_rules_joint.json: -------------------------------------------------------------------------------- 1 | { 2 | "forcing height": { 3 | "expr": "(sfr_1*bldgh+sfr_2*dectreeh+sfr_3*evetreeh)*3 3e4 else x) 107 | ) 108 | 109 | df_state_init, df_forcing_tstep = sp.load_SampleData() 110 | # multi-step results 111 | df_output_m, df_state_m = sp.run_supy( 112 | df_forcing_part, df_state_init, save_state=False 113 | ) 114 | df_res_m = ( 115 | df_output_m.loc[:, ["SUEWS", "DailyState", "snow",]] 116 | .fillna(-999.0) 117 | .sort_index(axis=1) 118 | .round(6) 119 | .applymap(lambda x: -999.0 if np.abs(x) > 3e4 else x) 120 | ) 121 | # print(df_res_m.iloc[:3, 86], df_res_s.iloc[:3, 86]) 122 | pd.testing.assert_frame_equal( 123 | left=df_res_s, right=df_res_m, 124 | ) 125 | # test_equal_mode = df_res_s.eq(df_res_m).all(None) 126 | # self.assertTrue(test_equal_mode) 127 | 128 | # # test saving output files working 129 | # def test_is_supy_save_working(self): 130 | # df_state_init, df_forcing_tstep = sp.load_SampleData() 131 | # # df_state_init = pd.concat([df_state_init for x in range(6)]) 132 | # df_forcing_part = df_forcing_tstep.iloc[: 288 * 2] 133 | # t_start = time() 134 | # df_output, df_state = sp.run_supy(df_forcing_part, df_state_init) 135 | # t_end = time() 136 | # with tempfile.TemporaryDirectory() as dir_temp: 137 | # list_outfile = sp.save_supy(df_output, df_state, path_dir_save=dir_temp) 138 | 139 | # # only print to screen on macOS due incompatibility on Windows 140 | # if platform.system() == "Darwin": 141 | # capturedOutput = io.StringIO() # Create StringIO object 142 | # sys.stdout = capturedOutput # and redirect stdout. 143 | # # Call function. 144 | # n_grid = df_state_init.index.size 145 | # print(f"Running time: {t_end-t_start:.2f} s for {n_grid} grids") 146 | # sys.stdout = sys.__stdout__ # Reset redirect. 147 | # # Now works as before. 148 | # print("Captured:\n", capturedOutput.getvalue()) 149 | 150 | # test_non_empty = np.all([isinstance(fn, Path) for fn in list_outfile]) 151 | # self.assertTrue(test_non_empty) 152 | 153 | # test saving output files working 154 | def test_is_checking_complete(self): 155 | df_state_init, df_forcing_tstep = sp.load_SampleData() 156 | dict_rules = sp._check.dict_rules_indiv 157 | 158 | # variables in loaded dataframe 159 | set_var_df_init = set(df_state_init.columns.get_level_values("var")) 160 | 161 | # variables in dict_rules 162 | set_var_dict_rules = set(list(dict_rules.keys())) 163 | 164 | # common variables 165 | set_var_common = set_var_df_init.intersection(set_var_dict_rules) 166 | 167 | # test if common variables are all those in `df_state_init` 168 | test_common_all = set_var_df_init == set_var_common 169 | self.assertTrue(test_common_all) 170 | 171 | # test ERA5 forcing generation 172 | def test_gen_forcing(self): 173 | import xarray as xr 174 | 175 | # # mimic downloading 176 | # dict_era5_file = sp.util.download_era5( 177 | # 57.7081, 178 | # 11.9653, 179 | # "20030101", 180 | # "20031231", 181 | # dir_save="./data_test/single-grid", 182 | # ) 183 | # list_fn_ml = [k for k in dict_era5file.keys() if "ml" in k] 184 | # list_fn_sfc = [k for k in dict_era5_file.keys() if "sfc" in k] 185 | # test forcing generation 186 | list_fn_fc = sp.util.gen_forcing_era5( 187 | 57.7081, 188 | 11.9653, 189 | "20030101", 190 | "20031231", 191 | dir_save="./data_test/multi-grid", 192 | force_download=False, 193 | ) 194 | df_forcing = sp.util.read_suews(list_fn_fc[0]) 195 | ser_tair=df_forcing.Tair 196 | # ds_sfc = xr.open_mfdataset(list_fn_sfc) 197 | # ser_t2 = ds_sfc.t2m.to_series() 198 | # res_dif = ((df_forcing.Tair + 273.15 - ser_t2.values) / 98).round(4) 199 | test_dif = -30 < ser_tair.max() < 100 200 | self.assertTrue(test_dif) 201 | -------------------------------------------------------------------------------- /src/supy/util/__init__.py: -------------------------------------------------------------------------------- 1 | # supy utilities 2 | 3 | 4 | from ._tmy import gen_epw, read_epw 5 | 6 | 7 | from ._era5 import download_era5, gen_forcing_era5 8 | 9 | from ._gap_filler import fill_gap_all 10 | 11 | 12 | from ._plot import plot_comp, plot_day_clm, plot_rsl 13 | 14 | 15 | from ._ohm import derive_ohm_coef, sim_ohm, replace_ohm_coeffs 16 | 17 | from ._atm import ( 18 | cal_des_dta, 19 | cal_rs_obs, 20 | cal_g_dq, 21 | cal_g_kd, 22 | cal_g_lai, 23 | cal_g_smd, 24 | cal_g_ta, 25 | cal_gs_mod, 26 | cal_gs_obs, 27 | calib_g, 28 | ) 29 | from ._io import read_suews, read_forcing 30 | 31 | from ._wrf import extract_reclassification, plot_reclassification 32 | 33 | from ._roughness import optimize_MO, cal_neutral 34 | 35 | from ._debug import diag_rsl, diag_rsl_prm 36 | -------------------------------------------------------------------------------- /src/supy/util/_debug.py: -------------------------------------------------------------------------------- 1 | import supy_driver as sd 2 | import pandas as pd 3 | 4 | 5 | def diag_rsl(df_forcing, df_state, df_output, include_rsl=False): 6 | """ 7 | Diagnose near-surface meteorological variables using RSL scheme as in `suews_driver`. 8 | 9 | Parameters 10 | ---------- 11 | df_forcing : pandas.Dataframe 12 | Forcing as used in SuPy run. 13 | df_state : pandas.Dataframe 14 | Model states as used in SuPy run. 15 | df_output : pandas.Dataframe 16 | Model output produced by SuPy run. 17 | include_rsl : bool, optional 18 | Flag to determine if full RSL output at all levels should be included, by default False 19 | 20 | Returns 21 | ------- 22 | df_sfc (if `include_rsl=False`) or (df_sfc, df_rsl) 23 | df_sfc: DataFrame with only near-surface level variables 24 | df_rsl: DataFrame with only RSL results at all levels 25 | """ 26 | grid = df_state.index[0] 27 | 28 | # get SUEWS group from `df_output` 29 | try: 30 | df_suews = df_output.loc[grid, "SUEWS"] 31 | except: 32 | df_suews = df_output 33 | 34 | sfr = df_state.sfr.values[0] 35 | zmeas = df_state.z.values[0] 36 | zh = df_state[["bldgh", "evetreeh", "dectreeh"]].dot(sfr[[1, 2, 3]]) 37 | fai = df_state[["faibldg", "faievetree", "faidectree",]].dot(sfr[[1, 2, 3]]) 38 | stabilitymethod = df_state.stabilitymethod.values[0] 39 | dict_sfc = {} 40 | dict_rsl = {} 41 | for idx in df_suews.index: 42 | z0m, zdm, l_mod, qh, qe = df_suews.loc[idx, ["z0m", "zdm", "Lob", "QH", "QE"]] 43 | temp_c, press_hpa, avrh, avu1 = df_forcing.loc[idx, ["Tair", "pres", "RH", "U"]] 44 | ( 45 | lv_j_kg, 46 | lvs_j_kg, 47 | es_hpa, 48 | ea_hpa, 49 | vpd_hpa, 50 | vpd_pa, 51 | dq, 52 | dens_dry, 53 | avcp, 54 | avdens, 55 | ) = sd.atmmoiststab_module.cal_atmmoist(temp_c, press_hpa, avrh, 0.0) 56 | res_rsl_idx = sd.rsl_module.rslprofile( 57 | zh, 58 | z0m, 59 | zdm, 60 | l_mod, 61 | sfr, 62 | fai, 63 | stabilitymethod, 64 | avcp, 65 | lv_j_kg, 66 | avdens, 67 | avu1, 68 | temp_c, 69 | avrh, 70 | press_hpa, 71 | zmeas, 72 | qh, 73 | qe, 74 | ) 75 | dict_sfc.update({idx.isoformat(): res_rsl_idx[:4]}) 76 | dict_rsl.update({idx.isoformat(): res_rsl_idx[4:]}) 77 | 78 | # post-process results 79 | df_sfc = pd.DataFrame.from_dict( 80 | dict_sfc, orient="index", columns=["T2", "q2", "U10", "RH2"] 81 | ) 82 | df_sfc.index = pd.to_datetime(df_sfc.index) 83 | df_rsl = pd.DataFrame.from_dict(dict_rsl, orient="index") 84 | df_rsl.index = pd.to_datetime(df_rsl.index) 85 | 86 | if include_rsl: 87 | return df_sfc, df_rsl 88 | else: 89 | return df_sfc 90 | 91 | 92 | def diag_rsl_prm(df_state, df_output): 93 | """ 94 | Diagnose near-surface meteorological variables using RSL scheme as in `suews_driver`. 95 | 96 | Parameters 97 | ---------- 98 | df_state : pandas.Dataframe 99 | Model states as used in SuPy run. 100 | df_output : pandas.Dataframe 101 | Model output produced by SuPy run. 102 | include_rsl : bool, optional 103 | Flag to determine if full RSL output at all levels should be included, by default False 104 | 105 | Returns 106 | ------- 107 | df_sfc (if `include_rsl=False`) or (df_sfc, df_rsl) 108 | df_sfc: DataFrame with only near-surface level variables 109 | df_rsl: DataFrame with only RSL results at all levels 110 | """ 111 | grid = df_state.index[0] 112 | 113 | # get SUEWS group from `df_output` 114 | try: 115 | df_suews = df_output.loc[grid, "SUEWS"] 116 | except: 117 | df_suews = df_output 118 | 119 | # print(df_suews.head()) 120 | 121 | zh_min = 0.15 122 | sfr = df_state.loc[:, "sfr"] 123 | sfr_obj = sfr.iloc[:, 1:4].values 124 | zmeas = df_state.z.values 125 | fai = df_state.loc[:, ["faibldg", "faievetree", "faidectree"]].values 126 | h_obj = df_state.loc[:, ["bldgh", "evetreeh", "dectreeh"]].values 127 | zh = pd.Series( 128 | [pd.Series(h).dot(sfr) for h, sfr in zip(h_obj, sfr_obj)], index=df_state.index 129 | ) 130 | fai = pd.Series( 131 | [pd.Series(fai).dot(sfr) for fai, sfr in zip(fai, sfr_obj)], 132 | index=df_state.index, 133 | ) 134 | stabilitymethod = df_state.stabilitymethod.values 135 | 136 | dict_prm = {} 137 | zh = zh.iloc[0] 138 | fai = fai.iloc[0] 139 | sfr = sfr.iloc[0] 140 | # print(zh,fai,sfr) 141 | for idx in df_suews.index: 142 | print(df_suews.loc[idx]) 143 | print(df_suews.loc[idx, ["z0m", "zdm", "Lob", "QH", "QE"]]) 144 | z0m, zdm, l_mod, qh, qe = df_suews.loc[idx, ["z0m", "zdm", "Lob", "QH", "QE"]] 145 | # zh_x = zh.loc[idx] 146 | # fai_x = fai.loc[idx] 147 | # temp_c, press_hpa, avrh, avu1 = df_forcing.loc[idx, ["Tair", "pres", "RH", "U"]] 148 | # ( 149 | # lv_j_kg, 150 | # lvs_j_kg, 151 | # es_hpa, 152 | # ea_hpa, 153 | # vpd_hpa, 154 | # vpd_pa, 155 | # dq, 156 | # dens_dry, 157 | # avcp, 158 | # avdens, 159 | # ) = sd.atmmoiststab_module.cal_atmmoist(temp_c, press_hpa, avrh, 0.0) 160 | ( 161 | l_mod_rsl, 162 | zh_rsl, 163 | lc, 164 | beta, 165 | zd, 166 | z0, 167 | elm, 168 | scc, 169 | f, 170 | PAI, 171 | ) = sd.rsl_module.rsl_cal_prms(stabilitymethod, zh, l_mod, sfr, fai,) 172 | dict_prm.update( 173 | {idx.isoformat(): [l_mod_rsl, zh_rsl, lc, beta, zd, z0, elm, scc, f, PAI]} 174 | ) 175 | 176 | # post-process results 177 | df_prm = pd.DataFrame.from_dict( 178 | dict_prm, 179 | orient="index", 180 | columns=[ 181 | "l_mod_rsl", 182 | "zh_rsl", 183 | "lc", 184 | "beta", 185 | "zd", 186 | "z0", 187 | "elm", 188 | "scc", 189 | "f", 190 | "PAI", 191 | ], 192 | ) 193 | df_prm.index = pd.to_datetime(df_prm.index) 194 | 195 | return df_prm 196 | -------------------------------------------------------------------------------- /src/supy/util/_gap_filler.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | 4 | 5 | # locate the first position of period with in-between gaps 6 | def loc_gap(ser_test, freq="1D", pattern="010"): 7 | rsmp = ser_test.resample(freq) 8 | ser_TF_10 = rsmp.apply(lambda ser: ser.isna().any()) * 1 9 | str_TF_10 = ser_TF_10.astype(str).str.cat() 10 | pos_gap = str_TF_10.find(pattern) 11 | loc_ser = ser_TF_10.iloc[pos_gap : pos_gap + len(pattern)].index 12 | return loc_ser 13 | 14 | 15 | # fill gap with neighbouring days 16 | def fill_gap_one(ser_test, freq="1D", pattern="010"): 17 | # resample into daily periods 18 | rsmp = ser_test.resample(freq) 19 | # locate the gaps according to gap pattern: 0 for NO gap, 1 for gapped 20 | loc_ser = loc_gap(ser_test, freq, pattern) 21 | 22 | # generator groups 23 | ser_find = (rsmp.get_group(x) for x in loc_ser) 24 | if len(loc_ser) == 0: 25 | return ser_test 26 | 27 | # assign series: 28 | # ser_prev: series prior to gapped period 29 | # ser_gap: series with gaps 30 | # ser_post: series after gapped period 31 | if pattern == "010": 32 | ser_prev, ser_gap, ser_post = ser_find 33 | elif pattern == "01": 34 | ser_prev, ser_gap = ser_find 35 | ser_post = pd.Series([]) 36 | elif pattern == "10": 37 | ser_gap, ser_post = ser_find 38 | ser_prev = pd.Series([]) 39 | 40 | # base series for gap filling 41 | ser_fill_base = pd.concat([ser_prev, ser_post]) 42 | ser_fill = ( 43 | ser_fill_base.groupby( 44 | [ 45 | ser_fill_base.index.hour.rename("hr"), 46 | ser_fill_base.index.minute.rename("min"), 47 | ] 48 | ) 49 | .median() 50 | .reset_index(drop=True) 51 | ) 52 | ser_fill.index = ser_gap.index 53 | 54 | # calculate rescaling factor with enough values to robustly rescale 55 | if (pattern == "010") and (ser_gap.count() > len(ser_gap) / 2): 56 | scale_fill = (ser_fill / ser_gap).median() 57 | # correct scale_fill for edge cases 58 | scale_fill = 1 if abs(scale_fill) > 10 else scale_fill 59 | scale_fill = 1 if abs(scale_fill) < 0.1 else scale_fill 60 | scale_fill = 1 if np.isnan(scale_fill) else scale_fill 61 | else: 62 | scale_fill = 1 63 | # rescale fill based on median ratio of fill:orig at available timesteps 64 | ser_fill_gap = ser_fill / scale_fill 65 | 66 | # fill in gaps with rescaled values of the filling data 67 | ser_gap.loc[ser_gap.isna()] = ser_fill_gap.loc[ser_gap.isna()] 68 | ser_filled = pd.concat([ser_prev, ser_gap, ser_post]) 69 | 70 | # fill the original gapped series 71 | ser_test_filled = ser_test.copy() 72 | ser_test_filled.loc[ser_filled.index] = ser_filled 73 | return ser_test_filled 74 | 75 | 76 | # fill gaps iteratively 77 | def fill_gap_all(ser_to_fill: pd.Series, freq="1D") -> pd.Series: 78 | """Fill all gaps in a time series using data from neighbouring divisions of 'freq' 79 | 80 | Parameters 81 | ---------- 82 | ser_to_fill : pd.Series 83 | Time series to gap-fill 84 | freq : str, optional 85 | Frequency to identify gapped divisions, by default '1D' 86 | 87 | Returns 88 | ------- 89 | ser_test_filled: pd.Series 90 | Gap-filled time series. 91 | 92 | Patterns 93 | -------- 94 | 010: missing data in division between others with no missing data 95 | 01: missing data in division after one with no missing data 96 | 10: division with missing data before one with no missing data 97 | """ 98 | 99 | ser_test_filled = ser_to_fill.copy() 100 | ptn_list = ["010", "01", "10"] 101 | while ser_test_filled.isna().any(): 102 | # try to different gap patterns and fill gaps 103 | try: 104 | ptn_gap = next( 105 | ptn for ptn in ptn_list if len(loc_gap(ser_test_filled, freq, ptn)) > 0 106 | ) 107 | ser_test_filled = fill_gap_one(ser_test_filled, freq, ptn_gap) 108 | except StopIteration: 109 | pass 110 | return ser_test_filled 111 | -------------------------------------------------------------------------------- /src/supy/util/_io.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from pathlib import Path 3 | import pandas as pd 4 | from .._load import ( 5 | load_SUEWS_Forcing_met_df_pattern, 6 | resample_forcing_met, 7 | set_index_dt, 8 | ) 9 | 10 | 11 | def read_suews(path_suews_file: str) -> pd.DataFrame: 12 | """read in SUEWS input/output file as datetime-aware DataFrame. 13 | 14 | Parameters 15 | ---------- 16 | path_suews_file : str 17 | a string that can be converted into a valid path to SUEWS file. 18 | 19 | Returns 20 | ------- 21 | pd.DataFrame 22 | datetime-aware DataFrame 23 | """ 24 | 25 | path_suews_file = Path(path_suews_file).resolve() 26 | df_raw = pd.read_csv( 27 | path_suews_file, delim_whitespace=True, comment="!", error_bad_lines=True 28 | ) 29 | df_suews = set_index_dt(df_raw) 30 | return df_suews 31 | 32 | 33 | def read_forcing(path_suews_file: str, tstep_mod=300) -> pd.DataFrame: 34 | """read in SUEWS forcing files as DataFrame ready for SuPy simulation. 35 | 36 | Parameters 37 | ---------- 38 | path_suews_file : str 39 | a string that represents wildcard pattern can locate SUEWS forcing files, which should follow `SUEWS convention `_. 40 | 41 | tstep_mod: int or None, optional 42 | time step [s] for resampling, by default 300. 43 | If `None`, resampling will be skipped. 44 | 45 | Returns 46 | ------- 47 | pd.DataFrame 48 | datetime-aware DataFrame 49 | """ 50 | 51 | path_suews_file = Path(path_suews_file) 52 | path_input = path_suews_file.parent 53 | str_pattern = path_suews_file.name 54 | 55 | df_forcing_raw = load_SUEWS_Forcing_met_df_pattern(path_input, str_pattern) 56 | tstep_met_in = df_forcing_raw.index.to_series().diff()[-1] / pd.Timedelta("1s") 57 | df_forcing_raw = df_forcing_raw.asfreq(f"{tstep_met_in:.0f}s") 58 | 59 | df_forcing = df_forcing_raw 60 | 61 | # resampling only when necessary 62 | if (tstep_mod is not None): 63 | if (tstep_mod < tstep_met_in): 64 | df_forcing = df_forcing_raw.replace(-999, np.nan) 65 | df_forcing = resample_forcing_met( 66 | df_forcing, tstep_met_in, tstep_mod, kdownzen=0 67 | ) 68 | df_forcing = df_forcing.replace(np.nan, -999) 69 | 70 | 71 | 72 | return df_forcing 73 | -------------------------------------------------------------------------------- /src/supy/util/_ohm.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Mon Dec 17 11:13:44 2018 4 | 5 | Authors: 6 | George Meachim, 7 | Ting Sun 8 | """ 9 | 10 | import numpy as np 11 | import pandas as pd 12 | 13 | from ._plot import plot_comp, plot_day_clm 14 | from .._env import logger_supy 15 | 16 | 17 | # Linear fitting of QS, QN, deltaQN/dt (entire timeseries) 18 | def derive_ohm_coef(ser_QS, ser_QN): 19 | """A function to linearly fit two independant variables to a dependent one. 20 | Input params: QS_Ser: The dependent variable QS (Surface heat storage). Pandas Series. 21 | QN_Ser: The first independent variable (Net all wave radiation). Pandas Series. 22 | dt: The time interval with which the rate of change of QN is calculated. Float (hours). 23 | Returns: a1, a2 coefficients and a3 (intercept) 24 | """ 25 | from sklearn.linear_model import LinearRegression 26 | # derive dt in hours 27 | dt_hr = ser_QN.index.freq / pd.Timedelta("1H") 28 | 29 | # Calculate difference between neighbouring QN values 30 | ser_delta_QN_dt = ser_QN.diff() / dt_hr 31 | 32 | # Drop NaNs and infinite values 33 | ser_QS = ser_QS.replace([np.inf, -np.inf], np.nan).dropna(how="all") 34 | ser_QN = ser_QN.loc[ser_QS.index] 35 | ser_delta_QN_dt = ser_delta_QN_dt.loc[ser_QS.index] 36 | 37 | # Create DataFrame with regression quantities and rename cols 38 | frames = [ser_QS, ser_QN, ser_delta_QN_dt] 39 | regression_df = pd.concat(frames, axis=1) 40 | regression_df.columns = ["QS", "QN", "delta_QN_dt"] 41 | 42 | # Reindex after dropping NaNs 43 | regression_df.reset_index(drop=True, inplace=True) 44 | regression_df.fillna(regression_df.mean(), inplace=True) 45 | 46 | feature_cols = ["QN", "delta_QN_dt"] 47 | 48 | X = regression_df[feature_cols].replace([np.inf, -np.inf], np.nan).dropna(how="all") 49 | y = regression_df.QS 50 | 51 | lm = LinearRegression() 52 | lm.fit(X, y) 53 | 54 | a1 = lm.coef_[0] 55 | a2 = lm.coef_[1] 56 | a3 = lm.intercept_ 57 | 58 | return a1, a2, a3 59 | 60 | 61 | def replace_ohm_coeffs(df_state_init, coefs, land_cover_type): 62 | """ 63 | This function takes as input parameters the model initial state DataFrame, 64 | the new ohm coefficients as calculated by performing linear regression on 65 | AMF Obs and the land cover type for which they were calculated. 66 | 67 | Input params: df_state_init: pandas df returned by supy after running SUEWS. 68 | coefs: tuple containing new a1, a2, a3 coefficients. 69 | land_cover_type: String specifying one of seven SUEWS land cover types. 70 | Returns: df_state_init_copy: A copy of df_state_init with changed ohm params. 71 | """ 72 | 73 | land_cover_type_dict = { 74 | "Paved": "1", 75 | "Bldgs": "2", 76 | "EveTr": "3", 77 | "DecTr": "4", 78 | "Grass": "5", 79 | "BSoil": "6", 80 | "Water": "7", 81 | } 82 | 83 | try: 84 | lc_index = int(land_cover_type_dict.get(land_cover_type)) - 1 85 | except: 86 | list_lc = list(land_cover_type_dict.keys()) 87 | logger_supy.error( 88 | f"land_cover_type must be one of {list_lc}, instead of {land_cover_type}" 89 | ) 90 | else: 91 | # Instantiate 4x3 matrix of zeros to put old coeffs 92 | coef_matrix = np.zeros((4, 3)) 93 | coef_matrix[:, 0] = coefs[0] 94 | coef_matrix[:, 1] = coefs[1] 95 | coef_matrix[:, 2] = coefs[2] 96 | 97 | # Copy ohm_coef part of df_state_init 98 | df_ohm = df_state_init.loc[:, "ohm_coef"].copy() 99 | # Reshape values into matrix form 100 | values_ohm = df_ohm.values.reshape((8, 4, 3)) 101 | # Get ohm values corresponding to user specified land cover and assign to matrix 102 | values_ohm[lc_index] = coef_matrix 103 | # Place new ohm values into df_ohm 104 | df_ohm.loc[:, :] = values_ohm.flatten() 105 | # Make copy of df_state_init 106 | df_state_init_copy = df_state_init.copy() 107 | # Replace ohm_coef part of df_state_init with new values 108 | df_state_init_copy.loc[:, "ohm_coef"] = df_ohm.values 109 | 110 | return df_state_init_copy 111 | 112 | 113 | def sim_ohm(ser_qn: pd.Series, a1: float, a2: float, a3: float) -> pd.Series: 114 | """Calculate QS using OHM (Objective Hysteresis Model). 115 | 116 | Parameters 117 | ---------- 118 | ser_qn : pd.Series 119 | net all-wave radiation. 120 | a1 : float 121 | a1 of OHM coefficients. 122 | a2 : float 123 | a2 of OHM coefficients. 124 | a3 : float 125 | a3 of OHM coefficients. 126 | 127 | Returns 128 | ------- 129 | pd.Series 130 | heat storage flux calculated by OHM. 131 | """ 132 | 133 | # derive delta t in hour 134 | try: 135 | dt_hr = ser_qn.index.freq / pd.Timedelta("1h") 136 | except AttributeError as ex: 137 | raise RuntimeError("frequency info is missing from input `ser_qn`") from ex 138 | 139 | # Calculate rate of change of Net All-wave radiation 140 | ser_qn_dt = ser_qn.diff() / dt_hr 141 | 142 | # Derive QS from OBS quantities 143 | ser_qs = a1 * ser_qn + a2 * ser_qn_dt + a3 144 | 145 | return ser_qs 146 | 147 | 148 | def compare_heat_storage(ser_qn_obs, ser_qs_obs, a1, a2, a3): 149 | """This function compares the storage heat flux calculated with AMF 150 | QN and linear regression coefficients with that output by SUEWS. 151 | Input params: QN_Ser_Obs: A series of OBS net all-wave radiation. 152 | QS_Ser_SUEWS: A series of SUEWS storage heat flux values. 153 | a1, a2, a3: Linear regression coefficients from ohm_linregress_clean. 154 | Returns: MPL plot of diurnal comparison, MPL plot with 1:1 line and fitted line. 155 | """ 156 | 157 | # calculate qs using OHM 158 | ser_qs_sim = sim_ohm(ser_qn_obs, a1, a2, a3).rename("Sim") 159 | 160 | # re-organise obs and sim info one dataframe 161 | ser_qs_obs = ser_qs_obs.rename("Obs") 162 | df_qs = pd.concat([ser_qs_sim, ser_qs_obs.rename("Obs")], axis=1) 163 | # Plotting 164 | plot1 = plot_day_clm(df_qs) 165 | plot2 = plot_comp(df_qs) 166 | 167 | return plot1, plot2 168 | -------------------------------------------------------------------------------- /src/supy/util/_roughness.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from .._env import logger_supy 4 | from ._atm import cal_cp 5 | 6 | # saturation vapour pressure [hPa] 7 | def cal_vap_sat(Temp_C, Press_hPa): 8 | # temp_c= 0.001 if np.abs(Temp_C)<0.001 else Temp_C 9 | Press_kPa = Press_hPa / 10 10 | 11 | if 0.001000 <= Temp_C < 50: 12 | e_mb = 6.1121 * np.exp(((18.678 - Temp_C / 234.5) * Temp_C) / (Temp_C + 257.14)) 13 | f = 1.00072 + Press_kPa * (3.2e-6 + 5.9e-10 * Temp_C ** 2) 14 | es_hPa = e_mb * f 15 | 16 | if -40 < Temp_C <= -0.001000: 17 | e_mb = 6.1115 * np.exp(((23.036 - Temp_C / 333.7) * Temp_C) / (Temp_C + 279.82)) 18 | f = 1.00022 + Press_kPa * (3.83e-6 + 6.4e-10 * Temp_C ** 2) 19 | es_hPa = e_mb * f 20 | 21 | if -0.001 < Temp_C < 0.001: 22 | es_hPa = cal_vap_sat(0.001, Press_hPa) 23 | 24 | return es_hPa 25 | 26 | 27 | # density of dry air [kg m-3] 28 | def cal_dens_dry(RH_pct, Temp_C, Press_hPa): 29 | gas_ct_dry = 8.31451 / 0.028965 # dry_gas/molar 30 | es_hPa = cal_vap_sat(Temp_C, Press_hPa) 31 | Ea_hPa = RH_pct / 100 * es_hPa 32 | dens_dry = ((Press_hPa - Ea_hPa) * 100) / (gas_ct_dry * (273.16 + Temp_C)) 33 | return dens_dry 34 | 35 | 36 | # density of vapour [kg m-3] 37 | def cal_dens_vap(RH_pct, Temp_C, Press_hPa): 38 | gas_ct_wv = 8.31451 / 0.0180153 # dry_gas/molar_wat_vap 39 | es_hPa = cal_vap_sat(Temp_C, Press_hPa) 40 | Ea_hPa = RH_pct / 100 * es_hPa 41 | vap_dens = Ea_hPa * 100 / ((Temp_C + 273.16) * gas_ct_wv) 42 | return vap_dens 43 | 44 | 45 | # 46 | # # specific heat capacity of air mass [J kg-1 K-1] 47 | # def cal_cpa(Temp_C, RH_pct, Press_hPa): 48 | # # heat capacity of dry air depending on air temperature 49 | # cpd = 1005.0 + ((Temp_C + 23.16) ** 2) / 3364.0 50 | # # heat capacity of vapour 51 | # cpm = ( 52 | # 1859 53 | # + 0.13 * RH_pct 54 | # + (19.3 + 0.569 * RH_pct) * (Temp_C / 100.0) 55 | # + (10.0 + 0.5 * RH_pct) * (Temp_C / 100.0) ** 2 56 | # ) 57 | # 58 | # # density of dry air 59 | # rho_d = cal_dens_dry(RH_pct, Temp_C, Press_hPa) 60 | # 61 | # # density of vapour 62 | # rho_v = cal_dens_vap(RH_pct, Temp_C, Press_hPa) 63 | # 64 | # # specific heat 65 | # cpa = cpd * (rho_d / (rho_d + rho_v)) + cpm * (rho_v / (rho_d + rho_v)) 66 | # return cpa 67 | 68 | 69 | # air density [kg m-3] 70 | def cal_dens_air(Press_hPa, Temp_C): 71 | # dry_gas/molar 72 | gas_ct_dry = 8.31451 / 0.028965 73 | 74 | # air density [kg m-3] 75 | dens_air = (Press_hPa * 100) / (gas_ct_dry * (Temp_C + 273.16)) 76 | return dens_air 77 | 78 | 79 | # Obukhov length 80 | def cal_Lob(QH, UStar, Temp_C, RH_pct, Press_hPa, g=9.8, k=0.4): 81 | # gravity constant/(Temperature*Von Karman Constant) 82 | G_T_K = (g / (Temp_C + 273.16)) * k 83 | 84 | # air density [kg m-3] 85 | rho = cal_dens_air(Press_hPa, Temp_C) 86 | 87 | # specific heat capacity of air mass [J kg-1 K-1] 88 | cpa = cal_cp(Temp_C, RH_pct, Press_hPa) 89 | 90 | # Kinematic sensible heat flux [K m s-1] 91 | H = QH / (rho * cpa) 92 | 93 | # temperature scale 94 | uStar = np.max([0.01, UStar]) 95 | TStar = -H / uStar 96 | 97 | # Obukhov length 98 | Lob = (uStar ** 2) / (G_T_K * TStar) 99 | 100 | return Lob 101 | 102 | 103 | def cal_neutral(df_val, z_meas, h_sfc): 104 | """ Calculates the rows associated with neutral condition (threshold=0.01) 105 | 106 | 107 | Parameters 108 | ---------- 109 | df_val: pd.DataFrame 110 | Index should be time with columns: 'H', 'USTAR', 'TA', 'RH', 'PA', 'WS' 111 | z_meas 112 | measurement height in m 113 | h_sfc 114 | vegetation height in m 115 | 116 | Returns 117 | ------- 118 | ser_ws: pd.series 119 | observation time series of WS (Neutral conditions) 120 | ser_ustar: pd.series 121 | observation time series of u* (Neutral conditions) 122 | """ 123 | 124 | # calculate Obukhov length 125 | ser_Lob = df_val.apply( 126 | lambda ser: cal_Lob(ser.H, ser.USTAR, ser.TA, ser.RH, ser.PA * 10), axis=1 127 | ) 128 | 129 | # zero-plane displacement: estimated using rule f thumb `d=0.7*h_sfc` 130 | 131 | z_d = 0.7 * h_sfc 132 | 133 | if z_d >= z_meas: 134 | logger_supy.exception( 135 | "vegetation height is greater than measuring height. Please fix this before continuing . . ." 136 | ) 137 | 138 | # calculate stability scale 139 | ser_zL = (z_meas - z_d) / ser_Lob 140 | 141 | # determine periods under quasi-neutral conditions 142 | limit_neutral = 0.01 143 | ind_neutral = ser_zL.between(-limit_neutral, limit_neutral) 144 | 145 | ind_neutral = ind_neutral[ind_neutral] 146 | 147 | df_sel = df_val.loc[ind_neutral.index, ["WS", "USTAR"]].dropna() 148 | ser_ustar = df_sel.USTAR 149 | ser_ws = df_sel.WS 150 | 151 | return ser_ws, ser_ustar 152 | 153 | 154 | # Optimization for calculating z0 and d 155 | def optimize_MO(df_val, z_meas, h_sfc): 156 | """Calculates surface roughness and zero plane displacement height. 157 | Refer to https://suews-parameters-docs.readthedocs.io/en/latest/steps/roughness-SuPy.html for example 158 | 159 | Parameters 160 | ---------- 161 | df_val: pd.DataFrame 162 | Index should be time with columns: 'H', 'USTAR', 'TA', 'RH', 'PA', 'WS' 163 | z_meas 164 | measurement height in m 165 | h_sfc 166 | vegetation height in m 167 | 168 | Returns 169 | ------- 170 | z0 171 | surface roughness 172 | d 173 | zero displacement height 174 | ser_ws: pd.series 175 | observation time series of WS (Neutral conditions) 176 | ser_ustar: pd.series 177 | observation time series of u* (Neutral conditions) 178 | """ 179 | 180 | from platypus.core import Problem 181 | from platypus.types import Real, random 182 | from platypus.algorithms import NSGAIII 183 | 184 | # Calculates rows related to neutral conditions 185 | ser_ws, ser_ustar = cal_neutral(df_val, z_meas, h_sfc) 186 | 187 | # function to optimize 188 | def func_uz(params): 189 | z0 = params[0] 190 | d = params[1] 191 | z = z_meas 192 | k = 0.4 193 | uz = (ser_ustar / k) * np.log((z - d) / z0) # logarithmic law 194 | 195 | o1 = abs(1 - np.std(uz) / np.std(ser_ws)) # objective 1: normalized STD 196 | # objective 2: normalized MAE 197 | o2 = np.mean(abs(uz - ser_ws)) / (np.mean(ser_ws)) 198 | 199 | return [o1, o2], [uz.min(), d - z0] 200 | 201 | problem = Problem(2, 2, 2) 202 | problem.types[0] = Real(0, 10) # bounds for first parameter (z0) 203 | problem.types[1] = Real(0, h_sfc) # bounds for second parameter (zd) 204 | 205 | problem.constraints[0] = ">=0" # constrain for first parameter 206 | problem.constraints[1] = ">=0" # constrain for second parameter 207 | 208 | problem.function = func_uz 209 | random.seed(12345) 210 | algorithm = NSGAIII(problem, divisions_outer=50) 211 | algorithm.run(30000) 212 | 213 | z0s = [] 214 | ds = [] 215 | os1 = [] 216 | os2 = [] 217 | # getting the solution vaiables 218 | for s in algorithm.result: 219 | z0s.append(s.variables[0]) 220 | ds.append(s.variables[1]) 221 | os1.append(s.objectives[0]) 222 | os2.append(s.objectives[1]) 223 | # getting the solution associated with minimum obj2 (can be changed) 224 | idx = os2.index(min(os2, key=lambda x: abs(x - np.mean(os2)))) 225 | z0 = z0s[idx] 226 | d = ds[idx] 227 | 228 | return z0, d, ser_ws, ser_ustar 229 | -------------------------------------------------------------------------------- /src/supy/util/_wrf.py: -------------------------------------------------------------------------------- 1 | # WRF-SUEWS related utilities 2 | import pandas as pd 3 | import numpy as np 4 | from .._load import load_SUEWS_nml 5 | from pathlib import Path 6 | 7 | 8 | dict_modis_20 = { 9 | 1: "Evergreen Needleleaf Forest", 10 | 2: "Evergreen Broadleaf Forest", 11 | 3: "Deciduous Needleleaf Forest", 12 | 4: "Deciduous Broadleaf Forest", 13 | 5: "Mixed Forests", 14 | 6: "Closed Shrublands", 15 | 7: "Open Shrublands", 16 | 8: "Woody Savannas", 17 | 9: "Savannas", 18 | 10: "Grasslands", 19 | 11: "Permanent Wetlands", 20 | 12: "Croplands", 21 | 13: "Urban and Built-Up", 22 | 14: "Cropland/Natural Vegetation Mosaic", 23 | 15: "Snow and Ice", 24 | 16: "Barren or Sparsely Vegetated", 25 | 17: "Water", 26 | 18: "Wooded Tundra", 27 | 19: "Mixed Tundra", 28 | 20: "Barren Tundra", 29 | } 30 | 31 | list_cat_suews = [ 32 | # built-up 33 | "Paved", 34 | "Bldgs", 35 | # vegetated 36 | "EveTr", 37 | "DecTr", 38 | "Grass", 39 | # soil 40 | "Bsoil", 41 | # water 42 | "Water", 43 | # not-used 44 | "Extra", 45 | ] 46 | 47 | 48 | def extract_reclassification(path_nml: str) -> pd.DataFrame: 49 | """Extract reclassification info from `path_nml` as a DataFrame. 50 | 51 | Parameters 52 | ---------- 53 | path_nml : str 54 | Path to `namelist.suews` 55 | 56 | Returns 57 | ------- 58 | pd.DataFrame 59 | Reclassification DataFrame with rows for WRF land covers while columns for SUEWS. 60 | """ 61 | df_lc = load_SUEWS_nml(path_nml).landuse 62 | 63 | ser_cat_suews = pd.Series(list_cat_suews, name="lc_suews") 64 | df_ind = pd.DataFrame(df_lc.loc["suews_cat_ind"], columns=ser_cat_suews) 65 | df_frac = pd.DataFrame(df_lc.loc["suews_cat_frac"], columns=ser_cat_suews) 66 | 67 | df_rcl = pd.concat([df_ind, df_frac], axis=1, keys=["lc_wrf", "frac"]) 68 | df_rcl = df_rcl.stack(-1).reset_index("lc_suews") 69 | df_rcl = df_rcl.pivot_table(index="lc_wrf", columns="lc_suews", values="frac") 70 | df_rcl = df_rcl.drop(-999, axis=0) 71 | df_rcl = df_rcl.drop(list_cat_suews[-1], axis=1) 72 | df_rcl = df_rcl.replace(np.nan, 0) 73 | df_rcl = df_rcl.rename(dict_modis_20, axis=0) 74 | return df_rcl 75 | 76 | 77 | def gen_df_sankey(path_nml: str): 78 | # load reclassification data 79 | df_rcl = extract_reclassification(path_nml) 80 | 81 | # create flow data 82 | df_flow = df_rcl.T.reset_index().melt(id_vars=["lc_suews"], value_name="frac") 83 | 84 | df_flow = df_flow.rename( 85 | {"lc_suews": "target", "lc_wrf": "source", "frac": "value"}, axis=1 86 | ) 87 | 88 | # label conversion types 89 | 90 | def cat_type(x: str) -> str: 91 | if x in ["Bldgs", "Paved"]: 92 | return "Built-up" 93 | elif x in ["DecTr", "EveTr", "Grass"]: 94 | return "Vegetated" 95 | else: 96 | return x 97 | 98 | df_flow["type"] = df_flow.target.apply(cat_type) 99 | 100 | # create process data 101 | df_process = df_flow.loc[df_flow.value > 0.1] 102 | df_process = pd.concat( 103 | [ 104 | df_process[["target", "type"]].rename({"target": "id"}, axis=1), 105 | df_process[["source", "type"]].rename({"source": "id"}, axis=1), 106 | ], 107 | sort=False, 108 | ) 109 | df_process = df_process.drop_duplicates().groupby("id").first() 110 | df_process["name"] = df_process.index 111 | 112 | return df_flow, df_process 113 | 114 | 115 | def in_ipynb(): 116 | try: 117 | from IPython import get_ipython 118 | cfg = get_ipython().has_trait("kernel") 119 | if cfg: 120 | return True 121 | else: 122 | return False 123 | except NameError: 124 | return False 125 | 126 | 127 | def plot_reclassification( 128 | path_nml: str, 129 | path_save="LC-WRF-SUEWS.png", 130 | width=800, 131 | height=360, 132 | top=10, 133 | bottom=10, 134 | left=260, 135 | right=60, 136 | ): 137 | """Produce Sankey Diagram to visualise the reclassification specified in `path_nml` 138 | 139 | Parameters 140 | ---------- 141 | path_nml : str 142 | Path to `namelist.suews` 143 | path_save : str, optional 144 | Path to save Sankey diagram, by default 'LC-WRF-SUEWS.png' 145 | width : int, optional 146 | Width of diagram, by default 800 147 | height : int, optional 148 | Height of diagram, by default 360 149 | top : int, optional 150 | Top margin of diagram, by default 10 151 | bottom : int, optional 152 | Bottom margin of diagram, by default 10 153 | left : int, optional 154 | Left margin of diagram, by default 260 155 | right : int, optional 156 | Right margin of diagram, by default 60 157 | 158 | Returns 159 | ------- 160 | Sankey Diagram 161 | Sankey Diagram showing the reclassification. 162 | """ 163 | try: 164 | from floweaver import ( 165 | Bundle, 166 | Dataset, 167 | Partition, 168 | ProcessGroup, 169 | Waypoint, 170 | SankeyDefinition, 171 | weave, 172 | ) 173 | except Exception as ie: 174 | raise ImportError("Please install `floweaver` by `pip install floweaver`.") 175 | 176 | 177 | # derive DataFrames required by Sankey 178 | df_flow, df_process = gen_df_sankey(path_nml) 179 | 180 | # set the default size to fit the documentation better. 181 | size = dict(width=width, height=height) 182 | margins = dict(top=top, bottom=bottom, left=left, right=right) 183 | 184 | # create Sankey data 185 | dataset = Dataset(df_flow, dim_process=df_process) 186 | # SUEWS LCs 187 | list_suews = df_flow.target.unique().tolist() 188 | # WRF LCs 189 | list_wrf = df_flow.source.unique().tolist() 190 | list_type = df_flow.type.unique().tolist() 191 | # LC types 192 | lc_by_type = Partition.Simple("type", list_type) 193 | 194 | nodes = { 195 | "SUEWS": ProcessGroup(list_suews), 196 | "WRF": ProcessGroup(list_wrf), 197 | } 198 | nodes["SUEWS"].partition = Partition.Simple("process", list_cat_suews) 199 | nodes["WRF"].partition = Partition.Simple("process", list_wrf) 200 | nodes["type"] = Waypoint(lc_by_type) 201 | ordering = [ 202 | ["WRF"], # put "WRF" on the left... 203 | ["type"], # put "type" on the left... 204 | ["SUEWS"], # ... and "SUEWS" on the right. 205 | ] 206 | bundles = [ 207 | Bundle("WRF", "SUEWS", waypoints=["type"]), 208 | ] 209 | 210 | # Set the colours for the labels in the partition. 211 | palette = { 212 | "Built-up": "slategrey", 213 | "Bsoil": "tan", 214 | "Vegetated": "forestgreen", 215 | "Water": "royalblue", 216 | } 217 | 218 | sdd = SankeyDefinition(nodes, bundles, ordering, flow_partition=lc_by_type) 219 | 220 | data_sankey = weave(sdd, dataset, palette=palette) 221 | sankey = data_sankey.to_widget(**size, margins=margins) 222 | if in_ipynb(): 223 | path_save = Path(path_save) 224 | suffix = path_save.suffix 225 | if suffix == "png": 226 | print("Saving figure in png:") 227 | sankey.auto_save_png(path_save) 228 | elif suffix == "svg": 229 | print("Saving figure in svg:") 230 | sankey.auto_save_svg(path_save) 231 | else: 232 | print("Saving figure in png: ") 233 | sankey.auto_save_png(path_save) 234 | print(path_save.resolve()) 235 | else: 236 | print("Please run this function in Jupyter notebook for visualisation.") 237 | 238 | return sankey 239 | --------------------------------------------------------------------------------